text stringlengths 8 4.13M |
|---|
extern crate rayon;
use rayon::prelude::*;
fn sum_of_square(input: &[i32]) -> i32 {
input.par_iter()
.map(|&i|i * i)
.sum()
}
fn main() {
let mut v = vec![3,4,2,6,4];
let sum = sum_of_square(&v);
println!("the sum of square:{}",sum);
}
|
// Copyright (c) 2021 Ant Group
//
// SPDX-License-Identifier: Apache-2.0
//
//! Because Tokio has removed UnixIncoming since version 0.3,
//! we define the UnixIncoming and implement the Stream for UnixIncoming.
use std::io;
use std::os::unix::io::{AsRawFd, RawFd};
use std::pin::Pin;
use std::task::{Context, Poll};
use futures::{ready, Stream};
use tokio::net::{UnixListener, UnixStream};
/// Stream of listeners
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct UnixIncoming {
inner: UnixListener,
}
impl UnixIncoming {
pub fn new(listener: UnixListener) -> Self {
Self { inner: listener }
}
}
impl Stream for UnixIncoming {
type Item = io::Result<UnixStream>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let (socket, _) = ready!(self.inner.poll_accept(cx))?;
Poll::Ready(Some(Ok(socket)))
}
}
impl AsRawFd for UnixIncoming {
fn as_raw_fd(&self) -> RawFd {
self.inner.as_raw_fd()
}
}
|
extern crate ocl;
extern crate log;
use std::result::Result;
use ocl::{Buffer, MemFlags, ProQue,Error};
use log::info;
static MULTIPLY_SRC: &str = include_str!("kernel/multiply.cl");
pub struct MultiplyKernel
{
proque: ProQue,
source_buffer: Buffer<f32>,
pub result_buffer: Buffer<f32>
}
impl MultiplyKernel
{
pub fn create(work_size: usize,vec_source:&Vec<f32>) -> Result<MultiplyKernel,Error> {
// let devices = &GPU_NVIDIA_DEVICES;
// if devices.is_empty() {
// return Err(GPUError::Simple("No working GPUs found!"));
// }
// let device = devices[0]; // Select the first device for FFT
let pq = ProQue::builder().src(MULTIPLY_SRC).dims(work_size).build()?;
let source_buffer = Buffer::builder()
.queue(pq.queue().clone())
.flags(MemFlags::new().read_write())
.len(work_size)
.copy_host_slice(vec_source)
.build()?;
let result_buffer: Buffer<f32> = pq.create_buffer()?;
info!("FFT: 1 working device(s) selected.");
info!("FFT: Device 0: {}", pq.device().name()?);
Ok(MultiplyKernel {
proque: pq,
source_buffer: source_buffer,
result_buffer: result_buffer,
})
}
pub fn multiply(&mut self,coeff: f32){
// let kern = self.proque.kernel_builder("multiply_by_scalar")
// .arg(coeff)
// .arg(None::<&Buffer<f32>>)
// .arg_named("result", None::<&Buffer<f32>>)
// .build().unwrap();
let kern = self.proque.kernel_builder("multiply_by_scalar")
.arg(coeff)
.arg(&self.source_buffer)
.arg(&self.result_buffer)
.build().unwrap();
// Set our named argument. The Option<_> wrapper is, well... optional:
// kern.set_arg("result", &self.result_buffer);
// // We can also set arguments (named or not) by index. Just for
// // demonstration, we'll set one using an option:
// kern.set_arg(0, &coeff);
// kern.set_arg(1, Some(&self.source_buffer));
// kern.set_arg(2, &self.result_buffer);
unsafe { kern.enq();}
// Read results from the device into result_buffer's local vector:
}
}
|
use ckb_chain_spec::consensus::Consensus;
use ckb_core::block::Block;
use ckb_core::extras::BlockExt;
use ckb_core::header::{BlockNumber, Header};
use ckb_core::transaction::{Capacity, ProposalShortId, Transaction};
use ckb_core::uncle::UncleBlock;
use numext_fixed_hash::H256;
use numext_fixed_uint::U256;
pub trait ChainProvider: Sync + Send {
fn block_body(&self, hash: &H256) -> Option<Vec<Transaction>>;
fn block_header(&self, hash: &H256) -> Option<Header>;
fn block_proposal_txs_ids(&self, hash: &H256) -> Option<Vec<ProposalShortId>>;
fn uncles(&self, hash: &H256) -> Option<Vec<UncleBlock>>;
fn block_hash(&self, number: BlockNumber) -> Option<H256>;
fn block_ext(&self, hash: &H256) -> Option<BlockExt>;
fn block_number(&self, hash: &H256) -> Option<BlockNumber>;
fn block(&self, hash: &H256) -> Option<Block>;
fn genesis_hash(&self) -> &H256;
fn get_transaction(&self, hash: &H256) -> Option<(Transaction, H256)>;
fn contain_transaction(&self, hash: &H256) -> bool;
fn block_reward(&self, block_number: BlockNumber) -> Capacity;
fn get_ancestor(&self, base: &H256, number: BlockNumber) -> Option<Header>;
fn calculate_difficulty(&self, last: &Header) -> Option<U256>;
fn consensus(&self) -> &Consensus;
}
|
#[derive(Serialize, Deserialize, Debug, Default, Clone, Copy, PartialEq)]
pub struct IndexValuesMetadata {
/// max value on the "right" side key -> value, key -> value ..
pub max_value_id: u32,
pub avg_join_size: f32,
pub num_values: u64,
pub num_ids: u32,
}
impl IndexValuesMetadata {
pub fn new(max_value_id: u32) -> Self {
IndexValuesMetadata {
max_value_id,
..Default::default()
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Default)]
pub enum IndexCategory {
Boost,
#[default]
KeyValue,
AnchorScore,
Phrase,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default)]
pub struct IndexMetadata {
pub path: String,
pub index_category: IndexCategory,
pub index_cardinality: IndexCardinality,
#[serde(default)]
pub is_empty: bool,
pub metadata: IndexValuesMetadata,
pub data_type: DataType,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default)]
pub enum DataType {
#[default]
U32,
U64,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default)]
pub enum IndexCardinality {
#[default]
IndirectIM,
IndexIdToOneParent,
}
|
use std::fs;
//use std::collections::HashMap;
#[derive(Debug, Clone)]
pub struct MemoryGame {
turns: Vec::<usize>,
}
impl MemoryGame {
pub fn new(starting: &Vec<usize>) -> MemoryGame {
let mut turns = Vec::<usize>::new();
starting.iter().for_each(|&s| turns.push(s));
MemoryGame { turns }
}
}
impl Iterator for MemoryGame {
type Item = usize;
fn next(&mut self) -> Option<usize> {
let number = self.turns.last().unwrap();
//println!("{:?}", self.turns);
let mut iter = self.turns.iter()
.enumerate()
.rev()
.filter(|(_i, &v)| v == *number)
.take(2);
let (last, prev) = (iter.next(), iter.next());
//println!("{:?} {:?}", last, prev);
let mut turn = 0;
if last.is_some() && prev.is_some() {
turn = last.unwrap().0 - prev.unwrap().0;
}
self.turns.push(turn);
if self.turns.len() % 100000 == 0 {
println!("{}", self.turns.len());
}
//println!("{:?}", turn);
Some(turn)
}
}
pub fn day15(args: &[String]) -> i32 {
println!("Day 15");
if args.len() != 1 {
println!("Missing input file");
return -1;
}
let filename = &args[0];
println!("In file {}", filename);
let contents = fs::read_to_string(filename)
.expect("Something went wrong reading the file");
let numbers = contents.lines()
.next()
.unwrap()
.split(",")
.map(|i| i.parse::<usize>().unwrap())
.collect::<Vec<usize>>();
//println!("{:?}", numbers);
let g = MemoryGame::new(&numbers);
println!("Part 1: {}", g.skip(2020 - numbers.len() - 1).next().unwrap());
let g = MemoryGame::new(&numbers);
println!("Part 2: {}", g.skip(30_000_000 - numbers.len() - 1).next().unwrap());
0
}
// pub fn run(numbers: Vec<usize>, count: usize) -> usize {
// } |
use bevy::prelude::*;
use crate::{player, asset_loader, theater_outside, GameState, level_collision, cutscene, AppState, follow_text::FollowTextEvent, get_colors, Kid};
use bevy::render::pipeline::PrimitiveTopology;
use serde::Deserialize;
use bevy::reflect::{TypeUuid};
use bevy::render::mesh::Indices;
#[derive(Default)]
pub struct EnemyMeshes {
pub fov_cone: Handle<Mesh>,
}
pub static SCALE: f32 = 0.36;
pub static SPEED: f32 = 0.1;
pub static FRICTION: f32 = 0.1;
#[derive(Debug, Clone, Deserialize, TypeUuid)]
#[uuid = "31c0df2d-8f17-4ed3-906f-e4e7ca870c2f"]
pub struct EnemySpawnPoint {
pub level: cutscene::Level,
pub location: Vec2,
pub enemy_type: EnemyType,
pub facing: crate::Direction,
}
#[derive(Debug, Clone, Deserialize, TypeUuid, PartialEq)]
#[uuid = "a3da668c-fa5c-402d-ab4f-edf62690827e"]
pub enum EnemyType {
Ticket(bool),
Patrol(Vec::<Vec2>),
Mom(Vec::<Vec2>),
Camera,
Dog
}
pub struct Cone { }
pub struct EnemyPlugin;
impl Plugin for EnemyPlugin {
fn build(&self, app: &mut AppBuilder) {
app
.init_resource::<EnemyMeshes>()
.add_system_set(
SystemSet::on_enter(crate::AppState::Loading)
.with_system(load_assets.system())
)
.add_system_set(
SystemSet::on_update(crate::AppState::Lobby)
.with_system(print.system())
.with_system(update_enemy.system())
.with_system(scale_cone.system())
.with_system(check_for_player.system())
)
.add_system_set(
SystemSet::on_update(crate::AppState::Movie)
.with_system(print.system())
.with_system(update_enemy.system())
.with_system(scale_cone.system())
.with_system(check_for_player.system())
)
.add_system_set(
SystemSet::on_update(crate::AppState::InGame)
.with_system(print.system())
.with_system(update_enemy.system())
.with_system(scale_cone.system())
.with_system(check_for_player.system())
);
}
}
pub fn load_assets(
asset_server: Res<AssetServer>,
mut enemy_meshes: ResMut<EnemyMeshes>,
mut loading: ResMut<asset_loader::AssetsLoading>,
) {
println!("Adding enemy assets");
enemy_meshes.fov_cone = asset_server.load("models/cone.glb#Mesh0/Primitive0");
loading.asset_handles.push(enemy_meshes.fov_cone.clone_untyped());
}
static VIEW_DISTANCE :f32 = 5.7;
static VIEW_ANGLE: f32 = 0.5;
pub fn spawn_enemies(
mut commands: Commands,
mut materials: ResMut<Assets<StandardMaterial>>,
// meshes: ResMut<Assets<Mesh>>,
enemy_meshes: Res<EnemyMeshes>,
theater_meshes: ResMut<theater_outside::TheaterMeshes>,
game_state: Res<GameState>,
level_info_state: Res<asset_loader::LevelInfoState>,
level_info_assets: ResMut<Assets<asset_loader::LevelInfo>>,
) {
let leg_color = Color::hex("293241").unwrap();
let torso_color = Color::hex("e63946").unwrap();
let hat_color = Color::hex("e63946").unwrap();
let other_colors = get_colors();
let vision_color = Color::hex("fdffb6").unwrap();
if let Some(levels_asset) = level_info_assets.get(&level_info_state.handle) {
for enemy_spawn in levels_asset.enemies.iter() {
if enemy_spawn.level != game_state.current_level { continue; }
let skin_color = Color::hex(other_colors.skin.to_string()).unwrap();
let mom_skin = Color::hex(game_state.kid_colors[&Kid::D].skin.clone()).unwrap();
let hair_color = Color::hex(game_state.kid_colors[&Kid::D].hair.clone()).unwrap();
let mut transform = Transform::from_translation(Vec3::new(enemy_spawn.location.x as f32,
0.0 as f32,
enemy_spawn.location.y as f32));
transform.apply_non_uniform_scale(Vec3::new(SCALE, SCALE, SCALE));
// do direction
transform.rotate(Quat::from_axis_angle(Vec3::new(0.0, 1.0, 0.0), std::f32::consts::PI));
commands.spawn_bundle(PbrBundle {
transform,
..Default::default()
})
.insert(Enemy {
target_waypoint: 0,
velocity: Vec3::default(),
is_patroling: true,
is_distracted: false,
enemy_spawn: enemy_spawn.clone()
})
.with_children(|parent| {
parent.spawn_bundle(PbrBundle {
mesh: theater_meshes.legs.clone(),
material: materials.add(leg_color.into()),
..Default::default()
});
parent.spawn_bundle(PbrBundle {
mesh: theater_meshes.torso.clone(),
material: materials.add(torso_color.into()),
..Default::default()
});
parent.spawn_bundle(PbrBundle {
mesh: theater_meshes.headhand.clone(),
material: match enemy_spawn.enemy_type {
EnemyType::Mom(_) => materials.add(mom_skin.into()),
_ => materials.add(skin_color.into()),
},
..Default::default()
});
match enemy_spawn.enemy_type {
EnemyType::Mom(_) => {
parent.spawn_bundle(PbrBundle {
mesh: theater_meshes.hairtwo.clone(),
material: materials.add(hair_color.into()),
..Default::default()
});
},
_ => {
parent.spawn_bundle(PbrBundle {
mesh: theater_meshes.hat.clone(),
material: materials.add(hat_color.into()),
..Default::default()
});
}
}
parent.spawn_bundle(PbrBundle {
mesh: theater_meshes.face.clone(),
material: theater_meshes.face_material.clone(),
..Default::default()
});
match enemy_spawn.enemy_type {
EnemyType::Mom(_) | EnemyType::Patrol(_) => {
let color = Color::rgba(vision_color.r(), vision_color.g(), vision_color.b(), 0.7);
parent.spawn_bundle(PbrBundle {
mesh: enemy_meshes.fov_cone.clone(),
material: materials.add(color.into()),
visible: Visible {
is_visible: true,
is_transparent: true,
},
transform: {
let mut t = Transform::from_xyz(0.0, -2.2, 0.0);
t.scale = Vec3::new(VIEW_DISTANCE, VIEW_DISTANCE, VIEW_DISTANCE);
t
},
..Default::default()
}).insert(Cone {});
},
_ => ()
}
}).id();
}
}
}
pub fn update_enemy(
mut enemies: Query<(Entity, &mut Transform, &mut Enemy)>,
time: Res<Time>,
mut game_state: ResMut<GameState>,
mut follow_text_event_writer: EventWriter<FollowTextEvent>,
level_info_assets: Res<Assets<asset_loader::LevelInfo>>,
level_info_state: Res<asset_loader::LevelInfoState>,
) {
for (entity, mut transform, mut enemy) in enemies.iter_mut() {
match &enemy.enemy_spawn.enemy_type {
EnemyType::Mom(waypoints) | EnemyType::Patrol(waypoints) => {
// this is pretty bad but it lets me end the game easier
if enemy.target_waypoint >= waypoints.len() - 1
&& game_state.current_level == cutscene::Level::Movie
&& !game_state.has_avoided_movie_guard {
game_state.has_avoided_movie_guard = true;
}
if game_state.has_avoided_movie_guard { return; }
let current_position = Vec2::new(transform.translation.x, transform.translation.z);
if let Some(point) = waypoints.get(enemy.target_waypoint) {
let distance = current_position.distance(*point);
if distance < 0.1 {
enemy.target_waypoint
= if enemy.target_waypoint >= waypoints.len() - 1 {
0
} else {
enemy.target_waypoint + 1
};
} else {
//let angle = current_position.angle_between(*point);
let move_toward = (*point - current_position).normalize();
let move_toward = Vec3::new(move_toward.x, 0.0, move_toward.y);
enemy.velocity += (move_toward * SPEED) * time.delta_seconds();
enemy.velocity = enemy.velocity.clamp_length_max(SPEED);
if distance < 2.0 {
enemy.velocity *= FRICTION.powf(time.delta_seconds());
}
if game_state.current_level == cutscene::Level::Movie {
enemy.velocity = enemy.velocity.clamp_length_max(SPEED / 2.0);
}
let new_translation = transform.translation + enemy.velocity;
let levels_asset = level_info_assets.get(&level_info_state.handle);
if let Some(level_asset) = levels_asset {
let temp_new_translation = new_translation;
let new_translation = level_collision::fit_in_level(&level_asset, &game_state, transform.translation, new_translation);
if temp_new_translation.x != new_translation.x {
enemy.velocity.x = 0.0;
}
if temp_new_translation.y != new_translation.y {
enemy.velocity.y = 0.0;
}
if temp_new_translation.z != new_translation.z {
enemy.velocity.z = 0.0;
}
// wow, this actually works?
let angle = (-(new_translation.z - transform.translation.z)).atan2(new_translation.x - transform.translation.x);
let rotation = Quat::from_axis_angle(Vec3::Y, angle);
transform.translation = new_translation;
let new_rotation = transform.rotation.lerp(rotation, time.delta_seconds());
// don't rotate if we're not moving or if uhh rotation isnt a number
if !new_rotation.is_nan() && enemy.velocity.length() > 0.0001 {
transform.rotation = rotation;
}
}
}
}
},
EnemyType::Ticket(_actually_checks) => {
if enemy.is_distracted {
follow_text_event_writer.send(FollowTextEvent {
entity,
value: "I'm distracted!".to_string(),
is_player: false,
force: false,
});
}
},
_ => ()
}
}
}
fn scale_cone(
keyboard_input: Res<Input<KeyCode>>,
mut cones: Query<&mut Transform, With<Cone>>,
) {
for mut t in cones.iter_mut() {
if keyboard_input.just_pressed(KeyCode::Y) {
t.scale.z += 0.1;
t.scale.x += 0.1;
}
if keyboard_input.just_pressed(KeyCode::H) {
t.scale.z -= 0.1;
t.scale.x -= 0.1;
}
if keyboard_input.just_pressed(KeyCode::T) {
t.translation.x += 0.1;
}
if keyboard_input.just_pressed(KeyCode::G) {
t.translation.x -= 0.1;
}
}
}
pub fn check_for_player(
enemies: Query<(Entity, &Enemy, &Transform, &Children)>,
mut cones: Query<&mut Handle<StandardMaterial>, With<Cone>>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut current_cutscene: ResMut<cutscene::CurrentCutscene>,
mut state: ResMut<State<AppState>>,
mut follow_text_event_writer: EventWriter<FollowTextEvent>,
game_state: Res<GameState>,
player: Query<&Transform, With<player::Player>>,
) {
for (entity, enemy, transform, children) in enemies.iter() {
match enemy.enemy_spawn.enemy_type {
EnemyType::Patrol(_) | EnemyType::Mom(_) => {
let (axis, mut angle) = transform.rotation.to_axis_angle();
if axis.y >= -0.0 {
angle = -angle;
}
let view_angle = if game_state.current_level == cutscene::Level::Movie {
0.1
} else {
VIEW_ANGLE
};
let left_angle = angle - view_angle;
let right_angle = angle + view_angle;
let view_distance = if game_state.current_level == cutscene::Level::Movie {
VIEW_DISTANCE - 2.7
} else {
VIEW_DISTANCE - 0.7
};
let left_vector = Vec2::new(left_angle.cos(), left_angle.sin()).normalize() * (view_distance);
let right_vector = Vec2::new(right_angle.cos(), right_angle.sin()).normalize() * (view_distance);
for p_transform in player.iter() {
let enemy_position = Vec2::new(transform.translation.x, transform.translation.z);
let player_position = Vec2::new(p_transform.translation.x, p_transform.translation.z);
let triangle: (Vec2, Vec2, Vec2) = (enemy_position, enemy_position + left_vector, enemy_position + right_vector);
if point_in_triangle(player_position, triangle) {
follow_text_event_writer.send(FollowTextEvent {
entity,
value: "Hey!".to_string(),
is_player: false,
force: true,
});
current_cutscene.trigger(
level_collision::random_death_two(&game_state),
game_state.current_level
);
state.push(AppState::Cutscene).unwrap();
// println!("TRUE {:?} {:?}", player_position, triangle);
} else {
}
}
//println!("Angle: {} {}", axis, angle);
},
_ => ()
}
}
}
fn point_in_triangle(
p: Vec2,
t: (Vec2, Vec2, Vec2)
) -> bool {
// The point p is inside the triangle if 0 <= s <= 1 and 0 <= t <= 1 and s + t <= 1.
// s,t and 1 - s - t are called the barycentric coordinates of the point p.
let a = 0.5 * (-t.1.y * t.2.x + t.0.y * (-t.1.x + t.2.x) + t.0.x * (t.1.y - t.2.y) + t.1.x * t.2.y);
let sign = if a < 0.0 { -1.0 } else { 1.0 };
let s = (t.0.y * t.2.x - t.0.x * t.2.y + (t.2.y - t.0.y) * p.x + (t.0.x - t.2.x) * p.y) * sign;
let t = (t.0.x * t.1.y - t.0.y * t.1.x + (t.0.y - t.1.y) * p.x + (t.1.x - t.0.x) * p.y) * sign;
s > 0.0 && t > 0.0 && (s + t) < 2.0 * a * sign
}
pub fn print(
enemies: Query<(&Enemy, &Transform)>,
keyboard_input: Res<Input<KeyCode>>,
) {
if keyboard_input.just_pressed(KeyCode::I) {
for (_, transform) in enemies.iter() {
let (rotation, axis) = transform.rotation.to_axis_angle();
println!("Axis: {} {} {} Angle: {},", rotation.x, rotation.y, rotation.z, axis);
}
}
}
pub struct Enemy {
pub enemy_spawn: EnemySpawnPoint,
pub target_waypoint: usize,
pub is_patroling: bool,
pub velocity: Vec3,
pub is_distracted: bool,
}
|
use std::iter::FromIterator;
use map::{
RadixMap,
Matches as MapMatches,
Keys as MapKeys,
};
use key::Key;
/// A set based on a [Radix tree](https://en.wikipedia.org/wiki/Radix_tree).
///
/// See [`RadixMap`](../map/struct.RadixMap.html) for an in-depth explanation of the workings of this
/// struct, as it's simply a wrapper around `RadixMap<K, ()>`.
pub struct RadixSet<K: Key + ?Sized> {
map: RadixMap<K, ()>,
}
impl<K: Key + ?Sized> RadixSet<K> {
/// Makes a new empty RadixSet.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use panoradix::RadixSet;
///
/// let mut set = RadixSet::new();
///
/// // entries can now be inserted into the empty set
/// set.insert("a");
/// ```
pub fn new() -> RadixSet<K> {
RadixSet { map: RadixMap::new() }
}
/// Clears the set, removing all values.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use panoradix::RadixSet;
///
/// let mut set = RadixSet::new();
/// set.insert("a");
/// set.clear();
/// assert!(set.is_empty());
/// ```
pub fn clear(&mut self) {
self.map.clear();
}
/// Return the number of elements in the set.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use panoradix::RadixSet;
///
/// let mut s = RadixSet::new();
/// s.insert("a");
/// s.insert("b");
/// s.insert("c");
/// assert_eq!(s.len(), 3);
/// ```
pub fn len(&self) -> usize {
self.map.len()
}
/// Inserts a key into the set.
///
/// If the set did not have this key present, `true` is returned, otherwise `false` is
/// returned.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use panoradix::RadixSet;
///
/// let mut set = RadixSet::new();
/// assert_eq!(set.insert("a"), true);
/// assert_eq!(set.is_empty(), false);
///
/// assert_eq!(set.insert("a"), false);
/// ```
pub fn insert(&mut self, key: &K) -> bool {
self.map.insert(key, ()).is_none()
}
/// Returns if the key is present in the set.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use panoradix::RadixSet;
///
/// let mut set = RadixSet::new();
/// set.insert("a");
/// assert_eq!(set.contains("a"), true);
/// assert_eq!(set.contains("b"), false);
/// ```
pub fn contains(&self, key: &K) -> bool {
self.map.contains_key(key)
}
/// Returns `true` if the set contains no elements.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use panoradix::RadixSet;
///
/// let mut set = RadixSet::new();
/// assert!(set.is_empty());
/// set.insert("a");
/// assert!(!set.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.map.is_empty()
}
/// Removes a key from the set, returning if the key was previously in the map.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use panoradix::RadixSet;
///
/// let mut set = RadixSet::new();
/// set.insert("a");
/// assert_eq!(set.remove("a"), true);
/// assert_eq!(set.remove("a"), false);
/// ```
pub fn remove(&mut self, key: &K) -> bool {
self.map.remove(key).is_some()
}
/// Gets an iterator over the keys inserted (sorted).
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use panoradix::RadixSet;
///
/// let mut set = RadixSet::new();
/// set.insert("c");
/// set.insert("b");
/// set.insert("a");
///
/// for key in set.iter() {
/// println!("{}", key);
/// }
///
/// let first_key = set.iter().next().unwrap();
/// assert_eq!(first_key, "a".to_string());
/// ```
pub fn iter(&self) -> Iter<K> {
self.map.keys()
}
/// Gets an iterator over a filtered subset of the set (sorted).
///
/// Note that the full key will be yielded each time, not just the filtered suffix.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use panoradix::RadixSet;
///
/// let mut set = RadixSet::new();
/// set.insert("abc");
/// set.insert("acd");
/// set.insert("abd");
/// set.insert("bbb");
/// set.insert("ccc");
///
/// for key in set.find("a") {
/// println!("{}", key);
/// }
///
/// let first_key = set.find("a").next().unwrap();
/// assert_eq!(first_key, "abc".to_string());
/// ```
pub fn find<'a>(&'a self, key: &K) -> Matches<'a, K> {
Matches {
iter: self.map.find(key),
}
}
}
impl<K: Key + ?Sized> Default for RadixSet<K> {
fn default() -> Self {
Self::new()
}
}
impl<K: Key + ?Sized, T: AsRef<K>> FromIterator<T> for RadixSet<K> {
fn from_iter<It>(iter: It) -> Self
where It: IntoIterator<Item=T>,
{
let iter = iter.into_iter().map(|k| (k, ()));
RadixSet { map: RadixMap::from_iter(iter), }
}
}
/// An iterator over a `RadixSet`'s entries.
pub type Iter<'a, K> = MapKeys<'a, K, ()>;
/// An iterator over the elements matching a call to [`find`].
///
/// [`find`]: struct.RadixSet.html#method.find
pub struct Matches<'a, K: 'a + Key + ?Sized> {
iter: MapMatches<'a, K, ()>,
}
impl<'a, K: 'a + Key + ?Sized> Iterator for Matches<'a, K> {
type Item = K::Owned;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|(k, _)| k)
}
}
#[cfg(test)]
mod tests {
use std::iter::FromIterator;
use super::RadixSet;
#[test]
fn it_can_be_created() {
let _: RadixSet<[i32]> = RadixSet::new();
}
#[test]
fn it_accepts_an_empty_element() {
let mut set: RadixSet<str> = RadixSet::new();
set.insert("");
assert!(!set.is_empty());
}
#[test]
fn it_accepts_an_element() {
let mut set: RadixSet<str> = RadixSet::new();
set.insert("a");
assert!(!set.is_empty());
}
#[test]
fn it_accepts_multiple_elements() {
let mut set: RadixSet<str> = RadixSet::new();
set.insert("a");
set.insert("b");
set.insert("c");
set.insert("ac");
set.insert("ab");
assert!(!set.is_empty());
}
#[test]
fn it_can_be_built_from_multiple_elements() {
let items = vec!["a", "ac", "acb", "b", "c", "d"];
let set: RadixSet<str> = items.iter().collect();
assert!(items.iter().all(|k| set.contains(k)))
}
#[test]
fn it_has_a_key_iterator() {
let mut set = RadixSet::<str>::new();
set.insert("foo");
set.insert("bar");
set.insert("baz");
let keys: Vec<_> = set.iter().collect();
assert_eq!(keys, vec!["bar", "baz", "foo"]);
}
#[test]
fn it_can_complete_keys() {
let v = vec!["foo", "bar", "baz"];
let set: RadixSet<str> = RadixSet::from_iter(v);
assert_eq!(set.find("ba").collect::<Vec<_>>(), vec!["bar", "baz"]);
}
#[test]
fn it_can_remove_keys() {
let v = vec!["foo", "bar", "baz"];
let mut set: RadixSet<str> = RadixSet::from_iter(v);
set.remove("bar");
assert!(!set.contains("bar"));
assert!(set.contains("baz"));
}
}
|
extern crate rand;
extern crate ecdh_wrapper;
extern crate snow;
extern crate byteorder;
use std::str;
use std::net::TcpListener;
use std::net::TcpStream;
use std::io::Read;
use std::io::Write;
use byteorder::{ByteOrder, BigEndian};
use rand::os::OsRng;
use snow::Builder;
use snow::params::NoiseParams;
use ecdh_wrapper::PrivateKey;
const NOISE_PARAMS: & str = "Noise_XXhfs_25519+Kyber1024_ChaChaPoly_BLAKE2b";
//const NOISE_PARAMS: & str = "Noise_XX_25519_ChaChaPoly_BLAKE2b";
const NOISE_MESSAGE_MAX_SIZE: usize = 65535;
const NOISE_HANDSHAKE_MESSAGE1_SIZE: usize = 1600;
const NOISE_HANDSHAKE_MESSAGE2_SIZE: usize = 1680;
const NOISE_HANDSHAKE_MESSAGE3_SIZE: usize = 64;
/*
const NOISE_HANDSHAKE_MESSAGE1_SIZE: usize = 32;
const NOISE_HANDSHAKE_MESSAGE2_SIZE: usize = 96;
const NOISE_HANDSHAKE_MESSAGE3_SIZE: usize = 64;
*/
const MAC_SIZE: usize = 16;
const NOISE_MESSAGE_HEADER_SIZE: usize = MAC_SIZE + 4;
const HEADER_SIZE: usize = 4;
fn do_noise_handshake(mut stream: TcpStream, handshake_state: &mut snow::HandshakeState) -> TcpStream {
// -> e, e1
let mut client_handshake1 = [0u8; NOISE_HANDSHAKE_MESSAGE1_SIZE];
stream.read_exact(&mut client_handshake1).unwrap();
let mut _msg = [0u8; NOISE_HANDSHAKE_MESSAGE1_SIZE];
handshake_state.read_message(&client_handshake1, &mut _msg).unwrap();
// <- e, ee, ekem1, s, es
let mut mesg = [0u8; NOISE_HANDSHAKE_MESSAGE2_SIZE];
handshake_state.write_message(b"", &mut mesg).unwrap();
stream.write_all(&mesg).unwrap();
// -> s, se
let mut client_handshake2 = [0u8; NOISE_HANDSHAKE_MESSAGE3_SIZE];
stream.read_exact(&mut client_handshake2).unwrap();
let mut _msg2 = [0u8; NOISE_HANDSHAKE_MESSAGE3_SIZE];
handshake_state.read_message(&client_handshake2, &mut _msg2).unwrap();
stream
}
fn handle_client(stream: TcpStream, private_key: PrivateKey) {
let params: NoiseParams = NOISE_PARAMS.parse().unwrap();
let mut hs = Builder::new(params)
.local_private_key(&private_key.to_vec())
.build_responder()
.unwrap();
let mut stream = do_noise_handshake(stream, &mut hs);
let mut transport = hs.into_transport_mode().unwrap();
loop {
let mut message_header_ciphertext = vec![0u8; NOISE_MESSAGE_HEADER_SIZE];
match stream.read_exact(&mut message_header_ciphertext) {
Ok(()) => {
}
Err(_) => {
println!("connection was closed");
break
}
}
let mut header = [0u8; HEADER_SIZE];
transport.read_message(&message_header_ciphertext, &mut header).unwrap();
let ciphertext_size = BigEndian::read_u32(&header);
let mut ciphertext = vec![0u8; ciphertext_size as usize];
stream.read_exact(&mut ciphertext).unwrap();
let mut plaintext = vec![0u8; ciphertext_size as usize - MAC_SIZE];
transport.read_message(&ciphertext, &mut plaintext).unwrap();
println!("PLAINTEXT is: {}\n", str::from_utf8(&plaintext).unwrap());
let mut send_header_ciphertext = vec![0u8; MAC_SIZE + 4];
// reuse "header" because it's the same big endian encoded length
transport.write_message(&header, &mut send_header_ciphertext).unwrap();
let mut send_ciphertext = vec![0u8; ciphertext_size as usize];
transport.write_message(&plaintext, &mut send_ciphertext).unwrap();
let mut send_message_ciphertext = Vec::new();
send_message_ciphertext.extend(send_header_ciphertext);
send_message_ciphertext.extend(send_ciphertext);
stream.write(&send_message_ciphertext).unwrap();
}
}
fn main() {
let mut rng = OsRng::new().unwrap();
let private_key = PrivateKey::generate(&mut rng).unwrap();
let public_key = private_key.public_key();
let server_addr = "127.0.0.1:36669";
println!("public_key: {}", public_key.to_base64());
println!("starting noise echo server, listening on {}...\n", server_addr);
let listener = TcpListener::bind(server_addr.clone()).unwrap();
// XXX fix me: how to make it spawn new threads?
// private_key does not implement Copy trait
for stream in listener.incoming() {
match stream {
Ok(stream) => {
handle_client(stream, private_key.clone());
}
Err(_) => {
println!("Error");
}
}
}
}
|
use spair::prelude::*;
pub struct Spinner;
impl<C: Component> spair::Render<C> for Spinner {
fn render(self, nodes: spair::Nodes<C>) {
nodes.div(|d| {
d.static_attributes()
.class("loading_spinner_container")
.nodes()
.div(|d| d.static_attributes().class("loading_spinner").done())
.div(|d| {
d.static_attributes()
.class("loading_spinner_text")
.nodes()
.r#static("Loading ...");
});
});
}
}
|
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use cosmwasm_std::{CanonicalAddr, StdResult, Storage};
use cosmwasm_storage::{
singleton, singleton_read, Singleton,
};
static KEY_CONFIG: &[u8] = b"config";
#[derive(Default, Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct Config {
pub terraswap_factory: CanonicalAddr,
}
pub fn config_store<S: Storage>(storage: &mut S) -> Singleton<S, Config> {
singleton(storage, KEY_CONFIG)
}
pub fn read_config<S: Storage>(storage: &S) -> StdResult<Config> {
singleton_read(storage, KEY_CONFIG).load()
}
|
use crate::common::*;
#[derive(Debug, PartialEq)]
pub(crate) struct Settings<'src> {
pub(crate) dotenv_load: Option<bool>,
pub(crate) export: bool,
pub(crate) positional_arguments: bool,
pub(crate) shell: Option<setting::Shell<'src>>,
}
impl<'src> Settings<'src> {
pub(crate) fn new() -> Settings<'src> {
Settings {
dotenv_load: None,
export: false,
positional_arguments: false,
shell: None,
}
}
pub(crate) fn shell_command(&self, config: &Config) -> Command {
let mut cmd = Command::new(self.shell_binary(config));
cmd.args(self.shell_arguments(config));
cmd
}
pub(crate) fn shell_binary<'a>(&'a self, config: &'a Config) -> &'a str {
if let (Some(shell), false) = (&self.shell, config.shell_present) {
shell.command.cooked.as_ref()
} else {
&config.shell
}
}
pub(crate) fn shell_arguments<'a>(&'a self, config: &'a Config) -> Vec<&'a str> {
if let (Some(shell), false) = (&self.shell, config.shell_present) {
shell
.arguments
.iter()
.map(|argument| argument.cooked.as_ref())
.collect()
} else {
config.shell_args.iter().map(String::as_ref).collect()
}
}
}
|
use actix_web::{
HttpResponse,
};
use super::super::{
service,
response,
};
pub fn index() -> HttpResponse {
let (domain_develop_trends, domain_develop_popularities) = &service::develop::index();
response::develop_index::response(domain_develop_trends, domain_develop_popularities)
} |
pub mod graphql;
mod misc;
pub mod ws {
use crate::misc::ExtractUserAgent;
use axum::ws::{Message, WebSocket};
pub async fn sub_(mut socket: WebSocket, ExtractUserAgent(user_agent): ExtractUserAgent) {
println!("`{:?}` connected", user_agent);
if let Some(msg) = socket.recv().await {
let msg = msg.unwrap();
println!("Client says: {:?}", msg);
if msg.is_text() {}
if msg.is_ping() {
socket.send(Message::pong(msg.as_bytes())).await.unwrap();
} else {
socket.send(msg).await.unwrap();
}
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
socket.send(Message::text("Bye.")).await.unwrap();
}
}
}
pub mod db {
pub use axum::extract::Extension;
use http::StatusCode;
//type ConnectionPool = Pool<PostgresConnectionManager<NoTls>>;
pub type PoolPg = sqlx::Pool<sqlx::postgres::Postgres>;
pub async fn pub_(Extension(pool): Extension<PoolPg>) -> Result<String, (StatusCode, String)> {
// We cannot get a connection directly via an extractor because
// `bb8::PooledConnection` contains a reference to the pool and
// `extract::FromRequest` cannot return types that contain references.
//
// So therefore we have to get a connection from the pool manually.
//let conn = pool.get().await.map_err(internal_error)?;
let row: (i64,) = sqlx::query_as("SELECT $1")
.bind(150_i64)
.fetch_one(&pool)
.await
.map_err(internal_error)?;
assert_eq!(row.0, 150);
println!("SELECT $1 .bind {}", row.0);
//let row = conn.query_one("select 1 + 1", &[]) .await .map_err(internal_error)?;
//let two: i32 = row.try_get(0).map_err(internal_error)?;
Ok(row.0.to_string())
}
/// Utility function for mapping any error into a `500 Internal Server Error`
/// response.
fn internal_error<E>(err: E) -> (StatusCode, String)
where
E: std::error::Error,
{
(StatusCode::INTERNAL_SERVER_ERROR, err.to_string())
}
}
|
//! Tests auto-converted from "sass-spec/spec/non_conformant/errors/import"
#[allow(unused)]
use super::rsass;
mod file;
mod miss;
mod url;
|
use std::io;
fn main() {
println!("Entering a number");
let mut input1 = String::new();
io::stdin().read_line(&mut input1).expect("failed to read from stdin");
let trim = input1.trim();
let fact = match trim.parse::<u32>() {
Ok(fact) => fact,
Err(_) => 0,
};
println!("The factorial of {} is {}",fact, calculate_fact(fact));
}
fn calculate_fact(var : u32) -> u32
{
let mut output :u32 = 1;
for i in (1..var + 1).rev() {
output *= i;
}
return output;
}
|
//! This example demonstrates usage of [`ron_to_table::to_string`].
fn main() {
let scene = ron::from_str(
r#"
Scene(
materials: {
"metal": (reflectivity: 1.0),
"plastic": (reflectivity: 0.5),
},
entities: [
(name: "hero", material: "metal"),
(name: "monster", material: "plastic"),
],
)
"#,
)
.unwrap();
println!("{}", ron_to_table::to_string(&scene));
}
|
//! Module containing definitions for BGP
use crate::{AsId, Prefix, RouterId};
/// Bgo Route
/// The following attributes are omitted
/// - ORIGIN: assumed to be always set to IGP
/// - ATOMIC_AGGREGATE: not used
/// - AGGREGATOR: not used
#[derive(Debug, Clone)]
pub struct BgpRoute {
pub prefix: Prefix,
pub as_path: Vec<AsId>,
pub next_hop: RouterId,
pub local_pref: Option<u32>,
pub med: Option<u32>,
}
impl BgpRoute {
/// Applies the default values for any non-mandatory field
pub fn apply_default(&mut self) {
self.local_pref = Some(self.local_pref.unwrap_or(100));
self.med = Some(self.med.unwrap_or(0));
}
/// returns a clone of self, with the default values applied for any non-mandatory field.
pub fn clone_default(&self) -> Self {
Self {
prefix: self.prefix,
as_path: self.as_path.clone(),
next_hop: self.next_hop,
local_pref: Some(self.local_pref.unwrap_or(100)),
med: Some(self.med.unwrap_or(0)),
}
}
}
impl PartialEq for BgpRoute {
fn eq(&self, other: &Self) -> bool {
let s = self.clone_default();
let o = other.clone_default();
s.prefix == o.prefix
&& s.as_path == other.as_path
&& s.next_hop == o.next_hop
&& s.local_pref == o.local_pref
&& s.med == o.med
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum BgpSessionType {
IBgpPeer,
IBgpClient,
EBgp,
}
impl BgpSessionType {
/// returns true if the session type is EBgp
pub fn is_ebgp(&self) -> bool {
match self {
Self::EBgp => true,
_ => false,
}
}
/// returns true if the session type is IBgp
pub fn is_ibgp(&self) -> bool {
!self.is_ebgp()
}
}
#[derive(Debug, Clone)]
pub enum BgpEvent {
Withdraw(Prefix),
Update(BgpRoute),
}
|
extern crate kernel32;
extern crate user32;
extern crate winapi;
mod utils;
use utils::*;
use winapi::*;
use user32::*;
use kernel32::*;
use std::ptr::{null, null_mut};
use std::mem;
//ALTERNATIVE TO CALLING AN EXTERNAL FUNCTION
/*#[link(name = "d2d1")]
extern "system" {
pub fn D2D1CreateFactory(
factoryType: D2D1_FACTORY_TYPE,
riid: REFIID,
pFactoryOptions: *const D2D1_FACTORY_OPTIONS,
ppIFactory: *mut *mut c_void,
) -> HRESULT;
}
fn create_d2d1_factory(
factory_type: D2D1_FACTORY_TYPE,
riid: REFIID,
p_factory_options: *const D2D1_FACTORY_OPTIONS,
pp_factory: *mut *mut c_void,
) -> HRESULT {
unsafe { D2D1CreateFactory(factory_type, riid, p_factory_options, pp_factory) }
}*/
//STRUCTURES
pub struct Resources {
render_target: *mut ID2D1HwndRenderTarget,
brush1: *mut ID2D1SolidColorBrush,
brush2: *mut ID2D1SolidColorBrush,
}
pub struct MyApp {
resources: Resources,
factory: *mut ID2D1Factory,
hwnd: HWND,
}
impl MyApp {
fn initialized() -> Self {
MyApp {
factory: null_mut(),
hwnd: null_mut(),
resources: Resources {
render_target: null_mut(),
brush1: null_mut(),
brush2: null_mut(),
},
}
}
}
//D2D1 SETUP
fn set_d2d1_factory(app: &mut MyApp) {
let mut factory: *mut c_void = null_mut();
let factory_options = D2D1_FACTORY_OPTIONS {
debugLevel: D2D1_DEBUG_LEVEL_NONE,
};
let d2d1_factory = create_d2d1_factory(
D2D1_FACTORY_TYPE_MULTI_THREADED,
&UuidOfID2D1Factory,
&factory_options as *const D2D1_FACTORY_OPTIONS,
&mut factory,
);
if d2d1_factory != S_OK {
error_msgbox("Could not create D2D1 factory.");
} else {
app.factory = factory as *mut ID2D1Factory;
}
}
fn set_d2d_resources(app: &mut MyApp) {
unsafe {
if !app.resources.render_target.is_null() {
return;
} else if app.factory.is_null() {
error_msgbox("There is no render target!")
} else {
let hwnd = app.hwnd;
let mut rect: RECT = WinStruct::default();
let mut resources = Resources {
render_target: null_mut(),
brush1: null_mut(),
brush2: null_mut(),
};
GetClientRect(hwnd, &mut rect as *mut RECT);
let d2d_rect = D2D1_SIZE_U {
width: (rect.right - rect.left) as u32,
height: (rect.bottom - rect.top) as u32,
};
let render_properties: D2D1_RENDER_TARGET_PROPERTIES = WinStruct::default();
let hwnd_render_properties = D2D1_HWND_RENDER_TARGET_PROPERTIES {
hwnd: hwnd,
pixelSize: d2d_rect,
presentOptions: D2D1_PRESENT_OPTIONS_NONE,
};
let gray = Color::solid_color(0.345, 0.423, 0.463);
let red = Color::solid_color(0.941, 0.353, 0.392);
let factory: &mut ID2D1Factory = &mut *app.factory;
if factory.CreateHwndRenderTarget(
&render_properties,
&hwnd_render_properties,
&mut resources.render_target,
) != S_OK
{
error_msgbox("Could not create render target!");
}
let rt: &mut ID2D1HwndRenderTarget = &mut *resources.render_target;
if rt.CreateSolidColorBrush(&gray, null(), &mut resources.brush1) != S_OK {
error_msgbox("Could not create brush!");
}
if rt.CreateSolidColorBrush(&red, null(), &mut resources.brush2) != S_OK {
error_msgbox("Could not create brush!");
}
app.resources = resources;
}
}
}
//RENDER METHOD
fn on_paint(app: &mut MyApp) -> HRESULT {
unsafe {
let d2d1_matrix: D2D1_MATRIX_3X2_F = WinStruct::default();
let white = Color::solid_color(255.0, 255.0, 255.0);
let mut render_size = D2D1_SIZE_F {
width: 0.0,
height: 0.0,
};
let render = &mut *app.resources.render_target;
render.BeginDraw();
render.Clear(&white);
render.SetTransform(&d2d1_matrix);
render.GetSize(&mut render_size);
let mut count: f32 = 0.0;
while count < render_size.width {
render.DrawLine(
D2D1_POINT_2F { x: count, y: 0.0 },
D2D1_POINT_2F {
x: count,
y: render_size.height,
},
&mut **app.resources.brush1 as *mut ID2D1Brush,
0.5,
null_mut(),
);
count += 10.0;
}
count = 0.0;
while count < render_size.height {
render.DrawLine(
D2D_POINT_2F { x: 0.0, y: count },
D2D_POINT_2F {
x: render_size.width,
y: count,
},
&mut **app.resources.brush1 as *mut ID2D1Brush,
0.5,
null_mut(),
);
count += 10.0;
}
// Draw two rectangles.
let rx = render_size.width / 2.0;
let ry = render_size.height / 2.0;
let rect1 = D2D1_RECT_F {
left: rx - 50.0,
right: rx + 50.0,
top: ry - 50.0,
bottom: ry + 50.0,
};
let rect2 = D2D1_RECT_F {
left: rx - 100.0,
right: rx + 100.0,
top: ry - 100.0,
bottom: ry + 100.0,
};
render.FillRectangle(&rect1, &mut **app.resources.brush1 as *mut ID2D1Brush);
render.DrawRectangle(
&rect2,
&mut **app.resources.brush2 as *mut ID2D1Brush,
3.0,
null_mut(),
);
render.EndDraw(null_mut(), null_mut())
}
}
//RELEASE RESOURCES
fn release_resources(app: &mut MyApp) {
unsafe {
safe_release(app);
if !app.factory.is_null() {
(*app.factory).Release();
app.factory = null_mut();
}
}
}
fn safe_release(app: &mut MyApp) {
unsafe {
if !app.resources.render_target.is_null() {
(*app.resources.brush1).Release();
(*app.resources.brush2).Release();
(*app.resources.render_target).Release();
app.resources.brush1 = null_mut();
app.resources.brush2 = null_mut();
app.resources.render_target = null_mut();
}
}
}
//MESSAGE PROCESSING
unsafe extern "system" fn wndproc(
hwnd: HWND,
message: UINT32,
wparam: WPARAM,
lparam: LPARAM,
) -> LRESULT {
let app_ptr = GetWindowLongPtrW(hwnd, 0);
let mut app: &mut MyApp = &mut *(app_ptr as *mut MyApp);
match message {
WM_PAINT => {
set_d2d_resources(app);
if on_paint(app) == D2DERR_RECREATE_TARGET {
safe_release(app);
}
0
}
WM_CREATE => {
SetWindowLongPtrW(hwnd, 0, 0);
0
}
WM_SIZE => {
if app_ptr != 0 {
let width = GET_X_LPARAM(lparam) as u32;
let height = GET_Y_LPARAM(lparam) as u32;
let render_size = D2D_SIZE_U {
width: width,
height: height,
};
let render = &mut *app.resources.render_target;
render.Resize(&render_size);
}
0
}
WM_DESTROY => {
release_resources(&mut app);
PostQuitMessage(0);
0
}
_ => DefWindowProcW(hwnd, message, wparam, lparam),
}
}
//WINDOW CREATION
pub fn init_class() {
unsafe {
let class = "direct2d_example".to_wide();
let wndcl = WNDCLASSEXW {
cbSize: mem::size_of::<WNDCLASSEXW>() as UINT32,
style: CS_HREDRAW | CS_VREDRAW,
lpfnWndProc: Some(wndproc),
cbClsExtra: 0,
cbWndExtra: mem::size_of::<MyApp>() as INT32,
hInstance: GetModuleHandleW(null_mut()),
hIcon: 0 as HICON,
hCursor: LoadCursorW(null_mut(), IDC_ARROW),
hbrBackground: COLOR_WINDOWFRAME as HBRUSH,
lpszMenuName: null(),
lpszClassName: class.as_ptr() as *const u16,
hIconSm: 0 as HICON,
};
if RegisterClassExW(&wndcl) == 0 {
error_msgbox("Could not register class!");
PostQuitMessage(0);
} else {
RegisterClassExW(&wndcl);
};
}
}
fn create_window(app: &mut MyApp, class: &[u16], window: &[u16]) {
unsafe {
let hwnd = CreateWindowExW(
WS_EX_COMPOSITED,
class.as_ptr(),
window.as_ptr(),
WS_OVERLAPPEDWINDOW | WS_VISIBLE,
CW_USEDEFAULT,
CW_USEDEFAULT,
600,
400,
null_mut(),
null_mut(),
GetModuleHandleW(null_mut()),
null_mut(),
);
if hwnd.is_null() {
error_msgbox("Could not create window!");
PostQuitMessage(0);
} else {
app.hwnd = hwnd;
}
}
}
//ASSOCIATE STRUCTURES/DATA
fn set_window(app: &mut MyApp) {
unsafe {
SetWindowLongPtrW(app.hwnd, 0, app as *mut MyApp as LONG_PTR);
}
}
fn main() {
unsafe {
let mut app = MyApp::initialized();
let class = "direct2d_example".to_wide();
let window = "Hello World!".to_wide();
init_class();
create_window(&mut app, &class, &window);
set_window(&mut app);
set_d2d1_factory(&mut app);
let mut msg: MSG = WinStruct::default();
while GetMessageW(&mut msg as *mut MSG, 0 as HWND, 0, 0) != 0 {
TranslateMessage(&msg as *const MSG);
DispatchMessageW(&msg as *const MSG);
}
}
}
|
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
use aws_config::meta::region::RegionProviderChain;
use aws_sdk_lambda::model::Runtime;
use aws_sdk_lambda::{Client, Error, Region, PKG_VERSION};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
struct Opt {
/// The AWS Region.
#[structopt(short, long)]
region: Option<String>,
/// The Lambda function's ARN.
#[structopt(short, long)]
arn: String,
/// Whether to display additional runtime information.
#[structopt(short, long)]
verbose: bool,
}
// Change Java runtime in Lambda function.
// snippet-start:[lambda.rust.change-java-runtime]
async fn set_runtimes(client: &Client, arn: &str) -> Result<(), Error> {
// Get function's runtime
let resp = client.list_functions().send().await?;
for function in resp.functions.unwrap_or_default() {
// We only change the runtime for the specified function.
if arn == function.function_arn.unwrap() {
let rt = function.runtime.unwrap();
// We only change the Java runtime.
if rt == Runtime::Java11 || rt == Runtime::Java8 {
// Change it to Java8a12 (Corretto).
println!("Original runtime: {:?}", rt);
let result = client
.update_function_configuration()
.function_name(function.function_name.unwrap())
.runtime(Runtime::Java8al2)
.send()
.await?;
let result_rt = result.runtime.unwrap();
println!("New runtime: {:?}", result_rt);
}
}
}
Ok(())
}
// snippet-end:[lambda.rust.change-java-runtime]
/// Sets a Lambda function's Java runtime to Corretto.
/// # Arguments
///
/// * `-a ARN` - The ARN of the Lambda function.
/// * `[-r -REGION]` - The Region in which the client is created.
/// If not supplied, uses the value of the **AWS_REGION** environment variable.
/// If the environment variable is not set, defaults to **us-west-2**.
/// * `[-v]` - Whether to display additional information.
#[tokio::main]
async fn main() -> Result<(), Error> {
tracing_subscriber::fmt::init();
let Opt {
arn,
region,
verbose,
} = Opt::from_args();
let region_provider = RegionProviderChain::first_try(region.map(Region::new))
.or_default_provider()
.or_else(Region::new("us-west-2"));
println!();
if verbose {
println!("Lambda client version: {}", PKG_VERSION);
println!(
"Region: {}",
region_provider.region().await.unwrap().as_ref()
);
println!("Lambda function ARN: {}", &arn);
println!();
}
let shared_config = aws_config::from_env().region(region_provider).load().await;
let client = Client::new(&shared_config);
set_runtimes(&client, &arn).await
}
|
use crate::{IntoSlice, IntoSliceFrom, IntoSliceTo, OwningSlice, OwningSliceFrom, OwningSliceTo};
macro_rules! sanity {
($buf:expr) => {{
assert_eq!(*$buf.into_slice(0u8, 0), []);
assert_eq!(*$buf.into_slice(0u8, 1), [0]);
assert_eq!(*$buf.into_slice(0u8, 2), [0, 1]);
assert_eq!(*$buf.into_slice(0u8, 3), [0, 1, 2]);
assert_eq!(*$buf.into_slice(0u8, 4), [0, 1, 2, 3]);
assert_eq!(*$buf.into_slice(1u8, 0), []);
assert_eq!(*$buf.into_slice(1u8, 1), [1]);
assert_eq!(*$buf.into_slice(1u8, 2), [1, 2]);
assert_eq!(*$buf.into_slice(1u8, 3), [1, 2, 3]);
assert_eq!(*$buf.into_slice(2u8, 0), []);
assert_eq!(*$buf.into_slice(2u8, 1), [2]);
assert_eq!(*$buf.into_slice(2u8, 2), [2, 3]);
assert_eq!(*$buf.into_slice(3u8, 0), []);
assert_eq!(*$buf.into_slice(3u8, 1), [3]);
assert_eq!(*$buf.into_slice(4u8, 0), []);
assert_eq!(*$buf.into_slice_from(0u8), [0, 1, 2, 3]);
assert_eq!(*$buf.into_slice_from(1u8), [1, 2, 3]);
assert_eq!(*$buf.into_slice_from(2u8), [2, 3]);
assert_eq!(*$buf.into_slice_from(3u8), [3]);
assert_eq!(*$buf.into_slice_from(4u8), []);
assert_eq!(*$buf.into_slice_to(0u8), []);
assert_eq!(*$buf.into_slice_to(1u8), [0]);
assert_eq!(*$buf.into_slice_to(2u8), [0, 1]);
assert_eq!(*$buf.into_slice_to(3u8), [0, 1, 2]);
assert_eq!(*$buf.into_slice_to(4u8), [0, 1, 2, 3]);
}};
}
#[test]
fn slice() {
let slice: &[_] = &[0, 1, 2, 3];
sanity!(slice);
}
#[test]
fn slice_mut() {
let slice: &mut [_] = &mut [0, 1, 2, 3];
sanity!(slice);
}
#[test]
fn owning_slice() {
let array = &[0xff, 0, 1, 2, 3, 0xff];
let slice = OwningSlice(array, 1u8, 4);
sanity!(slice);
}
#[test]
fn owning_slice_from() {
let array = &[0xff, 0, 1, 2, 3];
let slice_from = OwningSliceFrom(array, 1u8);
sanity!(slice_from);
}
#[test]
fn owning_slice_to() {
let array = &[0, 1, 2, 3, 0xff];
let slice_to = OwningSliceTo(array, 4u8);
sanity!(slice_to);
}
#[test]
#[should_panic]
fn oob_slice_1() {
let buf = &[0, 1, 2, 3];
OwningSlice(buf, 5u8, 2);
}
#[test]
#[should_panic]
fn oob_slice_2() {
let buf = &[0, 1, 2, 3];
OwningSlice(buf, 1u8, 4);
}
#[test]
#[should_panic]
fn oob_slice_from() {
let buf = &[0, 1, 2, 3];
OwningSliceFrom(buf, 5u8);
}
#[test]
#[should_panic]
fn oob_slice_to() {
let buf = &[0, 1, 2, 3];
OwningSliceTo(buf, 5u8);
}
|
use crate::atomicmin::AtomicMin;
use crate::Deadline;
use crate::PngError;
use crate::PngResult;
pub use cloudflare_zlib::is_supported;
use cloudflare_zlib::*;
impl From<ZError> for PngError {
fn from(err: ZError) -> Self {
match err {
ZError::DeflatedDataTooLarge(n) => PngError::DeflatedDataTooLong(n),
other => PngError::Other(other.to_string().into()),
}
}
}
pub(crate) fn cfzlib_deflate(
data: &[u8],
level: u8,
strategy: u8,
window_bits: u8,
max_size: &AtomicMin,
deadline: &Deadline,
) -> PngResult<Vec<u8>> {
let mut stream = Deflate::new(level.into(), strategy.into(), window_bits.into())?;
stream.reserve(max_size.get().unwrap_or(data.len() / 2));
let max_size = max_size.as_atomic_usize();
// max size is generally checked after each split,
// so splitting the buffer into pieces gives more checks
// = better chance of hitting it sooner.
let chunk_size = (data.len() / 4).max(1 << 15).min(1 << 18); // 32-256KB
for chunk in data.chunks(chunk_size) {
stream.compress_with_limit(chunk, max_size)?;
if deadline.passed() {
return Err(PngError::TimedOut);
}
}
Ok(stream.finish()?)
}
#[test]
fn compress_test() {
let vec = cfzlib_deflate(
b"azxcvbnm",
Z_BEST_COMPRESSION as u8,
Z_DEFAULT_STRATEGY as u8,
15,
&AtomicMin::new(None),
&Deadline::new(None),
)
.unwrap();
let res = crate::deflate::inflate(&vec).unwrap();
assert_eq!(&res, b"azxcvbnm");
}
|
use crate::dinfo;
use pollster::block_on;
use winit::event::{Event, VirtualKeyCode};
use winit::window::Window;
use winit_input_helper::WinitInputHelper;
use crate::config::GlobalConfig;
use crate::NOW;
/// Global constant resources
/// Different worlds will share these
pub struct Resources {
//
// GLOBAL
pub window: Window,
pub global_config: GlobalConfig,
//
// INPUT
pub input: WinitInputHelper,
//
// FLAGS
pub quit: bool,
pub pause: bool,
/// Modified externally
pub fullscreen: bool,
//
// INSTANCE
pub surface: wgpu::Surface,
pub device: wgpu::Device,
pub queue: wgpu::Queue,
pub sc_desc: wgpu::SwapChainDescriptor,
pub swap_chain: wgpu::SwapChain,
pub sample_count: u32,
//
// TIME
pub last_tick: std::time::Instant,
pub delta: f32,
pub elapsed_time: std::time::Duration,
//
// BUILD AFTER SELF
pub msaa_fbuffer: Option<wgpu::TextureView>,
//
// WINDOW
pub size: winit::dpi::PhysicalSize<u32>,
pub frame_num: usize,
pub active: Option<usize>,
}
impl Resources {
pub fn new(window: Window, global_config: GlobalConfig) -> Self {
// CONFIG
let sample_count = global_config.window.msaa;
// INPUT
let input = WinitInputHelper::new();
// FLAGS
let fullscreen = window.fullscreen().is_some();
// INSTANCE
dinfo!("Instance ({} ms)", NOW.elapsed().as_millis());
let size = window.inner_size();
let instance = wgpu::Instance::new(wgpu::BackendBit::VULKAN);
let surface = unsafe { instance.create_surface(&window) };
let adapter = block_on(async {
instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::HighPerformance,
compatible_surface: Some(&surface),
})
.await
})
.expect("COULD NOT FIND GPU");
let (device, queue) = block_on(async {
adapter
.request_device(
&wgpu::DeviceDescriptor {
label: None,
features: wgpu::Features::default()
| wgpu::Features::MAPPABLE_PRIMARY_BUFFERS,
limits: wgpu::Limits {
max_storage_buffers_per_shader_stage: 10,
..Default::default()
},
},
None,
)
.await
})
.expect("COULD NOT BUILD LOGICAL DEVICE/QUEUE");
let sc_desc = wgpu::SwapChainDescriptor {
usage: wgpu::TextureUsage::RENDER_ATTACHMENT,
format: wgpu::TextureFormat::Bgra8Unorm,
width: size.width,
height: size.height,
present_mode: wgpu::PresentMode::Mailbox,
};
let swap_chain = device.create_swap_chain(&surface, &sc_desc);
let mut s = Self {
window,
global_config,
input,
quit: false,
pause: false,
fullscreen,
surface,
device,
queue,
sc_desc,
swap_chain,
sample_count,
last_tick: std::time::Instant::now(),
delta: 0.0,
elapsed_time: std::time::Duration::new(0, 0),
msaa_fbuffer: None,
size,
frame_num: 0,
active: Some(0),
};
dinfo!(
"Multisampled Framebuffer ({} ms)",
NOW.elapsed().as_millis()
);
s.create_multisampled_framebuffer();
s
}
pub fn update_events(&mut self, event: &Event<()>) {
if self.input.update(event) {
if self.input.key_pressed(VirtualKeyCode::Escape) || self.input.quit() {
self.quit = true;
return;
}
if self.input.key_pressed(VirtualKeyCode::Space) {
self.pause = !self.pause;
}
}
}
/// Toggle windows fullscreen setting when called
pub fn toggle_fullscreen(&mut self) {
dinfo!("Fullscreen Toggled");
if self.fullscreen {
self.window.set_fullscreen(None);
self.fullscreen = false;
} else {
self.window
.set_fullscreen(Some(winit::window::Fullscreen::Borderless(None)));
self.fullscreen = true;
}
}
fn create_multisampled_framebuffer(&mut self) {
if self.sample_count > 1 {
let multisampled_texture_extent = wgpu::Extent3d {
width: self.sc_desc.width,
height: self.sc_desc.height,
depth: 1,
};
let multisampled_frame_descriptor = &wgpu::TextureDescriptor {
size: multisampled_texture_extent,
mip_level_count: 1,
sample_count: self.sample_count,
dimension: wgpu::TextureDimension::D2,
format: self.sc_desc.format,
usage: wgpu::TextureUsage::RENDER_ATTACHMENT,
label: None,
};
self.msaa_fbuffer = Some(
self.device
.create_texture(multisampled_frame_descriptor)
.create_view(&wgpu::TextureViewDescriptor::default()),
);
} else {
self.msaa_fbuffer = None;
}
}
pub fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {
self.size = new_size;
self.sc_desc.width = new_size.width;
self.sc_desc.height = new_size.height;
// Taken from the wgpu-rs "Water" example
if self.sc_desc.width == 0 && self.sc_desc.height == 0 {
// Stop rendering altogether.
self.active = None;
return;
} else {
// The next frame queued is the wrong size: (0, 0),
// so we skip a frame to avoid crashes where our
// textures are the correct (window) size, and the
// frame is still (0, 0).
self.active = Some(self.frame_num + 1);
}
self.swap_chain = self.device.create_swap_chain(&self.surface, &self.sc_desc);
self.create_multisampled_framebuffer();
}
pub fn render(&mut self) {
self.frame_num += 1;
self.update_time_info();
}
fn update_time_info(&mut self) {
self.delta = self.last_tick.elapsed().as_secs_f32();
if !self.pause {
self.elapsed_time += self.last_tick.elapsed();
}
self.last_tick = std::time::Instant::now();
}
}
|
//! Request specific implementations
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Deserialize, Serialize)]
/// The new page request
pub struct CreateStepPage {
/// describe the number of steps to complete
pub steps: u64,
}
#[derive(Debug, PartialEq, Deserialize, Serialize)]
/// Update steps
pub struct UpdateStepPage {
pub step_completed: u64,
pub admin_secret: String,
}
#[derive(Debug, PartialEq, Deserialize, Serialize)]
/// Delete steps page
pub struct DeleteStepPage {
pub admin_secret: String,
}
|
pub mod editors;
pub mod paths;
use std::io;
use crate::unity;
use thiserror::Error;
//
#[derive(Error, Debug)]
pub enum UvmHubError {
#[error("unity hub config: '{0}' is missing")]
ConfigNotFound(String),
#[error("Unity Hub config directory missing")]
ConfigDirectoryNotFound,
#[error("failed to read Unity Hub config {config}")]
ReadConfigError {
config: String,
source: anyhow::Error,
},
#[error("can't write Unity Hub config: '{config}'")]
WriteConfigError {
config: String,
source: anyhow::Error,
},
#[error("failed to create config directory")]
FailedToCreateConfigDirectory {
source: std::io::Error,
},
#[error("failed to create config file for config {config}")]
FailedToCreateConfig {
config: String,
source: io::Error
},
#[error("Unity Hub editor install path not found")]
InstallPathNotFound,
}
type Result<T> = std::result::Result<T, UvmHubError>;
pub fn list_installations() -> Result<unity::Installations> {
let install_path = paths::install_path()
.ok_or_else(|| UvmHubError::InstallPathNotFound)?;
debug!("unity hub install path: {}", install_path.display());
let editors = editors::Editors::load()?;
debug!("raw editors map: {:?}", editors);
let editors = unity::Installations::from(editors);
if let Ok(installations) = unity::list_installations_in_dir(&install_path) {
let iter = installations.chain(editors);
return Ok(unity::Installations(Box::new(iter)));
}
Ok(editors)
}
|
use std::collections::HashMap;
use serde::ser::{SerializeStruct, Error};
use serde::Serialize;
use crate::{Element, Uuid, Number};
use crate::Value;
use crate::error::TychoError;
use crate::serde::ser::TychoSerializer;
use crate::serde::ser::seq::{SeqSerializer, SeqSerializerType};
use crate::types::ident::ValueIdent;
use crate::serde::ser::map::MapSerializer;
#[allow(dead_code)]
pub struct StructSerializer {
content: HashMap<String, Element>,
name: String
}
impl StructSerializer {
pub fn new(name: &str) -> Self {
Self {
content: HashMap::new(),
name: name.to_string()
}
}
}
impl SerializeStruct for StructSerializer {
type Ok = Element;
type Error = TychoError;
#[cfg(feature="serde_types")]
fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error> where
T: Serialize {
if key == "inner" {
match self.name.as_str() {
"__tycho__/array" => {
return if let Some(Element::Value(Value::Number(Number::Unsigned8(internal)))) = self.content.get("ident") {
if let Some(ident) = ValueIdent::from_internal_prefix(&internal) {
self.content.insert(
"inner".to_string(),
value.serialize(
SeqSerializer::new(SeqSerializerType::Array(ident))
)?);
Ok(())
} else {
Err(Self::Error::custom("Invalid serde transfer type for Array."))
}
} else {
Err(Self::Error::custom("Invalid serde transfer type for Array."))
}
},
"__tycho__/map" => {
return if let Some(Element::Value(Value::Number(Number::Unsigned8(internal)))) = self.content.get("ident") {
if let Some(ident) = ValueIdent::from_internal_prefix(&internal) {
self.content.insert(
"inner".to_string(),
value.serialize(MapSerializer::typed(ident))?
);
Ok(())
} else {
Err(Self::Error::custom("Invalid serde transfer type for Array."))
}
} else {
Err(Self::Error::custom("Invalid serde transfer type for Array."))
}
}
_ => ()
}
}
self.content.insert(key.to_string(), value.serialize(TychoSerializer)?);
Ok(())
}
#[cfg(not(feature="serde_types"))]
fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error> where
T: Serialize {
self.content.insert(key.to_string(), value.serialize(TychoSerializer)?);
Ok(())
}
#[cfg(feature="serde_types")]
fn end(mut self) -> Result<Self::Ok, Self::Error> {
match self.name.as_str() {
"___tycho___/uuid" => if let Some(Element::Value(Value::Bytes(x))) = self.content.get("inner") {
Ok(Element::Value(Value::UUID(Uuid::from_bytes(&x))))
} else {
Err(Self::Error::custom("Invalid serde transfer type for Uuid."))
},
"__tycho__/array" => if let Some(x) = self.content.remove("inner") {
Ok(x)
} else {
Err(Self::Error::custom("Invalid serde transfer type for Array."))
},
"__tycho__/map" => if let Some(x) = self.content.remove("inner") {
Ok(x)
} else {
Err(Self::Error::custom("Invalid serde transfer type for Map."))
},
_ => Ok(Element::Struct(self.content))
}
}
#[cfg(not(feature="serde_types"))]
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(Element::Struct(self.content))
}
} |
#![allow(dead_code)]
#![feature(rustc_private)] // decl_storage extra genesis bug
#![cfg_attr(not(feature = "std"), no_std)]
use frame_support::{decl_error, decl_event, decl_module, decl_storage, dispatch, Parameter};
use rstd::prelude::*;
use sp_arithmetic::traits::{CheckedAdd, One, SimpleArithmetic};
use sp_runtime::traits::{MaybeSerializeDeserialize, Member};
use system::ensure_signed;
use crate::oracle::OracleError as InternalError;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
mod external_value;
mod oracle;
mod period_handler;
use crate::period_handler::PeriodHandler;
type AccountId<T> = <T as system::Trait>::AccountId;
/// Module types and dependencies from other pallets
pub trait Trait:
system::Trait + timestamp::Trait + tablescore::Trait<TargetType = AccountId<Self>>
{
type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;
type OracleId: Default
+ Parameter
+ Member
+ Copy
+ SimpleArithmetic
+ MaybeSerializeDeserialize;
type ValueType: Default + Parameter + Member + Copy + SimpleArithmetic;
}
type Moment<T> = <T as timestamp::Trait>::Moment;
type AssetId<T> = <T as assets::Trait>::AssetId;
type Oracle<T> = crate::oracle::Oracle<
<T as tablescore::Trait>::TableId,
<T as Trait>::ValueType,
Moment<T>,
AccountId<T>,
>;
decl_storage! {
trait Store for Module<T: Trait> as OracleModule
{
pub Oracles get(fn oracles): map hasher(blake2_256) T::OracleId => Oracle<T>;
OracleIdSequence get(fn next_oracle_id): T::OracleId;
}
}
decl_event!(
pub enum Event<T>
where
AccountId = <T as system::Trait>::AccountId,
OracleId = <T as Trait>::OracleId,
ValueType = <T as Trait>::ValueType,
ValueId = u8,
{
OracleCreated(OracleId, AccountId),
OracleUpdated(OracleId, ValueId, ValueType),
}
);
decl_error! {
pub enum Error for Module<T: Trait> {
NoneValue,
OracleIdOverflow,
WrongPeriods,
WrongValuesCount,
WrongValueId,
NotAggregationTime,
NotCalculateTime,
NotEnoughSources,
NotEnoughValues,
NotCalculatedValue,
AccountPermissionDenied,
}
}
impl<T: Trait> From<InternalError> for Error<T> {
fn from(error: InternalError) -> Self {
match error {
InternalError::FewSources(_exp, _act) => Error::<T>::NotEnoughSources,
InternalError::FewPushedValue(_exp, _act) => Error::<T>::NotEnoughValues,
InternalError::EmptyPushedValueInPeriod => Error::<T>::NotEnoughValues,
InternalError::WrongValuesCount(_exp, _act) => Error::<T>::WrongValuesCount,
InternalError::WrongValueId(_asset) => Error::<T>::WrongValueId,
InternalError::UncalculatedValue(_asset) => Error::<T>::NotCalculatedValue,
InternalError::SourcePermissionDenied => Error::<T>::AccountPermissionDenied,
InternalError::CalculationError => Error::<T>::NoneValue,
}
}
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
type Error = Error<T>;
fn deposit_event() = default;
/// Create oracle in runtime
///
/// * `name` - A raw string for identify oracle
/// * `source_limit` - Lower limit of the number of sources
/// * `period` - Defines oracle work cycle. Period have aggregate and calculate part.
/// * `aggregate_period` - Part of period when sources can push values. The rest part of
/// period - `calculate_part` when we can calculate from pushed values.
/// * `asset_id` - Asset with the help of which voting is carried out in tablescore
/// * `values_names` - Names of all external values for oracle
///
pub fn create_oracle(origin,
name: Vec<u8>,
source_limit: u8,
period: Moment<T>,
aggregate_period: Moment<T>,
asset_id: AssetId<T>,
values_names: Vec<Vec<u8>>,
) -> dispatch::DispatchResult
{
let who = ensure_signed(origin)?;
let now = timestamp::Module::<T>::get();
let period = PeriodHandler::new(now, period, aggregate_period)
.map_err(|_| Error::<T>::WrongPeriods)?;
let table = tablescore::Module::<T>::create(who.clone(), asset_id, source_limit, Some(name.clone()))?;
let id = Self::get_next_oracle_id()?;
Oracles::<T>::insert(id, Oracle::<T>::new(name, table, period, source_limit, values_names));
Self::deposit_event(RawEvent::OracleCreated(id, who));
Ok(())
}
/// Push values to oracle
///
/// In order to push, you need some conditions:
/// - You must be the winner from tablescore
/// - `values` must be the right size
/// - There must be an aggregation period
pub fn push(origin,
oracle_id: T::OracleId,
values: Vec<T::ValueType>) -> dispatch::DispatchResult
{
let who = ensure_signed(origin)?;
let now = timestamp::Module::<T>::get();
let oracle = Oracles::<T>::get(oracle_id);
if oracle.is_sources_empty()
|| oracle.period_handler.is_sources_update_needed(now)
{
Self::update_accounts(oracle_id)
.map_err(Error::<T>::from)?;
}
if !oracle.period_handler.is_allow_aggregate(now)
{
return Err(Error::<T>::NotAggregationTime.into());
}
Oracles::<T>::mutate(oracle_id, |oracle| {
oracle.push_values(
&who,
now,
values.into_iter(),
)
})
.map_err(Error::<T>::from)?;
Ok(())
}
/// Calculate value in oracle
///
/// In order to calculate, you need some conditions:
/// - There must be a calculate period part or in the previous
/// calculate period part the value was not calculated
/// - There are enough pushed values in oracle
pub fn calculate(origin,
oracle_id: T::OracleId,
value_id: u8) -> dispatch::DispatchResult
{
ensure_signed(origin)?;
let now = timestamp::Module::<T>::get();
let oracle = Oracles::<T>::get(oracle_id);
if oracle.period_handler.is_sources_update_needed(now)
{
Self::update_accounts(oracle_id).map_err(Error::<T>::from)?;
}
if !oracle.is_allow_calculate(value_id as usize, now).map_err(Error::<T>::from)?
{
return Err(Error::<T>::NotCalculateTime.into());
}
let new_value = Oracles::<T>::mutate(oracle_id, |oracle| {
oracle.calculate_value(value_id as usize, now)
}).map_err(Error::<T>::from)?;
Self::deposit_event(RawEvent::OracleUpdated(oracle_id, value_id, new_value));
Ok(())
}
}
}
impl<T: Trait> Module<T> {
fn get_next_oracle_id() -> Result<T::OracleId, Error<T>> {
OracleIdSequence::<T>::mutate(|id| match id.checked_add(&One::one()) {
Some(res) => {
let result = *id;
*id = res;
Ok(result)
}
None => Err(Error::<T>::OracleIdOverflow),
})
}
fn update_accounts(oracle_id: T::OracleId) -> Result<Vec<AccountId<T>>, InternalError> {
Oracles::<T>::mutate(oracle_id, |oracle| {
let table = tablescore::Module::<T>::tables(oracle.get_table());
let accounts = oracle.update_sources(table.get_head().into_iter().cloned())?;
Ok(accounts.into_iter().cloned().collect())
})
}
/// Getter for calculate value in oracle
fn get_external_value(
oracle_id: T::OracleId,
value_id: usize,
) -> Result<(T::ValueType, Moment<T>), Error<T>> {
Oracles::<T>::get(oracle_id)
.values
.get(value_id)
.ok_or(Error::<T>::WrongValueId)?
.get()
.ok_or(Error::<T>::NotCalculatedValue)
}
fn get_or_calculate_external_value(
origin: T::Origin,
oracle_id: T::OracleId,
value_id: usize,
) -> Result<(T::ValueType, Moment<T>), dispatch::DispatchError> {
match Oracles::<T>::get(oracle_id)
.values
.get(value_id)
.ok_or(Error::<T>::WrongValueId)?
.get()
{
Some((value, moment)) => Ok((value, moment)),
None => {
Self::calculate(origin, oracle_id, value_id as u8)?;
Ok(Self::get_external_value(oracle_id, value_id)?)
}
}
}
}
|
extern crate rand;
extern crate num_bigint;
use num_bigint::{BigInt, RandBigInt};
fn modpow(b: &BigInt, e: &BigInt, m: &BigInt) -> BigInt {
b.modpow(&e, &m)
}
pub fn signature(h: &BigInt) -> bool {
let p = BigInt::parse_bytes(b"255211775190703847597530955573826158579", 10).unwrap();
let q = BigInt::parse_bytes(b"252991020016994668398330411224101", 10).unwrap();
let mut rng = rand::thread_rng();
let gamma = rng.gen_bigint_range(&BigInt::from(2), &(&p - 1));
let g = modpow(&gamma, &((&p - 1) / &q), &p);
let k = rng.gen_bigint_range(&BigInt::from(1), &q);
let r = modpow(&g, &k, &p);
let rho = &r % &q;
let mut x = BigInt::from(2);
while (&k - &rho * h) % &x != BigInt::from(0) {
x += 1;
}
let y = modpow(&g, &x, &p);
let mut s: BigInt;
if &k > &(&rho * h) {
s = &(&(&k - &rho * h) / &x) % &q;
} else {
s = &(&(&(&k - &rho * h) / &x) + &q * &(&(&rho * h - &k) / &x)) % &q;
}
println!("gamma\n = {}", gamma);
println!("g\n = {}", g);
println!("k\n = {}", k);
println!("r\n = {}", r);
println!("rho\n = {}", rho);
println!("x\n = {}", x);
println!("y\n = {}", y);
println!("s\n = {}", s);
// Checking
let x1 = &r % &p;
let x2 = &(
modpow(&g, &(&rho * h), &p) * modpow(&y, &s, &p)
) % &p;
println!("x1 = {}", x1);
println!("x2 = {}", x2);
x1 == x2
} |
/// Tiny crate to verify message signature and format
use anyhow::Result as Fallible;
use anyhow::{format_err, Context};
use bytes::buf::BufExt;
use bytes::Bytes;
use serde::Deserialize;
use serde_json;
use std::fs::{read_dir, File};
use pgp::composed::message::Message;
use pgp::composed::signed_key::SignedPublicKey;
use pgp::Deserializable;
// Location of public keys
static PUBKEYS_DIR: &str = "/usr/local/share/public-keys/";
// Signature format
#[derive(Deserialize)]
struct SignatureImage {
#[serde(rename = "docker-manifest-digest")]
digest: String,
}
#[derive(Deserialize)]
struct SignatureCritical {
image: SignatureImage,
}
#[derive(Deserialize)]
struct Signature {
critical: SignatureCritical,
}
/// Keyring is a collection of public keys
pub type Keyring = Vec<SignedPublicKey>;
/// Create a Keyring from a dir of public keys
pub fn load_public_keys() -> Fallible<Keyring> {
let mut result: Keyring = vec![];
for entry in read_dir(PUBKEYS_DIR).context("Reading public keys dir")? {
let path = &entry?.path();
let path_str = match path.to_str() {
None => continue,
Some(p) => p,
};
let file = File::open(path).context(format!("Reading {}", path_str))?;
let (pubkey, _) =
SignedPublicKey::from_armor_single(file).context(format!("Parsing {}", path_str))?;
match pubkey.verify() {
Err(err) => return Err(format_err!("{:?}", err)),
Ok(_) => result.push(pubkey),
};
}
Ok(result)
}
/// Verify that signature is valid and contains expected digest
pub async fn verify_signature(
public_keys: &Keyring,
body: Bytes,
expected_digest: &str,
) -> Fallible<()> {
let msg = Message::from_bytes(body.reader()).context("Parsing message")?;
// Verify signature using provided public keys
if !public_keys.iter().any(|ref k| msg.verify(k).is_ok()) {
return Err(format_err!("No matching key found to decrypt {:#?}", msg));
}
// Deserialize the message
let contents = match msg.get_content().context("Reading contents")? {
None => return Err(format_err!("Empty message received")),
Some(m) => m,
};
let signature: Signature = serde_json::from_slice(&contents).context("Deserializing message")?;
let message_digest = signature.critical.image.digest;
if message_digest == expected_digest {
Ok(())
} else {
Err(format_err!(
"Valid signature, but digest mismatches: {}",
message_digest
))
}
}
|
//! Shortest Job First
use std::cmp::Reverse;
use keyed_priority_queue::KeyedPriorityQueue;
use crate::scheduling::{Os, PId, Scheduler};
/// Process which have the shortest burst time are scheduled first.
/// If two processes have the same bust time then FCFS is used to break the tie.
/// It is a non-preemptive scheduling algorithm.
#[derive(Default, Clone)]
pub struct ShortestJobFirstScheduler {
ready_queue: KeyedPriorityQueue<PId, Reverse<u64>>,
}
impl ShortestJobFirstScheduler {
pub fn new() -> Self {
Self::default()
}
}
impl Scheduler for ShortestJobFirstScheduler {
fn on_process_ready(&mut self, os: &mut Os, pid: usize) {
if let Some(process) = os.get_process(pid) {
let burst_time = process.burst_time();
self.ready_queue.push(pid, Reverse(burst_time));
}
}
fn switch_process(&mut self, os: &mut Os) {
os.switch_process(self.ready_queue.pop().map(|(pid, _)| pid));
}
fn desc(&self) -> &'static str {
"Shortest Job First; Non-Preemptive; for Job"
}
}
|
fn main() {
print_number(34);
print_sum(32, 35);
add_one(45);
// without type inference
let f: fn(i32) -> i32 = plus_one;
let six = f(5);
print_number(six);
}
fn print_number(x: i32) {
println!("x is: {}", x);
}
fn print_sum(x: i32, y: i32) {
println!("sum is: {}", x+y);
}
fn add_one(x: i32) -> i32 {
x + 1
}
fn plus_one(i: i32) -> i32 {
i + 1
}
|
use wasm_bindgen;
use wasm_bindgen::prelude::*;
use console_error_panic_hook;
use virtual_dom_rs::prelude::*;
use web_sys;
use web_sys::Element;
use isomorphic_app;
use isomorphic_app::App;
use isomorphic_app::VirtualNode;
#[wasm_bindgen]
pub struct Client {
app: App,
dom_updater: DomUpdater,
}
// Expose globals from JS for things such as request animation frame
// that web sys doesn't seem to have yet
//
// TODO: Remove this and use RAF from Rust
// https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Window.html#method.request_animation_frame
#[wasm_bindgen]
extern "C" {
pub type GlobalJS;
pub static global_js: GlobalJS;
#[wasm_bindgen(method)]
pub fn update(this: &GlobalJS);
}
#[wasm_bindgen]
impl Client {
#[wasm_bindgen(constructor)]
pub fn new(initial_state: &str) -> Client {
console_error_panic_hook::set_once();
let app = App::from_state_json(initial_state);
// TODO: Use request animation frame from web_sys
// https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Window.html#method.request_animation_frame
app.store.borrow_mut().subscribe(Box::new(|| {
web_sys::console::log_1(&JsValue::from("Updating state"));
global_js.update();
}));
let window = web_sys::window().unwrap();
let document = window.document().unwrap();
let root_node = document
.get_element_by_id("isomorphic-rust-web-app")
.unwrap();
let dom_updater = DomUpdater::new_replace_mount(app.render(), root_node);
Client { app, dom_updater }
}
pub fn render(&mut self) {
let vdom = self.app.render();
self.dom_updater.update(vdom);
}
}
|
#[doc = "Reader of register INTR_CAUSE_MED"]
pub type R = crate::R<u32, super::INTR_CAUSE_MED>;
#[doc = "Reader of field `SOF_INTR`"]
pub type SOF_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `BUS_RESET_INTR`"]
pub type BUS_RESET_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `EP0_INTR`"]
pub type EP0_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `LPM_INTR`"]
pub type LPM_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `RESUME_INTR`"]
pub type RESUME_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `ARB_EP_INTR`"]
pub type ARB_EP_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `EP1_INTR`"]
pub type EP1_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `EP2_INTR`"]
pub type EP2_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `EP3_INTR`"]
pub type EP3_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `EP4_INTR`"]
pub type EP4_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `EP5_INTR`"]
pub type EP5_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `EP6_INTR`"]
pub type EP6_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `EP7_INTR`"]
pub type EP7_INTR_R = crate::R<bool, bool>;
#[doc = "Reader of field `EP8_INTR`"]
pub type EP8_INTR_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - USB SOF Interrupt"]
#[inline(always)]
pub fn sof_intr(&self) -> SOF_INTR_R {
SOF_INTR_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - BUS RESET Interrupt"]
#[inline(always)]
pub fn bus_reset_intr(&self) -> BUS_RESET_INTR_R {
BUS_RESET_INTR_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - EP0 Interrupt"]
#[inline(always)]
pub fn ep0_intr(&self) -> EP0_INTR_R {
EP0_INTR_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - LPM Interrupt"]
#[inline(always)]
pub fn lpm_intr(&self) -> LPM_INTR_R {
LPM_INTR_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Resume Interrupt"]
#[inline(always)]
pub fn resume_intr(&self) -> RESUME_INTR_R {
RESUME_INTR_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 7 - Arbiter Endpoint Interrupt"]
#[inline(always)]
pub fn arb_ep_intr(&self) -> ARB_EP_INTR_R {
ARB_EP_INTR_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - EP1 Interrupt"]
#[inline(always)]
pub fn ep1_intr(&self) -> EP1_INTR_R {
EP1_INTR_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - EP2 Interrupt"]
#[inline(always)]
pub fn ep2_intr(&self) -> EP2_INTR_R {
EP2_INTR_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - EP3 Interrupt"]
#[inline(always)]
pub fn ep3_intr(&self) -> EP3_INTR_R {
EP3_INTR_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - EP4 Interrupt"]
#[inline(always)]
pub fn ep4_intr(&self) -> EP4_INTR_R {
EP4_INTR_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - EP5 Interrupt"]
#[inline(always)]
pub fn ep5_intr(&self) -> EP5_INTR_R {
EP5_INTR_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - EP6 Interrupt"]
#[inline(always)]
pub fn ep6_intr(&self) -> EP6_INTR_R {
EP6_INTR_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 14 - EP7 Interrupt"]
#[inline(always)]
pub fn ep7_intr(&self) -> EP7_INTR_R {
EP7_INTR_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - EP8 Interrupt"]
#[inline(always)]
pub fn ep8_intr(&self) -> EP8_INTR_R {
EP8_INTR_R::new(((self.bits >> 15) & 0x01) != 0)
}
}
|
mod list;
mod read;
pub use self::list::run as list;
pub use self::read::run as read;
|
fn print_h()
{
println!("hello");
}
fn main() {
let mut y=0;
let mut z=0;
let x=y+z;
println!("{}",x);
let z=print_h();
z;
}
|
use std::iter::FromIterator;
fn parse_input2(inp: &str) -> Vec<u8> {
inp.trim()
.chars()
.map(|c| {
if (c as u32) < 256 {
c as u8
} else {
panic!{"Only expecting ascii symbols"}
}
})
.collect()
}
fn parse_input1(inp: &str) -> Vec<u8> {
inp.trim()
.split(",")
.map(|elem| {
elem.trim().parse::<u8>().expect(
"The input was not formed correctly and had values that are not in the range 0..255",
)
})
.collect()
}
/*
Reverse the subrope delimited by start and end
end has to be > start but is not limited to the rope and instead
wraps around
start and end inclusive
*/
fn reverse_subrope(start: usize, end: usize, rope: &mut [u8]) {
assert!(end >= start);
assert!((end - start) <= rope.len());
let mut start = start;
let mut end = end - 1;
while start < end {
let len = rope.len();
rope.swap(start % len, end % len);
start += 1;
end -= 1;
}
}
#[allow(dead_code)]
const TEST_INPUT: &'static str = "3,4,1,5";
const ROPE_LENGTH: u8 = 255;
const ROUNDS: usize = 64;
const APPEND_LENGHTS: [u8; 5] = [17, 31, 73, 47, 23];
fn main() {
//Part1
let input = parse_input1(include_str!("../input"));
let mut rope = Vec::from_iter((0..ROPE_LENGTH));
//stupid but range doesn't accept 256 because out of range of u8!
rope.push(ROPE_LENGTH);
let mut position = 0;
let mut skip = 0;
for inp in input {
reverse_subrope(position, position + inp as usize, &mut rope[..]);
position = (position + inp as usize + skip) % rope.len();
skip += 1;
}
println!(
"The requested number is: {}",
rope[0] as u16 * rope[1] as u16
);
//Part2
let mut input = parse_input2(include_str!("../input"));
input.extend_from_slice(&APPEND_LENGHTS);
let mut rope = Vec::from_iter((0..ROPE_LENGTH));
//stupid but range doesn't accept 256 because out of range of u8!
rope.push(ROPE_LENGTH);
//Do the same stuff for 64 rounds
let mut position = 0;
let mut skip = 0;
for _ in 0..ROUNDS {
for inp in input.iter() {
reverse_subrope(position, position + *inp as usize, &mut rope);
position = (position + *inp as usize + skip) % rope.len();
skip += 1;
}
}
let dense_hash = create_dense(&rope);
let hex_string = dense_hash
.iter()
.map(|elem| format!("{:02x}", *elem))
.collect::<String>();
println!("Knot hash is: {}", hex_string);
}
fn create_dense(sparse: &[u8]) -> Vec<u8> {
//move over our input in chunks of 16 items and calculate their XOR
sparse
.chunks(16)
.map(|chunk| {
chunk.iter().fold(0, |acc, &elem| acc ^ elem)
})
.collect()
}
|
#[doc = "Register `GTPR` reader"]
pub type R = crate::R<GTPR_SPEC>;
#[doc = "Register `GTPR` writer"]
pub type W = crate::W<GTPR_SPEC>;
#[doc = "Field `PSC` reader - Prescaler value"]
pub type PSC_R = crate::FieldReader;
#[doc = "Field `PSC` writer - Prescaler value"]
pub type PSC_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 8, O>;
#[doc = "Field `GT` reader - Guard time value"]
pub type GT_R = crate::FieldReader;
#[doc = "Field `GT` writer - Guard time value"]
pub type GT_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 8, O>;
impl R {
#[doc = "Bits 0:7 - Prescaler value"]
#[inline(always)]
pub fn psc(&self) -> PSC_R {
PSC_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - Guard time value"]
#[inline(always)]
pub fn gt(&self) -> GT_R {
GT_R::new(((self.bits >> 8) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - Prescaler value"]
#[inline(always)]
#[must_use]
pub fn psc(&mut self) -> PSC_W<GTPR_SPEC, 0> {
PSC_W::new(self)
}
#[doc = "Bits 8:15 - Guard time value"]
#[inline(always)]
#[must_use]
pub fn gt(&mut self) -> GT_W<GTPR_SPEC, 8> {
GT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Guard time and prescaler register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gtpr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`gtpr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct GTPR_SPEC;
impl crate::RegisterSpec for GTPR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`gtpr::R`](R) reader structure"]
impl crate::Readable for GTPR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`gtpr::W`](W) writer structure"]
impl crate::Writable for GTPR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets GTPR to value 0"]
impl crate::Resettable for GTPR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use super::context::Context;
use super::graph::HandoffData;
/// Represents a compiled subgraph. Used internally by [Dataflow] to erase the input/output [Handoff] types.
pub(crate) trait Subgraph {
// TODO: pass in some scheduling info?
fn run(&mut self, context: &mut Context, handoffs: &mut Vec<HandoffData>);
}
impl<F> Subgraph for F
where
F: FnMut(&mut Context, &mut Vec<HandoffData>),
{
fn run(&mut self, context: &mut Context, handoffs: &mut Vec<HandoffData>) {
(self)(context, handoffs);
}
}
|
use crate::bus::Bus;
use crate::devnode;
use crate::devtree::DeviceIdent;
use crate::driver::Driver;
use std::thread::{spawn, JoinHandle};
use twz::device::{BusType, Device, DEVICE_ID_SERIAL};
pub struct Instance {
writethread: Option<JoinHandle<()>>,
readthread: Option<JoinHandle<()>>,
nodes: Vec<devnode::DeviceNode>,
}
use std::io::Write;
use twz::device::DeviceEvent;
use twz::obj::{ProtFlags, Twzobj};
use twzobj::pty::{PtyClientHdr, PtyServerHdr};
fn kec_write_thread(instance: std::sync::Arc<std::sync::Mutex<Instance>>) {
let server = {
let instance = instance.lock().unwrap();
Twzobj::<PtyServerHdr>::init_guid(instance.nodes[1].id, ProtFlags::READ | ProtFlags::WRITE)
};
let mut buffer = [0; 1024];
loop {
let read_result = twzobj::io::read(&server, &mut buffer, twzobj::io::ReadFlags::none()).unwrap();
if let twzobj::io::ReadOutput::Done(len) = read_result {
twz::sys::kec_write(&buffer[0..len], twz::sys::KECWriteFlags::none());
}
}
}
fn kec_read_thread(instance: std::sync::Arc<std::sync::Mutex<Instance>>) {
let server = {
let instance = instance.lock().unwrap();
Twzobj::<PtyServerHdr>::init_guid(instance.nodes[1].id, ProtFlags::READ | ProtFlags::WRITE)
};
let mut buffer = [0; 1024];
loop {
let result = twz::sys::kec_read(&mut buffer, twz::sys::KECReadFlags::none());
if let Ok(buf) = result {
twzobj::io::write(&server, buf, twzobj::io::WriteFlags::none());
}
}
}
use twzobj::pty;
impl Instance {
pub fn new() -> std::sync::Arc<std::sync::Mutex<Instance>> {
let spec = twz::obj::CreateSpec::new(
twz::obj::LifetimeType::Volatile,
twz::obj::BackingType::Normal,
twz::obj::CreateFlags::DFL_READ | twz::obj::CreateFlags::DFL_WRITE,
);
let (client, server) = pty::create_pty_pair(&spec, &spec).unwrap();
let nodes = devnode::allocate(&[("kec_ptyc", client.id()), ("kec_ptys", server.id())]);
let inst = std::sync::Arc::new(std::sync::Mutex::new(Instance {
readthread: None,
writethread: None,
nodes: nodes,
}));
let inst2 = inst.clone();
{
let mut inst = inst.lock().unwrap();
inst.writethread = Some(spawn(|| kec_write_thread(inst2)));
}
let inst2 = inst.clone();
{
let mut inst = inst.lock().unwrap();
inst.readthread = Some(spawn(|| kec_read_thread(inst2)));
}
devnode::publish(&inst.lock().unwrap().nodes[0]);
inst
}
}
#[derive(Default)]
struct KECDriver {
instances: Vec<std::sync::Arc<std::sync::Mutex<Instance>>>,
}
|
use tokio::stream::StreamExt;
use tokio_util::codec::{Framed, BytesCodec};
use futures::sink::SinkExt;
use tokio::net::TcpListener;
#[tokio::main]
async fn main() -> std::io::Result<()> {
let mut listener = TcpListener::bind("127.0.0.1:6142").await?;
println!("Listening on port 6142 ..");
loop {
let (socket, _) = listener.accept().await?;
let mut socket_wrapped = Framed::new(socket, BytesCodec::new());
loop {
let buffer = socket_wrapped.next().await;
match buffer {
Some(_value) => {
let rcvd = _value.unwrap();
println!(
"Socket buffer -> {:?}",
String::from_utf8(rcvd.to_vec()).unwrap()
);
if rcvd.len() > 0 {
match socket_wrapped
.send(rcvd.freeze())
.await
{
Ok(()) => {
let _ignore = socket_wrapped.flush().await;
}
_rest => {
break;
}
}
} else {
println!("zero bytes {:?}", rcvd.len());
break;
}
},
None => {
println!("That's unfortunate !");
break;
}
}
}
}
}
|
pub mod chain;
pub mod block; |
use crate::block_status::BlockStatus;
use crate::relayer::compact_block_verifier::CompactBlockVerifier;
use crate::relayer::error::{Error, Ignored, Internal, Misbehavior};
use crate::relayer::{ReconstructionError, Relayer};
use ckb_logger::{self, debug_target, warn};
use ckb_network::{CKBProtocolContext, PeerIndex};
use ckb_shared::Snapshot;
use ckb_store::ChainStore;
use ckb_traits::BlockMedianTimeContext;
use ckb_types::{
core::{self, BlockNumber},
packed,
prelude::*,
};
use ckb_verification::{HeaderVerifier, Verifier};
use failure::{err_msg, Error as FailureError};
use std::collections::HashMap;
use std::sync::Arc;
// Keeping in mind that short_ids are expected to occasionally collide.
// On receiving compact-block message,
// while the reconstructed the block has a different transactions_root,
// 1. if all the transactions are prefilled,
// the node should ban the peer but not mark the block invalid
// because of the block hash may be wrong.
// 2. otherwise, there may be short_id collision in transaction pool,
// the node retreat to request all the short_ids from the peer.
pub struct CompactBlockProcess<'a> {
message: packed::CompactBlockReader<'a>,
relayer: &'a Relayer,
nc: Arc<dyn CKBProtocolContext>,
peer: PeerIndex,
}
#[derive(Debug, Eq, PartialEq)]
pub enum Status {
// Accept block
AcceptBlock,
// Send get_headers
UnknownParent,
// Send missing_indexes by get_block_transactions
SendMissingIndexes,
// Collision and Send missing_indexes by get_block_transactions
CollisionAndSendMissingIndexes,
}
impl<'a> CompactBlockProcess<'a> {
pub fn new(
message: packed::CompactBlockReader<'a>,
relayer: &'a Relayer,
nc: Arc<dyn CKBProtocolContext>,
peer: PeerIndex,
) -> Self {
CompactBlockProcess {
message,
nc,
relayer,
peer,
}
}
pub fn execute(self) -> Result<Status, FailureError> {
let snapshot = self.relayer.shared.snapshot();
{
let compact_block = self.message;
if compact_block.uncles().len() > snapshot.consensus().max_uncles_num() {
warn!("Peer {} sends us an invalid message, CompactBlock uncles size ({}) is greater than consensus max_uncles_num ({})",
self.peer, compact_block.uncles().len(), snapshot.consensus().max_uncles_num());
return Err(err_msg(
"CompactBlock uncles size is greater than consensus max_uncles_num".to_owned(),
));
}
if (compact_block.proposals().len() as u64)
> snapshot.consensus().max_block_proposals_limit()
{
warn!("Peer {} sends us an invalid message, CompactBlock proposals size ({}) is greater than consensus max_block_proposals_limit ({})",
self.peer, compact_block.proposals().len(), snapshot.consensus().max_block_proposals_limit());
return Err(err_msg(
"CompactBlock proposals size is greater than consensus max_block_proposals_limit"
.to_owned(),
));
}
}
let compact_block = self.message.to_entity();
let header = compact_block.header().into_view();
let block_hash = header.hash();
// Only accept blocks with a height greater than tip - N
// where N is the current epoch length
let tip = snapshot.tip_header();
let epoch_length = snapshot.epoch_ext().length();
let lowest_number = tip.number().saturating_sub(epoch_length);
if lowest_number > header.number() {
return Err(Error::Ignored(Ignored::TooOldBlock).into());
}
let status = snapshot.get_block_status(&block_hash);
if status.contains(BlockStatus::BLOCK_STORED) {
return Err(Error::Ignored(Ignored::AlreadyStored).into());
} else if status.contains(BlockStatus::BLOCK_INVALID) {
debug_target!(
crate::LOG_TARGET_RELAY,
"receive a compact block with invalid status, {}, peer: {}",
block_hash,
self.peer
);
return Err(Error::Misbehavior(Misbehavior::BlockInvalid).into());
}
let parent = snapshot.get_header_view(&header.data().raw().parent_hash());
if parent.is_none() {
debug_target!(
crate::LOG_TARGET_RELAY,
"UnknownParent: {}, send_getheaders_to_peer({})",
block_hash,
self.peer
);
snapshot.send_getheaders_to_peer(self.nc.as_ref(), self.peer, &tip);
return Ok(Status::UnknownParent);
}
let parent = parent.unwrap();
if let Some(flight) = snapshot
.state()
.read_inflight_blocks()
.inflight_state_by_block(&block_hash)
{
if flight.peers.contains(&self.peer) {
debug_target!(
crate::LOG_TARGET_RELAY,
"discard already in-flight compact block {}",
block_hash,
);
return Err(Error::Ignored(Ignored::AlreadyInFlight).into());
}
}
// The new arrived has greater difficulty than local best known chain
let missing_transactions: Vec<u32>;
let missing_uncles: Vec<u32>;
let mut collision = false;
{
// Verify compact block
let mut pending_compact_blocks = snapshot.state().pending_compact_blocks();
if pending_compact_blocks
.get(&block_hash)
.map(|(_, peers_map)| peers_map.contains_key(&self.peer))
.unwrap_or(false)
{
debug_target!(
crate::LOG_TARGET_RELAY,
"discard already pending compact block {}",
block_hash
);
return Err(Error::Ignored(Ignored::AlreadyPending).into());
} else {
let fn_get_pending_header = {
|block_hash| {
pending_compact_blocks
.get(&block_hash)
.map(|(compact_block, _)| compact_block.header().into_view())
.or_else(|| {
snapshot
.get_header_view(&block_hash)
.map(|header_view| header_view.into_inner())
})
}
};
let resolver = snapshot.new_header_resolver(&header, parent.into_inner());
let median_time_context = CompactBlockMedianTimeView {
fn_get_pending_header: Box::new(fn_get_pending_header),
snapshot: snapshot.store(),
};
let header_verifier =
HeaderVerifier::new(&median_time_context, &snapshot.consensus());
if let Err(err) = header_verifier.verify(&resolver) {
debug_target!(crate::LOG_TARGET_RELAY, "invalid header: {}", err);
snapshot
.state()
.insert_block_status(block_hash, BlockStatus::BLOCK_INVALID);
return Err(Error::Misbehavior(Misbehavior::HeaderInvalid).into());
}
CompactBlockVerifier::verify(&compact_block)?;
// Header has been verified ok, update state
snapshot.insert_valid_header(self.peer, &header);
}
// Request proposal
let proposals: Vec<_> = compact_block.proposals().into_iter().collect();
if let Err(err) = self.relayer.request_proposal_txs(
self.nc.as_ref(),
self.peer,
block_hash.clone(),
proposals,
) {
debug_target!(
crate::LOG_TARGET_RELAY,
"[CompactBlockProcess] request_proposal_txs: {}",
err
);
}
// Reconstruct block
let ret = self
.relayer
.reconstruct_block(&snapshot, &compact_block, vec![], &[], &[]);
// Accept block
// `relayer.accept_block` will make sure the validity of block before persisting
// into database
match ret {
Ok(block) => {
pending_compact_blocks.remove(&block_hash);
self.relayer
.accept_block(&snapshot, self.nc.as_ref(), self.peer, block);
return Ok(Status::AcceptBlock);
}
Err(ReconstructionError::InvalidTransactionRoot) => {
return Err(Error::Misbehavior(Misbehavior::InvalidTransactionRoot).into());
}
Err(ReconstructionError::InvalidUncle) => {
return Err(Error::Misbehavior(Misbehavior::InvalidUncle).into());
}
Err(ReconstructionError::MissingIndexes(transactions, uncles)) => {
missing_transactions = transactions.into_iter().map(|i| i as u32).collect();
missing_uncles = uncles.into_iter().map(|i| i as u32).collect();
}
Err(ReconstructionError::Collision) => {
missing_transactions = compact_block
.short_id_indexes()
.into_iter()
.map(|i| i as u32)
.collect();
collision = true;
missing_uncles = vec![];
}
Err(ReconstructionError::Internal(e)) => {
ckb_logger::error!("reconstruct_block internal error: {}", e);
return Err(Error::Internal(Internal::TxPoolInternalError).into());
}
}
pending_compact_blocks
.entry(block_hash.clone())
.or_insert_with(|| (compact_block, HashMap::default()))
.1
.insert(
self.peer,
(missing_transactions.clone(), missing_uncles.clone()),
);
}
if !snapshot
.state()
.write_inflight_blocks()
.insert(self.peer, block_hash.clone())
{
debug_target!(
crate::LOG_TARGET_RELAY,
"BlockInFlight reach limit or had requested, peer: {}, block: {}",
self.peer,
block_hash,
);
return Err(Error::Internal(Internal::InflightBlocksReachLimit).into());
}
let content = packed::GetBlockTransactions::new_builder()
.block_hash(block_hash)
.indexes(missing_transactions.pack())
.uncle_indexes(missing_uncles.pack())
.build();
let message = packed::RelayMessage::new_builder().set(content).build();
let data = message.as_slice().into();
if let Err(err) = self.nc.send_message_to(self.peer, data) {
ckb_logger::debug!("relayer send get_block_transactions error: {:?}", err);
}
if collision {
Ok(Status::CollisionAndSendMissingIndexes)
} else {
Ok(Status::SendMissingIndexes)
}
}
}
struct CompactBlockMedianTimeView<'a> {
fn_get_pending_header: Box<dyn Fn(packed::Byte32) -> Option<core::HeaderView> + 'a>,
snapshot: &'a Snapshot,
}
impl<'a> CompactBlockMedianTimeView<'a> {
fn get_header(&self, hash: &packed::Byte32) -> Option<core::HeaderView> {
(self.fn_get_pending_header)(hash.to_owned())
.or_else(|| self.snapshot.get_block_header(hash))
}
}
impl<'a> BlockMedianTimeContext for CompactBlockMedianTimeView<'a> {
fn median_block_count(&self) -> u64 {
self.snapshot.consensus().median_time_block_count() as u64
}
fn timestamp_and_parent(
&self,
block_hash: &packed::Byte32,
) -> (u64, BlockNumber, packed::Byte32) {
let header = self
.get_header(&block_hash)
.expect("[CompactBlockMedianTimeView] blocks used for median time exist");
(
header.timestamp(),
header.number(),
header.data().raw().parent_hash(),
)
}
}
|
#![cfg_attr(not(feature = "std"), no_std)]
#![forbid(unsafe_code)]
//! An efficient and customizable parser for the
//! [`.cnf` (Conjunctive Normal Form)][cnf-format]
//! file format used by [SAT solvers][sat-solving].
//!
//! [sat-solving]: https://en.wikipedia.org/wiki/Boolean_satisfiability_problem
//! [cnf-format]: https://www.cs.utexas.edu/users/moore/acl2/manuals/current/manual/index-seo.php/SATLINK____DIMACS
//!
//! # Usage
//!
//! ```
//! # use cnf_parser::{Literal, Output};
//!
//! #[derive(Default)]
//! pub struct MyOutput {
//! head_clause: Vec<Literal>,
//! clauses: Vec<Vec<Literal>>,
//! }
//!
//! impl Output for MyOutput {
//! type Error = &'static str;
//!
//! fn problem(&mut self, num_variables: u32, num_clauses: u32) -> Result<(), Self::Error> {
//! Ok(())
//! }
//!
//! fn literal(&mut self, literal: Literal) -> Result<(), Self::Error> {
//! self.head_clause.push(literal); Ok(())
//! }
//!
//! fn finalize_clause(&mut self) -> Result<(), Self::Error> {
//! if self.head_clause.is_empty() {
//! return Err("encountered empty clause")
//! }
//! self.clauses.push(
//! core::mem::replace(&mut self.head_clause, Vec::new())
//! );
//! Ok(())
//! }
//!
//! fn finish(&mut self) -> Result<(), Self::Error> {
//! if !self.head_clause.is_empty() {
//! self.finalize_clause()?
//! }
//! Ok(())
//! }
//! }
//!
//! let my_input: &[u8] = br"
//! c This is my input .cnf file with 3 variables and 2 clauses.
//! p cnf 3 2
//! 1 -2 3 0
//! 1 -3 0
//! ";
//! let mut my_output = MyOutput::default();
//! cnf_parser::parse_cnf(&mut my_input.as_ref(), &mut my_output)
//! .expect("encountered invalid .cnf input");
//! ```
mod lexer;
mod token;
#[cfg(test)]
mod tests;
pub use self::{
lexer::Error,
token::{
Literal,
Problem,
},
};
use self::{
lexer::Lexer,
token::Token,
};
/// Types that can be used as input for the CNF parser.
pub trait Input {
/// Reads a byte from the input if any is remaining.
fn read_byte(&mut self) -> Option<u8>;
}
impl<'a> Input for &'a [u8] {
fn read_byte(&mut self) -> Option<u8> {
let len = self.len();
if len == 0 {
return None
}
let byte = self[0];
*self = &self[1..];
Some(byte)
}
}
/// Input wrapper for [`T: Read`](https://doc.rust-lang.org/std/io/trait.Read.html)
/// types.
///
/// # Note
///
/// This type is only available if the crate has been compiled with the `std`
/// crate feature.
#[cfg(feature = "std")]
pub struct IoReader<R>(pub R)
where
R: std::io::Read;
#[cfg(feature = "std")]
impl<R> Input for IoReader<R>
where
R: std::io::Read,
{
fn read_byte(&mut self) -> Option<u8> {
let mut buf = [0x00];
self.0.read_exact(&mut buf).ok().map(|_| buf[0])
}
}
/// The output where the CNF information is piped to.
///
/// Usually implemented by a dependency of this crate.
pub trait Output {
/// An error that can occure with the parser output.
type Error;
/// The optional problem line with the number of total variables and clauses.
///
/// # Note
///
/// This will only be processed once per CNF input stream.
fn problem(
&mut self,
num_variables: u32,
num_clauses: u32,
) -> Result<(), Self::Error>;
/// A literal has been read.
fn literal(&mut self, literal: Literal) -> Result<(), Self::Error>;
/// The end of the current clause has been read.
fn finalize_clause(&mut self) -> Result<(), Self::Error>;
/// Called at the end of CNF parsing.
///
/// Outputs can expect to receive no more messages from the parser after
/// being called with `finish`.
fn finish(&mut self) -> Result<(), Self::Error>;
}
/// Parses a CNF formatted input stream into the given output.
///
/// # Errors
///
/// - If the CNF input is malformed.
/// - If the output triggers a custom error.
pub fn parse_cnf<I, O>(
input: &mut I,
output: &mut O,
) -> Result<(), Error<<O as Output>::Error>>
where
I: Input,
O: Output,
{
let mut lexer = <Lexer<I, O>>::new(input);
loop {
match lexer.next_token()? {
Some(Token::Problem(problem)) => {
output
.problem(problem.num_variables, problem.num_clauses)
.map_err(Error::from_output)?
}
Some(Token::Literal(literal)) => {
output.literal(literal).map_err(Error::from_output)?
}
Some(Token::ClauseEnd) => {
output.finalize_clause().map_err(Error::from_output)?
}
None => break,
}
}
output.finish().map_err(Error::from_output)?;
Ok(())
}
|
mod compare;
mod store;
use core::fmt;
use core::marker::PhantomData;
use core::mem::ManuallyDrop;
use core::ptr;
use core::sync::atomic::Ordering;
use conquer_pointer::{AtomicMarkedPtr, MarkedNonNull, MarkedPtr, Null};
use crate::traits::{Protect, Reclaim};
use crate::{Maybe, NotEqual, Owned, Protected, Unlinked, Unprotected};
pub use self::compare::Comparable;
pub use self::store::Storable;
use self::compare::Unlink;
// *************************************************************************************************
// Atomic
// *************************************************************************************************
/// An atomic marked pointer type to a heap allocated value similar to
/// [`AtomicPtr`](core::sync::atomic::AtomicPtr).
///
/// Note, that the type does not implement the [`Drop`][core::ops::Drop] trait,
/// meaning it does not automatically take care of memory de-allocation when it
/// goes out of scope.
/// Use the (unsafe) [`take`][Atomic::take] method to extract an (optional)
/// [`Owned`] value, which *does* correctly deallocate memory when it goes out
/// of scope.
pub struct Atomic<T, R, const N: usize> {
inner: AtomicMarkedPtr<T, N>,
_marker: PhantomData<(T, R)>,
}
/********** impl inherent (const) *****************************************************************/
impl<T, R, const N: usize> Atomic<T, R, N> {
/// Creates a new `null` pointer.
#[inline]
pub const fn null() -> Self {
Self { inner: AtomicMarkedPtr::null(), _marker: PhantomData }
}
/// Returns a reference to the underlying (raw) [`AtomicMarkedPtr`].
///
/// # Safety
///
/// The returned reference to the raw pointer must not be used to store
/// invalid values into the [`Atomic`].
#[inline]
pub const unsafe fn as_raw(&self) -> &AtomicMarkedPtr<T, N> {
&self.inner
}
}
/********** impl inherent *************************************************************************/
impl<T, R: Reclaim<T>, const N: usize> Atomic<T, R, N> {
/// Creates a new [`Atomic`] for the given `owned` record.
#[inline]
pub fn new(owned: Owned<T, R, N>) -> Self {
Self::from(owned)
}
/// Creates a new [`Atomic`] from the given raw `ptr`.
///
/// # Safety
///
/// The given `ptr` must point at a live memory record that had been
/// allocated as a record for the same [`ReclaimBase`][crate::ReclaimBase].
///
/// Note, that creating more than one [`Atomic`] from the same record has
/// implications for other methods such as [`take`][Atomic::take].
#[inline]
pub unsafe fn from_raw(ptr: MarkedPtr<T, N>) -> Self {
Self { inner: AtomicMarkedPtr::new(ptr), _marker: PhantomData }
}
/// TODO: docs...
#[inline]
pub unsafe fn take(&mut self) -> Option<Owned<T, R, N>> {
match MarkedNonNull::new(self.inner.swap(MarkedPtr::null(), Ordering::Relaxed)) {
Ok(inner) => Some(Owned { inner, _marker: PhantomData }),
Err(_) => None,
}
}
/// Loads a raw marked pointer from the [`Atomic`].
///
/// `load_raw` takes an [`Ordering`] argument, which describes the
/// memory ordering of this operation.
///
/// # Panics
///
/// Panics if `order` is [`Release`][Ordering::Release] or
/// [`AcqRel`][Ordering::AcqRel].
///
/// # Example
///
/// Commonly, this is likely going to be used in conjunction with
/// [`load_if_equal`][Atomic::load_if_equal] or
/// [`acquire_if_equal`][Protect::protect_if_equal].
///
/// ```
/// use std::sync::atomic::Ordering::Relaxed;
///
/// use reclaim::leak::Guard;
///
/// type Atomic<T> = reclaim::leak::Atomic<T, 0>;
///
/// let atomic = Atomic::new("string");
/// let guard = &Guard::new();
///
/// let ptr = atomic.load_raw(Relaxed);
/// let res = atomic.load_if_equal(ptr, Relaxed, guard);
///
/// assert!(res.is_ok());
/// # assert_eq!(&"string", &*res.unwrap().unwrap());
/// ```
#[inline]
pub fn load_raw(&self, order: Ordering) -> MarkedPtr<T, N> {
self.inner.load(order)
}
/// Loads an [`Unprotected`] pointer from the [`Atomic`].
///
/// The returned pointer is explicitly **not** protected from reclamation,
/// meaning another thread could free the pointed to memory at any time.
///
/// `load_unprotected` takes an [`Ordering`] argument, which
/// describes the memory ordering of this operation.
///
/// # Panics
///
/// Panics if `order` is [`Release`][rel] or [`AcqRel`][acq_rel].
///
/// [acq_rel]: Ordering::AcqRel
/// [rel]: Ordering::Release
#[inline]
pub fn load_unprotected(&self, order: Ordering) -> Unprotected<T, R, N> {
Unprotected { inner: self.load_raw(order), _marker: PhantomData }
}
/// Stores either `null` or a valid pointer to an owned heap allocated value
/// into the pointer.
///
/// Note, that overwriting a non-null value through `store` will very likely
/// lead to memory leaks, since instances of [`Atomic`] will most commonly
/// be associated wit some kind of uniqueness invariants in order to be sound.
///
/// `store` takes an [`Ordering`][ordering] argument, which
/// describes the memory ordering of this operation.
///
/// # Panics
///
/// Panics if `order` is [`Acquire`][acquire] or [`AcqRel`][acq_rel]
///
/// [ordering]: Ordering
/// [acquire]: Ordering::Acquire
/// [acq_rel]: Ordering::AcqRel
#[inline]
pub fn store(&self, new: impl Into<Storable<T, R, N>>, order: Ordering) {
self.inner.store(new.into().into_marked_ptr(), order);
}
/// Stores either `null` or a valid pointer to an owned heap allocated value
/// into the pointer, returning the previous (now [`Unlinked`]) value
/// wrapped in an [`Option`].
///
/// The returned value can be safely reclaimed as long as the *uniqueness*
/// invariant is maintained.
///
/// `swap` takes an [`Ordering`][ordering] argument which describes the memory
/// ordering of this operation. All ordering modes are possible. Note that using
/// [`Acquire`][acquire] makes the store part of this operation [`Relaxed`][relaxed],
/// and using [`Release`][release] makes the load part [`Relaxed`][relaxed].
///
/// [ordering]: Ordering
/// [relaxed]: Ordering::Relaxed
/// [acquire]: Ordering::Acquire
/// [release]: Ordering::Release
#[inline]
pub fn swap(
&self,
new: impl Into<Storable<T, R, N>>,
order: Ordering,
) -> Maybe<Unlinked<T, R, N>> {
match MarkedNonNull::new(self.inner.swap(new.into().into_marked_ptr(), order)) {
Ok(inner) => Maybe::Some(Unlinked { inner, _marker: PhantomData }),
Err(Null(tag)) => Maybe::Null(tag),
}
}
/// TODO: docs...
#[inline]
pub fn compare_exchange<C, S>(
&self,
current: C,
new: S,
(success, failure): (Ordering, Ordering),
) -> Result<C::Unlinked, CompareExchangeErr<S, T, R, N>>
where
C: Into<Comparable<T, R, N>> + Unlink + Copy,
S: Into<Storable<T, R, N>>,
{
let new = ManuallyDrop::new(new);
unsafe {
let compare = current.into().into_marked_ptr();
let store = ptr::read(&*new).into().into_marked_ptr();
self.inner
.compare_exchange(compare, store, (success, failure))
.map(|_| current.into_unlinked())
.map_err(|inner| CompareExchangeErr {
loaded: Unprotected { inner, _marker: PhantomData },
input: ManuallyDrop::into_inner(new),
})
}
}
/// TODO: docs...
#[inline]
pub fn compare_exchange_weak<C, S>(
&self,
current: C,
new: S,
(success, failure): (Ordering, Ordering),
) -> Result<C::Unlinked, CompareExchangeErr<S, T, R, N>>
where
C: Into<Comparable<T, R, N>> + Unlink + Copy,
S: Into<Storable<T, R, N>>,
{
let new = ManuallyDrop::new(new);
unsafe {
let compare = current.into().into_marked_ptr();
let store = ptr::read(&*new).into().into_marked_ptr();
self.inner
.compare_exchange_weak(compare, store, (success, failure))
.map(|_| current.into_unlinked())
.map_err(|inner| CompareExchangeErr {
loaded: Unprotected { inner, _marker: PhantomData },
input: ManuallyDrop::into_inner(new),
})
}
}
#[inline]
pub(crate) fn load_raw_if_equal(
&self,
expected: MarkedPtr<T, N>,
order: Ordering,
) -> Result<MarkedPtr<T, N>, NotEqual> {
match self.load_raw(order) {
ptr if ptr == expected => Ok(ptr),
_ => Err(NotEqual),
}
}
/// Loads a value from the pointer using `guard` to protect it from
/// reclamation.
///
/// If the loaded value is non-null, the value is guaranteed to be protected
/// from reclamation during the lifetime of `guard`.
///
/// `load` takes an [`Ordering`] argument, which describes the memory
/// ordering of this operation.
///
/// # Panics
///
/// *May* panic if `order` is [`Release`][release] or [`AcqRel`][acq_rel].
///
/// [release]: Ordering::Release
/// [acq_rel]: Ordering::AcqRel
#[inline]
pub fn load<'g>(
&self,
guard: &'g mut impl Protect<T, Reclaim = R>,
order: Ordering,
) -> Protected<'g, T, R, N> {
guard.protect(self, order)
}
/// TODO: docs...
#[inline]
pub fn load_if_equal<'g>(
&self,
expected: MarkedPtr<T, N>,
guard: &'g mut impl Protect<T, Reclaim = R>,
order: Ordering,
) -> Result<Protected<'g, T, R, N>, NotEqual> {
guard.protect_if_equal(self, expected, order)
}
}
/********** impl Default **************************************************************************/
impl<T, R: Reclaim<T>, const N: usize> Default for Atomic<T, R, N> {
default_null!();
}
/********** impl Debug ****************************************************************************/
impl<T, R, const N: usize> fmt::Debug for Atomic<T, R, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let (ptr, tag) = self.inner.load(Ordering::SeqCst).decompose();
f.debug_struct("Atomic").field("ptr", &ptr).field("tag", &tag).finish()
}
}
/********** impl From (T) *************************************************************************/
impl<T, R: Reclaim<T>, const N: usize> From<T> for Atomic<T, R, N>
where
R::Header: Default,
{
#[inline]
fn from(val: T) -> Self {
Self::from(Owned::new(val))
}
}
/********** impl From (Owned<T>) ******************************************************************/
impl<T, R: Reclaim<T>, const N: usize> From<Owned<T, R, N>> for Atomic<T, R, N> {
#[inline]
fn from(owned: Owned<T, R, N>) -> Self {
Self { inner: AtomicMarkedPtr::from(Owned::into_marked_ptr(owned)), _marker: PhantomData }
}
}
/********** impl From (Storable) ******************************************************************/
impl<T, R: Reclaim<T>, const N: usize> From<Storable<T, R, N>> for Atomic<T, R, N> {
#[inline]
fn from(storable: Storable<T, R, N>) -> Self {
Self { inner: AtomicMarkedPtr::new(storable.into_marked_ptr()), _marker: PhantomData }
}
}
/********** impl Pointer **************************************************************************/
impl<T, R: Reclaim<T>, const N: usize> fmt::Pointer for Atomic<T, R, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&self.inner.load(Ordering::SeqCst), f)
}
}
// *************************************************************************************************
// CompareExchangeErr
// *************************************************************************************************
/// An error type indicating a failed *compare-and-swap* operation.
///
/// In case the *swap* part of the operation (the `new` argument) is an owning
/// type like [`Owned`] it must be retrieved from the `input`, otherwise it will
/// be dropped and de-allocated when the `CompareExchangeErr` goes out off scope.
#[derive(Copy, Clone, Eq, Ord, PartialEq, PartialOrd)]
pub struct CompareExchangeErr<S, T, R, const N: usize> {
/// The actually present value, as loaded by the CAS operation.
pub loaded: Unprotected<T, R, N>,
/// The value used as input for the CAS operation (`new` parameter).
pub input: S,
}
|
#[macro_use]
extern crate log;
extern crate env_logger;
extern crate riirc;
fn main() {
env_logger::Builder::from_default_env()
.default_format_timestamp(false)
.init();
// TODO use structopt or something like that for this
let mut args = ::std::env::args();
if let Some(next) = args.nth(1) {
match next.as_str() {
"-c" | "--config" => {
info!("generating a default toml config");
let mut stdout = ::std::io::stdout();
riirc::Config::default().dump(&mut stdout);
return;
}
"-a" | "--attach" => {
// this'll attach to the daemon
return;
}
"-h" | "--help" | _ => {
let help = &[
"-c, --config: writes a default config to stdout",
"-a, --attach: TODO",
];
let help =
help.iter()
.map(|s| format!("\n\t{}", s))
.fold(String::new(), |mut a, c| {
a.push_str(&c);
a
});
info!("{}", help);
return;
}
}
}
let config = riirc::Config::load("riirc.toml")
.map_err(|e| {
error!("{}", e);
::std::process::exit(2);
}).unwrap();
macro_rules! check {
($e:expr) => {
if $e.is_empty() {
error!(
"'{}' field is missing from the config",
stringify!($e).split('.').last().unwrap()
);
::std::process::exit(1);
}
};
}
check!(config.server);
check!(config.nick);
check!(config.user);
check!(config.real);
riirc::Gui::new(config).run();
}
|
use lazy_static::lazy_static;
use scan_fmt::scan_fmt;
use std::str::FromStr;
type Registers = [usize; 6];
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
enum Opcode {
Addr,
Addi,
Mulr,
Muli,
Banr,
Bani,
Borr,
Bori,
Setr,
Seti,
Gtir,
Gtri,
Gtrr,
Eqir,
Eqri,
Eqrr,
}
impl FromStr for Opcode {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
use self::Opcode::*;
Ok(match s {
"addr" => Addr,
"addi" => Addi,
"mulr" => Mulr,
"muli" => Muli,
"banr" => Banr,
"bani" => Bani,
"borr" => Borr,
"bori" => Bori,
"setr" => Setr,
"seti" => Seti,
"gtir" => Gtir,
"gtri" => Gtri,
"gtrr" => Gtrr,
"eqir" => Eqir,
"eqri" => Eqri,
"eqrr" => Eqrr,
_ => return Err(()),
})
}
}
struct Inst {
opcode: Opcode,
a: usize,
b: usize,
c: usize,
}
impl Inst {
fn new(opcode: Opcode, a: usize, b: usize, c: usize) -> Self {
Inst { opcode, a, b, c }
}
fn exec(&self, registers: &mut Registers) {
use self::Opcode::*;
registers[self.c] = match self.opcode {
Addr => registers[self.a] + registers[self.b],
Addi => registers[self.a] + self.b,
Mulr => registers[self.a] * registers[self.b],
Muli => registers[self.a] * self.b,
Banr => registers[self.a] & registers[self.b],
Bani => registers[self.a] & self.b,
Borr => registers[self.a] | registers[self.b],
Bori => registers[self.a] | self.b,
Setr => registers[self.a],
Seti => self.a,
Gtir => {
if self.a > registers[self.b] {
1
} else {
0
}
}
Gtri => {
if registers[self.a] > self.b {
1
} else {
0
}
}
Gtrr => {
if registers[self.a] > registers[self.b] {
1
} else {
0
}
}
Eqir => {
if self.a == registers[self.b] {
1
} else {
0
}
}
Eqri => {
if registers[self.a] == self.b {
1
} else {
0
}
}
Eqrr => {
if registers[self.a] == registers[self.b] {
1
} else {
0
}
}
}
}
}
struct Program {
ip: usize,
insts: Vec<Inst>,
}
impl Program {
fn run(&self, reg: &mut Registers) {
while reg[self.ip] < self.insts.len() {
self.insts[reg[self.ip]].exec(reg);
reg[self.ip] += 1;
}
}
}
lazy_static! {
static ref PROGRAM: Program = {
let mut lines = include_str!("input.txt").lines();
let ip = lines.next().unwrap()[4..5].parse().unwrap();
let insts = lines
.map(|line| {
let (opcode, a, b, c) = scan_fmt!(line, "{} {} {} {}", Opcode, usize, usize, usize);
Inst::new(opcode.unwrap(), a.unwrap(), b.unwrap(), c.unwrap())
})
.collect();
Program { ip, insts }
};
}
fn part1() {
let mut reg = [0; 6];
PROGRAM.run(&mut reg);
println!("{}", reg[0]);
}
fn part2() {
let mut reg = [1, 0, 0, 0, 0, 0];
PROGRAM.run(&mut reg);
println!("{}", reg[0]);
}
fn main() {
part1();
part2();
}
|
#[doc = "Register `RGSR` reader"]
pub type R = crate::R<RGSR_SPEC>;
#[doc = "Field `OF` reader - Trigger event overrun flag The flag is set when a trigger event occurs on DMA request generator channel x, while the DMA request generator counter value is lower than GNBREQ. The flag is cleared by writing 1 to the corresponding COFx bit in DMAMUX_RGCFR register."]
pub type OF_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:3 - Trigger event overrun flag The flag is set when a trigger event occurs on DMA request generator channel x, while the DMA request generator counter value is lower than GNBREQ. The flag is cleared by writing 1 to the corresponding COFx bit in DMAMUX_RGCFR register."]
#[inline(always)]
pub fn of(&self) -> OF_R {
OF_R::new((self.bits & 0x0f) as u8)
}
}
#[doc = "DMAMux - DMA request generator status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rgsr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RGSR_SPEC;
impl crate::RegisterSpec for RGSR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rgsr::R`](R) reader structure"]
impl crate::Readable for RGSR_SPEC {}
#[doc = "`reset()` method sets RGSR to value 0"]
impl crate::Resettable for RGSR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT.
extern "C"
{
pub fn gaspi_group_add(group: gaspi_group_t, rank: gaspi_rank_t) -> gaspi_return_t;
pub fn gaspi_group_commit(group: gaspi_group_t, timeout: gaspi_timeout_t) -> gaspi_return_t;
pub fn gaspi_group_create(group: *mut gaspi_group_t) -> gaspi_return_t;
pub fn gaspi_group_delete(group: gaspi_group_t) -> gaspi_return_t;
pub fn gaspi_group_max(group_max: *mut gaspi_number_t) -> gaspi_return_t;
pub fn gaspi_group_num(group_num: *mut gaspi_number_t) -> gaspi_return_t;
pub fn gaspi_group_ranks(group: gaspi_group_t, group_ranks: *mut gaspi_rank_t) -> gaspi_return_t;
pub fn gaspi_group_size(group: gaspi_group_t, group_size: *mut gaspi_number_t) -> gaspi_return_t;
}
|
//! Deals with configuring the I/O APIC.
use super::super::memory::map_page_at;
use super::IRQ_INTERRUPT_NUMS;
use core::fmt;
use memory::{PhysicalAddress, VirtualAddress, NO_CACHE, READABLE, WRITABLE};
use x86_64::instructions::port::outb;
/// The physical base address of the memory mapped I/O APIC.
const IO_APIC_BASE: PhysicalAddress = PhysicalAddress::from_const(0xfec00000);
/// Initializes the I/O APIC.
pub fn init() {
assert_has_not_been_called!("The I/O APIC should only be initialized once.");
map_page_at(
get_ioapic_base(),
IO_APIC_BASE,
READABLE | WRITABLE | NO_CACHE
);
// Disable the 8259 PIC.
unsafe {
outb(0x21, 0xff);
outb(0xa1, 0xff);
}
for i in 0..16 {
let mut irq = IORedirectionEntry::new();
irq.set_vector(IRQ_INTERRUPT_NUMS[i]);
set_irq(i as u8, irq);
}
// Deactivate irq2.
let mut irq2 = IORedirectionEntry::new();
irq2.set_inactive();
set_irq(2, irq2);
// Reroute interrupts to the IOAPIC.
unsafe {
outb(0x22, 0x70);
outb(0x23, 0x01);
}
}
/// Writes an I/O APIC register.
fn set_register(reg: u8, value: u32) {
unsafe {
*get_ioapic_base().as_mut_ptr() = reg as u32;
*(get_ioapic_base() + 0x10).as_mut_ptr() = value;
}
}
/// Sets the given IRQ number to the specified value.
fn set_irq(number: u8, value: IORedirectionEntry) {
assert!(number < 24);
let reg = 0x10 + number * 2;
// Disable the entry, before setting the destination.
set_register(reg, MASK.bits() as u32);
set_register(reg + 1, (value.0 >> 32) as u32);
set_register(reg, value.0 as u32);
}
/// Returns the base address for the I/O APIC.
fn get_ioapic_base() -> VirtualAddress {
IO_APIC_BASE.to_virtual()
}
/// Represents an entry in the I/O APIC redirection table.
#[repr(C)]
struct IORedirectionEntry(u64);
bitflags! {
flags IORedirectionEntryFlags: u64 {
/// Corresponds to the interrupt vector in the IVT.
const VECTOR = 0xff,
/// The delivery mode of the interrupt.
const DELIVERY_MODE = 0b111 << 8,
/// Delivers the interrupt to the specified vector.
const FIXED_DELIVERY_MODE = 0b000 << 8,
/// Delivers the interrupt to the processor with the lowest priority.
const LOWEST_PRIORITY_DELIVERY_MODE = 0b001 << 8,
/// Delivers an SMI interrupt.
const SMI_DELIVERY_MODE = 0b010 << 8,
/// Delivers an NMI interrupt.
const NMI_DELIVERY_MODE = 0b100 << 8,
/// For external interrupts.
const EXTINT_DELIVERY_MODE = 0b111 << 8,
/// Delivers an INIT request.
const INIT_DELIVERY_MODE = 0b101 << 8,
/// Specifies how the destination field is to be interpreted.
const DESTINATION_MODE = 1 << 11,
/// The specified destination references a physical processor ID.
const PHYSICAL_DESTINATION_MODE = 0 << 11,
/// The specified destination references a logical processor ID.
const LOGICAL_DESTINATION_MODE = 1 << 11,
/// The delivery status of the interrupt.
///
/// Read only.
const DELIVERY_STATUS = 1 << 12,
/// Specifies when the pin is active.
const PIN_POLARITY = 1 << 13,
/// The pin is active when high.
const HIGH_ACTIVE_PIN_POLARITY = 0 << 13,
/// The pin is active when low.
const LOW_ACTIVE_PIN_POLARITY = 1 << 13,
/// Indicates if the interrupt is being serviced.
///
/// Read only.
const REMOTRE_IRR = 1 << 14,
/// Specifies the trigger mode for the interrupt.
const TRIGGER_MODE = 1 << 15,
/// For edge sensitive interrupts.
const EDGE_SENSITIVE = 0 << 15,
/// For level sensitive interrupts.
const LEVEL_SENSITIVE = 1 << 15,
/// Masks the interrupt.
const MASK = 1 << 16,
/// The destination processor for this interrupt.
const DESTINATION = 0xff << 56
}
}
impl IORedirectionEntry {
/// Creates a new LVT register.
fn new() -> IORedirectionEntry {
let mut register = IORedirectionEntry(0);
register.set_active();
register.set_delivery_mode(FIXED_DELIVERY_MODE);
register.set_trigger_mode(EDGE_SENSITIVE);
register.set_polarity(HIGH_ACTIVE_PIN_POLARITY);
// 0xff sends the interrupt to all processors.
// TODO: Don't use this ID here.
register.set_destination(
PHYSICAL_DESTINATION_MODE,
::multitasking::get_cpu_id() as u8
);
register
}
/// Sets the vector of this interrupt.
fn set_vector(&mut self, num: u8) {
self.0 &= !VECTOR.bits();
self.0 |= num as u64;
}
/// Sets the delivery mode for this interrupt.
fn set_delivery_mode(&mut self, mode: IORedirectionEntryFlags) {
self.0 &= !DELIVERY_MODE.bits();
self.0 |= mode.bits();
}
/// Sets the trigger mode for this interrupt.
fn set_trigger_mode(&mut self, mode: IORedirectionEntryFlags) {
self.0 &= !TRIGGER_MODE.bits();
self.0 |= mode.bits();
}
/// Sets the polarity for this interrupt.
fn set_polarity(&mut self, polarity: IORedirectionEntryFlags) {
self.0 &= !PIN_POLARITY.bits();
self.0 |= polarity.bits();
}
/// Deactivates this interrupt.
fn set_inactive(&mut self) {
self.0 |= MASK.bits();
}
/// Activates this interrupt.
fn set_active(&mut self) {
self.0 &= !MASK.bits();
}
/// Sets the destination for this interrupt.
fn set_destination(&mut self, mode: IORedirectionEntryFlags, dest: u8) {
// Set the destination mode.
self.0 &= !DESTINATION_MODE.bits();
self.0 |= mode.bits();
// Set the actual destination.
self.0 &= !DESTINATION.bits();
self.0 |= (dest as u64) << 56;
}
}
impl fmt::Debug for IORedirectionEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"Vector: {:x}, Active: {}",
self.0 & VECTOR.bits(),
self.0 & MASK.bits() == 0
)
}
}
|
use serde::Deserialize;
use serde::Serialize;
#[derive(Clone, Debug, Default, PartialEq, Deserialize, Serialize)]
pub struct Timestamp {
pub epoch: u32,
pub unix: u32,
pub human: String,
}
|
#[doc = "Register `EXTI_HWCFGR4` reader"]
pub type R = crate::R<EXTI_HWCFGR4_SPEC>;
#[doc = "Field `EVENT_TRG` reader - EVENT_TRG"]
pub type EVENT_TRG_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - EVENT_TRG"]
#[inline(always)]
pub fn event_trg(&self) -> EVENT_TRG_R {
EVENT_TRG_R::new(self.bits)
}
}
#[doc = "EXTI hardware configuration register 4\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exti_hwcfgr4::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct EXTI_HWCFGR4_SPEC;
impl crate::RegisterSpec for EXTI_HWCFGR4_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`exti_hwcfgr4::R`](R) reader structure"]
impl crate::Readable for EXTI_HWCFGR4_SPEC {}
#[doc = "`reset()` method sets EXTI_HWCFGR4 to value 0x0001_ffff"]
impl crate::Resettable for EXTI_HWCFGR4_SPEC {
const RESET_VALUE: Self::Ux = 0x0001_ffff;
}
|
use tokio::signal;
pub(crate) fn raise_fd_limit() {
match std::panic::catch_unwind(fdlimit::raise_fd_limit) {
Ok(Some(limit)) => {
tracing::debug!("Increase file limit from soft to hard (limit is {limit})")
}
Ok(None) => tracing::debug!("Failed to increase file limit"),
Err(err) => {
let err = if let Some(err) = err.downcast_ref::<&str>() {
*err
} else if let Some(err) = err.downcast_ref::<String>() {
err
} else {
unreachable!("Should be unreachable as `fdlimit` uses panic macro, which should return either `&str` or `String`.")
};
tracing::warn!("Failed to increase file limit: {err}")
}
}
}
#[cfg(unix)]
pub(crate) async fn shutdown_signal() {
use futures::FutureExt;
futures::future::select(
Box::pin(
signal::unix::signal(signal::unix::SignalKind::interrupt())
.expect("Setting signal handlers must never fail")
.recv()
.map(|_| {
tracing::info!("Received SIGINT, shutting down farmer...");
}),
),
Box::pin(
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("Setting signal handlers must never fail")
.recv()
.map(|_| {
tracing::info!("Received SIGTERM, shutting down farmer...");
}),
),
)
.await;
}
#[cfg(not(unix))]
pub(crate) async fn shutdown_signal() {
signal::ctrl_c()
.await
.expect("Setting signal handlers must never fail");
tracing::info!("Received Ctrl+C, shutting down farmer...");
}
|
use glium::texture::Texture2d;
use glium::Display;
use image::{ImageBuffer, Rgb};
use support;
use noise::{Perlin, Seedable};
use noise::utils::*;
use nalgebra::clamp;
use scarlet::colors::hslcolor::HSLColor;
use scarlet::color::{RGBColor, Color};
use scarlet::illuminants::Illuminant;
// Really bad code
pub fn gen_planet_texture(seed: &[usize], disp: &Display, surf_color: (f32, f32, f32), oc_color: (f32, f32, f32)) -> Texture2d{
let (s_x, s_y) = (1024, 512);
let perlin = Perlin::new();
let perlin = perlin.set_seed((seed[0] + seed[1] + seed[2]) as u32);
// Ugh.... Thanks noise-rs creator! This fn is awesome!
let surface_noise = SphereMapBuilder::new(&perlin)
.set_size(s_x, s_y)
.set_bounds(-90.0, 90.0, -180.0, 180.0)
.build();
let surface_noise_details = SphereMapBuilder::new(&perlin)
.set_size(s_x, s_y)
.set_bounds(-90.0 * 10.0, 90.0 * 10.0, -180.0 * 10.0, 180.0 * 10.0)
.build();
let mut planet_tex: ImageBuffer<Rgb<u8>, Vec<u8>> = ImageBuffer::new(s_x as u32, s_y as u32);
for x in 0..s_x{
for y in 0..s_y{
let surf_px = surface_noise.get_value(x, y);
let surf_px_dt = surface_noise_details.get_value(x, y) / 4.0;
let surf_px = (clamp((surf_px + surf_px_dt) * 0.5 + 0.5, 0.0, 1.0) * 255.0) as f32;
if surf_px <= 80.0{
let r = (surf_px * surf_color.0) as u8;
let g = (surf_px * surf_color.1) as u8;
let b = (surf_px * surf_color.2) as u8;
let pix = Rgb([r, g, b]);
planet_tex.put_pixel(x as u32, y as u32, pix);
}
else{
let r = (surf_px * oc_color.0) as u8;
let g = (surf_px * oc_color.1) as u8;
let b = (surf_px * oc_color.2) as u8;
let pix = Rgb([r, g, b]);
planet_tex.put_pixel(x as u32, y as u32, pix);
}
}
}
support::texture_loader::into_texture(&planet_tex, disp)
}
pub fn gen_background_texture(seed: &[usize], disp: &Display) -> Texture2d{
let (s_x, s_y) = (2048, 2048);
let perlin = Perlin::new();
let perlin = perlin.set_seed((seed[0] + seed[1] + seed[2]) as u32);
/*let value = Value::new();
let value = value.set_seed((seed[0] + seed[1] + seed[2]) as u32);*/
let cloud_noise = SphereMapBuilder::new(&perlin)
.set_size(s_x, s_y)
.set_bounds(-90.0, 90.0, -180.0, 180.0)
.build();
/*let perlin = perlin.set_seed((seed[0] + seed[1] + seed[2]) as u32 * 3);
let dark_noise = SphereMapBuilder::new(&perlin)
.set_size(s_x, s_y)
.set_bounds(-90.0, 90.0, -180.0, 180.0)
.build();*/
let mut background_tex: ImageBuffer<Rgb<u8>, Vec<u8>> = ImageBuffer::new(s_x as u32, s_y as u32);
for x in 0..s_x{
for y in 0..s_y{
let cloud_noise = cloud_noise.get_value(x, y);
//let dark_noise = dark_noise.get_value(x, y);
let bg_px = (clamp((cloud_noise) * 0.5 + 0.5, 0.0, 1.0) * 360.0) as f64;
let hsl_px = HSLColor{h: bg_px, s: 0.2, l: 0.2}.to_xyz(Illuminant::D50);
let hsl_px = RGBColor::from_xyz( hsl_px );
let pix = Rgb([(hsl_px.r * 255.0) as u8, (hsl_px.g * 255.0) as u8, (hsl_px.b * 255.0) as u8]);
background_tex.put_pixel(x as u32, y as u32, pix);
}
}
support::texture_loader::into_texture(&background_tex, disp)
}
|
use crate::{command::Command, define_node_command, get_set_swap, scene::commands::SceneContext};
use rg3d::{
core::pool::Handle,
scene::{base::LevelOfDetail, base::LodGroup, graph::Graph, node::Node},
};
#[derive(Debug)]
pub struct AddLodGroupLevelCommand {
handle: Handle<Node>,
level: LevelOfDetail,
}
impl AddLodGroupLevelCommand {
pub fn new(handle: Handle<Node>, level: LevelOfDetail) -> Self {
Self { handle, level }
}
}
impl<'a> Command<'a> for AddLodGroupLevelCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Add Lod Group Level".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
context.scene.graph[self.handle]
.lod_group_mut()
.unwrap()
.levels
.push(self.level.clone());
}
fn revert(&mut self, context: &mut Self::Context) {
context.scene.graph[self.handle]
.lod_group_mut()
.unwrap()
.levels
.pop();
}
}
#[derive(Debug)]
pub struct RemoveLodGroupLevelCommand {
handle: Handle<Node>,
level: Option<LevelOfDetail>,
index: usize,
}
impl RemoveLodGroupLevelCommand {
pub fn new(handle: Handle<Node>, index: usize) -> Self {
Self {
handle,
level: None,
index,
}
}
}
impl<'a> Command<'a> for RemoveLodGroupLevelCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Remove Lod Group Level".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
self.level = Some(
context.scene.graph[self.handle]
.lod_group_mut()
.unwrap()
.levels
.remove(self.index),
);
}
fn revert(&mut self, context: &mut Self::Context) {
let group = context.scene.graph[self.handle].lod_group_mut().unwrap();
let level = self.level.take().unwrap();
if group.levels.is_empty() {
group.levels.push(level);
} else {
group.levels.insert(self.index, level)
}
}
}
#[derive(Debug)]
pub struct AddLodObjectCommand {
handle: Handle<Node>,
lod_index: usize,
object: Handle<Node>,
object_index: usize,
}
impl AddLodObjectCommand {
pub fn new(handle: Handle<Node>, lod_index: usize, object: Handle<Node>) -> Self {
Self {
handle,
lod_index,
object,
object_index: 0,
}
}
}
impl<'a> Command<'a> for AddLodObjectCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Add Lod Object".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
let objects = &mut context.scene.graph[self.handle]
.lod_group_mut()
.unwrap()
.levels[self.lod_index]
.objects;
self.object_index = objects.len();
objects.push(self.object);
}
fn revert(&mut self, context: &mut Self::Context) {
context.scene.graph[self.handle]
.lod_group_mut()
.unwrap()
.levels[self.lod_index]
.objects
.remove(self.object_index);
}
}
#[derive(Debug)]
pub struct RemoveLodObjectCommand {
handle: Handle<Node>,
lod_index: usize,
object: Handle<Node>,
object_index: usize,
}
impl RemoveLodObjectCommand {
pub fn new(handle: Handle<Node>, lod_index: usize, object_index: usize) -> Self {
Self {
handle,
lod_index,
object: Default::default(),
object_index,
}
}
}
impl<'a> Command<'a> for RemoveLodObjectCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Remove Lod Object".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
self.object = context.scene.graph[self.handle]
.lod_group_mut()
.unwrap()
.levels[self.lod_index]
.objects
.remove(self.object_index);
}
fn revert(&mut self, context: &mut Self::Context) {
let objects = &mut context.scene.graph[self.handle]
.lod_group_mut()
.unwrap()
.levels[self.lod_index]
.objects;
if objects.is_empty() {
objects.push(self.object);
} else {
objects.insert(self.object_index, self.object);
}
}
}
#[derive(Debug)]
pub struct ChangeLodRangeBeginCommand {
handle: Handle<Node>,
lod_index: usize,
new_value: f32,
}
impl ChangeLodRangeBeginCommand {
pub fn new(handle: Handle<Node>, lod_index: usize, new_value: f32) -> Self {
Self {
handle,
lod_index,
new_value,
}
}
fn swap(&mut self, context: &mut SceneContext) {
let level = &mut context.scene.graph[self.handle]
.lod_group_mut()
.unwrap()
.levels[self.lod_index];
let old = level.begin();
level.set_begin(self.new_value);
self.new_value = old;
}
}
impl<'a> Command<'a> for ChangeLodRangeBeginCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Change Lod Range Begin".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
self.swap(context);
}
fn revert(&mut self, context: &mut Self::Context) {
self.swap(context);
}
}
#[derive(Debug)]
pub struct ChangeLodRangeEndCommand {
handle: Handle<Node>,
lod_index: usize,
new_value: f32,
}
impl ChangeLodRangeEndCommand {
pub fn new(handle: Handle<Node>, lod_index: usize, new_value: f32) -> Self {
Self {
handle,
lod_index,
new_value,
}
}
fn swap(&mut self, context: &mut SceneContext) {
let level = &mut context.scene.graph[self.handle]
.lod_group_mut()
.unwrap()
.levels[self.lod_index];
let old = level.end();
level.set_end(self.new_value);
self.new_value = old;
}
}
impl<'a> Command<'a> for ChangeLodRangeEndCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Change Lod Range End".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
self.swap(context);
}
fn revert(&mut self, context: &mut Self::Context) {
self.swap(context);
}
}
define_node_command!(SetLodGroupCommand("Set Lod Group", Option<LodGroup>) where fn swap(self, node) {
get_set_swap!(self, node, take_lod_group, set_lod_group);
});
|
use crate::{
cmd::multisig::Artifact,
cmd::*,
keypair::{Network, PublicKey},
result::Result,
traits::{ToJson, TxnEnvelope},
};
use helium_api::vars;
use std::{convert::TryInto, str::FromStr};
#[derive(Debug, StructOpt)]
/// Commands for chain variables
pub enum Cmd {
Current(Current),
Create(Create),
}
#[derive(Debug, StructOpt)]
/// Lists current chain variables
pub struct Current {
/// The network to get the variables for (mainnet/testnet). Defaults to the
/// network associated with the active wallet.
#[structopt(long)]
network: Option<Network>,
}
#[derive(Debug, StructOpt)]
/// Create a chain variable transaction
pub struct Create {
/// Set of Variables to set
#[structopt(long, name = "set_name=value")]
set: Vec<VarSet>,
/// Variable to unset
#[structopt(long, name = "unset_name")]
unset: Vec<String>,
/// Variable to cancel
#[structopt(long, name = "cancel_name")]
cancel: Vec<String>,
/// Signing keys to set
#[structopt(long, name = "key")]
key: Vec<PublicKey>,
/// The nonce to use
#[structopt(long, number_of_values(1))]
nonce: Option<u32>,
/// Return the encoded transaction for signing
#[structopt(long)]
txn: bool,
/// The network to create the variables for (mainnet/testnet). Defaults to
/// the network associated with the active wallet.
#[structopt(long)]
network: Option<Network>,
}
impl Cmd {
pub async fn run(&self, opts: Opts) -> Result {
match self {
Cmd::Current(cmd) => cmd.run(opts).await,
Cmd::Create(cmd) => cmd.run(opts).await,
}
}
}
impl Current {
pub async fn run(&self, opts: Opts) -> Result {
let wallet = load_wallet(opts.files)?;
let network = self.network.unwrap_or(wallet.public_key.network);
let client = new_client(api_url(network));
let vars = vars::get(&client).await?;
print_json(&vars)
}
}
impl Create {
pub async fn run(&self, opts: Opts) -> Result {
let wallet = load_wallet(opts.files)?;
let network = self.network.unwrap_or(wallet.public_key.network);
let client = new_client(api_url(network));
let vars = vars::get(&client).await?;
let mut txn = BlockchainTxnVarsV1 {
version_predicate: 0,
master_key: vec![],
proof: vec![],
key_proof: vec![],
vars: self.set.iter().map(|v| v.0.clone()).collect(),
nonce: self.nonce.unwrap_or_else(|| {
vars.get("nonce").map_or(0, |v| {
let result: u32 = v.as_u64().unwrap_or(0).try_into().unwrap();
result + 1
})
}),
unsets: self.unset.iter().map(|v| v.as_bytes().to_vec()).collect(),
cancels: self.cancel.iter().map(|v| v.as_bytes().to_vec()).collect(),
multi_key_proofs: vec![],
multi_proofs: vec![],
multi_keys: self.key.iter().map(|v| v.to_vec()).collect(),
};
txn.multi_keys.dedup_by(|a, b| a == b);
if self.txn {
print_json(&Artifact::from_txn(&txn.in_envelope())?)
} else {
print_json(&txn.to_json()?)
}
}
}
#[derive(Debug)]
struct VarSet(BlockchainVarV1);
impl FromStr for VarSet {
type Err = Box<dyn std::error::Error>;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
let pos = s
.find('=')
.ok_or_else(|| format!("invalid KEY=value: missing `=` in `{}`", s))?;
let name = s[..pos].to_string();
let value: serde_json::Value = s[pos + 1..].parse()?;
let var = match value {
serde_json::Value::Number(n) if n.is_i64() => BlockchainVarV1 {
name,
r#type: "int".to_string(),
value: n.to_string().as_bytes().to_vec(),
},
serde_json::Value::Number(n) if n.is_f64() => BlockchainVarV1 {
name,
r#type: "float".to_string(),
value: n.to_string().as_bytes().to_vec(),
},
serde_json::Value::String(s) => BlockchainVarV1 {
name,
r#type: "string".to_string(),
value: s.as_bytes().to_vec(),
},
_ => BlockchainVarV1 {
name,
r#type: "atom".to_string(),
value: s.as_bytes().to_vec(),
},
};
Ok(VarSet(var))
}
}
|
use log::{debug, error, info, trace, warn};
use edocore::math::vector::{UVector3};
use crate::utils::rle_array::RLEArray;
pub struct VoxelGrid {
//pub voxels: [[[u8; 512]; 512]; 512]
pub voxels: RLEArray<u8>,
pub size: UVector3,
}
impl VoxelGrid {
pub fn new(size: UVector3) -> VoxelGrid {
let mut voxels = RLEArray::with_capacity(0, (size.x as usize)*(size.y as usize)*(size.z as usize));
VoxelGrid {
voxels: voxels,
size: size
}
}
pub fn get(&self, x: usize, y: usize, z: usize) {
}
}
|
use std::convert::TryFrom;
use crate::{Element, Number, Value};
macro_rules! number_to {
($id: ident, $type: ty) => {
impl TryFrom<Number> for $type {
type Error = ();
fn try_from(value: Number) -> Result<Self, Self::Error> {
if let Number::$id(x) = value { return Ok(x) } else { Err(()) }
}
}
impl TryFrom<Value> for $type {
type Error = ();
fn try_from(value: Value) -> Result<Self, Self::Error> {
if let Value::Number(Number::$id(x)) = value { return Ok(x) } else { Err(()) }
}
}
impl TryFrom<Element> for $type {
type Error = ();
fn try_from(value: Element) -> Result<Self, Self::Error> {
if let Element::Value(Value::Number(Number::$id(x))) = value { return Ok(x) }
else { Err(()) }
}
}
};
}
number_to!(Unsigned8, u8);
number_to!(Signed8, i8);
number_to!(Unsigned16, u16);
number_to!(Signed16, i16);
number_to!(Unsigned32, u32);
number_to!(Signed32, i32);
number_to!(Unsigned64, u64);
number_to!(Signed64, i64);
number_to!(Unsigned128, u128);
number_to!(Signed128, i128);
number_to!(Float32, f32);
number_to!(Float64, f64);
macro_rules! value_to {
($id: ident, $type: ty) => {
impl TryFrom<Value> for $type {
type Error = ();
fn try_from(value: Value) -> Result<Self, Self::Error> {
if let Value::$id(x) = value { return Ok(x) } else { Err(()) }
}
}
impl TryFrom<Element> for $type {
type Error = ();
fn try_from(value: Element) -> Result<Self, Self::Error> {
if let Element::Value(Value::$id(x)) = value { return Ok(x) } else { Err(()) }
}
}
};
}
/*
macro_rules! value_to_proc {
($id: ident, $type: ty, $func: expr) => {
impl TryFrom<Value> for $type {
type Error = ();
fn try_from(value: $type) -> Result<Self, Self::Error> {
if let Value::$id(x) = value { return Ok($func(x)) } else { Err(()) }
}
}
impl TryFrom<Element> for $type {
type Error = ();
fn try_from(value: $type) -> Result<Self, Self::Error> {
if let Element::Value(Value::$id(x)) = value { return Ok($func(x)) } else { Err(()) }
}
}
};
}*/
value_to!(Boolean, bool);
value_to!(Char, char);
value_to!(String, String);
value_to!(Bytes, Vec<u8>);
value_to!(UUID, crate::Uuid);
|
/**
*
* If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
* Find the sum of all the multiples of 3 or 5 below 1000.
*/
pub fn execute() {
let sequence = 0..1000_i32;
let value : i32 = sequence
.filter(|v| v % 3 == 0 || v % 5 == 0)
.sum();
println!("sum = {}", value);
}
|
use super::Filter;
pub trait WrapSealed<F: Filter> {
type Wrapped: Filter;
fn wrap(&self, filter: F) -> Self::Wrapped;
}
impl<'a, T, F> WrapSealed<F> for &'a T
where
T: WrapSealed<F>,
F: Filter,
{
type Wrapped = T::Wrapped;
fn wrap(&self, filter: F) -> Self::Wrapped {
(*self).wrap(filter)
}
}
pub trait Wrap<F: Filter>: WrapSealed<F> {}
impl<T, F> Wrap<F> for T
where
T: WrapSealed<F>,
F: Filter,
{
}
|
use std::collections::HashMap;
use regex::Regex;
pub mod input_files
{
fn build_regex(s : &str) -> regex::Regex
{
regex::Regex::new(s).unwrap()
}
fn build_regex_ci(s : &str) -> regex::Regex
{
regex::Regex::new(&format!("(?i){s}")[..]).unwrap()
}
pub struct Lvo
{
entity_type : std::collections::HashMap<String,String>,
libname2handle : std::collections::HashMap<String,String>,
libname2libstr : std::collections::HashMap<String,String>,
offset_name : std::collections::HashMap<(String,i32),String>,
custom_name : std::collections::HashMap<i32,String>,
}
impl Lvo
{
fn capitalize(s: &str) -> String {
let mut c = s.chars();
match c.next() {
None => String::new(),
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
}
}
pub fn new() -> Lvo
{
let lvo_lines = std::str::from_utf8(include_bytes!("LVOs.i")).unwrap().lines();
let custom_lines = std::str::from_utf8(include_bytes!("custom.i")).unwrap().lines();
let lvo_re = build_regex(r"\*+\sLVOs for (.*)\.(library|resource)");
let eq_off_re = build_regex(r"(\w+)\s+equ\s+(-\d+)");
let eq_cust_re = build_regex(r"(?i)(\w+)\s+equ\s+\$([a-f\d]+)");
let mut rval = Lvo{
entity_type : std::collections::HashMap::new(),
offset_name : std::collections::HashMap::new(),
libname2handle : std::collections::HashMap::new(),
libname2libstr : std::collections::HashMap::new(),
custom_name : std::collections::HashMap::new(),
};
let mut libname = "";
let mut libtype = "";
// populate the entity types
for line in lvo_lines
{
if let Some(caps) = lvo_re.captures(line) {
libname = caps.get(1).map_or("", |m| m.as_str());
libtype = caps.get(2).map_or("", |m| m.as_str());
rval.entity_type.insert(libname.to_string(),libtype.to_string());
} else if let Some(caps) = eq_off_re.captures(line) {
let funcname = caps.get(1).map_or("", |m| m.as_str()).to_string();
let funcoffset = caps.get(2).map_or("", |m| m.as_str()).parse::<i32>().unwrap();
rval.offset_name.insert((libname.to_string(),funcoffset),funcname);
}
}
// populate the custom types
for line in custom_lines
{
if let Some(caps) = eq_cust_re.captures(line) {
let regname = caps.get(1).map_or("", |m| m.as_str()).to_string();
let regoffset = i32::from_str_radix(caps.get(2).map_or("", |m| m.as_str()), 16).unwrap();
rval.custom_name.insert(regoffset,regname);
}
}
// in the end create some aux mappings
for ((key,_), _) in &rval.offset_name {
let prefix = Self::capitalize(&key);
// would be better to use &key as key lifetime is the same as the Lvo object
// that would save some memory but lifetimes are beyond me ATM
rval.libname2handle.insert(key.to_string(),format!("{}Base",prefix));
rval.libname2libstr.insert(key.to_string(),format!("{}Name",prefix));
}
rval
}
}
pub struct AsmFile
{
execcopy_re : regex::Regex,
lab_re : regex::Regex,
labeldecl_re : regex::Regex,
leahardbase_re : regex::Regex,
movehardbase_re : regex::Regex,
set_ax_re : regex::Regex,
syscall_re : regex::Regex,
syscall_re2 : regex::Regex,
syscall_re3 : regex::Regex,
valid_base : regex::Regex,
address_reg_re : regex::Regex,
return_re : regex::Regex,
ax_di_re : regex::Regex,
ax_di_re_2 : regex::Regex,
hexdata_re : regex::Regex,
}
impl AsmFile
{
pub fn load(input_file : &String) -> AsmFile
{
let mut rval = AsmFile{
execcopy_re : build_regex_ci(r"MOVE.*ABSEXECBASE.*,(LAB_....)\s"),
lab_re : build_regex_ci(r"(LAB_....|ABSEXECBASE)"),
labeldecl_re : build_regex_ci(r"(LAB_....):"),
leahardbase_re : build_regex_ci(r"LEA\s+HARDBASE,A([0-6])"),
movehardbase_re : build_regex_ci(r"MOVEA?.L\s+#\$0*DFF000,A([0-6])"),
set_ax_re : build_regex_ci(r"MOVEA?\.L\s+([\S]+),A([0-6])\s"),
syscall_re : build_regex_ci(r"(JMP|JSR)\s+(-\d+)\(A6\)"),
syscall_re2 : build_regex_ci(r"(JMP|JSR)\s+\((-\d+),A6\)"),
syscall_re3 : build_regex_ci(r"(JMP|JSR)\s+(-\$[\dA-F]+)\(A6\)"),
valid_base : build_regex_ci(r"([\-\w]{3,}(\(A\d\))?)"),
address_reg_re : build_regex_ci(r"A([0-6])"),
return_re : build_regex_ci(r"\b(RT[SED])\b"),
ax_di_re : build_regex_ci(r"([\s,])(\$[0-9A-F]+|\d+)\(A([0-6])\)"),
ax_di_re_2 : build_regex_ci(r"([\s,])\((\$[0-9A-F]+|\d+),A([0-6])\)"),
hexdata_re : build_regex_ci(r"(?:.*;.*:\s+|DC.[WLB]\s+\$)([A-F\d]+)"),
};
rval
}
}
}
|
extern crate cfg_if;
extern crate wasm_bindgen;
extern crate chip8;
mod utils;
use cfg_if::cfg_if;
use wasm_bindgen::prelude::*;
use chip8::cpu::Cpu;
cfg_if! {
// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global
// allocator.
if #[cfg(feature = "wee_alloc")] {
extern crate wee_alloc;
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
}
}
#[wasm_bindgen]
pub struct Emulator {
cpu: Cpu
}
#[wasm_bindgen]
impl Emulator {
pub fn new() -> Emulator {
Emulator {
cpu: Cpu::new()
}
}
pub fn tick(&mut self) {
self.cpu.execute_cycle();
}
pub fn decrement_timers(&mut self) {
self.cpu.decrement_timers();
}
pub fn load(&mut self, data: &[u8]) {
self.cpu.load(data);
}
pub fn height(&self) -> usize {
chip8::display::HEIGHT
}
pub fn width(&self) -> usize {
chip8::display::WIDTH
}
pub fn pixels(&self) -> *const bool {
self.cpu.display.memory.as_ptr()
}
pub fn display_timer(&self) -> u8 {
self.cpu.dt
}
pub fn sound_timer(&self) -> u8 {
self.cpu.st
}
pub fn key_down(&mut self, key: u8) {
self.cpu.keypad.key_down(key);
}
pub fn key_up(&mut self, key: u8) {
self.cpu.keypad.key_up(key);
}
}
|
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
mod std {
pub use core::*;
}
use std::any::{Any as StdAny, TypeId, type_name};
use std::fmt::{self, Debug, Display};
#[cfg(feature = "std")]
use std::{error::Error, rc::Rc, sync::Arc};
// ++++++++++++++++++++ Any ++++++++++++++++++++
pub trait Any: StdAny {
#[doc(hidden)]
fn as_any(&self) -> &dyn StdAny;
#[doc(hidden)]
fn as_any_mut(&mut self) -> &mut dyn StdAny;
#[doc(hidden)]
#[cfg(feature = "std")]
fn into_any(self: Box<Self>) -> Box<dyn StdAny>;
#[doc(hidden)]
#[cfg(feature = "std")]
fn into_any_rc(self: Rc<Self>) -> Rc<dyn StdAny>;
fn type_name(&self) -> &'static str;
}
impl<T> Any for T where T: StdAny {
#[doc(hidden)]
fn as_any(&self) -> &dyn StdAny { self }
#[doc(hidden)]
fn as_any_mut(&mut self) -> &mut dyn StdAny { self }
#[cfg(feature = "std")]
fn into_any(self: Box<Self>) -> Box<dyn StdAny> { self }
#[cfg(feature = "std")]
fn into_any_rc(self: Rc<Self>) -> Rc<dyn StdAny> { self }
fn type_name(&self) -> &'static str { type_name::<Self>() }
}
#[cfg(feature = "std")]
pub trait AnySync: Any + Send + Sync {
fn into_any_arc(self: Arc<Self>) -> Arc<dyn StdAny + Send + Sync>;
}
#[cfg(feature = "std")]
impl<T> AnySync for T where T: Any + Send + Sync {
fn into_any_arc(self: Arc<Self>) -> Arc<dyn StdAny + Send + Sync> { self }
}
// ++++++++++++++++++++ TypeMismatch ++++++++++++++++++++
#[derive(Debug, Clone, Copy)]
pub struct TypeMismatch {
pub expected: &'static str,
pub found: &'static str,
}
impl TypeMismatch {
pub fn new<T, O>(found_obj: &O) -> Self
where T: Any + ?Sized, O: Any + ?Sized
{
TypeMismatch {
expected: type_name::<T>(),
found: found_obj.type_name(),
}
}
}
impl Display for TypeMismatch {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "Type mismatch: Expected '{}', found '{}'!", self.expected, self.found)
}
}
#[cfg(feature = "std")]
impl Error for TypeMismatch {}
// ++++++++++++++++++++ DowncastError ++++++++++++++++++++
pub struct DowncastError<O> {
mismatch: TypeMismatch,
object: O,
}
impl<O> DowncastError<O> {
pub fn new(mismatch: TypeMismatch, object: O) -> Self {
Self{ mismatch, object }
}
pub fn type_mismatch(&self) -> TypeMismatch { self.mismatch }
pub fn into_object(self) -> O { self.object }
}
impl<O> Debug for DowncastError<O> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("DowncastError")
.field("mismatch", &self.mismatch)
.finish()
}
}
impl<O> Display for DowncastError<O> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.mismatch, fmt)
}
}
#[cfg(feature = "std")]
impl<O> Error for DowncastError<O> {}
// ++++++++++++++++++++ Downcast ++++++++++++++++++++
pub trait Downcast<T>: Any
where T: Any
{
fn is_type(&self) -> bool { self.type_id() == TypeId::of::<T>() }
fn downcast_ref(&self) -> Result<&T, TypeMismatch> {
if self.is_type() {
Ok(self.as_any().downcast_ref().unwrap())
} else {
Err(TypeMismatch::new::<T, Self>(self))
}
}
fn downcast_mut(&mut self) -> Result<&mut T, TypeMismatch> {
if self.is_type() {
Ok(self.as_any_mut().downcast_mut().unwrap())
} else {
Err(TypeMismatch::new::<T, Self>(self))
}
}
#[cfg(feature = "std")]
fn downcast(self: Box<Self>) -> Result<Box<T>, DowncastError<Box<Self>>> {
if self.is_type() {
Ok(self.into_any().downcast().unwrap())
} else {
let mismatch = TypeMismatch::new::<T, Self>(&*self);
Err(DowncastError::new(mismatch, self))
}
}
#[cfg(feature = "std")]
fn downcast_rc(self: Rc<Self>) -> Result<Rc<T>, DowncastError<Rc<Self>>> {
if self.is_type() {
Ok(self.into_any_rc().downcast().unwrap())
} else {
let mismatch = TypeMismatch::new::<T, Self>(&*self);
Err(DowncastError::new(mismatch, self))
}
}
}
#[cfg(feature = "std")]
pub trait DowncastSync<T>: Downcast<T> + AnySync
where T: AnySync
{
fn downcast_arc(self: Arc<Self>) -> Result<Arc<T>, DowncastError<Arc<Self>>> {
if self.is_type() {
Ok(self.into_any_arc().downcast().unwrap())
} else {
let mismatch = TypeMismatch::new::<T, Self>(&*self);
Err(DowncastError::new(mismatch, self))
}
}
}
// ++++++++++++++++++++ macros ++++++++++++++++++++
#[doc(hidden)]
pub mod _std {
#[cfg(feature = "std")]
pub use std::*;
#[cfg(not(feature = "std"))]
pub use core::*;
}
/// Implements [`Downcast`](trait.Downcast.html) for your trait-object-type.
///
/// ```ignore
/// impl_downcast!(Foo);
/// impl_downcast!(<B> Foo<B> where B: Bar);
/// impl_downcast!(<B> Foo<Bar = B>);
/// ```
///
/// expands to
///
/// ```ignore
/// impl<T> Downcast<T> for Foo
/// where T: Any
/// {}
///
/// impl<T, B> Downcast<T> for Foo<B>
/// where T: Any, B: Bar
/// {}
///
/// impl<T, B> Downcast<T> for Foo<Bar = B>
/// where T: Any
/// {}
/// ```
#[macro_export]
macro_rules! impl_downcast {
(<$($params:ident),+ $(,)*> $base:ty $(where $($bounds:tt)+)*) => {
impl<_T, $($params),+> $crate::Downcast<_T> for $base
where _T: $crate::Any, $($params: 'static,)* $($($bounds)+)*
{}
};
($base:ty) => {
impl<_T> $crate::Downcast<_T> for $base
where _T: $crate::Any
{}
};
}
/// Implements [`Downcast`](trait.Downcast.html) and [`DowncastSync`](trait.DowncastSync.html) for your trait-object-type.
#[cfg(feature = "std")]
#[macro_export]
macro_rules! impl_downcast_sync {
(<$($params:ident),+ $(,)*> $base:ty $(where $($bounds:tt)+)*) => {
impl<_T, $($params),+> $crate::Downcast<_T> for $base
where _T: $crate::Any, $($params: 'static,)* $($($bounds)+)*
{}
impl<_T, $($params),+> $crate::DowncastSync<_T> for $base
where _T: $crate::AnySync, $($params: 'static,)* $($($bounds)+)*
{}
};
($base:ty) => {
impl<_T> $crate::Downcast<_T> for $base
where _T: $crate::Any
{}
impl<_T> $crate::DowncastSync<_T> for $base
where _T: $crate::AnySync
{}
};
}
#[doc(hidden)]
#[macro_export]
macro_rules! downcast_methods_core {
(@items) => {
#[allow(unused, missing_docs)]
pub fn is<_T>(&self) -> bool
where _T: $crate::Any, Self: $crate::Downcast<_T>
{
$crate::Downcast::<_T>::is_type(self)
}
#[allow(unused, missing_docs)]
pub fn downcast_ref<_T>(&self) -> $crate::_std::result::Result<&_T, $crate::TypeMismatch>
where _T: $crate::Any, Self: $crate::Downcast<_T>
{
$crate::Downcast::<_T>::downcast_ref(self)
}
#[allow(unused, missing_docs)]
pub fn downcast_mut<_T>(&mut self) -> $crate::_std::result::Result<&mut _T, $crate::TypeMismatch>
where _T: $crate::Any, Self: $crate::Downcast<_T>
{
$crate::Downcast::<_T>::downcast_mut(self)
}
};
(<$($params:ident),+ $(,)*> $base:ty $(where $($bounds:tt)+)*) => {
impl<$($params),+> $base
where $($params: 'static,)* $($($bounds)+)*
{
$crate::downcast_methods_core!(@items);
}
};
($base:ty) => {
impl $base {
$crate::downcast_methods_core!(@items);
}
};
}
#[doc(hidden)]
#[macro_export]
macro_rules! downcast_methods_std {
(@items) => {
$crate::downcast_methods_core!(@items);
#[allow(unused, missing_docs)]
pub fn downcast<_T>(self: $crate::_std::boxed::Box<Self>) -> $crate::_std::result::Result<$crate::_std::boxed::Box<_T>, $crate::DowncastError<$crate::_std::boxed::Box<Self>>>
where _T: $crate::Any, Self: $crate::Downcast<_T>
{
$crate::Downcast::<_T>::downcast(self)
}
#[allow(unused, missing_docs)]
pub fn downcast_rc<_T>(self: $crate::_std::rc::Rc<Self>) -> $crate::_std::result::Result<$crate::_std::rc::Rc<_T>, $crate::DowncastError<$crate::_std::rc::Rc<Self>>>
where _T: $crate::Any, Self: $crate::Downcast<_T>
{
$crate::Downcast::<_T>::downcast_rc(self)
}
};
(<$($params:ident),+ $(,)*> $base:ty $(where $($bounds:tt)+)*) => {
impl<$($params),+> $base
$(where $($bounds)+)*
{
$crate::downcast_methods_std!(@items);
}
};
($base:ty) => {
impl $base {
$crate::downcast_methods_std!(@items);
}
};
}
#[doc(hidden)]
#[cfg(feature = "std")]
#[macro_export]
macro_rules! downcast_sync_methods {
(@items) => {
$crate::downcast_methods_std!(@items);
#[allow(unused, missing_docs)]
pub fn downcast_arc<_T>(self: $crate::_std::sync::Arc<Self>) -> $crate::_std::result::Result<$crate::_std::sync::Arc<_T>, $crate::DowncastError<$crate::_std::sync::Arc<Self>>>
where _T: $crate::AnySync, Self: $crate::DowncastSync<_T>
{
$crate::DowncastSync::<_T>::downcast_arc(self)
}
};
(<$($params:ident),+ $(,)*> $base:ty $(where $($bounds:tt)+)*) => {
impl<$($params),+> $base
$(where $($bounds)+)*
{
$crate::downcast_sync_methods!(@items);
}
};
($base:ty) => {
impl $base {
$crate::downcast_sync_methods!(@items);
}
};
}
/// Generate `downcast`-methods for your trait-object-type.
///
/// ```ignore
/// downcast_methods!(Foo);
/// downcast_methods!(<B> Foo<B> where B: Bar);
/// downcast_methods!(<B> Foo<Bar = B>);
/// ```
///
/// ```ignore
/// impl dyn Foo {
/// /* impl<B> dyn Foo<B> where B: Bar { */
/// /* impl<B> dyn Foo<Bar = B> { */
///
/// pub fn is<T>(&self) -> bool
/// where T: Any, Self: Downcast<T>
/// { ... }
///
/// pub fn downcast_ref<T>(&self) -> Result<&T, TypeMismatch>
/// where T: Any, Self: Downcast<T>
/// { ... }
///
/// pub fn downcast_mut<T>(&mut self) -> Result<&mut T, TypeMismatch>
/// where T: Any, Self: Downcast<T>
/// { ... }
/// }
/// ```
#[cfg(not(feature = "std"))]
#[macro_export]
macro_rules! downcast_methods {
($($tt:tt)+) => { $crate::downcast_methods_core!($($tt)+); }
}
/// Generate `downcast`-methods for your trait-object-type.
///
/// ```ignore
/// downcast_methods!(Foo);
/// downcast_methods!(<B> Foo<B> where B: Bar);
/// downcast_methods!(<B> Foo<Bar = B>);
/// ```
///
/// ```ignore
/// impl dyn Foo {
/// /* impl<B> dyn Foo<B> where B: Bar { */
/// /* impl<B> dyn Foo<Bar = B> { */
///
/// pub fn is<T>(&self) -> bool
/// where T: Any, Self: Downcast<T>
/// { ... }
///
/// pub fn downcast_ref<T>(&self) -> Result<&T, TypeMismatch>
/// where T: Any, Self: Downcast<T>
/// { ... }
///
/// pub fn downcast_mut<T>(&mut self) -> Result<&mut T, TypeMismatch>
/// where T: Any, Self: Downcast<T>
/// { ... }
///
/// pub fn downcast<T>(self: Box<Self>) -> Result<Box<T>, DowncastError<Box<T>>>
/// where T: Any, Self: Downcast<T>
/// { ... }
/// }
/// ```
#[cfg(feature = "std")]
#[macro_export]
macro_rules! downcast_methods {
($($tt:tt)+) => { $crate::downcast_methods_std!($($tt)+); }
}
/// Implements [`Downcast`](trait.Downcast.html) and generates
/// `downcast`-methods for your trait-object-type.
///
/// See [`impl_downcast`](macro.impl_downcast.html),
/// [`downcast_methods`](macro.downcast_methods.html).
#[macro_export]
macro_rules! downcast {
($($tt:tt)+) => {
$crate::impl_downcast!($($tt)+);
$crate::downcast_methods!($($tt)+);
}
}
/// Implements [`DowncastSync`](trait.DowncastSync.html) and generates
/// `downcast`-methods for your trait-object-type.
///
/// See [`impl_downcast_sync`](macro.impl_downcast.html),
/// [`downcast_sync_methods`](macro.downcast_methods.html).
#[cfg(feature = "std")]
#[macro_export]
macro_rules! downcast_sync {
($($tt:tt)+) => {
$crate::impl_downcast_sync!($($tt)+);
$crate::downcast_sync_methods!($($tt)+);
}
}
// NOTE: We only implement the trait, because implementing the methods won't
// be possible when we replace downcast::Any by std::any::Any.
downcast!(dyn Any);
downcast!((dyn Any + Send));
downcast!((dyn Any + Sync));
#[cfg(feature = "std")]
downcast_sync!(dyn AnySync);
|
extern crate reqwest;
mod token;
mod track;
mod track_types;
use reqwest::{
Client,
};
fn main(
) {
let client = Client::new();
let token = token::retrieve_access_token(&client)
.expect("Error in access token")
.access_token;
let track_info = track::get_track(&client, &token[..], "3JIxjvbbDrA9ztYlNcp3yL")
.expect("Error in getting track");
println!("{:?}", track_info);
}
|
use diesel::prelude::*;
use super::db_connection::*;
use crate::db::models::AuthInfoEntity;
use crate::db::models::AuthInfo;
use crate::schema::auth_infos::dsl::*;
pub fn fetch_auth_info_by_user_id(database_url: &String, uid: i32) -> Option<AuthInfoEntity> {
use crate::schema::auth_infos::dsl::*;
let connection = db_connection(&database_url);
let mut auth_info_by_uid: Vec<AuthInfoEntity> = auth_infos
.filter(user_id.eq(uid))
.load::<AuthInfoEntity>(&connection)
.expect("ErrorLoadingAuthInfo");
if auth_info_by_uid.len() == 0 {
None
} else {
Some(auth_info_by_uid.remove(0))
}
}
pub fn insert_auth_info(database_url: &String, auth: AuthInfo) -> AuthInfoEntity {
let connection = db_connection(&database_url);
diesel::insert_into(auth_infos)
.values(auth)
.get_result(&connection)
.expect("ErrorSavingAuthInfo")
}
|
pub(crate) mod dir;
pub(crate) mod fadvise;
pub(crate) mod file;
use crate::dir::SeekLoc;
use std::io::Result;
impl SeekLoc {
pub unsafe fn from_raw(loc: i64) -> Result<Self> {
let loc = loc.into();
Ok(Self(loc))
}
}
|
use srt::SrtSocketBuilder;
use futures::try_join;
#[tokio::test]
async fn rendezvous() {
let a = SrtSocketBuilder::new_rendezvous("127.0.0.1:5000")
.local_port(5001)
.connect();
let b = SrtSocketBuilder::new_rendezvous("127.0.0.1:5001")
.local_port(5000)
.connect();
let _ = try_join!(a, b).unwrap();
}
|
use std::sync::Arc;
use std::path::Path;
use super::TaskResult;
use worker::state::State;
use worker::graph::TaskRef;
use worker::data::{Data, DataBuilder};
use futures::{future, Future};
/// Task that merge all input blobs and merge them into one blob
pub fn task_concat(_state: &mut State, task_ref: TaskRef) -> TaskResult {
let inputs = {
let task = task_ref.get();
task.inputs_data()
};
for (i, input) in inputs.iter().enumerate() {
if !input.is_blob() {
bail!("Input {} object is not blob", i);
}
}
Ok(Box::new(future::lazy(move || {
let result_size: usize = inputs.iter().map(|d| d.size()).sum();
let mut builder = DataBuilder::new();
builder.set_size(result_size);
for input in inputs {
builder.write_blob(&input).unwrap();
}
let result = builder.build();
let output = task_ref.get().output(0);
output.get_mut().set_data(Arc::new(result));
Ok(())
})))
}
/// Task that returns the input argument after a given number of milliseconds
pub fn task_sleep(state: &mut State, task_ref: TaskRef) -> TaskResult {
let sleep_ms: u64 = {
let task = task_ref.get();
task.check_number_of_args(1)?;
task.attributes.get("config")?
};
debug!("Starting sleep task for {} ms", sleep_ms);
let duration = ::std::time::Duration::from_millis(sleep_ms);
Ok(Box::new(
state
.timer()
.sleep(duration)
.map_err(|e| e.into())
.map(move |()| {
{
let task = task_ref.get();
let output = task.output(0);
output.get_mut().set_data(task.input_data(0));
}
()
}),
))
}
#[derive(Deserialize)]
struct OpenConfig {
path: String,
}
/// Open external file
pub fn task_open(state: &mut State, task_ref: TaskRef) -> TaskResult {
{
let task = task_ref.get();
task.check_number_of_args(0)?;
}
let state_ref = state.self_ref();
Ok(Box::new(future::lazy(move || {
{
let task = task_ref.get();
let config: OpenConfig = task.attributes.get("config")?;
let path = Path::new(&config.path);
if !path.is_absolute() {
bail!("Path {:?} is not absolute", path);
}
let target_path = state_ref.get().work_dir().new_path_for_dataobject();
let data = Data::new_by_fs_copy(&path, target_path)?;
let output = task_ref.get().output(0);
output.get_mut().set_data(Arc::new(data));
}
Ok(())
})))
}
#[derive(Deserialize)]
struct ExportConfig {
path: String,
}
/// Export internal file to external file system
pub fn task_export(_: &mut State, task_ref: TaskRef) -> TaskResult {
{
let task = task_ref.get();
task.check_number_of_args(1)?;
}
Ok(Box::new(future::lazy(move || {
let task = task_ref.get();
let config: ExportConfig = task.attributes.get("config")?;
let path = Path::new(&config.path);
if !path.is_absolute() {
bail!("Path {:?} is not absolute", path);
}
let input = task.input_data(0);
input.export_to_path(path)
})))
}
|
use ifstructs::ifreq;
use mio::{unix::SourceFd, Events, Interest, Poll, Token};
use nix::libc;
use pnet::packet::ipv4::Ipv4Packet;
use std::{
fs::{File, OpenOptions},
io::{self, prelude::*},
net::{Ipv4Addr, SocketAddr, UdpSocket},
os::unix::io::{AsRawFd, RawFd},
};
const TUNSETIFF: libc::c_ulong = 0x400454CA;
#[derive(Debug)]
pub struct TunDevice {
name: String,
fd: File,
}
impl TunDevice {
pub fn new(name: &str) -> io::Result<Self> {
let mut ifr = ifreq::from_name(name)?;
ifr.ifr_ifru.ifr_flags = (libc::IFF_TUN | libc::IFF_NO_PI) as _;
let fd = OpenOptions::new()
.read(true)
.write(true)
.open("/dev/net/tun")?;
match unsafe {
libc::ioctl(
fd.as_raw_fd(),
TUNSETIFF,
&mut ifr as *mut ifreq as *mut libc::c_void,
)
} {
-1 => Err(std::io::Error::last_os_error()),
_ => Ok(()),
}?;
let tun = Self {
name: ifr.get_name()?,
fd,
};
Ok(tun)
}
pub fn up(&self) -> io::Result<()> {
let mut ifr = ifreq::from_name(&self.name)?;
if_ioctl(libc::SIOCGIFFLAGS, &mut ifr)?;
ifr.set_flags(ifr.get_flags() | libc::IFF_UP as libc::c_short);
if_ioctl(libc::SIOCSIFFLAGS, &mut ifr)
}
pub fn down(&self) -> io::Result<()> {
let mut ifr = ifreq::from_name(&self.name)?;
if_ioctl(libc::SIOCGIFFLAGS, &mut ifr)?;
ifr.set_flags(ifr.get_flags() & !(libc::IFF_UP as libc::c_short));
if_ioctl(libc::SIOCSIFFLAGS, &mut ifr)
}
pub fn get_addr(&self) -> io::Result<Ipv4Addr> {
let mut ifr = ifreq::from_name(&self.name)?;
if_ioctl(libc::SIOCGIFADDR, &mut ifr)?;
unsafe {
let addr: &libc::sockaddr_in = std::mem::transmute(&ifr.ifr_ifru.ifr_addr);
Ok(std::mem::transmute(addr.sin_addr))
}
}
pub fn set_addr(&self, addr: Ipv4Addr) -> io::Result<&Self> {
let mut ifr = ifreq::from_name(&self.name)?;
unsafe {
let mut saddr_in: &mut libc::sockaddr_in =
std::mem::transmute(&mut ifr.ifr_ifru.ifr_addr);
saddr_in.sin_addr = std::mem::transmute(addr);
saddr_in.sin_family = libc::AF_INET as _;
}
if_ioctl(libc::SIOCSIFADDR, &mut ifr)?;
Ok(self)
}
pub fn get_dstaddr(&self) -> io::Result<Ipv4Addr> {
let mut ifr = ifreq::from_name(&self.name)?;
if_ioctl(libc::SIOCGIFDSTADDR, &mut ifr)?;
unsafe {
let addr: &libc::sockaddr_in = std::mem::transmute(&ifr.ifr_ifru.ifr_dstaddr);
Ok(std::mem::transmute(addr.sin_addr))
}
}
pub fn set_dstaddr(&self, dstaddr: Ipv4Addr) -> io::Result<&Self> {
let mut ifr = ifreq::from_name(&self.name)?;
unsafe {
let mut saddr_in: &mut libc::sockaddr_in =
std::mem::transmute(&mut ifr.ifr_ifru.ifr_dstaddr);
saddr_in.sin_addr = std::mem::transmute(dstaddr);
saddr_in.sin_family = libc::AF_INET as _;
}
if_ioctl(libc::SIOCSIFDSTADDR, &mut ifr)?;
Ok(self)
}
pub fn get_netmask(&self) -> io::Result<Ipv4Addr> {
let mut ifr = ifreq::from_name(&self.name)?;
if_ioctl(libc::SIOCGIFNETMASK, &mut ifr)?;
unsafe {
let addr: &libc::sockaddr_in = std::mem::transmute(&ifr.ifr_ifru.ifr_netmask);
Ok(std::mem::transmute(addr.sin_addr))
}
}
pub fn set_netmask(&self, netmask: Ipv4Addr) -> io::Result<&Self> {
let mut ifr = ifreq::from_name(&self.name)?;
unsafe {
let mut saddr_in: &mut libc::sockaddr_in =
std::mem::transmute(&mut ifr.ifr_ifru.ifr_netmask);
saddr_in.sin_addr = std::mem::transmute(netmask);
saddr_in.sin_family = libc::AF_INET as _;
}
if_ioctl(libc::SIOCSIFNETMASK, &mut ifr)?;
Ok(self)
}
pub fn get_rawfd(&self) -> RawFd {
self.fd.as_raw_fd()
}
pub fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.fd.read(buf)
}
pub fn write(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.fd.write(buf)
}
}
fn if_ioctl(request: libc::c_ulong, ifr: &mut ifreq) -> io::Result<()> {
let sock = UdpSocket::bind("0.0.0.0:55555")?;
unsafe {
match libc::ioctl(
sock.as_raw_fd(),
request,
ifr as *mut ifreq as *mut libc::c_void,
) {
-1 => Err(io::Error::last_os_error()),
_ => Ok(()),
}
}
}
#[derive(Debug)]
pub struct Tunnel {
tuns: [TunDevice; 2],
addr: SocketAddr,
dst: SocketAddr,
}
impl Tunnel {
pub fn new(tuns: [TunDevice; 2], addr: SocketAddr, dst: SocketAddr) -> Self {
Self { tuns, addr, dst }
}
pub fn tunnel(&mut self) -> io::Result<()> {
let socket = UdpSocket::bind(self.addr)?;
socket.set_nonblocking(true)?;
let mut poll = Poll::new()?;
let mut events = Events::with_capacity(3);
for i in 0..2usize {
poll.registry().register(
&mut SourceFd(&self.tuns[i].get_rawfd()),
Token(i),
Interest::READABLE,
)?;
}
poll.registry().register(
&mut SourceFd(&socket.as_raw_fd()),
Token(2),
Interest::READABLE,
)?;
let mut buf = [0u8; 1500];
loop {
poll.poll(&mut events, None)?;
for event in events.iter() {
match event.token() {
Token(idx @ 0..=1) => {
let len = self.tuns[idx].read(&mut buf)?;
eprint!("read {} bytes from {}. ", len, self.tuns[idx].name);
if probably_ipv4(&buf) {
let send_len = socket.send_to(&mut buf[..len], &self.dst)?;
eprintln!("{} bytes send.", send_len);
} else {
eprintln!("drop");
}
}
Token(2) => {
let (read_len, _) = socket.recv_from(&mut buf)?;
let write_len = self.tuns[0].write(&mut buf[..read_len])?;
eprintln!(
"read {} bytes from socket. {} bytes wrote.",
read_len, write_len
);
}
_ => unreachable!(),
};
break;
}
}
}
}
fn probably_ipv4(data: &[u8]) -> bool {
match Ipv4Packet::new(data) {
Some(pkt) => pkt.get_version() == 4u8,
None => false,
}
}
|
// Copyright (c) 2018 The rust-bitcoin developers
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! Constants for various bitcoin-like cryptocurrencie networks.
//!
//! The data provided for each currency includes:
//! * the human readable part as authoritatively maintained in [SLIP-0173](https://github.com/satoshilabs/slips/blob/master/slip-0173.md)
//! * the network's magic bytes as defined in their respective git repository
//!
//! The data provided for bitcoin only for now includes (other currencies may panic):
//! * chain parameters
//!
//! Please check if all constants you want to use are actually implemented by this library to avoid
//! panics.
//!
//! PRs adding new networks for the existing currencies (e.g. regtest) and constants not yet
//! included are very welcome. Please provide credible sources for magic bytes etc. in comments
//! to make review easier.
#![deny(missing_docs)]
#![deny(non_upper_case_globals)]
#![deny(non_camel_case_types)]
#![deny(non_snake_case)]
#![deny(unused_mut)]
extern crate bitcoin_hashes;
use bitcoin_hashes::sha256d;
use std::{fmt, ops};
pub mod networks;
/// Represents a bitcoin-like network for which it can provide encoding , network and consensus
/// constants.
pub struct Network(Box<NetworkConstants>);
impl Network {
/// Create a net `Network` object from a trait object that provides network constants
pub fn from_box(trait_obj: Box<NetworkConstants>) -> Network {
Network(trait_obj)
}
/// Creates a `Network` object representing the bitcoin mainnet
pub fn bitcoin() -> Network {
Self::from_box(networks::Bitcoin::new())
}
/// Creates a `Network` object representing the bitcoin testnet
pub fn bitcoin_testnet() -> Network {
Self::from_box(networks::BitcoinTestnet::new())
}
/// Creates a `Network` object representing the bitcoin regtest
pub fn bitcoin_regtest() -> Network {
Self::from_box(networks::BitcoinRegtest::new())
}
}
impl Clone for Network {
fn clone(&self) -> Self {
Self::from_box(self.0.clone_boxed())
}
}
impl ops::Deref for Network {
type Target = Box<NetworkConstants>;
fn deref(&self) -> &Box<NetworkConstants> {
&self.0
}
}
impl fmt::Debug for Network {
fn fmt(&self, f: & mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "Network{{name: '{}', ...}}", self.name())
}
}
/// Provides network constants for a bitcoin-like crypto currency
pub trait NetworkConstants {
/// Returns the Human-readable part for the given network
fn hrp(&self) -> &'static str;
/// Returns the prefix byte for legacy p2pk addresses
fn p2pk_prefix(&self) -> u8;
/// Returns the prefix byte for legacy p2pkh addresses
fn p2pkh_prefix(&self) -> u8;
/// Returns the prefix byte for legacy p2sh addresses
fn p2sh_prefix(&self) -> u8;
/// Returns the prefix bytes for encoding xpub keys
fn xpub_prefix(&self) -> &'static [u8; 4];
/// Returns the prefix bytes for encoding xpriv keys
fn xpriv_prefix(&self) -> &'static [u8; 4];
/// Returns the prefix byte for encoding private keys as WIF
fn wif_prefix(&self) -> u8;
/// Returns the network's magic bytes
fn magic(&self) -> u32;
/// Returns a string representation of the networks identity (a.k.a. name)
fn name(&self) -> &'static str;
/// Describes the nature of the network (production/testing)
fn network_type(&self) -> NetworkType;
/// Returns parameters for the chain's consensus
fn chain_params(&self) -> ChainParams;
/// Returns the hash of the genesis block
fn genesis_block(&self) -> sha256d::Hash;
/// Creates a boxed copy of `self`
fn clone_boxed(&self) -> Box<NetworkConstants>;
}
/// Describes the nature of the network
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum NetworkType {
/// Public production network with real economic activity
Mainnet,
/// Public network without real economic activity, for testing purposes only
Testnet,
/// Private testnet, typically created and controlled by a single actor
Regtest,
}
/// Parameters that influence chain consensus.
#[derive(Debug, Clone)]
pub struct ChainParams {
/// Time when BIP16 becomes active.
pub bip16_time: u32,
/// Block height at which BIP34 becomes active.
pub bip34_height: u32,
/// Block height at which BIP65 becomes active.
pub bip65_height: u32,
/// Block height at which BIP66 becomes active.
pub bip66_height: u32,
/// Minimum blocks including miner confirmation of the total of 2016 blocks in a retargeting period,
/// (nPowTargetTimespan / nPowTargetSpacing) which is also used for BIP9 deployments.
/// Examples: 1916 for 95%, 1512 for testchains.
pub rule_change_activation_threshold: u32,
/// Number of blocks with the same set of rules.
pub miner_confirmation_window: u32,
/// Proof of work limit value. It cointans the lowest possible difficulty.
pub pow_limit: [u64; 4],
/// Expected amount of time to mine one block.
pub pow_target_spacing: u64,
/// Difficulty recalculation interval.
pub pow_target_timespan: u64,
/// Determines whether minimal difficulty may be used for blocks or not.
pub allow_min_difficulty_blocks: bool,
/// Determines whether retargeting is disabled for this network or not.
pub no_pow_retargeting: bool,
}
#[cfg(test)]
mod tests {
use ::{Network};
fn all_networks() -> Vec<Network> {
vec![Network::bitcoin(), Network::bitcoin_testnet(), Network::bitcoin_regtest()]
}
#[test]
fn debug() {
for n in all_networks() {
assert!(format!("{:?}", n).contains(n.name()));
}
}
#[test]
fn dont_panic() {
for n in all_networks() {
let _ = n.hrp();
let _ = n.p2pk_prefix();
let _ = n.p2pkh_prefix();
let _ = n.p2sh_prefix();
let _ = n.xpub_prefix();
let _ = n.xpriv_prefix();
let _ = n.wif_prefix();
let _ = n.magic();
let _ = n.name();
let _ = n.network_type();
let _ = n.chain_params();
let _ = n.genesis_block();
let _ = n.clone_boxed();
}
}
}
|
use libc::{c_char, c_int};
use std::ffi::CStr;
#[no_mangle]
pub extern "C" fn solve(line: *const c_char, solution: *mut c_int) -> c_int {
if line.is_null() || solution.is_null() {
return 1;
}
let c_str = unsafe { CStr::from_ptr(line) };
let r_str = match c_str.to_str() {
Ok(s) => s,
Err(e) => {
eprintln!("UTF-8 Error: {}", e);
return 1;
}
};
match evaluate(r_str) {
Ok(value) => {
unsafe {
*solution = value as c_int;
}
0
}
Err(e) => {
eprintln!("Error: {}", e);
1
}
}
}
fn evaluate(problem: &str) -> Result<i32, &'static str> {
Ok(1)
}
|
//! This example is a quick and dirty example of
//! what someone might want to do with a JS token stream.
//! Essentially this is reading in the file and writing it out
//! with no comments. It successfully stripped all of the comments
//! out of a webpack output file though it cannot handle object literals
//! very well. It does a pretty good job of showing how you might use the Scanner.
extern crate docopt;
extern crate ress;
extern crate serde;
#[macro_use]
extern crate serde_derive;
use std::{
fs::{read_to_string, File},
io::{BufWriter, Write},
path::PathBuf,
string::ToString,
};
use docopt::Docopt;
use ress::prelude::*;
type RefToken<'a> = Token<&'a str>;
const USAGE: &str = "
clear-comments
Usage:
clear-comments <in-path> <out-path>
";
fn main() {
let opts: Opts = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| {
println!("error: {:?}", e);
e.exit()
});
let js = if let Ok(s) = read_to_string(opts.arg_in_path) {
s
} else {
eprintln!("Unable to read in-path");
::std::process::exit(1);
};
let s = Scanner::new(&js);
let mut indent = 0;
let f = File::create(&opts.arg_out_path).expect("Error opening outfile");
let mut out = BufWriter::new(f);
let mut last_token = Token::EoF;
let mut new_line = false;
let mut in_loop = false;
let mut in_case = false;
let mut in_if = false;
let mut if_parens = 0;
let mut unbraced_if = false;
for item in s {
let item = item.unwrap();
println!("{:?}", item);
let token = item.token;
if token.matches_keyword(Keyword::If(())) {
in_if = true;
}
if in_if && token.matches_punct(Punct::OpenParen) {
if_parens += 1;
}
if in_if && token.matches_punct(Punct::CloseParen) {
if_parens -= 1;
}
if last_token.matches_keyword(Keyword::For(())) {
in_loop = true;
}
if last_token.matches_keyword(Keyword::Case(()))
|| last_token.matches_keyword(Keyword::Default(()))
{
in_case = true;
}
if last_token.matches_punct(Punct::Colon) && in_case {
new_line = true;
}
if in_loop && last_token.matches_punct(Punct::CloseParen) {
in_loop = false;
}
if token.is_comment() {
continue;
}
if last_token.matches_punct(Punct::OpenBrace) {
indent += 1;
new_line = true;
}
if in_if
&& if_parens == 0
&& last_token.matches_punct(Punct::CloseParen)
&& !token.is_punct()
{
unbraced_if = true;
new_line = true;
indent += 1;
}
if last_token.matches_punct(Punct::CloseParen) && !token.is_punct() {
new_line = true;
}
if last_token.matches_punct(Punct::SemiColon) && !in_loop {
new_line = true;
}
if last_token.matches_punct(Punct::CloseBrace) && !token.is_punct() {
new_line = true;
}
if token.matches_punct(Punct::CloseBrace) {
indent -= 1;
new_line = !last_token.matches_punct(Punct::OpenBrace);
}
if last_token.is_comment() {
new_line = true;
}
if new_line {
out.write_all(format!("\n{}", " ".repeat(indent)).as_bytes())
.expect("error writing indent");
new_line = false;
in_if = false;
if_parens = 0;
if unbraced_if {
indent -= 1;
unbraced_if = false;
}
}
if space_before(&last_token, &token) {
out.write_all(b" ").expect("error writing space");
}
out.write_all(token_to_string(&token).as_bytes())
.expect("Error writing token");
last_token = token;
}
}
fn space_before(last_token: &RefToken, token: &RefToken) -> bool {
if last_token.matches_punct(Punct::Equal) || token.matches_punct(Punct::DoubleEqual) {
return true;
}
if last_token.matches_punct(Punct::Period)
&& (token.is_ident() || token.matches_keyword(Keyword::This(())))
{
return false;
}
if (last_token.is_ident() || last_token.matches_keyword(Keyword::This(())))
&& token.matches_punct(Punct::Period)
{
return false;
}
if token.matches_keyword(Keyword::If(())) {
return false;
}
if last_token.matches_keyword(Keyword::If(())) {
return true;
}
if last_token.matches_keyword(Keyword::Return(())) && !token.is_punct() {
return true;
}
if last_token.matches_keyword(Keyword::For(())) {
return true;
}
if last_token.matches_keyword(Keyword::Switch(())) {
return true;
}
if last_token.matches_punct(Punct::Colon) {
return true;
}
if token.matches_keyword(Keyword::This(())) {
return false;
}
if token.matches_punct(Punct::OpenParen) {
return false;
}
if token.matches_punct(Punct::CloseParen) {
return false;
}
if token.matches_punct(Punct::CloseBracket) {
return false;
}
if token.matches_punct(Punct::OpenBracket) {
return false;
}
if token.matches_punct(Punct::CloseBrace) {
return false;
}
if last_token.matches_punct(Punct::OpenBrace) {
return false;
}
if last_token.matches_punct(Punct::CloseBrace) {
return false;
}
if last_token.matches_punct(Punct::CloseParen) && token.matches_punct(Punct::OpenBrace) {
return true;
}
if last_token.matches_punct(Punct::OpenBracket) {
return false;
}
if last_token.matches_punct(Punct::OpenParen) {
return false;
}
if token.matches_punct(Punct::SemiColon) {
return false;
}
if token.matches_punct(Punct::Period) {
return false;
}
if last_token.matches_punct(Punct::Period) {
return false;
}
if token.matches_punct(Punct::Comma) {
return false;
}
if token.matches_punct(Punct::Colon) {
return false;
}
if last_token.matches_punct(Punct::Bang) {
return false;
}
if last_token.matches_punct(Punct::Comma) {
return true;
}
if token.matches_punct(Punct::Bang) {
return false;
}
if last_token.matches_keyword(Keyword::Function(())) && token.matches_punct(Punct::OpenBrace) {
return false;
}
if last_token.matches_keyword(Keyword::In(()))
|| last_token.matches_ident_str("of")
|| last_token.matches_keyword(Keyword::For(()))
{
return true;
}
if token.matches_keyword(Keyword::In(())) || token.matches_ident_str("of") {
return true;
}
if last_token.is_keyword() {
return true;
}
if last_token.matches_punct(Punct::SemiColon) {
return false;
}
if token.is_punct() || last_token.is_punct() {
return true;
}
false
}
fn token_to_string(t: &RefToken) -> String {
match t {
Token::Boolean(ref t) => if t == &Boolean::True { "true" } else { "false" }.to_string(),
Token::Comment(ref comment) => {
if comment.is_multi_line() {
format!("/*\n{}\n*/", comment.content)
} else {
format!("//{}", comment.content)
}
}
Token::Ident(ref name) => name.to_string(),
Token::Keyword(ref key) => key.to_string(),
Token::Null => "null".to_string(),
Token::Number(ref number) => number.to_string(),
Token::Punct(ref p) => p.to_string(),
Token::RegEx(ref regex) => match regex.flags {
Some(ref f) => format!("/{}/{}", regex.body, f),
None => format!("/{}/", regex.body),
},
Token::String(ref s) => s.to_string(),
_ => String::new(),
}
}
#[derive(Deserialize)]
struct Opts {
arg_in_path: PathBuf,
arg_out_path: PathBuf,
}
|
// use rayon::prelude::*;
fn elevation_at(i: usize, w: usize, h: usize, elv: &[u8]) -> [f32; 4] {
let row = i / w;
let col = i % w;
if row == 0 || row == h - 1 || col == 0 || col == w - 1 {
return [0.0, 0.0, 0.0, 0.0];
}
return [
(elv[i] as f32) / 16.0,
(elv[i + w + (i / w % 2)] as f32) / 16.0,
(elv[i + w + (i / w % 2) - 1] as f32) / 16.0,
(elv[i + 1] as f32) / 16.0,
];
}
pub fn triangulate_map(map: &mut Vec<f32>, width: usize, height: usize, elevation: &[u8]) {
map.chunks_mut(12).enumerate().for_each(|(i, r)| {
let x = i % width;
let y = i / width;
let off = (y % 2) as f32;
let elv = elevation_at(i, width, height, elevation);
let fx = 2.0 * x as f32;
let fy = 2.0 * y as f32;
r[0] = fx + 0.0 + off;
r[1] = fy + 0.0 - elv[0];
r[2] = fx + 1.0 + off;
r[3] = fy + 2.0 - elv[1];
r[4] = fx - 1.0 + off;
r[5] = fy + 2.0 - elv[2];
r[6] = fx + 0.0 + off;
r[7] = fy + 0.0 - elv[0];
r[8] = fx + 2.0 + off;
r[9] = fy + 0.0 - elv[3];
r[10] = fx + 1.0 + off;
r[11] = fy + 2.0 - elv[1];
})
}
|
use std::fmt::{self,Debug};
use std::io::{Write, Read};
use std::collections::{HashSet, HashMap};
use std::fs::File;
use std::cmp::Ordering::{Less, Equal, Greater};
extern crate select;
#[macro_use] extern crate serde_json;
use select::predicate::{Predicate, Attr, Class as HTMLClass, Name, And};
const NBSP:char = '\u{a0}';
const CRN_INDEX:usize = 1_usize;
const DEPT_INDEX:usize = 2_usize;
const COURSE_IDNEX:usize = 3_usize;
const SEC_IDNEX:usize = 4_usize;
const CRED_INDEX:usize = 6_usize;
const TITLE_INDEX:usize = 7_usize;
const DAYS_INDEX:usize = 8_usize;
const TIME_INDEX:usize = 9_usize;
const INSTRUCTOR_INDEX:usize = 19_usize;
const LOC_INDEX:usize = 21_usize;
struct TimeDuration {
day: u8, // Todo vec<day?> // todo just precompute all section collisions? // howmany are tehre?
hour: u32,
minutes: u32,
length_in_minutes: u32,
}
impl Debug for TimeDuration {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} at {} for {} mins", u8_to_day_char(self.day), self.hour, self.length_in_minutes)
}
}
impl TimeDuration {
fn minute_begin(&self) -> u32 {
let minutes_per_day = 24 * 60;
let mb = self.hour * 60 + self.minutes;
assert!(mb < minutes_per_day);
return mb;
}
fn minute_end(&self) -> u32 {
self.minute_begin() + self.length_in_minutes
}
fn intersects_with(&self, other: &Self) -> bool {
if self.day != other.day {
return false;
}
let this_begin = self.minute_begin();
let this_end = self.minute_end();
let other_begin = other.minute_begin();
let other_end = other.minute_end();
// If one ends before the other begins they don't overlap
if this_end < other_begin || other_end < this_begin {
return false;
}
// One does not end before the other begins, which implies overlap
return true
}
fn to_json(&self) -> serde_json::Value {
assert!(self.minutes == 0);
json!({
"day" : self.day+1,
"hour" : self.hour,
"length" : self.length_in_minutes
})
}
}
#[derive(Debug)]
struct Section<'a> {
name: &'a str,
dept: &'a str,
course_num: u32,
section_num: u8,
credits: u8,
times: Vec<TimeDuration>
}
impl<'a> Section<'a> {
fn intersects_with(&self, other: &Self) -> bool {
//Todo: this can be done faster with sorting
for time1 in self.times.iter() {
for time2 in other.times.iter() {
if time1.intersects_with(time2) {
return true;
}
}
}
return false;
}
fn to_json(&self) -> serde_json::Value {
json!({
"name" : format!("{}-{}-{}", self.dept, self.name, self.section_num),
"times" : self.times.iter().map(TimeDuration::to_json).collect::<Vec<_>>()
})
}
}
struct Class<'a> {
name: &'a str,
sections: Vec<Section<'a>>
}
#[derive(Debug)]
struct Course<'a> {
name: &'a str,
department: &'a str,
sections: Vec<Section<'a>>
}
//fn make_single_section_hour<'a>(n:&'a str, course_num:u32, start_hour: u32) -> Class<'a> {
//
// let only_time = TimeDuration {day: 0, hour: start_hour, minutes: 0, length_in_minutes: 50};
// let section = Section {course_num, section_num: 1, credits: 4, times: vec![only_time], name: n};
//
// Class {name: n, sections: vec![section]}
//
//}
fn print_stack(stack: &[&Section]) {
for (i, sec) in stack.iter().enumerate() {
print!("{}-{}", sec.name, sec.section_num);
if i != stack.len() - 1 {
print!(" -> ");
}
}
}
fn intersects_any_in_stack(sec_to_check: &Section, stack: &[&Section]) -> bool {
for sec in stack.iter() {
if sec_to_check.intersects_with(sec) {
return true;
}
}
return false;
}
fn schedule_credit_hours(sched: &Vec<&Section>) -> u32 {
sched.iter().map(|s| s.credits as u32 ).sum::<u32>()
}
fn schedule_starting_time(sched: &Vec<&Section>) -> u32 {
sched.iter().map(|section| {
section.times.iter().min_by_key(|t| t.hour).expect("Found a section with no times.... wtf?").hour
}).min().unwrap()
}
#[derive(Debug)]
struct TimeAnalysis {
back_to_dorm_count: u32,
back_to_dorm_minutes: u32,
bs_time: u32
}
fn schedule_bs_time(sched: &Vec<&Section>) -> TimeAnalysis {
// This can be optimized by preallocing a big matrix of max size 7*24 but can be 7*(less_than_24)
// and avoiding any sorting
let mut day_partition:Vec<_> = (0..7).map(|_| Vec::<&TimeDuration>::new()).collect();
for &sec in sched.iter() {
for time_dur in sec.times.iter() {
let day = time_dur.day;
assert!(day >= 0 && day < 7);
day_partition[day as usize].push(&time_dur);
}
}
let mut bs_time = 0_u32;
let mut back_to_dorm_count = 0_u32;
let mut total_btd_break_minutes = 0_u32;
for dp in day_partition.iter_mut() {
dp.sort_by_key(|td| td.hour);
for win in dp.as_slice().windows(2) {
let first_end = win[0].minute_end();
let second_begin = win[1].minute_begin();
let dm = second_begin - first_end;
let two_hours = 2 * 60;
if dm < two_hours {
if dm != 10 {
bs_time += dm;
}
} else {
total_btd_break_minutes += dm;
back_to_dorm_count += 1
}
assert!(dm > 0);
}
}
TimeAnalysis {
back_to_dorm_count,
back_to_dorm_minutes : total_btd_break_minutes,
bs_time
}
}
#[derive(Debug, Clone)]
struct EvaluatedSchedule<'a> {
schedule: Vec<&'a Section<'a>>,
s_time: u32,
start_time_dt: u32,
bs_time: u32,
back_to_dorm_count: u32,
back_to_dorm_minutes: u32,
score: f64
}
impl<'a> EvaluatedSchedule<'a> {
fn to_json(&self) -> serde_json::Value {
json!({
"score" : self.score,
"stats" : {
"s_time" : self.s_time,
"start_time_dt" : self.start_time_dt,
"bs_time" : self.bs_time,
"back_dorm_count" : self.back_to_dorm_count,
"back_to_dorm_minutes" : self.back_to_dorm_minutes,
},
"classes" : self.schedule.iter().map(|s| s.to_json()).collect::<Vec<_>>()
})
}
fn is_strictly_better_than(&self, other:&Self) -> bool {
let cmps = vec![
self.start_time_dt.cmp(&other.start_time_dt),
self.bs_time.cmp(&other.bs_time),
self.back_to_dorm_count.cmp(&other.back_to_dorm_count),
self.back_to_dorm_minutes.cmp(&other.back_to_dorm_minutes),
];
let better = vec![
Less,
Less,
Less,
Less
];
assert_eq!(better.len(), cmps.len());
let opposite = |o: &std::cmp::Ordering| {
match o {
&Less => Greater,
&Greater => Less,
_=> panic!()
}
};
// Make sure it isn't any worse
for (cmp, desired_cmp) in cmps.iter().zip(better.iter()) {
let opp = opposite(desired_cmp);
if *cmp == opp {
return false;
}
}
// Make sure atleast one is better
for (cmp, desired_cmp) in cmps.iter().zip(better.iter()) {
if cmp == desired_cmp {
return true;
}
}
return false; // They are equal
}
}
fn evaluate_schedule<'a>(sched: Vec<&'a Section>) -> EvaluatedSchedule<'a> {
let credit_hours = schedule_credit_hours(&sched);
// Todo I don't really care about absolute earliest, maybe try average or drop the single worst
let s_time = schedule_starting_time(&sched);
let time_analysis = schedule_bs_time(&sched);
//print_stack(sched);
let dt:u32 = (s_time as i32 - 12).abs() as u32;
let bs_time_penalty = time_analysis.bs_time as f64;
let back_to_dorm_count_penalty = time_analysis.back_to_dorm_count as f64 * 20.0;
let back_to_dorm_minutes_penalty = time_analysis.back_to_dorm_minutes as f64 / 100.0;
let mut total_score = 0.0 - (dt as f64) - bs_time_penalty - back_to_dorm_count_penalty - back_to_dorm_minutes_penalty;
//TODO
if credit_hours < 16 || credit_hours > 20 {
total_score = -std::f64::INFINITY;
}
//print!(" credit_hours = {:?}, s_time = {:?}, bs_time={:?}, -- SCORE= {:?}", credit_hours, s_time, time_analysis, total_score);
//print!("\n");
EvaluatedSchedule {
schedule: sched,
score: total_score,
start_time_dt: dt,
bs_time: time_analysis.bs_time,
back_to_dorm_count: time_analysis.back_to_dorm_count,
back_to_dorm_minutes: time_analysis.back_to_dorm_minutes,
s_time: s_time
}
}
fn enumerate_schedules<'b, 'c, 'a : 'b+'c, >(courses: &'a[Course<'a>], stack: &'b mut Vec<&'a Section<'a>>, results: &'c mut Vec<Vec<&'a Section<'a>>>) {
if courses.len() == 0 {
if stack.len() != 0 && stack.len() == 4{
results.push(stack.clone());
}
return
}
let this_course = &courses[0];
// You can either not take this course at all
enumerate_schedules(&courses[1..], stack, results);
// Or you can take this course at any of the below times
for section in this_course.sections.iter() {
if !intersects_any_in_stack(section, stack) {
stack.push(section);
enumerate_schedules(&courses[1..], stack, results);
stack.pop();
}
}
}
fn get_immediate_inner_text<'a>(n: select::node::Node<'a>) -> Option<&'a str> {
n.children().next().and_then(|c| {
c.as_text().and_then(|t| {
if t.len() == 2 && t.chars().nth(0).unwrap() == NBSP {
None
}
else {
Some(t)
}
})
})
}
// Takes a string like 06:00 pm and returns time in 24 hour format
fn parse_mins_hours(s:&str) -> (u32, u32) {
let mut time_and_pm_am = s.split(' ');
let time = time_and_pm_am.next().unwrap();
let am_pm = time_and_pm_am.next().unwrap();
let mut pieces = time.split(':');
let mut hours:u32 = pieces.next().unwrap().parse().unwrap();
let mins:u32 = pieces.next().unwrap().parse().unwrap();
if am_pm == "pm" && hours != 12 {
hours = hours + 12;
}else if am_pm == "am" && hours == 12 {
hours = 0;
}
(hours, mins)
}
fn day_char_to_u8(day:char) -> u8 {
let days = ['M', 'T', 'W', 'R', 'F'];
days.iter().position(|&c| c == day).unwrap() as u8
}
fn u8_to_day_char(day:u8) -> char {
let days = ['M', 'T', 'W', 'R', 'F'];
return days[day as usize]
}
fn parse_time(days:&str, s:&str) -> Vec<TimeDuration> {
let mut durs = Vec::new();
for ch in days.chars() {
let mut pieces = s.split('-');
let start = pieces.next().unwrap();
let end = pieces.next().unwrap();
let (s_h, s_m) = parse_mins_hours(start);
let (e_h, e_m) = parse_mins_hours(end);
let dh = e_h - s_h;
let dm = (60 - s_m) % 60 + e_m;
let total_dm = dh * 60 + dm;
let this_td = TimeDuration{
day: day_char_to_u8(ch),
hour: s_h,
minutes: s_m,
length_in_minutes: total_dm
};
durs.push(this_td)
}
durs
}
fn get_time_in_row<'a, 'b>(rows:&'b Vec<select::node::Node<'a>>, row_i:usize) -> Option<Vec<TimeDuration>> {
let next_row = rows[row_i];
let next_row_tds = next_row.find(Name("td")).collect::<Vec<_>>();
if next_row_tds.len() == 0 {
return None;
}
let next_time = get_immediate_inner_text(next_row_tds[TIME_INDEX]);
let next_days = get_immediate_inner_text(next_row_tds[DAYS_INDEX]);
if next_time.is_some() && next_time.unwrap() != "TBA" && next_days.is_some() {
Some(parse_time(next_days.unwrap(), next_time.unwrap()))
} else {
None
}
}
fn get_time_continuation<'a, 'b>(rows:&'b Vec<select::node::Node<'a>>, row_i:usize) -> Option<Vec<TimeDuration>> {
if row_i >= rows.len() {
return None
}
get_time_in_row(rows, row_i).and_then(|v:Vec<TimeDuration>| {
let next_row = rows[row_i];
let next_row_tds = next_row.find(Name("td")).collect::<Vec<_>>();
if next_row_tds.len() == 0 {
return None;
}
if get_immediate_inner_text(next_row_tds[TITLE_INDEX]).is_some() {
return None
}
Some(v)
})
}
fn load_sections_from_doc<'a>(doc:&'a select::document::Document) -> HashMap<(&'a str, u32), Course<'a>> {
let table = doc.find(And(Name("table"), HTMLClass("datadisplaytable"))).next().expect("Couldn't find <table class=\"datadisplaytable\">");
let rows = table.find(Name("tr")).collect::<Vec<_>>();
println!("Found rows");
// Todo my layout of times can be optimized, it appears every section only has 1 time but multiple days
let mut courses = HashMap::<(&'a str, u32), Course<'a>>::new();
let mut i = 0_usize;
while i < rows.len() {
let row = &rows[i];
let tds = row.find(Name("td")).collect::<Vec<_>>();
if tds.len() == 0 {
println!("Found header row {}", row.inner_html());
i += 1;
continue; // This is a header row
}
let title_txt = get_immediate_inner_text(tds[TITLE_INDEX]);
println!("title_text={:?}", title_txt);
let course_num_s = get_immediate_inner_text(tds[COURSE_IDNEX]);
let course_num_i:Result<u32, _> = course_num_s.unwrap().parse();
let section_s = get_immediate_inner_text(tds[SEC_IDNEX]);
let section_num_i:Result<u8, _> = section_s.unwrap().chars().filter(|c| *c != 'T' && *c != 'G').collect::<String>().parse();
let credits_s = get_immediate_inner_text(tds[CRED_INDEX]);
let dept_s = get_immediate_inner_text(tds[DEPT_INDEX]);
// Todo double check this and make sure there aren't any non_integer credits
let credit_num_i:Result<u8, _> = credits_s.unwrap().chars().take_while(|&c| c!= '.').collect::<String>().parse();
if course_num_i.is_err() {
panic!("Couldn't parse {:?}", course_num_s);
}
let td_opt = get_time_in_row(&rows, i);
if td_opt.is_none() {
println!("SKIPPING ROW {:?} at i={} BECAUSE FOUND NO TIME", title_txt, i);
i+= 1;
continue;
}
let mut td_vec = td_opt.unwrap();
while let Some(new_td) = get_time_continuation(&rows, i+1) {
println!("GOTTT A CONTINUATION!!!");
td_vec.extend(new_td);
i += 1
}
let course_num = course_num_i.unwrap();
let course_name = title_txt.unwrap();
let dept_str = dept_s.unwrap();
let sec = Section {
name : course_name,
course_num: course_num,
section_num: section_num_i.unwrap(),
credits: credit_num_i.unwrap(),
times: td_vec,
dept: dept_str,
};
let existing_course:&mut Course<'a> = courses.entry((dept_str, course_num)).or_insert(Course { department: dept_str, name:course_name, sections: Vec::new() });
existing_course.sections.push(sec);
i += 1;
}
//println!("{:#?}", courses);
return courses;
}
fn prune<'a>(v:&mut Vec<EvaluatedSchedule<'a>>) -> Vec<EvaluatedSchedule<'a>> {
// Todo this is much faster with a hashset
let mut strictly_worse_i = HashSet::with_capacity(5000); //todo
for i in 0..v.len() {
for j in 0..v.len() {
if i==j || strictly_worse_i.contains(&i) || strictly_worse_i.contains(&j) {
continue;
}
if v[i].is_strictly_better_than(&v[j]) {
strictly_worse_i.insert(j);
} else if v[j].is_strictly_better_than(&v[i]) {
strictly_worse_i.insert(i);
}
}
}
let mut ret = Vec::new();
for i in 0..v.len() {
if strictly_worse_i.contains(&i) { continue; }
ret.push(v[i].clone()); // todo should i clone here?
}
println!("Successfuly pruned {} schedules or {}%!!", strictly_worse_i.len(), strictly_worse_i.len() as f64 / v.len() as f64 * 100.);
return ret;
}
fn main() {
let mut file = std::fs::File::open("./Search Results.html").expect("Couldn't open file");
let mut contents = String::new();
file.read_to_string(&mut contents).expect("Couldn't read file");
let doc = select::document::Document::from(contents.as_ref());
let mut courses = load_sections_from_doc(&doc);
let c1 = courses.remove(&("MATH", 1010));
let c2 = courses.remove(&("PHYS", 1100));
let c3 = courses.remove(&("CSCI", 1200));
let c4 = courses.remove(&("PSYC", 1200));
let courses = [c1.unwrap(), c2.unwrap(), c3.unwrap(), c4.unwrap()];
let mut stack = Vec::new();
let mut results = Vec::new();
enumerate_schedules(&courses, &mut stack, &mut results);
println!("Generated {:?} schedules", results.len());
let mut evaluations = results.into_iter().map(|s| evaluate_schedule(s)).collect::<Vec<_>>();
evaluations.sort_by(|a, b| b.score.partial_cmp(&a.score).unwrap());
let SHOULD_PRUNE = true;
let evals_for_json = if SHOULD_PRUNE {
prune(&mut evaluations)
} else {
evaluations
};
let schedules = json!({
"schedules" : evals_for_json.iter().map(EvaluatedSchedule::to_json).collect::<Vec<_>>()
});
let full_data = format!("let schedules = JSON.parse('{}')", schedules.to_string());
let mut file = File::create("data2.js").unwrap();
file.write_all(full_data.as_bytes());
}
#[test]
fn time_durations() {
let t1 = TimeDuration {day: 0, hour: 1, minutes: 30, length_in_minutes: 60}; // Hour class starting at 1:30 to 2:30
let t2 = TimeDuration {day: 0, hour: 1, minutes: 30, length_in_minutes: 60};
assert!(t1.intersects_with(&t2));
assert!(t2.intersects_with(&t1));
let tt1 = TimeDuration {day: 0, hour: 1, minutes: 30, length_in_minutes: 60}; // 1:30 - 2:30
let tt2 = TimeDuration {day: 0, hour: 2, minutes: 29, length_in_minutes: 60}; // 2:29 - 3:29
let tt3 = TimeDuration {day: 0, hour: 2, minutes: 30, length_in_minutes: 60}; // 2:30 - 3:30
assert!(tt1.intersects_with(&tt2));
assert!(tt2.intersects_with(&tt1));
assert!(tt3.intersects_with(&tt1));
}
#[test]
fn time_parsing() {
assert_eq!(parse_mins_hours("6:00 pm"), (6+12, 0));
assert_eq!(parse_mins_hours("12:00 am"), (0, 0));
assert_eq!(parse_mins_hours("12:00 pm"), (12, 0));
assert_eq!(parse_mins_hours("7:15 am"), (7, 15));
assert_eq!(parse_mins_hours("8:35 pm"), (8+12, 35))
}
#[test]
fn parse_time_test() {
unimplemented!();
} |
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::MyWorld;
use cucumber::{Steps, StepsBuilder};
use starcoin_config::{ChainNetwork, NodeConfig, StarcoinOpt};
use std::path::PathBuf;
use std::sync::Arc;
pub fn steps() -> Steps<MyWorld> {
let mut builder: StepsBuilder<MyWorld> = Default::default();
builder
.given("a dev node config", |world: &mut MyWorld, _step| {
let mut opt = StarcoinOpt::default();
opt.net = Some(ChainNetwork::Dev);
opt.disable_metrics = true;
opt.data_dir = Some(PathBuf::from(starcoin_config::temp_path().as_ref()));
let mut config = NodeConfig::load_with_opt(&opt).unwrap();
config.network.disable_seed = true;
world.node_config = Some(config)
})
.given("halley node config", |world: &mut MyWorld, _step| {
let mut opt = StarcoinOpt::default();
opt.net = Some(ChainNetwork::Halley);
opt.disable_metrics = true;
opt.data_dir = Some(PathBuf::from(starcoin_config::temp_path().as_ref()));
let config = NodeConfig::load_with_opt(&opt).unwrap();
world.node_config = Some(config)
})
.given("node dev handle", |world: &mut MyWorld, _step| {
let node_config = world.node_config.as_ref().take().unwrap();
let handle = starcoin_node::run_dev_node(Arc::new(node_config.clone()));
world.node_handle = Some(handle)
})
.given("node handle", |world: &mut MyWorld, _step| {
let node_config = world.node_config.as_ref().take().unwrap();
let handle = starcoin_node::run_normal_node(Arc::new(node_config.clone()));
world.node_handle = Some(handle)
})
.then("node handle stop", |world: &mut MyWorld, _step| {
let node_handle = world.node_handle.take().unwrap();
let result = node_handle.stop();
assert!(result.is_ok());
})
.then("get node info", |world: &mut MyWorld, _step| {
let client = world.rpc_client.as_ref().take().unwrap();
let node_info = client.clone().node_info();
assert!(node_info.is_ok());
})
.then("get node status", |world: &mut MyWorld, _step| {
let client = world.rpc_client.as_ref().take().unwrap();
let status = client.clone().node_status();
assert!(status.is_ok());
assert_eq!(status.unwrap(), true);
})
.then("get node peers", |world: &mut MyWorld, _step| {
let client = world.rpc_client.as_ref().take().unwrap();
let peers = client.clone().node_peers();
assert!(peers.is_ok());
});
builder.build()
}
|
pub fn image(grid: &[Vec<u8>], iteration: u8) {
let mut imgbuf = image::RgbImage::new(129, 129);
for (i, line) in grid.iter().enumerate() {
for (j, val) in line.iter().enumerate() {
imgbuf.put_pixel(j as u32, i as u32, image::Rgb([*val, *val, *val]));
}
}
imgbuf.save(&format!("{}.png", iteration)).unwrap();
}
|
use super::Mesh;
pub struct Update<'a> {
pub mesh: Option<Mesh>,
pub swap_desc: Option<&'a wgpu::SwapChainDescriptor>,
}
impl<'a> Default for Update<'a> {
fn default() -> Self {
Update {
mesh: None,
swap_desc: None,
}
}
}
|
extern crate reversi;
use reversi::game::*;
use reversi::cpu;
fn main() {
let cpu_setting = cpu::Setting::new(5, 5, 50, 20, 1, 7, 50);
let setting = Setting {
black : PlayerType::Human,
white : PlayerType::Computer(cpu_setting),
boardsize : (8, 8),
};
start(&setting, true);
} |
#![feature(proc_macro, wasm_custom_section, wasm_import_module)]
extern crate wasm_bindgen;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(js_namespace = Math)]
fn random() -> f64;
#[wasm_bindgen(js_namespace = console)]
fn log(msg: &str);
#[wasm_bindgen(js_namespace = console)]
fn time(name: &str);
#[wasm_bindgen(js_namespace = console)]
fn timeEnd(name: &str);
#[wasm_bindgen(js_namespace = performance)]
fn now() -> f64;
}
macro_rules! log {
($($t:tt)*) => (log(&format!($($t)*)))
}
type Cells = Vec<u8>;
trait BitOper<T> {
fn get_bit(&self, idx: T) -> bool;
fn set_bit(&mut self, idx: T, val: bool);
fn toggle(&mut self, idx: T);
}
impl BitOper<u8> for u8 {
fn set_bit(&mut self, idx: u8, val: bool) {
if val {
*self = *self | 1 << idx;
} else {
*self = *self & !(1 << idx);
}
}
fn get_bit(&self, idx: u8) -> bool {
(self & 1 << idx) != 0
}
fn toggle(&mut self, idx: u8) {
*self = *self ^ 1 << idx;
}
}
impl BitOper<usize> for Cells {
fn get_bit(&self, idx: usize) -> bool {
self[idx / 8].get_bit((idx % 8) as u8)
}
fn set_bit(&mut self, idx: usize, val: bool) {
self[idx / 8].set_bit( (idx % 8) as u8, val);
}
fn toggle(&mut self, idx: usize) {
self[idx / 8].toggle( (idx % 8) as u8);
}
}
#[wasm_bindgen]
pub struct Universe {
width: u32,
height: u32,
cells: Cells,
pres: Cells,
}
impl Universe {
fn get_index(&self, row: u32, column: u32) -> usize {
(row * self.width + column) as usize
}
fn live_neighbor_count(&self, row: u32, column: u32) -> u8 {
let mut count = 0;
let north = if row == 0 { self.height - 1 } else { row - 1 };
let south = if row == self.height - 1 { 0 } else { row + 1 };
let west = if column == 0 {
self.width - 1
} else {
column - 1
};
let east = if column == self.width - 1 {
0
} else {
column + 1
};
let cells = &self.pres;
let idx = self.get_index(north, west);
count += cells.get_bit(idx) as u8;
let idx = self.get_index(north, column);
count += cells.get_bit(idx) as u8;
let idx = self.get_index(north, east);
count += cells.get_bit(idx) as u8;
let idx = self.get_index(row, west);
count += cells.get_bit(idx) as u8;
let idx = self.get_index(row, east);
count += cells.get_bit(idx) as u8;
let idx = self.get_index(south, west);
count += cells.get_bit(idx) as u8;
let idx = self.get_index(south, column);
count += cells.get_bit(idx) as u8;
let idx = self.get_index(south, east);
count += cells.get_bit(idx) as u8;
count
}
// ...
}
/// Public methods, exported to JavaScript.
#[wasm_bindgen]
impl Universe {
pub fn new(width: u32, height: u32) -> Universe {
let len = (width * height) as usize;
let vlen = if len % 8 == 0 { len / 8 } else { len / 8 + 1 };
let mut cells = vec![0u8; vlen];
for i in 0..len {
if i % 2 == 0 || i % 7 == 0 {
cells.set_bit(i, true);
} else {
cells.set_bit(i, false);
}
}
//log!("new cells:{:?}",cells);
Universe {
width,
height,
cells,
pres: vec![0u8; vlen],
}
}
pub fn tick(&mut self) {
let _timer = Timer::new("Universe::tick");
for i in 0..((self.width * self.height) as usize) {
self.pres.set_bit(i, self.cells.get_bit(i));
}
//let _timer = Timer::new("new generation");
for row in 0..self.height {
for col in 0..self.width {
let idx = self.get_index(row, col);
let cell = self.pres.get_bit(idx);
let live_neighbors = self.live_neighbor_count(row, col);
let next_cell = match (cell, live_neighbors) {
// Rule 1: Any live cell with fewer than two live neighbours
// dies, as if caused by underpopulation.
(true, x) if x < 2 => false,
// Rule 2: Any live cell with two or three live neighbours
// lives on to the next generation.
(true, 2) | (true, 3) => true,
// Rule 3: Any live cell with more than three live
// neighbours dies, as if by overpopulation.
(true, x) if x > 3 => false,
// Rule 4: Any dead cell with exactly three live neighbours
// becomes a live cell, as if by reproduction.
(false, 3) => true,
// All other cells remain in the same state.
(otherwise, _) => otherwise,
};
self.cells.set_bit(idx, next_cell);
}
}
}
pub fn rand_gen(&mut self) {
for i in 0..((self.width * self.height) as usize) {
self.cells.set_bit(i, random() > 0.4995);
}
}
pub fn toggle_cell(&mut self, row: u32, column: u32) {
let idx = self.get_index(row, column);
self.cells.toggle(idx);
}
pub fn clear(&mut self) {
self.cells.iter_mut().for_each(|x| *x = 0);
}
pub fn width(&self) -> u32 {
self.width
}
pub fn height(&self) -> u32 {
self.height
}
pub fn bytes(&self) -> u32 {
self.cells.len() as u32
}
pub fn cells(&self) -> *const u8 {
//log!("cells:{:?}",self.cells);
//log!("ptr:{:?}",self.cells.as_ptr());
self.cells.as_ptr()
}
pub fn render(&self) -> String {
self.to_string()
}
// ...
}
use std::fmt;
impl fmt::Display for Universe {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
println!("{:?}", self.cells);
for i in 0..self.width * self.height {
let symbol = if self.cells.get_bit(i as usize) {
"1"
} else {
"0"
};
write!(f, "{}", symbol)?;
if (i + 1) % self.width == 0 {
write!(f, "\n")?;
}
}
Ok(())
}
}
pub struct Timer<'a> {
name: &'a str,
}
impl<'a> Timer<'a> {
pub fn new(name: &'a str) -> Timer<'a> {
time(name);
Timer { name }
}
}
impl<'a> Drop for Timer<'a> {
fn drop(&mut self) {
timeEnd(self.name);
}
}
|
//vim: tw=80
use std::pin::Pin;
use std::task::Poll;
use futures::{Future, FutureExt, TryFutureExt, StreamExt, future, stream};
use futures::channel::oneshot;
#[cfg(feature = "tokio")]
use std::rc::Rc;
use tokio;
use tokio::runtime::current_thread;
use futures_locks::*;
// When an exclusively owned but not yet polled RwLock future is dropped, it
// should relinquish ownership. If not, deadlocks may result.
#[test]
fn drop_exclusive_before_poll() {
let rwlock = RwLock::<u32>::new(42);
let mut rt = current_thread::Runtime::new().unwrap();
rt.block_on(future::poll_fn(|cx| {
let mut fut1 = rwlock.read();
let guard1 = Pin::new(&mut fut1).poll(cx); // fut1 immediately gets ownership
assert!(guard1.is_ready());
let mut fut2 = rwlock.write();
assert!(Pin::new(&mut fut2).poll(cx).is_pending());
drop(guard1); // ownership transfers to fut2
//drop(fut1);
drop(fut2); // relinquish ownership
let mut fut3 = rwlock.read();
let guard3 = Pin::new(&mut fut3).poll(cx); // fut3 immediately gets ownership
assert!(guard3.is_ready());
Poll::Ready(())
}));
}
// When an nonexclusively owned but not yet polled RwLock future is dropped, it
// should relinquish ownership. If not, deadlocks may result.
#[test]
fn drop_shared_before_poll() {
let rwlock = RwLock::<u32>::new(42);
let mut rt = current_thread::Runtime::new().unwrap();
rt.block_on(future::poll_fn(|cx| {
let mut fut1 = rwlock.write();
let guard1 = Pin::new(&mut fut1).poll(cx); // fut1 immediately gets ownership
assert!(guard1.is_ready());
let mut fut2 = rwlock.read();
assert!(Pin::new(&mut fut2).poll(cx).is_pending());
drop(guard1); // ownership transfers to fut2
//drop(fut1);
drop(fut2); // relinquish ownership
let mut fut3 = rwlock.write();
let guard3 = Pin::new(&mut fut3).poll(cx); // fut3 immediately gets ownership
assert!(guard3.is_ready());
Poll::Ready(())
}));
}
// Mutably dereference a uniquely owned RwLock
#[test]
fn get_mut() {
let mut rwlock = RwLock::<u32>::new(42);
*rwlock.get_mut().unwrap() += 1;
assert_eq!(*rwlock.get_mut().unwrap(), 43);
}
// Cloned RwLocks cannot be deferenced
#[test]
fn get_mut_cloned() {
let mut rwlock = RwLock::<u32>::new(42);
let _clone = rwlock.clone();
assert!(rwlock.get_mut().is_none());
}
// Acquire an RwLock nonexclusively by two different tasks simultaneously .
#[test]
fn read_shared() {
let rwlock = RwLock::<u32>::new(42);
let mut rt = current_thread::Runtime::new().unwrap();
let result = rt.block_on(async {
let (tx0, rx0) = oneshot::channel::<()>();
let (tx1, rx1) = oneshot::channel::<()>();
let task0 = rwlock.read()
.then(move |guard| {
tx1.send(()).unwrap();
rx0.map(move |_| *guard)
});
let task1 = rwlock.read()
.then(move |guard| {
tx0.send(()).unwrap();
rx1.map(move |_| *guard)
});
future::join(task0, task1).await
});
assert_eq!(result, (42, 42));
}
// Acquire an RwLock nonexclusively by a single task
#[test]
fn read_uncontested() {
let rwlock = RwLock::<u32>::new(42);
let mut rt = current_thread::Runtime::new().unwrap();
let result = rt.block_on(async {
rwlock.read().map(|guard| {
*guard
}).await
});
assert_eq!(result, 42);
}
// Attempt to acquire an RwLock for reading that already has a writer
#[test]
fn read_contested() {
let rwlock = RwLock::<u32>::new(0);
let mut rt = current_thread::Runtime::new().unwrap();
let result = rt.block_on(async {
let (tx0, rx0) = oneshot::channel::<()>();
let (tx1, rx1) = oneshot::channel::<()>();
let task0 = rwlock.write()
.then(move |mut guard| {
*guard += 5;
rx0.map_err(|_| {drop(guard);})
});
let task1 = rwlock.read().map(|guard| *guard);
let task2 = rwlock.read().map(|guard| *guard);
// Readying task3 before task1 and task2 causes Tokio to poll the latter
// even though they're not ready
let task3 = rx1.map_err(|_| ()).map(|_| tx0.send(()).unwrap());
let task4 = async { tx1.send(()) };
future::join5(task0, task1, task2, task3, task4).await
});
assert_eq!(result, (Ok(()), 5, 5, (), Ok(())));
}
// Attempt to acquire an rwlock exclusively when it already has a reader.
// 1) task0 will run first, reading the rwlock's original value and blocking on
// rx.
// 2) task1 will run next, but block on acquiring rwlock.
// 3) task2 will run next, reading the rwlock's value and returning immediately.
// 4) task3 will run next, waking up task0 with the oneshot
// 5) finally task1 will acquire the rwlock and increment it.
//
// If RwLock::write is allowed to acquire an RwLock with readers, then task1
// would erroneously run before task2, and task2 would return the wrong value.
#[test]
fn read_write_contested() {
let rwlock = RwLock::<u32>::new(42);
let mut rt = current_thread::Runtime::new().unwrap();
let result = rt.block_on(async {
let (tx0, rx0) = oneshot::channel::<()>();
let (tx1, rx1) = oneshot::channel::<()>();
let task0 = rwlock.read()
.then(move |guard| {
rx0.map(move |_| { *guard })
});
let task1 = rwlock.write().map(|mut guard| *guard += 1);
let task2 = rwlock.read().map(|guard| *guard);
// Readying task3 before task1 and task2 causes Tokio to poll the latter
// even though they're not ready
let task3 = rx1.map_err(|_| ()).map(|_| tx0.send(()).unwrap());
let task4 = async move {
tx1.send(()).unwrap();
};
future::join5(task0, task1, task2, task3, task4).await
});
assert_eq!(result, (42, (), 42, (), ()));
assert_eq!(rwlock.try_unwrap().expect("try_unwrap"), 43);
}
#[test]
fn try_read_uncontested() {
let rwlock = RwLock::<u32>::new(42);
assert_eq!(42, *rwlock.try_read().unwrap());
}
#[test]
fn try_read_contested() {
let rwlock = RwLock::<u32>::new(42);
let _guard = rwlock.try_write();
assert!(rwlock.try_read().is_err());
}
#[test]
fn try_unwrap_multiply_referenced() {
let rwlock = RwLock::<u32>::new(0);
let _rwlock2 = rwlock.clone();
assert!(rwlock.try_unwrap().is_err());
}
#[test]
fn try_write_uncontested() {
let rwlock = RwLock::<u32>::new(0);
*rwlock.try_write().unwrap() += 5;
assert_eq!(5, rwlock.try_unwrap().unwrap());
}
#[test]
fn try_write_contested() {
let rwlock = RwLock::<u32>::new(42);
let _guard = rwlock.try_read();
assert!(rwlock.try_write().is_err());
}
// Acquire an uncontested RwLock in exclusive mode. poll immediately returns
// Async::Ready
#[test]
fn write_uncontested() {
let rwlock = RwLock::<u32>::new(0);
let mut rt = current_thread::Runtime::new().unwrap();
rt.block_on(async {
rwlock.write().map(|mut guard| {
*guard += 5;
}).await
});
assert_eq!(rwlock.try_unwrap().expect("try_unwrap"), 5);
}
// Pend on an RwLock held exclusively by another task in the same tokio Reactor.
// poll returns Async::NotReady. Later, it gets woken up without involving the
// OS.
#[test]
fn write_contested() {
let rwlock = RwLock::<u32>::new(0);
let mut rt = current_thread::Runtime::new().unwrap();
let result = rt.block_on(async {
let (tx0, rx0) = oneshot::channel::<()>();
let (tx1, rx1) = oneshot::channel::<()>();
let task0 = rwlock.write()
.then(move |mut guard| {
*guard += 5;
rx0.map_err(|_| {drop(guard);})
});
let task1 = rwlock.write().map(|guard| *guard);
// Readying task2 before task1 causes Tokio to poll the latter
// even though it's not ready
let task2 = rx1.map(|_| tx0.send(()).unwrap());
let task3 = async move {
tx1.send(()).unwrap();
};
future::join4(task0, task1, task2, task3).await
});
assert_eq!(result, (Ok(()), 5, (), ()));
}
// RwLocks should be acquired in the order that their Futures are waited upon.
#[test]
fn write_order() {
let rwlock = RwLock::<Vec<u32>>::new(vec![]);
let fut2 = rwlock.write().map(|mut guard| guard.push(2));
let fut1 = rwlock.write().map(|mut guard| guard.push(1));
let mut rt = current_thread::Runtime::new().unwrap();
rt.block_on(async {
fut1.then(|_| fut2).await
});
assert_eq!(rwlock.try_unwrap().unwrap(), vec![1, 2]);
}
// A single RwLock is contested by tasks in multiple threads
#[tokio::test]
async fn multithreaded() {
let rwlock = RwLock::<u32>::new(0);
let rwlock_clone0 = rwlock.clone();
let rwlock_clone1 = rwlock.clone();
let rwlock_clone2 = rwlock.clone();
let rwlock_clone3 = rwlock.clone();
let fut1 = stream::iter(0..1000).for_each(move |_| {
let rwlock_clone4 = rwlock_clone0.clone();
rwlock_clone0.write().map(|mut guard| { *guard += 2 })
.then(move |_| rwlock_clone4.read().map(|_| ()))
});
let fut2 = stream::iter(0..1000).for_each(move |_| {
let rwlock_clone5 = rwlock_clone1.clone();
rwlock_clone1.write().map(|mut guard| { *guard += 3 })
.then(move |_| rwlock_clone5.read().map(|_| ()))
});
let fut3 = stream::iter(0..1000).for_each(move |_| {
let rwlock_clone6 = rwlock_clone2.clone();
rwlock_clone2.write().map(|mut guard| { *guard += 5 })
.then(move |_| rwlock_clone6.read().map(|_| ()))
});
let fut4 = stream::iter(0..1000).for_each(move |_| {
let rwlock_clone7 = rwlock_clone3.clone();
rwlock_clone3.write().map(|mut guard| { *guard += 7 })
.then(move |_| rwlock_clone7.read().map(|_| ()))
});
future::join4(fut1, fut2, fut3, fut4).await;
assert_eq!(rwlock.try_unwrap().expect("try_unwrap"), 17_000);
}
#[cfg(feature = "tokio")]
#[test]
fn with_read_err() {
let mtx = RwLock::<i32>::new(-5);
let mut rt = current_thread::Runtime::new().unwrap();
let r = rt.block_on(async {
mtx.with_read(|guard| {
if *guard > 0 {
future::ok(*guard)
} else {
future::err("Whoops!")
}
}).unwrap().await
});
assert_eq!(r, Err("Whoops!"));
}
#[cfg(feature = "tokio")]
#[test]
fn with_read_ok() {
let mtx = RwLock::<i32>::new(5);
let mut rt = current_thread::Runtime::new().unwrap();
let r = rt.block_on(async {
mtx.with_read(|guard| {
futures::future::ok::<i32, ()>(*guard)
}).unwrap().await
});
assert_eq!(r, Ok(5));
}
// RwLock::with_read should work with multithreaded Runtimes as well as
// single-threaded Runtimes.
// https://github.com/asomers/futures-locks/issues/5
#[cfg(feature = "tokio")]
#[tokio::test]
async fn with_read_threadpool() {
let mtx = RwLock::<i32>::new(5);
let r = mtx.with_read(|guard| {
futures::future::ok::<i32, ()>(*guard)
}).unwrap().await;
assert_eq!(r, Ok(5));
}
#[cfg(feature = "tokio")]
#[test]
fn with_read_local_ok() {
// Note: Rc is not Send
let rwlock = RwLock::<Rc<i32>>::new(Rc::new(5));
let mut rt = current_thread::Runtime::new().unwrap();
let r = rt.block_on(async move {
rwlock.with_read_local(|guard| {
futures::future::ok::<i32, ()>(**guard)
}).await
});
assert_eq!(r, Ok(5));
}
#[cfg(feature = "tokio")]
#[test]
fn with_write_err() {
let mtx = RwLock::<i32>::new(-5);
let mut rt = current_thread::Runtime::new().unwrap();
let r = rt.block_on(async move {
mtx.with_write(|mut guard| {
if *guard > 0 {
*guard -= 1;
future::ok(())
} else {
future::err("Whoops!")
}
}).unwrap().await
});
assert_eq!(r, Err("Whoops!"));
}
#[cfg(feature = "tokio")]
#[test]
fn with_write_ok() {
let mtx = RwLock::<i32>::new(5);
let mut rt = current_thread::Runtime::new().unwrap();
rt.block_on(async {
mtx.with_write(|mut guard| {
*guard += 1;
future::ok::<(), ()>(())
}).unwrap().await
}).unwrap();
assert_eq!(mtx.try_unwrap().unwrap(), 6);
}
// RwLock::with_write should work with multithreaded Runtimes as well as
// single-threaded Runtimes.
// https://github.com/asomers/futures-locks/issues/5
#[cfg(feature = "tokio")]
#[tokio::test]
async fn with_write_threadpool() {
let mtx = RwLock::<i32>::new(5);
let test_mtx = mtx.clone();
let r = async move {
mtx.with_write(|mut guard| {
*guard += 1;
future::ok::<(), ()>(())
}).unwrap().await
}.await;
assert!(r.is_ok());
assert_eq!(test_mtx.try_unwrap().unwrap(), 6);
}
#[cfg(feature = "tokio")]
#[test]
fn with_write_local_ok() {
// Note: Rc is not Send
let rwlock = RwLock::<Rc<i32>>::new(Rc::new(5));
let mut rt = current_thread::Runtime::new().unwrap();
rt.block_on(async {
rwlock.with_write_local(|mut guard| {
*Rc::get_mut(&mut *guard).unwrap() += 1;
future::ok::<(), ()>(())
}).await.unwrap()
});
assert_eq!(*rwlock.try_unwrap().unwrap(), 6);
}
|
// Primitive str = Immutable fixed-length string somewhere in memory
// String = Growable, heap-allocated data structure - Use when you need to modify or own string data
pub fn run() {
// hello is not change. This string has the fixed length
let hello = "I am going to School in the morning ";
println!("{}", hello );
// Change the length the String
let mut hello2 = String::from("Can Change length of the string ");
// Push Character
hello2.push('W');
// Push String
hello2.push_str("and making money");
// Capacity of bytes
println!("The Capacity {}", hello2.capacity());
// Empty Empty
println!("Is empty {}", hello.is_empty());
// Contains World
println!("contains 'going '{}",hello.contains("going"));
// Replace World in string
println!("Replace: {}", hello2.replace("going", "Went"));
// Loop through word white-space
for word in hello.split_whitespace() {
println!("{}", word);
}
// Create String with capacity
let mut string_with_capacity = String::with_capacity(10);
string_with_capacity.push('a');
string_with_capacity.push_str("Going to");
println!("{}", string_with_capacity);
// Asesstion testing
assert_eq!(9, string_with_capacity.len());
assert_eq!(10, string_with_capacity.capacity());
} |
use std::{any::{Any, TypeId}, collections::HashMap, fmt::Debug, rc::Rc};
use serde::*;
use uuid::Uuid;
use std::fmt;
use crate::element::*;
use crate::entity::*;
use crate::deserialize_context::*;
#[derive(Debug)]
pub enum SceneSerdeError {
CycleError(String),
MissingElementError(String),
SerdeError(serde_json::Error)
}
impl fmt::Display for SceneSerdeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SceneSerdeError::CycleError(info) => write!(f, "{}", info),
SceneSerdeError::MissingElementError(info) => write!(f, "{}", info),
SceneSerdeError::SerdeError(err) => write!(f, "{}", err)
}
}
}
#[derive(Clone)]
pub struct CreatorEntry {
pub creator: Rc<Box<dyn Fn(EntAddr) -> EleAddrErased>>,
pub name: String,
pub id: TypeId
}
pub struct SceneDeserResult {
pub ents: Vec<EntAddr>,
pub errors: Vec<SceneSerdeError>
}
pub struct SceneSerde {
creator_map: HashMap<TypeId, CreatorEntry>
}
impl SceneSerde {
pub fn new() -> Self {
Self {
creator_map: HashMap::new()
}
}
pub fn register_element_creator<T: Element + Any + Clone>(&mut self, default: T, name: &str) {
let id = TypeId::of::<T>();
self.creator_map.insert(id, CreatorEntry {
creator: Rc::new(Box::new(move |ent| {
match ent.clone().get_ref_mut() {
Some(mut e) => {
e
.add_element(default.clone())
.map_or(EleAddrErased::new(), |a| a.into())
},
None => {
EleAddrErased::new()
}
}
})),
name: name.into(),
id: std::any::TypeId::of::<T>()
});
}
pub fn deserialize_empty_into(&self, ent: EntAddr, name: String) -> Result<EleAddrErased, SceneSerdeError> {
// find creator
let entry =
self.find_exact_creator(name.as_str())
.ok_or(SceneSerdeError::MissingElementError(name))?;
{
let cloned = ent.clone();
let mut ent_ref = cloned.get_ref_mut().unwrap();
assert!(!ent_ref.query_element_addr_by_id(&entry.id).valid());
}
let erased = (entry.creator)(ent);
assert!(erased.valid());
Ok(erased)
}
// Returns a Some(value) if ele is valid, otherwise returns None
pub fn serialize_element(&mut self, ele: &EleAddrErased) -> Option<serde_json::Value> {
#[derive(Serialize)]
struct ElementObj {
name: String,
payload: serde_json::Value
}
if !ele.valid() {
return None;
}
let creator = self.find_exact_creator_by_id(ele.get_element_type_id().unwrap()).unwrap();
let payload = ele.get_ref().unwrap().ecs_serialize();
Some(serde_json::to_value(ElementObj {
name: creator.name,
payload
}).unwrap())
}
pub fn deserialize_scene(&mut self, man: &mut Manager, content: serde_json::Value) -> Result<SceneDeserResult, SceneSerdeError> {
#[derive(Deserialize, Clone)]
struct EleObj {
name: String,
payload: serde_json::Value
}
#[derive(Deserialize, Clone)]
struct EntObj {
name: String,
parent_payload: serde_json::Value, // This is a serialized form of EntAddr
id: i64,
eles: Vec<EleObj>
}
struct EntDeserializeState {
payload: EntObj,
addr: EntAddr
}
begin_deserialize();
// Deserialize all entity data, create the actual entities, and associate the original data with the entities
let ent_states: Vec<EntDeserializeState> =
serde_json::from_value::<Vec<EntObj>>(content)
.map_err(|er| SceneSerdeError::SerdeError(er))?
.into_iter()
.map(|payload| {
let addr = set_mapping(Uuid::from_u128(payload.id as u128), payload.name.clone(), man);
EntDeserializeState { payload, addr }
})
.collect();
let mut reparent_failures = Vec::<String>::new();
// All entities have been created; we can now assign parent/child relations
ent_states.iter().for_each(|state| {
let parent_addr = serde_json::from_value::<EntAddr>(state.payload.parent_payload.clone()).unwrap();
let child_addr = map_id(Uuid::from_u128(state.payload.id as u128));
if let Err(_er) = man.reparent(child_addr.clone(), parent_addr.clone()) {
let child_ent = child_addr.get_ref().unwrap();
let parent_ent = parent_addr.get_ref().unwrap();
reparent_failures.push(format!("Making Child -> Parent relationship \"{}\" -> \"{}\" would have created a cycle", child_ent.name, parent_ent.name));
}
});
if reparent_failures.len() > 0 {
// Clear out created entities
ent_states.into_iter().for_each(|state| {
man.destroy_entity(state.addr);
});
man.resolve();
end_deserialize();
return Err(SceneSerdeError::CycleError(reparent_failures.join("\n")));
}
// First create empty elements in their respective entities so no EleAddr deserialize
// fails due to the element not yet being added
struct EleAddrDeserializeState {
ele: EleAddrErased,
payload: serde_json::Value
}
let deser_attempts =
ent_states.iter().map(|pair| {
pair.payload.eles.iter().map(|ele_obj| {
self
.deserialize_empty_into(pair.addr.clone(), ele_obj.name.clone())
.map(|ele| EleAddrDeserializeState {
ele,
payload: ele_obj.payload.clone()
})
})
.collect::<Vec<Result<EleAddrDeserializeState, SceneSerdeError>>>()
})
.flatten()
.collect::<Vec<Result<EleAddrDeserializeState, SceneSerdeError>>>();
let ecs_deser_errors: Vec<SceneSerdeError> =
deser_attempts
.iter()
.filter(|attempt| attempt.is_ok())
.map(|attempt| attempt.as_ref().ok().unwrap())
.map(|state| {
state.ele.clone().get_ref_mut().unwrap().ecs_deserialize(state.payload.clone())
})
.filter(|state_attempt| state_attempt.is_err())
.map(|state_attempt| SceneSerdeError::SerdeError(state_attempt.err().unwrap()))
.collect();
end_deserialize();
let errors =
deser_attempts
.into_iter()
.filter(|state| state.is_err())
.map(|state| state.err().unwrap())
.chain(ecs_deser_errors.into_iter())
.collect();
Ok(SceneDeserResult {
ents: ent_states.into_iter().map(|pair| pair.addr).collect(),
errors
})
}
pub fn serialize_scene(&mut self, _man: &mut Manager, content: Vec<EntAddr>) -> serde_json::Value {
#[derive(Serialize)]
struct EntObj {
name: String,
parent_payload: serde_json::Value,
id: i64,
eles: Vec<serde_json::Value>
}
let ent_objs: Vec<EntObj>
=content
.iter()
.map(|ea| {
let erased_eles=
ea.get_ref_mut().unwrap()
.erased_elements();
let eles: Vec<serde_json::Value>=
erased_eles
.iter().filter_map(|ele| {
self.serialize_element(&ele)
})
.collect();
EntObj {
name: ea.get_ref().unwrap().name.clone(),
parent_payload: serde_json::to_value(ea.get_ref().unwrap().get_parent()).unwrap(),
id: match ea.get_ref() {
None => 0i64,
Some(e) => e.get_id().as_u128() as i64
},
eles
}
})
.collect();
serde_json::to_value(ent_objs).unwrap()
}
// Utility functions
pub fn find_creators(&self, name: &str) -> Vec<CreatorEntry> {
let mut res = Vec::<CreatorEntry>::new();
for b in self.creator_map.iter() {
if b.1.name.contains(name) {
res.push(b.1.clone());
}
}
res
}
pub fn find_exact_creator(&self, name: &str) -> Option<CreatorEntry> {
for b in self.creator_map.iter() {
if b.1.name == name {
return Some(b.1.clone());
}
}
None
}
pub fn find_exact_creator_by_id(&self, id: TypeId) -> Option<CreatorEntry> {
for b in self.creator_map.iter() {
if b.1.id == id {
return Some(b.1.clone());
}
}
None
}
} |
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::collections::HashMap;
fn calculate_len(object_dependency_len : &mut Vec<i32>, object_dependency : &Vec<i32>, idx : usize) {
let mut i = idx as i32;
let mut stack : Vec<i32> = Vec::new();
while object_dependency[i as usize] >= 0 {
stack.push(i);
i = object_dependency[i as usize];
}
let mut counter : i32 = 1;
loop {
let stack_idx = match stack.pop() {
Some(x) => x,
None => break,
};
object_dependency_len[stack_idx as usize] = counter;
counter += 1;
}
}
fn main() {
let filename = "src/input";
// Open the file in read-only mode (ignoring errors).
let file = File::open(filename).unwrap();
let reader = BufReader::new(file);
const MAX_SIZE : usize = 2000;
let mut object_dependency : Vec<i32> = vec![-1; MAX_SIZE];
let mut object_dependency_len : Vec<i32> = vec![0; MAX_SIZE];
let mut object_index = HashMap::new();
let mut idx : i32 = 0;
// Read the file line by line using the lines() iterator from std::io::BufRead.
for (_, line) in reader.lines().enumerate() {
let line = line.unwrap(); // Ignore errors.
// Show the line and its number.
let oo_pair : Vec<&str> = line.split(")").collect();
let origin = String::from(oo_pair[0]);
let orbit = String::from(oo_pair[1]);
let mut origin_idx = idx;
if !object_index.contains_key(&origin) {
object_index.insert(origin, idx);
idx += 1;
} else {
origin_idx = *object_index.get(&origin).unwrap();
}
let mut orbit_idx = idx;
if !object_index.contains_key(&orbit) {
object_index.insert(orbit, idx);
idx += 1;
} else {
orbit_idx = *object_index.get(&orbit).unwrap();
}
object_dependency[orbit_idx as usize] = origin_idx;
}
for i in 0..object_dependency_len.len() {
if object_dependency_len[i] == 0 {
calculate_len(&mut object_dependency_len, &object_dependency, i);
}
}
let dependencies : i32 = object_dependency_len.iter().sum();
println!("Solution: {}", dependencies);
// println!("dependecies: {:?}", object_dependency);
// println!("dependecies len: {:?}", object_dependency_len);
// println!("object index: {:?}", object_index);
}
|
use super::Result;
use crate::model::Wishlist;
pub trait WishlistService: Send + Sync {
fn get_last_wishlist(&self) -> Result<Wishlist>;
}
|
use Renderable;
use context::Context;
use filters;
use error::Result;
pub struct Template {
pub elements: Vec<Box<Renderable>>,
}
impl Renderable for Template {
fn render(&self, context: &mut Context) -> Result<Option<String>> {
maybe_add_default_filters(context);
maybe_add_extra_filters(context);
let mut buf = String::new();
for el in &self.elements {
if let Some(ref x) = try!(el.render(context)) {
buf = buf + x;
}
// Did the last element we processed set an interrupt? If so, we
// need to abandon the rest of our child elements and just
// return what we've got. This is usually in response to a
// `break` or `continue` tag being rendered.
if context.interrupted() {
break;
}
}
Ok(Some(buf))
}
}
impl Template {
pub fn new(elements: Vec<Box<Renderable>>) -> Template {
Template { elements: elements }
}
}
fn maybe_add_default_filters(context: &mut Context) {
context.maybe_add_filter("abs", Box::new(filters::abs));
context.maybe_add_filter("append", Box::new(filters::append));
context.maybe_add_filter("capitalize", Box::new(filters::capitalize));
context.maybe_add_filter("ceil", Box::new(filters::ceil));
context.maybe_add_filter("compact", Box::new(filters::compact));
context.maybe_add_filter("concat", Box::new(filters::concat));
context.maybe_add_filter("date", Box::new(filters::date));
context.maybe_add_filter("default", Box::new(filters::default));
context.maybe_add_filter("divided_by", Box::new(filters::divided_by));
context.maybe_add_filter("downcase", Box::new(filters::downcase));
context.maybe_add_filter("escape", Box::new(filters::escape));
context.maybe_add_filter("escape_once", Box::new(filters::escape_once));
context.maybe_add_filter("first", Box::new(filters::first));
context.maybe_add_filter("floor", Box::new(filters::floor));
context.maybe_add_filter("join", Box::new(filters::join));
context.maybe_add_filter("last", Box::new(filters::last));
context.maybe_add_filter("lstrip", Box::new(filters::lstrip));
context.maybe_add_filter("map", Box::new(filters::map));
context.maybe_add_filter("minus", Box::new(filters::minus));
context.maybe_add_filter("modulo", Box::new(filters::modulo));
context.maybe_add_filter("newline_to_br", Box::new(filters::newline_to_br));
context.maybe_add_filter("plus", Box::new(filters::plus));
context.maybe_add_filter("prepend", Box::new(filters::prepend));
context.maybe_add_filter("remove", Box::new(filters::remove));
context.maybe_add_filter("remove_first", Box::new(filters::remove_first));
context.maybe_add_filter("replace", Box::new(filters::replace));
context.maybe_add_filter("replace_first", Box::new(filters::replace_first));
context.maybe_add_filter("reverse", Box::new(filters::reverse));
context.maybe_add_filter("round", Box::new(filters::round));
context.maybe_add_filter("rstrip", Box::new(filters::rstrip));
context.maybe_add_filter("size", Box::new(filters::size));
context.maybe_add_filter("slice", Box::new(filters::slice));
context.maybe_add_filter("sort", Box::new(filters::sort));
context.maybe_add_filter("sort_natural", Box::new(filters::sort_natural));
context.maybe_add_filter("split", Box::new(filters::split));
context.maybe_add_filter("strip", Box::new(filters::strip));
context.maybe_add_filter("strip_html", Box::new(filters::strip_html));
context.maybe_add_filter("strip_newlines", Box::new(filters::strip_newlines));
context.maybe_add_filter("times", Box::new(filters::times));
context.maybe_add_filter("truncate", Box::new(filters::truncate));
context.maybe_add_filter("truncatewords", Box::new(filters::truncatewords));
context.maybe_add_filter("uniq", Box::new(filters::uniq));
context.maybe_add_filter("upcase", Box::new(filters::upcase));
context.maybe_add_filter("url_decode", Box::new(filters::url_decode));
context.maybe_add_filter("url_encode", Box::new(filters::url_encode));
}
#[cfg(not(feature = "extra-filters"))]
fn maybe_add_extra_filters(context: &mut Context) {}
#[cfg(feature = "extra-filters")]
fn maybe_add_extra_filters(context: &mut Context) {
context.maybe_add_filter("pluralize", Box::new(filters::pluralize));
context.maybe_add_filter("date_in_tz", Box::new(filters::date_in_tz));
}
|
// ===============================================================================
// Authors: AFRL/RQQA
// Organization: Air Force Research Laboratory, Aerospace Systems Directorate, Power and Control Division
//
// Copyright (c) 2017 Government of the United State of America, as represented by
// the Secretary of the Air Force. No copyright is claimed in the United States under
// Title 17, U.S. Code. All Other Rights Reserved.
// ===============================================================================
// This file was auto-created by LmcpGen. Modifications will be overwritten.
pub mod abstract_geometry;
pub mod key_value_pair;
pub mod location3d;
pub mod payload_action;
pub mod payload_configuration;
pub mod payload_state;
pub mod vehicle_action;
pub mod task;
pub mod search_task;
pub mod abstract_zone;
pub mod entity_configuration;
pub mod flight_profile;
pub mod air_vehicle_configuration;
pub mod entity_state;
pub mod air_vehicle_state;
pub mod wedge;
pub mod area_search_task;
pub mod camera_action;
pub mod camera_configuration;
pub mod gimballed_payload_state;
pub mod camera_state;
pub mod circle;
pub mod gimbal_angle_action;
pub mod gimbal_configuration;
pub mod gimbal_scan_action;
pub mod gimbal_stare_action;
pub mod gimbal_state;
pub mod go_to_waypoint_action;
pub mod keep_in_zone;
pub mod keep_out_zone;
pub mod line_search_task;
pub mod navigation_action;
pub mod loiter_action;
pub mod loiter_task;
pub mod waypoint;
pub mod mission_command;
pub mod must_fly_task;
pub mod operator_signal;
pub mod operating_region;
pub mod automation_request;
pub mod point_search_task;
pub mod polygon;
pub mod rectangle;
pub mod remove_tasks;
pub mod service_status;
pub mod session_status;
pub mod vehicle_action_command;
pub mod video_stream_action;
pub mod video_stream_configuration;
pub mod video_stream_state;
pub mod automation_response;
pub mod remove_zones;
pub mod remove_entities;
pub mod flight_director_action;
pub mod weather_report;
pub mod follow_path_command;
pub mod path_waypoint;
pub mod stop_movement_action;
pub mod waypoint_transfer;
pub mod payload_stow_action;
pub mod wavelength_band;
pub mod navigation_mode;
pub mod fovoperation_mode;
pub mod gimbal_pointing_mode;
pub mod zone_avoidance_type;
pub mod loiter_type;
pub mod loiter_direction;
pub mod service_status_type;
pub mod simulation_status_type;
pub mod speed_type;
pub mod turn_type;
pub mod command_status_type;
pub mod altitude_type;
pub mod travel_mode;
pub mod waypoint_transfer_mode;
pub mod perceive;
|
use super::{Object, TaggedValue};
impl std::fmt::Debug for Object {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self)
}
}
impl std::fmt::Display for Object {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use TaggedValue::*;
match &self.content {
Nil => write!(f, "'()"),
Undef => write!(f, "<undefined>"),
Integer(x) => write!(f, "{}", x),
Float(x) => write!(f, "{}", x),
Symbol(s) => write!(f, "{}", s),
String(s) => write!(f, "{:?}", s),
Function(_) => write!(f, "<function>"),
Pair(car, cdr) => {
let mut cdr = &**cdr;
write!(f, "(")?;
write!(f, "{}", car)?;
while let Pair(a, d) = &cdr.content {
write!(f, " {}", a)?;
cdr = &**d;
}
if !cdr.is_null() {
write!(f, " . {}", cdr)?;
}
write!(f, ")")
}
}
}
}
|
use std::fmt;
use crate::translations::translations::both_translation;
use crate::Language;
/// Enum representing the possible observed values of IP protocol version.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum IpVersion {
/// Internet Protocol version 4
IPv4,
/// Internet Protocol version 6
IPv6,
/// Not identified
Other,
}
impl fmt::Display for IpVersion {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{self:?}")
}
}
impl IpVersion {
pub(crate) const ALL: [IpVersion; 3] = [IpVersion::IPv4, IpVersion::IPv6, IpVersion::Other];
pub fn get_radio_label(&self, language: Language) -> &str {
match self {
IpVersion::IPv4 => "IPv4",
IpVersion::IPv6 => "IPv6",
IpVersion::Other => both_translation(language),
}
}
}
|
#![ allow (unused_parens) ]
use cairo;
use gio;
use glib;
use gtk;
use gio::prelude::*;
use gtk::prelude::*;
use gtk::traits::SettingsExt;
use std::cell::RefCell;
use std::env;
use std::io;
use std::rc::Rc;
use std::time::Duration;
use nono::*;
const BORDER_SIZE: f64 = 20.0;
const CELL_SIZE: f64 = 20.0;
const THICK_LINE_SIZE: f64 = 3.0;
const THIN_LINE_SIZE: f64 = 1.0;
const CLUE_FONT_SIZE: f64 = 14.0;
const CLUE_GAP: f64 = 2.0;
fn main () {
// create gtk app
let application =
gtk::Application::new (
Some ("com.jamespharaoh.nono"),
gio::ApplicationFlags::HANDLES_OPEN);
application.connect_open (|app, files, hint| {
handle_open (app, files, hint);
});
application.run_with_args (
& env::args ().collect::<Vec <_>> ());
}
fn handle_open (
application: & gtk::Application,
files: & [gio::File],
_hint: & str,
) {
for file in files {
handle_open_one (application, file);
}
}
fn handle_open_one (
application: & gtk::Application,
file: & gio::File,
) {
// load clues
let file_input_stream =
file.read (gio::Cancellable::NONE).unwrap ();
let mut reader = InputStreamReader {
input_stream: file_input_stream.upcast (),
};
let clues = Clues::load (& mut reader).unwrap ();
if ! clues.is_consistent () {
println! (
"Inconsistent clues: row sum = {}, coll sum = {}",
clues.rows_sum (),
clues.cols_sum (),
);
return;
}
SolverWindow::new (
application,
clues,
);
}
#[ derive (Clone) ]
struct SolverWindow {
state: Rc <RefCell <SolverWindowState>>,
}
struct SolverWindowState {
solver: GridSolver,
palette: Palette,
dimensions: SolverWindowDimensions,
window: Option <gtk::ApplicationWindow>,
timeout_source: Option <glib::source::SourceId>,
}
#[ derive (Default) ]
struct SolverWindowDimensions {
size: Size,
grid: Rectangle,
row_clues: Rectangle,
col_clues: Rectangle,
}
impl SolverWindow {
pub fn new (
application: & gtk::Application,
clues: Clues,
) -> SolverWindow {
let solver = GridSolver::new (
Grid::new (clues.num_rows (), clues.num_cols ()),
clues,
);
let solver_window = SolverWindow {
state: Rc::new (RefCell::new (SolverWindowState {
solver: solver,
palette: Palette::new (),
dimensions: Default::default (),
window: None,
timeout_source: None,
})),
};
solver_window.build_ui (application);
solver_window
}
fn build_ui (
& self,
application: & gtk::Application,
) {
let mut state = self.state.borrow_mut ();
let window = gtk::ApplicationWindow::new (application);
state.window = Some (window.clone ());
window.set_title ("Nono solver");
window.set_default_size (500, 500);
let drawing_area = Box::new (gtk::DrawingArea::new) ();
let self_clone = self.clone ();
drawing_area.connect_draw (move |drawing_area, context|
self_clone.draw_fn (drawing_area, context),
);
window.add (& drawing_area);
window.show_all ();
state.dimensions =
Self::calculate_dimensions (
& state.solver.clues (),
& state.solver.grid ());
let self_clone = self.clone ();
state.timeout_source =
Some (glib::timeout_add_local (
Duration::from_millis (10),
move || self_clone.tick ()));
let self_clone = self.clone ();
window.connect_destroy (move |_window|
self_clone.destroy (),
);
}
fn calculate_dimensions (
clues: & Clues,
grid: & Grid,
) -> SolverWindowDimensions {
// sizes
let max_row_clues = clues.rows ().map (Vec::len).max ().unwrap_or (0);
let max_col_clues = clues.cols ().map (Vec::len).max ().unwrap_or (0);
let grid_size = Size {
width: CELL_SIZE * grid.num_cols () as f64 + THICK_LINE_SIZE,
height: CELL_SIZE * grid.num_rows () as f64 + THICK_LINE_SIZE,
};
let row_clues_size = Size {
width: CELL_SIZE * max_row_clues as f64,
height: CELL_SIZE * grid.num_rows () as f64,
};
let col_clues_size = Size {
width: CELL_SIZE * grid.num_cols () as f64,
height: CELL_SIZE * max_col_clues as f64,
};
let content_size = Size {
width: (0.0
+ BORDER_SIZE
+ row_clues_size.width
+ CLUE_GAP
+ grid_size.width
+ BORDER_SIZE
),
height: (0.0
+ BORDER_SIZE
+ grid_size.height
+ CLUE_GAP
+ col_clues_size.height
+ BORDER_SIZE
),
};
let row_clues_position = Position {
horizontal: BORDER_SIZE,
vertical: BORDER_SIZE + col_clues_size.height + CLUE_GAP + THICK_LINE_SIZE / 2.0,
};
let col_clues_position = Position {
horizontal: BORDER_SIZE + row_clues_size.width + CLUE_GAP + THICK_LINE_SIZE / 2.0,
vertical: BORDER_SIZE,
};
let grid_position = Position {
horizontal: content_size.width - BORDER_SIZE - grid_size.width,
vertical: content_size.height - BORDER_SIZE - grid_size.height,
};
SolverWindowDimensions {
size: content_size,
grid: Rectangle::from ( (
grid_position,
grid_size,
) ),
col_clues: Rectangle::from ( (
col_clues_position,
col_clues_size,
) ),
row_clues: Rectangle::from ( (
row_clues_position,
row_clues_size,
) ),
}
}
fn tick (& self) -> Continue {
if self.solve_one_cell () {
let state = self.state.borrow_mut ();
let window = state.window.clone ().unwrap ();
window.queue_draw ();
Continue (true)
} else {
let mut state = self.state.borrow_mut ();
state.timeout_source = None;
Continue (false)
}
}
fn solve_one_cell (& self) -> bool {
let mut state = self.state.borrow_mut ();
let _event = match state.solver.next () {
Some (val) => val,
None => return false,
};
true
}
fn draw_fn (
& self,
drawing_area: & gtk::DrawingArea,
context: & cairo::Context,
) -> gtk::Inhibit {
let state = self.state.borrow ();
// background
context.set_source (& state.palette.background);
context.paint ();
// content
let content_width = state.dimensions.size.width;
let content_height = state.dimensions.size.height;
let content_ratio = content_width / content_height;
let native_width = drawing_area.allocated_width () as f64;
let native_height = drawing_area.allocated_height () as f64;
let native_ratio = native_width / native_height;
let scale = if native_ratio > content_ratio {
native_height / content_height
} else {
native_width / content_width
};
context.translate (
(native_width - content_width * scale) / 2.0,
(native_height - content_height * scale) / 2.0,
);
context.scale (scale, scale);
Self::draw_row_clues (& state, & context);
Self::draw_col_clues (& state, & context);
Self::draw_grid (& state, & context);
// return
gtk::Inhibit (false)
}
fn draw_row_clues (
state: & SolverWindowState,
context: & cairo::Context,
) {
let clues = state.solver.clues ();
let palette = & state.palette;
context.save ();
context.translate (
state.dimensions.row_clues.right,
state.dimensions.row_clues.top,
);
let gtk_settings = gtk::Settings::default ().unwrap ();
let gtk_font_name = gtk_settings.gtk_font_name ().unwrap ();
let font_name = & gtk_font_name [
0 .. gtk_font_name.chars ().rev ()
.skip_while (|& ch| ch.is_ascii_digit ())
.skip_while (|& ch| ch.is_whitespace ())
.count ()
];
context.select_font_face (
& font_name,
cairo::FontSlant::Normal,
cairo::FontWeight::Normal,
);
context.set_font_size (CLUE_FONT_SIZE);
for (row_index, row_clues) in clues.rows ().enumerate () {
for (clue_index, clue) in row_clues.iter ().rev ().enumerate () {
let text = format! ("{}", clue);
let text_extents = context.text_extents (& text).unwrap ();
let clue_position = Position {
horizontal: - CELL_SIZE * clue_index as f64,
vertical: CELL_SIZE * row_index as f64,
};
context.rectangle (
clue_position.horizontal - CELL_SIZE,
clue_position.vertical,
CELL_SIZE,
CELL_SIZE,
);
context.set_source (& state.palette.clue_box);
context.fill ();
context.move_to (
clue_position.horizontal,
clue_position.vertical,
);
context.rel_move_to (
- (CELL_SIZE + text_extents.x_advance ()) / 2.0,
(CELL_SIZE + text_extents.height ()) / 2.0,
);
context.set_source (& palette.clue_text);
context.show_text (& text);
}
}
context.restore ();
}
fn draw_col_clues (
state: & SolverWindowState,
context: & cairo::Context,
) {
let clues = state.solver.clues ();
let palette = & state.palette;
context.save ();
context.translate (
state.dimensions.col_clues.left,
state.dimensions.col_clues.bottom,
);
let gtk_settings = gtk::Settings::default ().unwrap ();
let gtk_font_name = gtk_settings.gtk_font_name ().unwrap ();
let font_name = & gtk_font_name [
0 .. gtk_font_name.chars ().rev ()
.skip_while (|& ch| ch.is_ascii_digit ())
.skip_while (|& ch| ch.is_whitespace ())
.count ()
];
context.select_font_face (
& font_name,
cairo::FontSlant::Normal,
cairo::FontWeight::Normal,
);
context.set_font_size (CLUE_FONT_SIZE);
for (col_index, col_clues) in clues.cols ().enumerate () {
for (clue_index, clue) in col_clues.iter ().rev ().enumerate () {
let text = format! ("{}", clue);
let text_extents = context.text_extents (& text).unwrap ();
let clue_position = Position {
horizontal: CELL_SIZE * col_index as f64,
vertical: - CELL_SIZE * clue_index as f64,
};
context.rectangle (
clue_position.horizontal,
clue_position.vertical - CELL_SIZE,
CELL_SIZE,
CELL_SIZE,
);
context.set_source (& state.palette.clue_box);
context.fill ();
context.move_to (
clue_position.horizontal,
clue_position.vertical,
);
context.rel_move_to (
(CELL_SIZE - text_extents.x_advance ()) / 2.0,
- (CELL_SIZE - text_extents.height ()) / 2.0,
);
context.set_source (& palette.clue_text);
context.show_text (& text);
}
}
context.restore ();
}
fn draw_grid (
state: & SolverWindowState,
context: & cairo::Context,
) {
let grid = state.solver.grid ();
let palette = & state.palette;
let grid_size = state.dimensions.grid.size ();
let grid_size_internal = Size {
width: grid_size.width - THICK_LINE_SIZE,
height: grid_size.height - THICK_LINE_SIZE,
};
context.save ();
context.translate (
state.dimensions.grid.left + THICK_LINE_SIZE / 2.0,
state.dimensions.grid.top + THICK_LINE_SIZE / 2.0,
);
// grid cells
context.set_antialias (cairo::Antialias::None);
for row_index in 0 .. grid.num_rows () {
for col_index in 0 .. grid.num_cols () {
context.set_source (
match grid [(row_index, col_index)] {
Cell::UNKNOWN => & palette.unknown,
Cell::EMPTY => & palette.empty,
Cell::FILLED => & palette.filled,
_ => & palette.error,
},
);
context.rectangle (
CELL_SIZE * col_index as f64,
CELL_SIZE * row_index as f64,
CELL_SIZE + THIN_LINE_SIZE,
CELL_SIZE + THIN_LINE_SIZE,
);
context.fill ();
}
}
// grid lines
context.set_antialias (cairo::Antialias::Default);
context.set_source (& palette.lines);
context.set_line_cap (cairo::LineCap::Square);
for row_index in 0 ..= grid.num_rows () {
context.set_line_width (
if row_index % 5 == 0 || row_index == grid.num_rows () {
THICK_LINE_SIZE
} else {
THIN_LINE_SIZE
},
);
context.move_to (0.0, CELL_SIZE * row_index as f64);
context.rel_line_to (grid_size_internal.width, 0.0);
context.stroke ();
}
for col_index in 0 ..= grid.num_cols () {
context.set_line_width (
if col_index % 5 == 0 || col_index == grid.num_cols () {
THICK_LINE_SIZE
} else {
THIN_LINE_SIZE
},
);
context.move_to (CELL_SIZE * col_index as f64, 0.0);
context.rel_line_to (0.0, grid_size_internal.height);
context.stroke ();
}
context.restore ();
}
fn destroy (& self) {
let mut state = self.state.borrow_mut ();
if let Some (timeout_source) = state.timeout_source.take () {
timeout_source.remove ();
}
}
}
#[ derive (Clone, Copy, Debug, Default) ]
struct Position {
horizontal: f64,
vertical: f64,
}
impl From <(f64, f64)> for Position {
fn from (tuple: (f64, f64)) -> Position {
Position {
horizontal: tuple.0,
vertical: tuple.1,
}
}
}
#[ derive (Clone, Copy, Debug, Default) ]
struct Size {
width: f64,
height: f64,
}
impl From <(f64, f64)> for Size {
fn from (tuple: (f64, f64)) -> Size {
Size {
width: tuple.0,
height: tuple.1,
}
}
}
#[ derive (Clone, Copy, Debug, Default) ]
struct Rectangle {
left: f64,
top: f64,
right: f64,
bottom: f64,
}
impl From <(Position, Size)> for Rectangle {
fn from (tuple: (Position, Size)) -> Rectangle {
Rectangle {
left: tuple.0.horizontal,
top: tuple.0.vertical,
right: tuple.0.horizontal + tuple.1.width,
bottom: tuple.0.vertical + tuple.1.height,
}
}
}
impl Rectangle {
fn width (& self) -> f64 { self.right - self.left }
fn height (& self) -> f64 { self.bottom - self.top }
fn size (& self) -> Size {
Size {
width: self.width (),
height: self.height (),
}
}
}
struct InputStreamReader {
input_stream: gio::InputStream,
}
impl io::Read for InputStreamReader {
fn read (
& mut self,
buffer: & mut [u8],
) -> Result <usize, io::Error> {
let bytes =
self.input_stream.read_bytes (
buffer.len (),
gio::Cancellable::NONE)
.map_err (|gio_err|
io::Error::new (
io::ErrorKind::Other,
gio_err)) ?;
buffer [0 .. bytes.len ()].copy_from_slice (& bytes);
Ok (bytes.len ())
}
}
struct Palette {
background: cairo::SolidPattern,
lines: cairo::SolidPattern,
clue_text: cairo::SolidPattern,
clue_box: cairo::SolidPattern,
unknown: cairo::SolidPattern,
filled: cairo::SolidPattern,
empty: cairo::SolidPattern,
error: cairo::SolidPattern,
}
impl Palette {
fn new () -> Palette {
Palette {
background: Self::from_rgb (0.85, 0.85, 0.85),
lines: Self::from_rgb (0.00, 0.00, 0.00),
clue_text: Self::from_rgb (0.00, 0.00, 0.00),
clue_box: Self::from_rgb (0.85, 0.85, 0.85),
unknown: Self::from_rgb (0.70, 0.70, 0.70),
filled: Self::from_rgb (0.10, 0.10, 0.10),
empty: Self::from_rgb (1.00, 1.00, 1.00),
error: Self::from_rgb (0.80, 0.20, 0.20),
}
}
fn from_rgb (red: f64, green: f64, blue: f64) -> cairo::SolidPattern {
cairo::SolidPattern::from_rgb (red, green, blue)
}
}
|
#[cfg(all(unix, not(target_os = "macos")))]
mod common;
#[cfg(all(unix, not(target_os = "macos")))]
mod tests {
const TEST_PLATFORM: &str = "unix";
use super::common::*;
use serial_test::serial;
use webbrowser::Browser;
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[serial]
async fn test_open_default() {
check_browser(Browser::Default, TEST_PLATFORM).await;
}
#[test]
fn test_existence_default() {
assert!(Browser::is_available(), "should have found a browser");
}
#[test]
fn test_non_existence_safari() {
assert!(!Browser::Safari.exists(), "should not have found Safari");
}
#[cfg(not(feature = "hardened"))]
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[serial]
async fn test_local_file_abs_path() {
check_local_file(Browser::Default, None, |pb| {
pb.as_os_str().to_string_lossy().into()
})
.await;
}
#[cfg(not(feature = "hardened"))]
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[serial]
async fn test_local_file_rel_path() {
let cwd = std::env::current_dir().expect("unable to get current dir");
check_local_file(Browser::Default, None, |pb| {
pb.strip_prefix(cwd)
.expect("strip prefix failed")
.as_os_str()
.to_string_lossy()
.into()
})
.await;
}
#[cfg(not(feature = "hardened"))]
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[serial]
async fn test_local_file_uri() {
check_local_file(Browser::Default, None, |pb| {
url::Url::from_file_path(pb)
.expect("failed to convert path to url")
.to_string()
})
.await;
}
#[cfg(feature = "hardened")]
#[test]
fn test_hardened_mode() {
let err = webbrowser::open("file:///etc/passwd")
.expect_err("expected non-http url to fail in hardened mode");
assert_eq!(err.kind(), std::io::ErrorKind::InvalidInput);
}
}
|
//! Transaction fees.
use std::collections::HashMap;
use exonum::crypto::PublicKey;
use exonum::storage::{Fork, Snapshot};
use currency::assets;
use currency::assets::{AssetBundle, MetaAsset, TradeAsset};
use currency::configuration::Configuration;
use currency::error::Error;
use currency::wallet;
use currency::wallet::Wallet;
use currency::Service;
/// For exchange transactions, determines who shall pay the fees.
#[repr(u8)]
#[derive(PartialEq, Eq)]
pub enum FeeStrategy {
/// Recipient pays.
Recipient = 1,
/// Sender pays.
Sender = 2,
/// Recipient and sender share paying the fee.
RecipientAndSender = 3,
/// Intermediary pays.
Intermediary = 4,
}
impl FeeStrategy {
/// Try converting from an u8. To be replaced when the `TryFrom` trait
/// is stabilised.
pub fn try_from(value: u8) -> Option<Self> {
match value {
1 => Some(FeeStrategy::Recipient),
2 => Some(FeeStrategy::Sender),
3 => Some(FeeStrategy::RecipientAndSender),
4 => Some(FeeStrategy::Intermediary),
_ => None,
}
}
}
/// Transaction fees.
pub struct ThirdPartyFees(pub HashMap<PublicKey, u64>);
impl ThirdPartyFees {
/// Create `ThirdPartyFees` for an `add_assets` transaction.
pub fn new_add_assets<S, I>(view: S, assets: I) -> Result<ThirdPartyFees, Error>
where
S: AsRef<Snapshot>,
I: IntoIterator<Item = MetaAsset>,
{
let fees_config = Configuration::extract(view.as_ref()).fees();
let per_asset = fees_config.add_assets_per_entry();
let assets_fee = assets
.into_iter()
.map(|meta| meta.amount() * per_asset)
.sum();
let to_third_party = Some((Service::genesis_wallet(view), assets_fee))
.into_iter()
.collect();
let fees = ThirdPartyFees(to_third_party);
Ok(fees)
}
/// Create `ThirdPartyFees` for an `delete_assets` transaction.
pub fn new_delete_assets<S, I>(_view: S, _assets: I) -> Result<ThirdPartyFees, Error>
where
S: AsRef<Snapshot>,
I: IntoIterator<Item = AssetBundle>,
{
let to_third_party = HashMap::new();
let fees = ThirdPartyFees(to_third_party);
Ok(fees)
}
/// Create `ThirdPartyFees` for `trade` transactions.
pub fn new_trade<'a, S, I>(view: S, assets: I) -> Result<ThirdPartyFees, Error>
where
S: AsRef<Snapshot>,
I: IntoIterator<Item = &'a TradeAsset>,
<I as IntoIterator>::IntoIter: Clone,
{
let view = view.as_ref();
let assets = assets.into_iter();
let mut to_third_party = HashMap::new();
for asset in assets {
let info = assets::Schema(view)
.fetch(&asset.id())
.ok_or_else(|| Error::AssetNotFound)?;
let fee = info.fees().trade().for_price(asset.price()) * asset.amount();
to_third_party
.entry(*info.creator())
.and_modify(|prev_fee| {
*prev_fee += fee;
})
.or_insert(fee);
}
let fees = ThirdPartyFees(to_third_party);
Ok(fees)
}
/// Create `ThirdPartyFees` for `exchange` transactions.
pub fn new_exchange<S, I>(view: S, assets: I) -> Result<Self, Error>
where
S: AsRef<Snapshot>,
I: IntoIterator<Item = AssetBundle>,
{
let view = view.as_ref();
let mut to_third_party = HashMap::new();
for asset in assets {
let info = assets::Schema(view)
.fetch(&asset.id())
.ok_or_else(|| Error::AssetNotFound)?;
let fee = info.fees().exchange().fixed() * asset.amount();
to_third_party
.entry(*info.creator())
.and_modify(|prev_fee| {
*prev_fee += fee;
})
.or_insert(fee);
}
let fees = ThirdPartyFees(to_third_party);
Ok(fees)
}
/// Create `ThirdPartyFees` for `transfer` transactions.
pub fn new_transfer<S, I>(view: S, assets: I) -> Result<Self, Error>
where
S: AsRef<Snapshot>,
I: IntoIterator<Item = AssetBundle>,
{
let view = view.as_ref();
let mut to_third_party = HashMap::new();
for asset in assets {
let info = assets::Schema(view)
.fetch(&asset.id())
.ok_or_else(|| Error::AssetNotFound)?;
let fee = info.fees().transfer().fixed() * asset.amount();
to_third_party
.entry(*info.creator())
.and_modify(|prev_fee| {
*prev_fee += fee;
})
.or_insert(fee);
}
let fees = ThirdPartyFees(to_third_party);
Ok(fees)
}
/// Total amound that needs to be paid to third party wallets.
pub fn total(&self) -> u64 {
self.0.values().sum()
}
pub fn total_for_wallet(&self, pub_key: &PublicKey) -> u64 {
self.0
.iter()
.filter_map(|(key, fee)| if key != pub_key { Some(fee) } else { None })
.sum()
}
/// Add a new fee to the list of third party payments.
pub fn add_fee(&mut self, key: &PublicKey, fee: u64) {
self.0
.entry(*key)
.and_modify(|prev_fee| {
*prev_fee += fee;
})
.or_insert(fee);
}
/// Collect fees to third party wallets.
///
/// Returns a list of wallets modified by fee withdrawal.
/// This list must usually not be committed or discarded before
/// the transaction has otherwise successfully executed.
///
/// # Errors
/// Returns `InsufficientFunds` if the payer is unable to pay the fees.
pub fn collect(
&self,
view: &Fork,
payer_key: &PublicKey,
) -> Result<HashMap<PublicKey, Wallet>, Error> {
let mut payer = wallet::Schema(&*view).fetch(&payer_key);
let mut updated_wallets = self
.0
.iter()
.filter(|&(key, _)| key != payer_key)
.map(|(key, fee)| {
let mut wallet = wallet::Schema(&*view).fetch(key);
wallet::move_coins(&mut payer, &mut wallet, *fee)?;
Ok((*key, wallet))
})
.collect::<Result<HashMap<_, _>, _>>()?;
updated_wallets.entry(*payer_key).or_insert(payer);
Ok(updated_wallets)
}
/// Split fees to third party wallets between two payers.
pub fn collect2(
&self,
view: &mut Fork,
payer_key_1: &PublicKey,
payer_key_2: &PublicKey,
) -> Result<HashMap<PublicKey, Wallet>, Error> {
let mut payer_1 = wallet::Schema(&*view).fetch(&payer_key_1);
let mut payer_2 = wallet::Schema(&*view).fetch(&payer_key_2);
let mut to_third_party = self.0.clone();
if let Some(fee) = to_third_party.remove(payer_key_1) {
wallet::move_coins(&mut payer_2, &mut payer_1, fee / 2)?;
}
if let Some(fee) = to_third_party.remove(payer_key_2) {
wallet::move_coins(&mut payer_1, &mut payer_2, fee / 2)?;
}
let mut updated_wallets = to_third_party
.iter()
.map(|(key, fee)| {
let mut wallet = wallet::Schema(&*view).fetch(&key);
wallet::move_coins(&mut payer_1, &mut wallet, fee / 2)?;
wallet::move_coins(&mut payer_2, &mut wallet, fee / 2)?;
Ok((*key, wallet))
})
.collect::<Result<HashMap<_, _>, _>>()?;
updated_wallets.insert(*payer_key_1, payer_1);
updated_wallets.insert(*payer_key_2, payer_2);
Ok(updated_wallets)
}
}
pub trait FeesCalculator {
fn calculate_fees(&self, view: &mut Fork) -> Result<HashMap<PublicKey, u64>, Error>;
}
|
#![feature(inclusive_range_syntax)]
fn score_byte(c: u8) -> Option<u64> {
let c = if b'A' <= c && c <= b'Z' {
c - b'A' + b'a'
} else {
c
};
match c {
b'e' => Some(5),
b'a' | b't' => Some(4),
b'h' | b'i' | b'n' | b'o' | b'r' | b's' | b' ' => Some(3),
b'b' | b'c' | b'd' | b'f' | b'g' | b'k' | b'l' | b'm' | b'p' | b'u'
| b'v' | b'w' | b'y' => Some(2),
b'j' | b'q' | b'x' | b'z' => Some(1),
x if x < b' ' && x != b'\n' || x >= 128 => None,
_ => Some(0),
}
}
#[test]
fn test_score_byte_uppercase() {
for c in b'A'...b'Z' {
assert_eq!(score_byte(c), score_byte(c - b'A' + b'a'));
}
}
pub fn score(bytes: &[u8]) -> u64 {
let mut result = 0;
for &b in bytes.iter() {
if let Some(score) = score_byte(b) {
result += score;
} else {
return 0;
}
}
return result;
}
pub fn find_key(bytes: &[u8]) -> u8 {
(0...255).max_by_key(|key| {
let tmp: Vec<u8> = bytes.iter().map(|x| {x ^ key}).collect();
score(&tmp)
}).unwrap()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_score() {
let bytes = b"ETAOIN SHRDLU";
assert_eq!(40, score(bytes));
}
#[test]
fn find_key_test() {
let bytes = b"YELLOW SUBMARINE!!";
let key = 42;
let tmp: Vec<u8> = bytes.iter().map(|x| x ^ key).collect();
assert_eq!(find_key(&tmp), key);
}
}
|
use std::env;
use std::fs;
fn elem(x : i32, y : i32) -> char
{
let map = [['0', '0', '1', '0', '0'],
['0', '2', '3', '4', '0'],
['5', '6', '7', '8', '9'],
['0', 'A', 'B', 'C', '0'],
['0', '0', 'D', '0', '0']];
map[y as usize][x as usize]
}
fn main()
{
let args : Vec<String> = env::args().collect();
if args.len() > 2 {
return;
}
let mut x : i32 = 0;
let mut y : i32 = 2;
for line in fs::read_to_string(&args[1]).unwrap().split('\n') {
if line == "" {
continue;
}
for instruction in line.trim().chars() {
match instruction {
'U' => if y > 0 && elem(x, y-1) != '0' { y -= 1 },
'R' => if x < 4 && elem(x+1, y) != '0' { x += 1 },
'D' => if y < 4 && elem(x, y+1) != '0' { y += 1 },
'L' => if x > 0 && elem(x-1, y) != '0' { x -= 1 },
_ => {}
}
}
print!("{}", elem(x, y));
}
println!("");
}
|
#[test]
fn test_parse_pull_request_response() {}
|
//! Error facilities
use std::fmt;
/// Core error type for all errors possible from tame-gcs
#[derive(thiserror::Error, Debug, PartialEq)]
pub enum Error {
#[error("Expected {min}-{max} characters, found {len}")]
InvalidCharacterCount { len: usize, min: usize, max: usize },
#[error("Expected {min}-{max} bytes, found {len}")]
InvalidLength { len: usize, min: usize, max: usize },
#[error("Character '{1}' @ {0} is not allowed")]
InvalidCharacter(usize, char),
#[error("Prefix {0} is not allowed")]
InvalidPrefix(&'static str),
#[error("Sequence {0} is not allowed")]
InvalidSequence(&'static str),
#[error("Failed to parse URI")]
InvalidUri(UriError),
#[error("Header value is invalid")]
InvalidHeaderValue,
#[error("HTTP error")]
Http(#[source] HttpError),
#[error("HTTP status")]
HttpStatus(#[source] HttpStatusError),
#[error("An HTTP response didn't have a valid {0}")]
UnknownHeader(http::header::HeaderName),
#[error("GCS API error")]
Api(#[source] ApiError),
#[error("JSON error")]
Json(#[source] JsonError),
#[error("Response body doesn't contain enough data")]
InsufficientData,
#[error("Key rejected: {0}")]
KeyRejected(String),
#[error("An error occurred during signing")]
Signing,
#[error("An expiration duration was too long: requested = {requested}, max = {max}")]
TooLongExpiration { requested: u64, max: u64 },
#[error("Failed to parse url")]
UrlParse(#[source] url::ParseError),
#[error("Unable to stringize or parse header value '{0:?}'")]
OpaqueHeaderValue(http::header::HeaderValue),
#[error("I/O error occurred")]
Io(#[source] IoError),
#[error("Unable to decode base64")]
Base64Decode(#[source] base64::DecodeError),
#[error("Unable to encode url")]
UrlEncode(#[source] serde_urlencoded::ser::Error),
}
#[derive(Debug, thiserror::Error)]
pub struct HttpError(#[source] pub http::Error);
impl fmt::Display for HttpError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl PartialEq for HttpError {
fn eq(&self, _other: &Self) -> bool {
// I feel really bad about this
true
}
}
impl From<http::Error> for Error {
fn from(e: http::Error) -> Self {
Self::Http(HttpError(e))
}
}
#[derive(Debug, thiserror::Error)]
pub struct HttpStatusError(pub http::StatusCode);
impl PartialEq for HttpStatusError {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
impl fmt::Display for HttpStatusError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<http::StatusCode> for Error {
fn from(e: http::StatusCode) -> Self {
Self::HttpStatus(HttpStatusError(e))
}
}
impl From<http::header::InvalidHeaderValue> for Error {
fn from(_: http::header::InvalidHeaderValue) -> Self {
Self::InvalidHeaderValue
}
}
#[derive(Debug, thiserror::Error)]
pub struct IoError(#[source] pub std::io::Error);
impl PartialEq for IoError {
fn eq(&self, other: &Self) -> bool {
self.0.kind() == other.0.kind()
}
}
impl fmt::Display for IoError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Self {
Self::Io(IoError(e))
}
}
#[derive(Debug, thiserror::Error)]
pub struct JsonError(#[source] pub serde_json::Error);
impl fmt::Display for JsonError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl PartialEq for JsonError {
fn eq(&self, other: &Self) -> bool {
self.0.classify() == other.0.classify()
}
}
impl From<serde_json::Error> for Error {
fn from(e: serde_json::Error) -> Self {
Self::Json(JsonError(e))
}
}
impl From<serde_urlencoded::ser::Error> for Error {
fn from(e: serde_urlencoded::ser::Error) -> Self {
Self::UrlEncode(e)
}
}
#[derive(Debug, thiserror::Error)]
pub struct UriError(#[source] http::uri::InvalidUri);
impl fmt::Display for UriError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl PartialEq for UriError {
fn eq(&self, other: &Self) -> bool {
// This is **TERRIBLE** but all of the error details are unnecessarily
// private and it doesn't implement PartialEq ARGH
self.0.to_string() == other.0.to_string()
}
}
impl From<http::uri::InvalidUri> for Error {
fn from(e: http::uri::InvalidUri) -> Self {
Self::InvalidUri(UriError(e))
}
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct ApiErrorInner {
pub domain: Option<String>,
pub reason: Option<String>,
pub message: Option<String>,
}
#[derive(Debug, thiserror::Error, PartialEq, Deserialize)]
pub struct ApiError {
pub code: u16,
pub message: String,
pub errors: Vec<ApiErrorInner>,
}
impl fmt::Display for ApiError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:#?}", self)
}
}
#[cfg(feature = "signing")]
impl From<ring::error::KeyRejected> for Error {
fn from(re: ring::error::KeyRejected) -> Self {
Self::KeyRejected(format!("{}", re))
}
}
#[cfg(feature = "signing")]
impl From<ring::error::Unspecified> for Error {
fn from(_re: ring::error::Unspecified) -> Self {
Self::Signing
}
}
|
// #![feature(rustc_private)]
#[macro_use]
extern crate actix_web;
mod badge_routes;
mod utils;
use actix_files::Files;
use actix_web::{
http::{header, StatusCode},
middleware, web, App, HttpResponse, HttpServer, Responder,
};
use badgeland::Badge;
use dotenv::dotenv;
use env_logger::Env;
use listenfd::ListenFd;
use std::{env, io};
#[get("/")]
async fn index() -> impl Responder {
HttpResponse::build(StatusCode::TEMPORARY_REDIRECT)
.header(header::LOCATION, "https://github.com/msuntharesan/badgeland")
.finish()
}
async fn default_404() -> impl Responder {
let mut badge = Badge::new();
badge.subject("Error").color("red".parse().unwrap());
HttpResponse::NotFound()
.content_type("image/svg+xml")
.body(badge.text("404").to_string())
}
#[actix_web::main]
async fn main() -> io::Result<()> {
dotenv().ok();
let env = Env::new().filter("RUST_LOG");
env_logger::init_from_env(env);
let mut server = HttpServer::new(move || {
App::new()
.wrap(middleware::Logger::new("%a %r %s %Dms %b %{Referer}i %{User-Agent}i"))
.wrap(middleware::NormalizePath::default())
.default_service(web::route().to(default_404))
.service(index)
.configure(badge_routes::config)
.service(
web::scope("/")
.wrap(
middleware::DefaultHeaders::new()
.header("Cache-Control", format!("public, max-age={}", 60 * 24 * 100)),
)
.service(Files::new("/", format!("{}/static/", env!("CARGO_MANIFEST_DIR"))).prefer_utf8(true)),
)
});
server = if let Some(l) = ListenFd::from_env().take_tcp_listener(0)? {
server.listen(l)?
} else {
let port = env::var("PORT").unwrap_or("3000".into());
let addr = format!("0.0.0.0:{}", port);
server.bind(addr)?
};
server.run().await
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - SysTick control and status register"]
pub csr: CSR,
#[doc = "0x04 - SysTick reload value register"]
pub rvr: RVR,
#[doc = "0x08 - SysTick current value register"]
pub cvr: CVR,
#[doc = "0x0c - SysTick calibration value register"]
pub calib: CALIB,
}
#[doc = "CSR (rw) register accessor: SysTick control and status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`csr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`csr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`csr`]
module"]
pub type CSR = crate::Reg<csr::CSR_SPEC>;
#[doc = "SysTick control and status register"]
pub mod csr;
#[doc = "RVR (rw) register accessor: SysTick reload value register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rvr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rvr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rvr`]
module"]
pub type RVR = crate::Reg<rvr::RVR_SPEC>;
#[doc = "SysTick reload value register"]
pub mod rvr;
#[doc = "CVR (rw) register accessor: SysTick current value register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cvr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cvr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cvr`]
module"]
pub type CVR = crate::Reg<cvr::CVR_SPEC>;
#[doc = "SysTick current value register"]
pub mod cvr;
#[doc = "CALIB (rw) register accessor: SysTick calibration value register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`calib::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`calib::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`calib`]
module"]
pub type CALIB = crate::Reg<calib::CALIB_SPEC>;
#[doc = "SysTick calibration value register"]
pub mod calib;
|
#![feature(test)]
extern crate scalable_bloom_filter;
extern crate test;
use scalable_bloom_filter::ScalableBloomFilter;
use test::Bencher;
#[bench]
fn insert_n1000_p01(b: &mut Bencher) {
let mut filter = ScalableBloomFilter::new(1000, 0.1);
let mut i = 0;
b.iter(|| {
filter.insert(&i);
i += 1;
});
}
#[bench]
fn insert_n1000_p001(b: &mut Bencher) {
let mut filter = ScalableBloomFilter::new(1000, 0.01);
let mut i = 0;
b.iter(|| {
filter.insert(&i);
i += 1;
});
}
#[bench]
fn insert_n1000_p0001(b: &mut Bencher) {
let mut filter = ScalableBloomFilter::new(1000, 0.001);
let mut i = 0;
b.iter(|| {
filter.insert(&i);
i += 1;
});
}
#[bench]
fn insert_n10000_p01(b: &mut Bencher) {
let mut filter = ScalableBloomFilter::new(10_000, 0.1);
let mut i = 0;
b.iter(|| {
filter.insert(&i);
i += 1;
});
}
#[bench]
fn insert_n10000_p001(b: &mut Bencher) {
let mut filter = ScalableBloomFilter::new(10_000, 0.01);
let mut i = 0;
b.iter(|| {
filter.insert(&i);
i += 1;
});
}
#[bench]
fn insert_n10000_p0001(b: &mut Bencher) {
let mut filter = ScalableBloomFilter::new(10_000, 0.001);
let mut i = 0;
b.iter(|| {
filter.insert(&i);
i += 1;
});
}
#[bench]
fn insert_n100000_p01(b: &mut Bencher) {
let mut filter = ScalableBloomFilter::new(100_000, 0.1);
let mut i = 0;
b.iter(|| {
filter.insert(&i);
i += 1;
});
}
#[bench]
fn insert_n100000_p001(b: &mut Bencher) {
let mut filter = ScalableBloomFilter::new(100_000, 0.01);
let mut i = 0;
b.iter(|| {
filter.insert(&i);
i += 1;
});
}
#[bench]
fn insert_n100000_p0001(b: &mut Bencher) {
let mut filter = ScalableBloomFilter::new(100_000, 0.001);
let mut i = 0;
b.iter(|| {
filter.insert(&i);
i += 1;
});
}
|
extern crate native;
extern crate termkey;
#[start]
fn start(argc: int, argv: *const *const u8) -> int {
native::start(argc, argv, main)
}
fn main()
{
let mouse = 0i; // TODO parse arg -m, default 1000
let mouse_proto = 0i; // TODO parse arg -p (no default)
let format = termkey::c::TERMKEY_FORMAT_VIM;
let mut tk = termkey::TermKey::new(0, termkey::c::TERMKEY_FLAG_SPACESYMBOL|termkey::c::TERMKEY_FLAG_CTRLC);
if !(tk.get_flags() & termkey::c::TERMKEY_FLAG_UTF8).is_empty()
{
println!("Termkey in UTF-8 mode")
}
if !(tk.get_flags() & termkey::c::TERMKEY_FLAG_RAW).is_empty()
{
println!("Termkey in RAW mode")
}
if mouse != 0
{
println!("\x1b[?{}hMouse mode active", mouse);
if mouse_proto != 0
{
println!("\x1b[?{}h", mouse_proto);
}
}
loop
{
match tk.waitkey()
{
termkey::Eof => break,
termkey::Key(key) =>
{
let s = tk.strfkey(key, format);
println!("Key {}", s);
match key
{
termkey::MouseEvent{mods: _, ev: _, button: _, line, col} =>
{
println!("Mouse (printing unimplemented, sorry) at line={}, col={}\n", line, col)
}
termkey::PositionEvent{line, col} =>
{
println!("Cursor position report at line={}, col={}\n", line, col)
}
termkey::ModeReportEvent{initial, mode, value} =>
{
let initial_str = if initial != 0 { "DEC" } else { "ANSI" };
println!("Mode report {} mode {} = {}\n", initial_str, mode, value)
}
termkey::UnknownCsiEvent =>
{
println!("Unrecognised CSI (printing unimplemented, sorry)\n")
}
_ => {}
}
match key
{
termkey::UnicodeEvent{mods, codepoint, utf8: _} =>
{
if !(mods & termkey::c::TERMKEY_KEYMOD_CTRL).is_empty() && (codepoint == 'C' || codepoint == 'c')
{
break;
}
if mods.is_empty() && codepoint == '?'
{
// println!("\x1b[?6n"); // DECDSR 6 == request cursor position
println!("\x1b[?1$p"); // DECRQM == request mode, DEC origin mode
}
}
_ => {}
}
}
termkey::Error{errno: _} =>
{
println!("Error of some sort")
}
_ => { panic!() }
}
}
if mouse != 0
{
println!("\x1b[?{}lMouse mode deactivated", mouse)
}
}
|
std_prelude!();
use crate::prelude::*;
#[test]
fn true_accept() {
use crate::preds::True;
assert_eq!(True::pet(()).expect("Must be Some(..)").value(), ());
assert_eq!(True::petref(&()).expect("Must be Some(..)").value(), &());
}
#[test]
fn false_reject() {
use crate::preds::False;
assert_eq!(False::pet(()), None);
assert_eq!(False::petref(&()), None);
}
#[cfg(feature = "num-integer")]
#[test]
fn even_accept() {
use crate::preds::Even;
let accept: &[usize] = &[0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20];
for &x in accept {
assert_eq!(Even::pet(x).expect("Must be Some(..)").value(), x);
assert_eq!(Even::petref(&x).expect("Must be Some(..)").value(), &x);
}
}
#[cfg(feature = "num-integer")]
#[test]
fn even_reject() {
use crate::preds::Even;
let reject: &[usize] = &[1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21];
for &x in reject {
assert_eq!(Even::pet(x), None);
assert_eq!(Even::petref(&x), None);
}
}
#[cfg(feature = "num-integer")]
#[test]
fn odd_accept() {
use crate::preds::Odd;
let accept: &[usize] = &[1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21];
for &x in accept {
assert_eq!(Odd::pet(x).expect("Must be Some(..)").value(), x);
assert_eq!(Odd::petref(&x).expect("Must be Some(..)").value(), &x);
}
}
#[cfg(feature = "num-integer")]
#[test]
fn odd_reject() {
use crate::preds::Odd;
let reject: &[usize] = &[0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20];
for &x in reject {
assert_eq!(Odd::pet(x), None);
assert_eq!(Odd::petref(&x), None);
}
}
|
mod graphql;
mod ops;
use crate::infrastructure::{config, graphql as gql, repositories};
use actix_web::{dev::Server, http, middleware, web, App, HttpServer};
pub fn run(
listener: std::net::TcpListener,
config: config::Settings,
db_pool: repositories::PostgresPool,
) -> std::result::Result<Server, std::io::Error> {
let config = web::Data::new(config);
let db_pool = web::Data::new(db_pool);
let schema = web::Data::new(gql::create_schema());
let server = HttpServer::new(move || {
App::new()
.app_data(db_pool.clone())
.app_data(config.clone())
.app_data(schema.clone())
.wrap(middleware::Compress::default())
.wrap(middleware::Logger::default())
.wrap(
actix_cors::Cors::default()
.allowed_methods(vec!["GET", "POST"])
.allowed_headers(vec![
http::header::AUTHORIZATION,
http::header::ACCEPT,
http::header::CONTENT_TYPE,
http::header::ORIGIN,
])
.max_age(3600)
.supports_credentials()
.allow_any_origin(),
)
.wrap(middleware::DefaultHeaders::default())
.route("/health_check", web::get().to(ops::health_check))
.service(
web::resource("/graphql")
.route(web::post().to(graphql::handler))
// FIXME: This route shouldn't be exposed on production
.route(web::get().to(graphql::graphiql)),
)
})
.listen(listener)?
.run();
Ok(server)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.