text stringlengths 8 4.13M |
|---|
extern crate byteorder;
extern crate colored;
extern crate clap;
extern crate ctrlc;
extern crate rand;
extern crate tiny_keccak;
#[macro_use]
mod log;
mod chain;
mod message;
mod network;
mod node;
mod params;
mod parse;
mod prefix;
mod random;
mod section;
mod stats;
use clap::{App, Arg, ArgMatches};
use colored::Colorize;
use network::Network;
use params::Params;
use random::Seed;
use std::cmp;
use std::collections;
use std::collections::hash_map::DefaultHasher;
use std::hash::BuildHasherDefault;
use std::panic;
use std::str::FromStr;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
type Age = u8;
fn main() {
let params = get_params();
if params.disable_colors || cfg!(windows) {
colored::control::set_override(false);
}
let seed = params.seed;
random::reseed(seed);
// Print seed on panic.
let default_hook = panic::take_hook();
panic::set_hook(Box::new(move |info| {
default_hook(info);
println!("{:?}", seed);
}));
log::set_verbosity(params.verbosity);
// Set SIGINT (Ctrl+C) handler.
let running = Arc::new(AtomicBool::new(true));
{
let running = Arc::clone(&running);
let _ = ctrlc::set_handler(move || { running.store(false, Ordering::Relaxed); });
}
let mut network = Network::new(params.clone());
let mut max_prefix_len_diff = 0;
for i in 0..params.num_iterations {
info!(
"{}",
format!("Iteration: {}", format!("{}", i).bold()).green()
);
network.tick(i);
if params.stats_frequency > 0 && i % params.stats_frequency == 0 {
print_tick_stats(&network, &mut max_prefix_len_diff);
}
if !running.load(Ordering::Relaxed) {
break;
}
}
println!("\n===== Summary =====");
println!("\n{:?}\n", params);
println!("{}", network.stats().summary());
println!("Age distribution:");
let age = network.age_distribution();
println!("{}\n{}", age, age.summary());
println!("Section size distribution:");
println!("{}", network.section_size_aggregator());
println!("Prefix length distribution:");
println!("{}", network.prefix_len_aggregator());
if let Some(path) = params.file {
network.stats().write_to_file(path);
}
}
fn get_params() -> Params {
let matches = App::new("SAFE network simulation")
.about("Simulates evolution of SAFE network")
.arg(
Arg::with_name("SEED")
.short("S")
.long("seed")
.help("Random seed")
.takes_value(true),
)
.arg(
Arg::with_name("ITERATIONS")
.short("n")
.long("iterations")
.help("Number of simulation iterations")
.takes_value(true)
.default_value("100000"),
)
.arg(
Arg::with_name("GROUP_SIZE")
.short("g")
.long("group-size")
.help("Group size")
.takes_value(true)
.default_value("8"),
)
.arg(
Arg::with_name("INIT_AGE")
.short("i")
.long("init-age")
.help("Initial age of newly joining nodes")
.takes_value(true)
.default_value("4"),
)
.arg(
Arg::with_name("ADULT_AGE")
.short("a")
.long("adult-age")
.help("Age at which a node becomes adult")
.takes_value(true)
.default_value("5"),
)
.arg(
Arg::with_name("MAX_SECTION_SIZE")
.short("s")
.long("max-section-size")
.help(
"Maximum section size (number of nodes) before the simulation fails",
)
.takes_value(true)
.default_value("60"),
)
.arg(
Arg::with_name("MAX_RELOCATION_ATTEMPTS")
.short("r")
.long("max-relocation-attempts")
.help("Maximum number of relocation attempts after a Live event")
.takes_value(true)
.default_value("25"),
)
.arg(
Arg::with_name("MAX_INFANTS_PER_SECTION")
.short("I")
.long("max-infants-per-section")
.help("Maximum number of infants per section")
.takes_value(true)
.default_value("1"),
)
.arg(
Arg::with_name("STATS_FREQUENCY")
.short("F")
.long("stats-frequency")
.help(
"how often (every which iteration) to output network statistics",
)
.takes_value(true)
.default_value("10"),
)
.arg(
Arg::with_name("FILE")
.long("file")
.short("f")
.help("Output file for network structure data")
.takes_value(true),
)
.arg(Arg::with_name("VERBOSITY").short("v").multiple(true).help(
"Log verbosity",
))
.arg(
Arg::with_name("DISABLE_COLORS")
.short("C")
.long("disable-colors")
.help("Disable colored output"),
)
.get_matches();
let seed = match matches.value_of("SEED") {
Some(seed) => seed.parse().expect("SEED must be in form `[1, 2, 3, 4]`"),
None => Seed::random(),
};
Params {
seed,
num_iterations: get_number(&matches, "ITERATIONS"),
group_size: get_number(&matches, "GROUP_SIZE"),
init_age: get_number(&matches, "INIT_AGE"),
adult_age: get_number(&matches, "ADULT_AGE"),
max_section_size: get_number(&matches, "MAX_SECTION_SIZE"),
max_relocation_attempts: get_number(&matches, "MAX_RELOCATION_ATTEMPTS"),
max_infants_per_section: get_number(&matches, "MAX_INFANTS_PER_SECTION"),
stats_frequency: get_number(&matches, "STATS_FREQUENCY"),
file: matches.value_of("FILE").map(String::from),
verbosity: matches.occurrences_of("VERBOSITY") as usize + 1,
disable_colors: matches.is_present("DISABLE_COLORS"),
}
}
fn print_tick_stats(network: &Network, max_prefix_len_diff: &mut u64) {
let prefix_len_agg = network.prefix_len_aggregator();
*max_prefix_len_diff = cmp::max(
*max_prefix_len_diff,
prefix_len_agg.max - prefix_len_agg.min,
);
println!(
"Header {:?}, AgeDist {:?}, SectionSizeDist {:?}, PrefixLenDist {:?}, MaxPrefixLenDiff: {}",
network.stats().summary(),
network.age_aggregator(),
network.section_size_aggregator(),
prefix_len_agg,
max_prefix_len_diff,
)
}
fn get_number<T: Number>(matches: &ArgMatches, name: &str) -> T {
match matches.value_of(name).unwrap().parse() {
Ok(value) => value,
Err(_err) => panic!("{} must be a number.", name),
}
}
trait Number: FromStr {}
impl Number for u8 {}
impl Number for u64 {}
impl Number for usize {}
// Use these type aliases instead of the default collections to make sure
// we use consistent hashing across runs, to enable deterministic results.
type HashMap<K, V> = collections::HashMap<K, V, BuildHasherDefault<DefaultHasher>>;
type HashSet<T> = collections::HashSet<T, BuildHasherDefault<DefaultHasher>>;
|
use crate::{
grid::config::Entity,
grid::records::{ExactRecords, PeekableRecords, Records, RecordsMut},
settings::{CellOption, TableOption},
};
/// A lambda which formats cell content.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct FormatContent<F> {
f: F,
multiline: bool,
}
impl<F> FormatContent<F> {
pub(crate) fn new(f: F) -> Self {
Self {
f,
multiline: false,
}
}
}
impl<F> FormatContent<F> {
/// Multiline a helper function for changing multiline content of cell.
/// Using this formatting applied for all rows not to a string as a whole.
///
/// ```rust,no_run
/// use tabled::{Table, settings::{Format, object::Segment, Modify}};
///
/// let data: Vec<&'static str> = Vec::new();
/// let table = Table::new(&data)
/// .with(Modify::new(Segment::all()).with(Format::content(|s| s.to_string()).multiline()))
/// .to_string();
/// ```
pub fn multiline(mut self) -> Self {
self.multiline = true;
self
}
}
impl<F, R, D, C> TableOption<R, D, C> for FormatContent<F>
where
F: FnMut(&str) -> String + Clone,
R: Records + ExactRecords + PeekableRecords + RecordsMut<String>,
{
fn change(self, records: &mut R, cfg: &mut C, _: &mut D) {
CellOption::change(self, records, cfg, Entity::Global);
}
}
impl<F, R, C> CellOption<R, C> for FormatContent<F>
where
F: FnMut(&str) -> String + Clone,
R: Records + ExactRecords + PeekableRecords + RecordsMut<String>,
{
fn change(mut self, records: &mut R, _: &mut C, entity: Entity) {
let count_rows = records.count_rows();
let count_cols = records.count_columns();
for pos in entity.iter(count_rows, count_cols) {
let is_valid_pos = pos.0 < count_rows && pos.1 < count_cols;
if !is_valid_pos {
continue;
}
let content = records.get_text(pos);
let content = if self.multiline {
multiline(self.f.clone())(content)
} else {
(self.f)(content)
};
records.set(pos, content);
}
}
}
fn multiline<F: FnMut(&str) -> String>(mut f: F) -> impl FnMut(&str) -> String {
move |s: &str| {
let mut v = Vec::new();
for line in s.lines() {
v.push(f(line));
}
v.join("\n")
}
}
|
use futures::{Stream, StreamExt, TryStreamExt};
use kube::{
api::{Api, DynamicObject, GroupVersionKind, Resource, ResourceExt},
runtime::{metadata_watcher, watcher, watcher::Event, WatchStreamExt},
};
use serde::de::DeserializeOwned;
use tracing::*;
use std::{env, fmt::Debug};
#[tokio::main]
async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt::init();
let client = kube::Client::try_default().await?;
// If set will receive only the metadata for watched resources
let watch_metadata = env::var("WATCH_METADATA").map(|s| s == "1").unwrap_or(false);
// Take dynamic resource identifiers:
let group = env::var("GROUP").unwrap_or_else(|_| "".into());
let version = env::var("VERSION").unwrap_or_else(|_| "v1".into());
let kind = env::var("KIND").unwrap_or_else(|_| "Pod".into());
// Turn them into a GVK
let gvk = GroupVersionKind::gvk(&group, &version, &kind);
// Use API discovery to identify more information about the type (like its plural)
let (ar, _caps) = kube::discovery::pinned_kind(&client, &gvk).await?;
// Use the full resource info to create an Api with the ApiResource as its DynamicType
let api = Api::<DynamicObject>::all_with(client, &ar);
let wc = watcher::Config::default();
// Start a metadata or a full resource watch
if watch_metadata {
handle_events(metadata_watcher(api, wc)).await
} else {
handle_events(watcher(api, wc)).await
}
}
async fn handle_events<K: Resource + Clone + Debug + Send + DeserializeOwned + 'static>(
stream: impl Stream<Item = watcher::Result<Event<K>>> + Send + 'static,
) -> anyhow::Result<()> {
let mut items = stream.applied_objects().boxed();
while let Some(p) = items.try_next().await? {
if let Some(ns) = p.namespace() {
info!("saw {} in {ns}", p.name_any());
} else {
info!("saw {}", p.name_any());
}
trace!("full obj: {p:?}");
}
Ok(())
}
|
#[derive(Clone)]
enum Instruction {
Acc(isize),
Jmp(isize),
Nop(isize),
}
fn program_halts(instructions: Vec<Instruction>) -> (bool, isize) {
let mut accumulator: isize = 0;
let mut ip: isize = 0;
use std::collections::HashSet;
let mut instructions_that_ran: HashSet<isize> = HashSet::with_capacity(instructions.len());
loop {
let new_instruction = instructions_that_ran.insert(ip);
if !new_instruction {
return (false, accumulator);
}
if (ip as usize) >= instructions.len() {
return (true, accumulator);
}
match instructions[ip as usize] {
Instruction::Acc(d) => {
accumulator += d;
ip += 1;
}
Instruction::Jmp(d) => {
ip += d;
}
Instruction::Nop(_) => {
ip += 1;
}
}
}
}
fn main() {
let instructions = std::fs::read_to_string("../input.txt").unwrap();
let instructions: Vec<Instruction> = instructions
.split("\n")
.filter(|l| !l.is_empty())
.map(|line| {
let mut parts = line.split(" ");
let inst = parts.next().unwrap();
let arg: isize = parts.next().unwrap().parse().unwrap();
match inst {
"acc" => Instruction::Acc(arg),
"jmp" => Instruction::Jmp(arg),
"nop" => Instruction::Nop(arg),
_ => panic!("unhandled instruction"),
}
})
.collect();
for (i, instruction) in instructions.iter().enumerate() {
match instruction {
Instruction::Acc(_) => continue, // acc instructions not modified,
Instruction::Jmp(d) => {
let mut instructions = instructions.clone();
instructions[i] = Instruction::Nop(*d);
let (halts, acc) = program_halts(instructions);
if halts {
println!("{}", acc);
return;
}
}
Instruction::Nop(d) => {
let mut instructions = instructions.clone();
instructions[i] = Instruction::Jmp(*d);
let (halts, acc) = program_halts(instructions);
if halts {
println!("{}", acc);
return;
}
}
}
}
println!("no working modifications found?");
}
|
use crate::nes::{Nes, NesIo};
use crate::video::{Color, Point, Video};
use bitflags::bitflags;
use std::cell::Cell;
use std::ops::{Generator, GeneratorState};
use std::pin::Pin;
use std::u8;
#[derive(Clone)]
pub struct Ppu {
pub ctrl: Cell<PpuCtrlFlags>,
pub mask: Cell<PpuMaskFlags>,
pub status: Cell<PpuStatusFlags>,
pub oam_addr: Cell<u8>,
pub scroll: Cell<u16>,
pub addr: Cell<u16>,
// Latch used for writing to PPUSCROLL and PPUADDR (toggles after a write
// to each, used to determine if the high bit or low bit is being written).
pub scroll_addr_latch: Cell<bool>,
pub ppu_ram: Cell<[u8; 0x0800]>,
pub oam: Cell<[u8; 0x0100]>,
pub palette_ram: Cell<[u8; 0x20]>,
// Internal "cache" indexed by an X coordinate, returning a bitfield
// that represents the sprites that should be rendered for that
// X coordinate. For example, a value of 0b_0100_0001 at index 5 means that
// the sprites at index 0 and 6 should be rendered at pixel 5 of
// the scanline (since bits 0 and 6 are set)
scanline_sprite_indices: Cell<[u64; 256]>,
}
impl Ppu {
pub fn new() -> Self {
Ppu {
ctrl: Cell::new(PpuCtrlFlags::from_bits_truncate(0x00)),
mask: Cell::new(PpuMaskFlags::from_bits_truncate(0x00)),
status: Cell::new(PpuStatusFlags::from_bits_truncate(0x00)),
oam_addr: Cell::new(0x00),
scroll: Cell::new(0x0000),
addr: Cell::new(0x0000),
scroll_addr_latch: Cell::new(false),
ppu_ram: Cell::new([0; 0x0800]),
oam: Cell::new([0; 0x0100]),
palette_ram: Cell::new([0; 0x20]),
scanline_sprite_indices: Cell::new([0; 256]),
}
}
pub fn ppu_ram(&self) -> &[Cell<u8>] {
let ppu_ram: &Cell<[u8]> = &self.ppu_ram;
ppu_ram.as_slice_of_cells()
}
fn oam(&self) -> &[Cell<u8>] {
let oam: &Cell<[u8]> = &self.oam;
oam.as_slice_of_cells()
}
pub fn palette_ram(&self) -> &[Cell<u8>] {
let palette_ram: &Cell<[u8]> = &self.palette_ram;
palette_ram.as_slice_of_cells()
}
pub fn set_ppuctrl(&self, value: u8) {
self.ctrl.set(PpuCtrlFlags::from_bits_truncate(value));
}
pub fn set_ppumask(&self, value: u8) {
self.mask.set(PpuMaskFlags::from_bits_truncate(value));
}
pub fn write_oamaddr(&self, value: u8) {
self.oam_addr.set(value);
}
pub fn write_oamdata(&self, value: u8) {
let oam_addr = self.oam_addr.get();
let oam = self.oam();
oam[oam_addr as usize].set(value);
let next_oam_addr = oam_addr.wrapping_add(1) as usize % oam.len();
self.oam_addr.set(next_oam_addr as u8);
}
pub fn write_ppuscroll(&self, value: u8) {
let latch = self.scroll_addr_latch.get();
if latch {
let scroll_lo = self.scroll.get() & 0x00FF;
let scroll_hi = (value as u16) << 8;
self.scroll.set(scroll_lo | scroll_hi);
} else {
let scroll_lo = value as u16;
let scroll_hi = self.scroll.get() & 0xFF00;
self.scroll.set(scroll_lo | scroll_hi);
}
self.scroll_addr_latch.set(!latch);
}
pub fn write_ppuaddr(&self, value: u8) {
let latch = self.scroll_addr_latch.get();
if latch {
let addr_lo = value as u16;
let addr_hi = self.addr.get() & 0xFF00;
self.addr.set(addr_lo | addr_hi);
} else {
let addr_lo = self.addr.get() & 0x00FF;
let addr_hi = (value as u16) << 8;
self.addr.set(addr_lo | addr_hi);
}
self.scroll_addr_latch.set(!latch);
}
pub fn read_ppudata(&self, nes: &Nes<impl NesIo>) -> u8 {
let addr = self.addr.get();
let ctrl = self.ctrl.get();
let stride =
// Add 1 to the PPU address if the I flag is clear, add 32 if
// it is set
match ctrl.contains(PpuCtrlFlags::VRAM_ADDR_INCREMENT) {
false => 1,
true => 32
};
let value = nes.read_ppu_u8(addr);
self.addr.update(|addr| addr.wrapping_add(stride));
value
}
pub fn write_ppudata(&self, nes: &Nes<impl NesIo>, value: u8) {
let addr = self.addr.get();
let ctrl = self.ctrl.get();
let stride =
// Add 1 to the PPU address if the I flag is clear, add 32 if
// it is set
match ctrl.contains(PpuCtrlFlags::VRAM_ADDR_INCREMENT) {
false => 1,
true => 32
};
nes.write_ppu_u8(addr, value);
self.addr.update(|addr| addr.wrapping_add(stride));
}
pub fn ppustatus(&self) -> u8 {
self.status.get().bits()
}
pub fn run<'a>(nes: &'a Nes<impl NesIo>) -> impl Generator<Yield = PpuStep, Return = !> + 'a {
let mut run_sprite_evaluation = Ppu::run_sprite_evaluation(nes);
let mut run_renderer = Ppu::run_renderer(nes);
move || loop {
loop {
match Pin::new(&mut run_sprite_evaluation).resume(()) {
GeneratorState::Yielded(PpuStep::Cycle) => {
break;
}
GeneratorState::Yielded(step) => {
yield step;
}
}
}
loop {
match Pin::new(&mut run_renderer).resume(()) {
GeneratorState::Yielded(PpuStep::Cycle) => {
break;
}
GeneratorState::Yielded(step) => {
yield step;
}
}
}
yield PpuStep::Cycle;
}
}
fn run_sprite_evaluation<'a>(
nes: &'a Nes<impl NesIo>,
) -> impl Generator<Yield = PpuStep, Return = !> + 'a {
move || loop {
for frame in 0_u64.. {
let frame_is_odd = frame % 2 != 0;
for scanline in 0_u16..=261 {
let y = scanline;
let should_skip_first_cycle = frame_is_odd && scanline == 0;
if !should_skip_first_cycle {
// The first cycle of each scanline is idle (except
// for the first cycle of the pre-render scanline
// for odd frames, which is skipped)
yield PpuStep::Cycle;
}
// TODO: Implement sprite evaluation with secondary OAM!
let oam = nes.ppu.oam();
for _ in 0_u16..340 {
// Here, secondary OAM would be filled with sprite data
yield PpuStep::Cycle;
}
if scanline < 240 {
let mut new_sprite_indices = [0; 256];
for sprite_index in 0_u8..64 {
let oam_index = sprite_index as usize * 4;
let sprite_y = oam[oam_index].get() as u16;
if sprite_y <= y && y < sprite_y + 8 {
let sprite_x = oam[oam_index + 3].get() as u16;
for sprite_x_offset in 0_u16..8 {
let x = (sprite_x + sprite_x_offset) as usize;
if x < 256 {
let sprite_bitmask = 1 << sprite_index;
new_sprite_indices[x] |= sprite_bitmask;
}
}
}
}
nes.ppu.scanline_sprite_indices.set(new_sprite_indices);
} else {
let new_sprite_indices = [0; 256];
nes.ppu.scanline_sprite_indices.set(new_sprite_indices);
}
}
}
}
}
fn run_renderer<'a>(
nes: &'a Nes<impl NesIo>,
) -> impl Generator<Yield = PpuStep, Return = !> + 'a {
move || loop {
for frame in 0_u64.. {
let frame_is_odd = frame % 2 != 0;
for scanline in 0_u16..=261 {
let tile_y = scanline / 8;
let tile_y_pixel = scanline % 8;
let y = scanline;
let sprite_indices = nes.ppu.scanline_sprite_indices.get();
let oam = nes.ppu.oam();
let should_skip_first_cycle = frame_is_odd && scanline == 0;
if !should_skip_first_cycle {
// The first cycle of each scanline is idle (except
// for the first cycle of the pre-render scanline
// for odd frames, which is skipped)
yield PpuStep::Cycle;
}
if scanline == 240 {
let _ = nes.ppu.status.update(|mut status| {
status.set(PpuStatusFlags::VBLANK_STARTED, true);
status
});
let ctrl = nes.ppu.ctrl.get();
if ctrl.contains(PpuCtrlFlags::VBLANK_INTERRUPT) {
// TODO: Generate NMI immediately if
// VBLANK_INTERRUPT is toggled during vblank
nes.cpu.nmi.set(true);
}
nes.io.video().present();
yield PpuStep::Vblank;
} else if scanline == 0 {
let _ = nes.ppu.status.update(|mut status| {
status.set(PpuStatusFlags::VBLANK_STARTED, false);
status
});
nes.io.video().clear();
}
for tile_x in 0_u16..42 {
if tile_x >= 32 || scanline >= 240 {
// TODO: Implement sprite tile fetching here
for _cycle in 0..8 {
yield PpuStep::Cycle;
}
continue;
}
let scroll = nes.ppu.scroll.get();
let scroll_x = scroll & 0x00FF;
let tile_offset = scroll_x / 8;
let tile_x_pixel_offset = scroll_x % 8;
let scroll_tile_x = tile_x + tile_offset;
yield PpuStep::Cycle;
yield PpuStep::Cycle;
let nametable_index = tile_y * 32 + scroll_tile_x;
let nametable_byte = nes.read_ppu_u8(0x2000 + nametable_index);
yield PpuStep::Cycle;
yield PpuStep::Cycle;
let attr_x = scroll_tile_x / 4;
let attr_y = tile_y / 4;
let attr_is_left = ((scroll_tile_x / 2) % 2) == 0;
let attr_is_top = ((tile_y / 2) % 2) == 0;
let attr_index = attr_y * 8 + attr_x;
let attr = nes.read_ppu_u8(0x23C0 + attr_index);
let background_palette_index = match (attr_is_top, attr_is_left) {
(true, true) => attr & 0b_0000_0011,
(true, false) => (attr & 0b_0000_1100) >> 2,
(false, true) => (attr & 0b_0011_0000) >> 4,
(false, false) => (attr & 0b_1100_0000) >> 6,
};
let pattern_table_offset = if nes
.ppu
.ctrl
.get()
.contains(PpuCtrlFlags::BACKGROUND_PATTERN_TABLE_ADDR)
{
0x1000
} else {
0x0000
};
let bitmap_offset = pattern_table_offset + nametable_byte as u16 * 16;
let bitmap_lo_byte = nes.read_ppu_u8(bitmap_offset + tile_y_pixel);
yield PpuStep::Cycle;
yield PpuStep::Cycle;
let bitmap_hi_byte = nes.read_ppu_u8(bitmap_offset + tile_y_pixel + 8);
yield PpuStep::Cycle;
yield PpuStep::Cycle;
for tile_x_pixel in 0..8 {
let tile_x_pixel_scroll = tile_x_pixel + tile_x_pixel_offset;
let x = (tile_x * 8) + tile_x_pixel;
let background_color_index = {
let bitmap_bitmask = 0b_1000_0000 >> (tile_x_pixel_scroll % 8);
let bitmap_lo_bit = (bitmap_lo_byte & bitmap_bitmask) != 0;
let bitmap_hi_bit = (bitmap_hi_byte & bitmap_bitmask) != 0;
let color_index = match (bitmap_hi_bit, bitmap_lo_bit) {
(false, false) => 0,
(false, true) => 1,
(true, false) => 2,
(true, true) => 3,
};
color_index
};
let sprite_palette_and_color_index = {
let sprite_index_bitmask = sprite_indices[x as usize];
let included_sprites = (0_u64..64).filter(|sprite_index| {
(sprite_index_bitmask & (1_u64 << sprite_index)) != 0
});
let mut palette_and_color_indices =
included_sprites.map(|sprite_index| {
let oam_index = (sprite_index * 4) as usize;
let sprite_y = oam[oam_index].get() as u16;
let tile_index = oam[oam_index + 1].get();
let attrs = oam[oam_index + 2].get();
let sprite_x = oam[oam_index + 3].get() as u16;
let flip_horizontal = (attrs & 0b_0100_0000) != 0;
let flip_vertical = (attrs & 0b_1000_0000) != 0;
let sprite_x_pixel = x.wrapping_sub(sprite_x) % 256;
let sprite_y_pixel =
y.wrapping_sub(1).wrapping_sub(sprite_y) % 256;
let sprite_x_pixel = if flip_horizontal {
7 - sprite_x_pixel
} else {
sprite_x_pixel
};
let sprite_y_pixel = if flip_vertical {
7 - sprite_y_pixel
} else {
sprite_y_pixel
};
let pattern_bitmask = 0b_1000_0000 >> sprite_x_pixel;
let pattern_table_offset = if nes
.ppu
.ctrl
.get()
.contains(PpuCtrlFlags::SPRITE_PATTERN_TABLE_ADDR)
{
0x1000
} else {
0x0000
};
let pattern_offset =
pattern_table_offset as u16 + tile_index as u16 * 16;
let pattern_lo_byte =
nes.read_ppu_u8(pattern_offset + sprite_y_pixel);
let pattern_hi_byte =
nes.read_ppu_u8(pattern_offset + sprite_y_pixel + 8);
let pattern_lo_bit =
(pattern_lo_byte & pattern_bitmask) != 0;
let pattern_hi_bit =
(pattern_hi_byte & pattern_bitmask) != 0;
let palette_lo_bit = (attrs & 0b_0000_0001) != 0;
let palette_hi_bit = (attrs & 0b_0000_0010) != 0;
let palette_index = match (palette_hi_bit, palette_lo_bit) {
(false, false) => 4,
(false, true) => 5,
(true, false) => 6,
(true, true) => 7,
};
let color_index = match (pattern_hi_bit, pattern_lo_bit) {
(false, false) => 0,
(false, true) => 1,
(true, false) => 2,
(true, true) => 3,
};
(palette_index, color_index)
});
let palette_and_color_index = palette_and_color_indices
.find(|&(_, color_index)| color_index != 0);
palette_and_color_index
};
let color_code = match sprite_palette_and_color_index {
None | Some((_, 0)) => nes.ppu.palette_index_to_nes_color_code(
background_palette_index,
background_color_index,
),
Some((sprite_palette_index, sprite_color_index)) => {
nes.ppu.palette_index_to_nes_color_code(
sprite_palette_index,
sprite_color_index,
)
}
};
let color = nes_color_code_to_rgb(color_code);
let point = Point { x, y };
nes.io.video().draw_point(point, color);
}
}
for _ in 0..4 {
// TODO: Implement PPU garbage reads
yield PpuStep::Cycle;
}
}
}
}
}
fn palette_index_to_nes_color_code(&self, palette_index: u8, color_index: u8) -> u8 {
let palette_ram = self.palette_ram();
let palette_ram_indices = [
[0x00, 0x01, 0x02, 0x03],
[0x00, 0x05, 0x06, 0x07],
[0x00, 0x09, 0x0A, 0x0B],
[0x00, 0x0D, 0x0E, 0x0F],
[0x00, 0x11, 0x12, 0x13],
[0x00, 0x15, 0x16, 0x17],
[0x00, 0x19, 0x1A, 0x1B],
[0x00, 0x1D, 0x1E, 0x1F],
];
let palettes = [
[
palette_ram[palette_ram_indices[0][0]].get(),
palette_ram[palette_ram_indices[0][1]].get(),
palette_ram[palette_ram_indices[0][2]].get(),
palette_ram[palette_ram_indices[0][3]].get(),
],
[
palette_ram[palette_ram_indices[1][0]].get(),
palette_ram[palette_ram_indices[1][1]].get(),
palette_ram[palette_ram_indices[1][2]].get(),
palette_ram[palette_ram_indices[1][3]].get(),
],
[
palette_ram[palette_ram_indices[2][0]].get(),
palette_ram[palette_ram_indices[2][1]].get(),
palette_ram[palette_ram_indices[2][2]].get(),
palette_ram[palette_ram_indices[2][3]].get(),
],
[
palette_ram[palette_ram_indices[3][0]].get(),
palette_ram[palette_ram_indices[3][1]].get(),
palette_ram[palette_ram_indices[3][2]].get(),
palette_ram[palette_ram_indices[3][3]].get(),
],
[
palette_ram[palette_ram_indices[4][0]].get(),
palette_ram[palette_ram_indices[4][1]].get(),
palette_ram[palette_ram_indices[4][2]].get(),
palette_ram[palette_ram_indices[4][3]].get(),
],
[
palette_ram[palette_ram_indices[5][0]].get(),
palette_ram[palette_ram_indices[5][1]].get(),
palette_ram[palette_ram_indices[5][2]].get(),
palette_ram[palette_ram_indices[5][3]].get(),
],
[
palette_ram[palette_ram_indices[6][0]].get(),
palette_ram[palette_ram_indices[6][1]].get(),
palette_ram[palette_ram_indices[6][2]].get(),
palette_ram[palette_ram_indices[6][3]].get(),
],
[
palette_ram[palette_ram_indices[7][0]].get(),
palette_ram[palette_ram_indices[7][1]].get(),
palette_ram[palette_ram_indices[7][2]].get(),
palette_ram[palette_ram_indices[7][3]].get(),
],
];
let palette = palettes[palette_index as usize];
palette[color_index as usize]
}
}
pub enum PpuStep {
Cycle,
Vblank,
}
fn nes_color_code_to_rgb(color_code: u8) -> Color {
// Based on the palette provided on the NesDev wiki:
// - https://wiki.nesdev.com/w/index.php/PPU_palettes
// - https://wiki.nesdev.com/w/index.php/File:Savtool-swatches.png
match color_code & 0x3F {
0x00 => Color {
r: 0x54,
g: 0x54,
b: 0x54,
},
0x01 => Color {
r: 0x00,
g: 0x1E,
b: 0x74,
},
0x02 => Color {
r: 0x08,
g: 0x10,
b: 0x90,
},
0x03 => Color {
r: 0x30,
g: 0x00,
b: 0x88,
},
0x04 => Color {
r: 0x44,
g: 0x00,
b: 0x64,
},
0x05 => Color {
r: 0x5C,
g: 0x00,
b: 0x30,
},
0x06 => Color {
r: 0x54,
g: 0x04,
b: 0x00,
},
0x07 => Color {
r: 0x3C,
g: 0x18,
b: 0x00,
},
0x08 => Color {
r: 0x20,
g: 0x2A,
b: 0x00,
},
0x09 => Color {
r: 0x08,
g: 0x3A,
b: 0x00,
},
0x0A => Color {
r: 0x00,
g: 0x40,
b: 0x00,
},
0x0B => Color {
r: 0x00,
g: 0x3C,
b: 0x00,
},
0x0C => Color {
r: 0x00,
g: 0x32,
b: 0x3C,
},
0x0D => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
0x0E => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
0x0F => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
0x10 => Color {
r: 0x98,
g: 0x96,
b: 0x98,
},
0x11 => Color {
r: 0x08,
g: 0x4C,
b: 0xC4,
},
0x12 => Color {
r: 0x30,
g: 0x32,
b: 0xEC,
},
0x13 => Color {
r: 0x5C,
g: 0x1E,
b: 0xE4,
},
0x14 => Color {
r: 0x88,
g: 0x14,
b: 0xB0,
},
0x15 => Color {
r: 0xA0,
g: 0x14,
b: 0x64,
},
0x16 => Color {
r: 0x98,
g: 0x22,
b: 0x20,
},
0x17 => Color {
r: 0x78,
g: 0x3C,
b: 0x00,
},
0x18 => Color {
r: 0x54,
g: 0x5A,
b: 0x00,
},
0x19 => Color {
r: 0x28,
g: 0x72,
b: 0x00,
},
0x1A => Color {
r: 0x08,
g: 0x7C,
b: 0x00,
},
0x1B => Color {
r: 0x00,
g: 0x76,
b: 0x28,
},
0x1C => Color {
r: 0x00,
g: 0x66,
b: 0x78,
},
0x1D => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
0x1E => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
0x1F => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
0x20 => Color {
r: 0xEC,
g: 0xEE,
b: 0xEC,
},
0x21 => Color {
r: 0x4C,
g: 0x9A,
b: 0xEC,
},
0x22 => Color {
r: 0x78,
g: 0x7C,
b: 0xEC,
},
0x23 => Color {
r: 0xB0,
g: 0x62,
b: 0xEC,
},
0x24 => Color {
r: 0xE4,
g: 0x54,
b: 0xEC,
},
0x25 => Color {
r: 0xEC,
g: 0x58,
b: 0xB4,
},
0x26 => Color {
r: 0xEC,
g: 0x6A,
b: 0x64,
},
0x27 => Color {
r: 0xD4,
g: 0x88,
b: 0x20,
},
0x28 => Color {
r: 0xA0,
g: 0xAA,
b: 0x00,
},
0x29 => Color {
r: 0x74,
g: 0xC4,
b: 0x00,
},
0x2A => Color {
r: 0x4C,
g: 0xD0,
b: 0x20,
},
0x2B => Color {
r: 0x38,
g: 0xCC,
b: 0x6C,
},
0x2C => Color {
r: 0x38,
g: 0xB4,
b: 0xCC,
},
0x2D => Color {
r: 0x3C,
g: 0x3C,
b: 0x3C,
},
0x2E => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
0x2F => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
0x30 => Color {
r: 0xEC,
g: 0xEE,
b: 0xEC,
},
0x31 => Color {
r: 0xA8,
g: 0xCC,
b: 0xEC,
},
0x32 => Color {
r: 0xBC,
g: 0xBC,
b: 0xEC,
},
0x33 => Color {
r: 0xD4,
g: 0xB2,
b: 0xEC,
},
0x34 => Color {
r: 0xEC,
g: 0xAE,
b: 0xEC,
},
0x35 => Color {
r: 0xEC,
g: 0xAE,
b: 0xD4,
},
0x36 => Color {
r: 0xEC,
g: 0xB4,
b: 0xB0,
},
0x37 => Color {
r: 0xE4,
g: 0xC4,
b: 0x90,
},
0x38 => Color {
r: 0xCC,
g: 0xD2,
b: 0x78,
},
0x39 => Color {
r: 0xB4,
g: 0xDE,
b: 0x78,
},
0x3A => Color {
r: 0xA8,
g: 0xE2,
b: 0x90,
},
0x3B => Color {
r: 0x98,
g: 0xE2,
b: 0xB4,
},
0x3C => Color {
r: 0xA0,
g: 0xD6,
b: 0xE4,
},
0x3D => Color {
r: 0xA0,
g: 0xA2,
b: 0xA0,
},
0x3E => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
0x3F => Color {
r: 0x00,
g: 0x00,
b: 0x00,
},
_ => {
unreachable!();
}
}
}
bitflags! {
pub struct PpuCtrlFlags: u8 {
const NAMETABLE_LO = 1 << 0;
const NAMETABLE_HI = 1 << 1;
const VRAM_ADDR_INCREMENT = 1 << 2;
const SPRITE_PATTERN_TABLE_ADDR = 1 << 3;
const BACKGROUND_PATTERN_TABLE_ADDR = 1 << 4;
const SPRITE_SIZE = 1 << 5;
const PPU_MASTER_SLAVE_SELECT = 1 << 6;
const VBLANK_INTERRUPT = 1 << 7;
}
}
bitflags! {
pub struct PpuMaskFlags: u8 {
const GREYSCALE = 1 << 0;
const SHOW_BACKGROUND_IN_LEFT_MARGIN = 1 << 1;
const SHOW_SPRITES_IN_LEFT_MARGIN = 1 << 2;
const SHOW_BACKGROUND = 1 << 3;
const SHOW_SPRITES = 1 << 4;
const EMPHASIZE_RED = 1 << 5;
const EMPHASIZE_GREEN = 1 << 6;
const EMPHASIZE_BLUE = 1 << 7;
}
}
bitflags! {
pub struct PpuStatusFlags: u8 {
// NOTE: Bits 0-4 are unused (but result in bits read from
// the PPU's latch)
const SPRITE_OVERFLOW = 1 << 5;
const SPRITE_ZERO_HIT = 1 << 6;
const VBLANK_STARTED = 1 << 7;
}
}
|
use diesel::prelude::*;
use rocket::response::Failure;
use rocket::http::Status;
use rocket_contrib::Json;
use models::{HJob, JobStatus, User};
use fields::{Authentication, PrivilegeLevel};
use schema::horus_jobs::dsl::*;
use schema::horus_users::dsl::*;
use DbConn;
#[derive(Serialize)]
pub struct ListJob
{
id: i32,
job_name: String,
job_status: i32,
priority: i32,
}
// NONE OF THESE ARE IMPLEMENTED
#[get("/active/<uid>")]
pub fn list_active_jobs(
uid: i32,
auth: Authentication,
conn: DbConn,
) -> Result<Json<Vec<ListJob>>, Failure>
{
if auth.get_userid() != uid && auth.get_privilege_level() == PrivilegeLevel::User {
return Err(Failure(Status::Unauthorized));
}
let user = horus_users.find(&uid).get_result::<User>(&*conn);
if let Err(_) = user {
return Err(Failure(Status::NotFound));
}
let user = user.unwrap();
let result = HJob::belonging_to(&user)
.filter(job_status.ne(JobStatus::Failed as i32))
.filter(job_status.ne(JobStatus::Complete as i32))
.select((
::schema::horus_jobs::dsl::id,
job_name,
job_status,
priority,
))
.get_results::<(i32, String, i32, i32)>(&*conn);
match result {
Ok(values) => {
let values = values
.iter()
.map(|&(id, ref name, status, _priority)| ListJob {
id: id,
job_name: name.clone(),
job_status: status,
priority: _priority,
})
.collect();
Ok(Json(values))
}
Err(_) => Err(Failure(Status::InternalServerError)),
}
}
#[get("/all/<uid>/<page>")]
pub fn list_all_jobs(
uid: i32,
page: u32,
auth: Authentication,
conn: DbConn,
) -> Result<Json<Vec<ListJob>>, Failure>
{
if auth.get_userid() != uid && auth.get_privilege_level() == PrivilegeLevel::User {
return Err(Failure(Status::Unauthorized));
}
let user = horus_users.find(&uid).get_result::<User>(&*conn);
if user.is_err() {
return Err(Failure(Status::NotFound));
}
let user = user.unwrap();
let result = HJob::belonging_to(&user)
.select((
::schema::horus_jobs::dsl::id,
job_name,
job_status,
priority,
))
.offset((page * 24) as i64)
.limit(24)
.get_results::<(i32, String, i32, i32)>(&*conn);
match result {
Ok(values) => {
let values = values
.iter()
.map(|&(id, ref name, status, _priority)| ListJob {
id: id,
job_name: name.clone(),
job_status: status,
priority: _priority,
})
.collect();
Ok(Json(values))
}
Err(_) => Err(Failure(Status::InternalServerError)),
}
}
#[get("/poll/<job_id>")]
pub fn retrieve_job_status(
job_id: i32,
auth: Authentication,
conn: DbConn,
) -> Result<Json<i32>, Failure>
{
let status = poll_job(job_id, auth.get_userid(), conn);
match status {
None => Err(Failure(Status::NotFound)),
Some(v) => Ok(Json(v)),
}
}
/// Poll a job's status. Returns `None` if error.
fn poll_job(job_id: i32, owner_id: i32, conn: DbConn) -> Option<i32>
{
let user = horus_users.find(owner_id).first::<User>(&*conn);
if user.is_err() {
return None;
}
let user = user.unwrap();
let result = HJob::belonging_to(&user)
.find(job_id)
.select(job_status)
.first::<i32>(&*conn);
if result.is_err() {
None
} else {
Some(result.unwrap())
}
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::cli_state::CliState;
use crate::StarcoinOpt;
use anyhow::{bail, Result};
use scmd::{CommandAction, ExecContext};
use starcoin_move_compiler::command_line::parse_address;
use starcoin_move_compiler::shared::Address;
use std::fs::File;
use std::io::Write;
use std::path::{Path, PathBuf};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(name = "compile")]
pub struct CompileOpt {
#[structopt(short = "s", long = "sender", name = "sender address", help = "hex encoded string, like 0x0, 0x1", parse(try_from_str = parse_address))]
sender: Option<Address>,
#[structopt(
short = "d",
name = "dependency_path",
long = "dep",
help = "path of dependency used to build, support multi deps"
)]
deps: Vec<String>,
#[structopt(short = "o", name = "out_dir", help = "out dir", parse(from_os_str))]
out_dir: Option<PathBuf>,
#[structopt(name = "source", help = "source file path")]
source_file: String,
}
pub struct CompileCommand;
impl CommandAction for CompileCommand {
type State = CliState;
type GlobalOpt = StarcoinOpt;
type Opt = CompileOpt;
type ReturnItem = PathBuf;
fn run(
&self,
ctx: &ExecContext<Self::State, Self::GlobalOpt, Self::Opt>,
) -> Result<Self::ReturnItem> {
let sender = if let Some(sender) = ctx.opt().sender {
sender
} else {
Address::new(ctx.state().default_account()?.address.into())
};
let source_file = ctx.opt().source_file.as_str();
let source_file_path = Path::new(source_file);
let ext = source_file_path
.extension()
.map(|os_str| os_str.to_str().expect("file extension should is utf str"))
.unwrap_or_else(|| "");
//TODO support compile dir.
if ext != starcoin_move_compiler::MOVE_EXTENSION {
bail!("Only support compile *.move file.")
}
let temp_dir = ctx.state().temp_dir();
let source_file_path =
starcoin_move_compiler::process_source_tpl_file(temp_dir, source_file_path, sender)?;
let mut deps = stdlib::stdlib_files();
// add extra deps
deps.append(&mut ctx.opt().deps.clone());
let targets = vec![source_file_path
.to_str()
.expect("path to str should success.")
.to_owned()];
let (file_texts, compile_units) =
starcoin_move_compiler::move_compile_no_report(&targets, &deps, Some(sender))?;
let mut compile_units = match compile_units {
Err(e) => {
let err =
starcoin_move_compiler::errors::report_errors_to_color_buffer(file_texts, e);
bail!(String::from_utf8(err).unwrap())
}
Ok(r) => r,
};
let compile_result = compile_units.pop().unwrap();
let mut txn_path = ctx
.opt()
.out_dir
.clone()
.unwrap_or_else(|| temp_dir.to_path_buf());
txn_path.push(source_file_path.file_name().unwrap());
txn_path.set_extension(stdlib::STAGED_EXTENSION);
File::create(txn_path.clone())?.write_all(&compile_result.serialize())?;
Ok(txn_path)
}
}
|
use std::fmt;
use std::result::Result;
use std::error::Error;
use std::panic::catch_unwind;
use std::os::raw::c_void;
use super::*;
#[test]
fn test_set_get() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
globals.set("foo", "bar").unwrap();
globals.set("baz", "baf").unwrap();
assert_eq!(globals.get::<_, String>("foo").unwrap(), "bar");
assert_eq!(globals.get::<_, String>("baz").unwrap(), "baf");
}
#[test]
fn test_load() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"
res = 'foo'..'bar'
"#,
None,
).unwrap();
assert_eq!(globals.get::<_, String>("res").unwrap(), "foobar");
let module: LuaTable = lua.load(
r#"
local module = {}
function module.func()
return "hello"
end
return module
"#,
None,
).unwrap();
assert!(module.has("func").unwrap());
assert_eq!(module
.get::<_, LuaFunction>("func")
.unwrap()
.call::<_, String>(())
.unwrap(),
"hello");
}
#[test]
fn test_eval() {
let lua = Lua::new();
assert_eq!(lua.eval::<i32>("1 + 1").unwrap(), 2);
assert_eq!(lua.eval::<bool>("false == false").unwrap(), true);
assert_eq!(lua.eval::<i32>("return 1 + 2").unwrap(), 3);
match lua.eval::<()>("if true then") {
Err(LuaError(LuaErrorKind::IncompleteStatement(_), _)) => {}
r => panic!("expected IncompleteStatement, got {:?}", r),
}
}
#[test]
fn test_table() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
globals
.set("table", lua.create_empty_table().unwrap())
.unwrap();
let table1: LuaTable = globals.get("table").unwrap();
let table2: LuaTable = globals.get("table").unwrap();
table1.set("foo", "bar").unwrap();
table2.set("baz", "baf").unwrap();
assert_eq!(table2.get::<_, String>("foo").unwrap(), "bar");
assert_eq!(table1.get::<_, String>("baz").unwrap(), "baf");
lua.load::<()>(
r#"
table1 = {1, 2, 3, 4, 5}
table2 = {}
table3 = {1, 2, nil, 4, 5}
"#,
None,
).unwrap();
let table1 = globals.get::<_, LuaTable>("table1").unwrap();
let table2 = globals.get::<_, LuaTable>("table2").unwrap();
let table3 = globals.get::<_, LuaTable>("table3").unwrap();
assert_eq!(table1.length().unwrap(), 5);
assert_eq!(table1.pairs::<i64, i64>().unwrap(),
vec![(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)]);
assert_eq!(table2.length().unwrap(), 0);
assert_eq!(table2.pairs::<i64, i64>().unwrap(), vec![]);
assert_eq!(table2.array_values::<i64>().unwrap(), vec![]);
assert_eq!(table3.length().unwrap(), 5);
assert_eq!(table3.array_values::<Option<i64>>().unwrap(),
vec![Some(1), Some(2), None, Some(4), Some(5)]);
globals
.set("table4",
lua.create_array_table(vec![1, 2, 3, 4, 5]).unwrap())
.unwrap();
let table4 = globals.get::<_, LuaTable>("table4").unwrap();
assert_eq!(table4.pairs::<i64, i64>().unwrap(),
vec![(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)]);
}
#[test]
fn test_function() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"
function concat(arg1, arg2)
return arg1 .. arg2
end
"#,
None,
).unwrap();
let concat = globals.get::<_, LuaFunction>("concat").unwrap();
assert_eq!(concat.call::<_, String>(hlist!["foo", "bar"]).unwrap(),
"foobar");
}
#[test]
fn test_bind() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"
function concat(...)
local res = ""
for _, s in pairs({...}) do
res = res..s
end
return res
end
"#,
None,
).unwrap();
let mut concat = globals.get::<_, LuaFunction>("concat").unwrap();
concat = concat.bind("foo").unwrap();
concat = concat.bind("bar").unwrap();
concat = concat.bind(hlist!["baz", "baf"]).unwrap();
assert_eq!(concat.call::<_, String>(hlist!["hi", "wut"]).unwrap(),
"foobarbazbafhiwut");
}
#[test]
fn test_rust_function() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"
function lua_function()
return rust_function()
end
-- Test to make sure chunk return is ignored
return 1
"#,
None,
).unwrap();
let lua_function = globals.get::<_, LuaFunction>("lua_function").unwrap();
let rust_function = lua.create_function(|lua, _| lua.pack("hello")).unwrap();
globals.set("rust_function", rust_function).unwrap();
assert_eq!(lua_function.call::<_, String>(()).unwrap(), "hello");
}
#[test]
fn test_user_data() {
struct UserData1(i64);
struct UserData2(Box<i64>);
impl LuaUserDataType for UserData1 {};
impl LuaUserDataType for UserData2 {};
let lua = Lua::new();
let userdata1 = lua.create_userdata(UserData1(1)).unwrap();
let userdata2 = lua.create_userdata(UserData2(Box::new(2))).unwrap();
assert!(userdata1.is::<UserData1>());
assert!(!userdata1.is::<UserData2>());
assert!(userdata2.is::<UserData2>());
assert!(!userdata2.is::<UserData1>());
assert_eq!(userdata1.borrow::<UserData1>().unwrap().0, 1);
assert_eq!(*userdata2.borrow::<UserData2>().unwrap().0, 2);
}
#[test]
fn test_methods() {
struct UserData(i64);
impl LuaUserDataType for UserData {
fn add_methods(methods: &mut LuaUserDataMethods<Self>) {
methods.add_method("get_value", |lua, data, _| lua.pack(data.0));
methods.add_method_mut("set_value", |lua, data, args| {
data.0 = lua.unpack(args)?;
lua.pack(())
});
}
}
let lua = Lua::new();
let globals = lua.globals().unwrap();
let userdata = lua.create_userdata(UserData(42)).unwrap();
globals.set("userdata", userdata.clone()).unwrap();
lua.load::<()>(
r#"
function get_it()
return userdata:get_value()
end
function set_it(i)
return userdata:set_value(i)
end
"#,
None,
).unwrap();
let get = globals.get::<_, LuaFunction>("get_it").unwrap();
let set = globals.get::<_, LuaFunction>("set_it").unwrap();
assert_eq!(get.call::<_, i64>(()).unwrap(), 42);
userdata.borrow_mut::<UserData>().unwrap().0 = 64;
assert_eq!(get.call::<_, i64>(()).unwrap(), 64);
set.call::<_, ()>(100).unwrap();
assert_eq!(get.call::<_, i64>(()).unwrap(), 100);
}
#[test]
fn test_metamethods() {
#[derive(Copy, Clone)]
struct UserData(i64);
impl LuaUserDataType for UserData {
fn add_methods(methods: &mut LuaUserDataMethods<Self>) {
methods.add_method("get", |lua, data, _| lua.pack(data.0));
methods.add_meta_function(LuaMetaMethod::Add, |lua, args| {
let hlist_pat![lhs, rhs] = lua.unpack::<HList![UserData, UserData]>(args)?;
lua.pack(UserData(lhs.0 + rhs.0))
});
methods.add_meta_function(LuaMetaMethod::Sub, |lua, args| {
let hlist_pat![lhs, rhs] = lua.unpack::<HList![UserData, UserData]>(args)?;
lua.pack(UserData(lhs.0 - rhs.0))
});
methods.add_meta_method(LuaMetaMethod::Index, |lua, data, args| {
let index = lua.unpack::<LuaString>(args)?;
if index.get()? == "inner" {
lua.pack(data.0)
} else {
Err("no such custom index".into())
}
});
}
}
let lua = Lua::new();
let globals = lua.globals().unwrap();
globals.set("userdata1", UserData(7)).unwrap();
globals.set("userdata2", UserData(3)).unwrap();
assert_eq!(lua.eval::<UserData>("userdata1 + userdata2").unwrap().0, 10);
assert_eq!(lua.eval::<UserData>("userdata1 - userdata2").unwrap().0, 4);
assert_eq!(lua.eval::<i64>("userdata1:get()").unwrap(), 7);
assert_eq!(lua.eval::<i64>("userdata2.inner").unwrap(), 3);
assert!(lua.eval::<()>("userdata2.nonexist_field").is_err());
}
#[test]
fn test_scope() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"
touter = {
tin = {1, 2, 3}
}
"#,
None,
).unwrap();
// Make sure that table gets do not borrow the table, but instead just borrow lua.
let tin;
{
let touter = globals.get::<_, LuaTable>("touter").unwrap();
tin = touter.get::<_, LuaTable>("tin").unwrap();
}
assert_eq!(tin.get::<_, i64>(1).unwrap(), 1);
assert_eq!(tin.get::<_, i64>(2).unwrap(), 2);
assert_eq!(tin.get::<_, i64>(3).unwrap(), 3);
// Should not compile, don't know how to test that
// struct UserData;
// impl LuaUserDataType for UserData {};
// let userdata_ref;
// {
// let touter = globals.get::<_, LuaTable>("touter").unwrap();
// touter.set("userdata", lua.create_userdata(UserData).unwrap()).unwrap();
// let userdata = touter.get::<_, LuaUserData>("userdata").unwrap();
// userdata_ref = userdata.borrow::<UserData>();
// }
}
#[test]
fn test_lua_multi() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"
function concat(arg1, arg2)
return arg1 .. arg2
end
function mreturn()
return 1, 2, 3, 4, 5, 6
end
"#,
None,
).unwrap();
let concat = globals.get::<_, LuaFunction>("concat").unwrap();
let mreturn = globals.get::<_, LuaFunction>("mreturn").unwrap();
assert_eq!(concat.call::<_, String>(hlist!["foo", "bar"]).unwrap(),
"foobar");
let hlist_pat![a, b] = mreturn.call::<_, HList![u64, u64]>(hlist![]).unwrap();
assert_eq!((a, b), (1, 2));
let hlist_pat![a, b, LuaVariadic(v)] = mreturn.call::<_, HList![u64, u64, LuaVariadic<u64>]>(hlist![]).unwrap();
assert_eq!((a, b), (1, 2));
assert_eq!(v, vec![3, 4, 5, 6]);
}
#[test]
fn test_coercion() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"
int = 123
str = "123"
num = 123.0
"#,
None,
).unwrap();
assert_eq!(globals.get::<_, String>("int").unwrap(), "123");
assert_eq!(globals.get::<_, i32>("str").unwrap(), 123);
assert_eq!(globals.get::<_, i32>("num").unwrap(), 123);
}
#[test]
fn test_error() {
#[derive(Debug)]
pub struct TestError;
impl fmt::Display for TestError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "test error")
}
}
impl Error for TestError {
fn description(&self) -> &str {
"test error"
}
fn cause(&self) -> Option<&Error> {
None
}
}
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"
function no_error()
end
function lua_error()
error("this is a lua error")
end
function rust_error()
rust_error_function()
end
function test_pcall()
local testvar = 0
pcall(function(arg)
testvar = testvar + arg
error("should be ignored")
end, 3)
local function handler(err)
testvar = testvar + err
return "should be ignored"
end
xpcall(function()
error(5)
end, handler)
if testvar ~= 8 then
error("testvar had the wrong value, pcall / xpcall misbehaving "..testvar)
end
end
function understand_recursion()
understand_recursion()
end
"#,
None,
).unwrap();
let rust_error_function =
lua.create_function(|_, _| Err(LuaExternalError(Box::new(TestError)).into()))
.unwrap();
globals
.set("rust_error_function", rust_error_function)
.unwrap();
let no_error = globals.get::<_, LuaFunction>("no_error").unwrap();
let lua_error = globals.get::<_, LuaFunction>("lua_error").unwrap();
let rust_error = globals.get::<_, LuaFunction>("rust_error").unwrap();
let test_pcall = globals.get::<_, LuaFunction>("test_pcall").unwrap();
let understand_recursion = globals
.get::<_, LuaFunction>("understand_recursion")
.unwrap();
assert!(no_error.call::<_, ()>(()).is_ok());
match lua_error.call::<_, ()>(()) {
Err(LuaError(LuaErrorKind::ScriptError(_), _)) => {}
Err(_) => panic!("error is not ScriptError kind"),
_ => panic!("error not thrown"),
}
match rust_error.call::<_, ()>(()) {
Err(LuaError(LuaErrorKind::CallbackError(_), _)) => {}
Err(_) => panic!("error is not CallbackError kind"),
_ => panic!("error not thrown"),
}
test_pcall.call::<_, ()>(()).unwrap();
assert!(understand_recursion.call::<_, ()>(()).is_err());
match catch_unwind(|| -> LuaResult<()> {
let lua = Lua::new();
lua.load::<()>(
r#"
function rust_panic()
pcall(function () rust_panic_function() end)
end
"#,
None,
)?;
let rust_panic_function = lua.create_function(|_, _| {
panic!("expected panic, this panic should be caught in rust")
})?;
globals.set("rust_panic_function", rust_panic_function)?;
let rust_panic = globals.get::<_, LuaFunction>("rust_panic")?;
rust_panic.call::<_, ()>(())
}) {
Ok(Ok(_)) => panic!("no panic was detected, pcall caught it!"),
Ok(Err(e)) => panic!("error during panic test {:?}", e),
Err(_) => {}
};
match catch_unwind(|| -> LuaResult<()> {
let lua = Lua::new();
lua.load::<()>(
r#"
function rust_panic()
xpcall(function() rust_panic_function() end, function() end)
end
"#,
None,
)?;
let rust_panic_function = lua.create_function(|_, _| {
panic!("expected panic, this panic should be caught in rust")
})?;
globals.set("rust_panic_function", rust_panic_function)?;
let rust_panic = globals.get::<_, LuaFunction>("rust_panic")?;
rust_panic.call::<_, ()>(())
}) {
Ok(Ok(_)) => panic!("no panic was detected, xpcall caught it!"),
Ok(Err(e)) => panic!("error during panic test {:?}", e),
Err(_) => {}
};
}
#[test]
fn test_thread() {
let lua = Lua::new();
let thread = lua.create_thread(
lua.eval::<LuaFunction>(
r#"function (s)
local sum = s
for i = 1,4 do
sum = sum + coroutine.yield(sum)
end
return sum
end"#,
).unwrap(),
).unwrap();
assert_eq!(thread.status().unwrap(), LuaThreadStatus::Active);
assert_eq!(thread.resume::<_, i64>(0).unwrap(), Some(0));
assert_eq!(thread.status().unwrap(), LuaThreadStatus::Active);
assert_eq!(thread.resume::<_, i64>(1).unwrap(), Some(1));
assert_eq!(thread.status().unwrap(), LuaThreadStatus::Active);
assert_eq!(thread.resume::<_, i64>(2).unwrap(), Some(3));
assert_eq!(thread.status().unwrap(), LuaThreadStatus::Active);
assert_eq!(thread.resume::<_, i64>(3).unwrap(), Some(6));
assert_eq!(thread.status().unwrap(), LuaThreadStatus::Active);
assert_eq!(thread.resume::<_, i64>(4).unwrap(), Some(10));
assert_eq!(thread.status().unwrap(), LuaThreadStatus::Dead);
let accumulate = lua.create_thread(
lua.eval::<LuaFunction>(
r#"function (sum)
while true do
sum = sum + coroutine.yield(sum)
end
end"#,
).unwrap(),
).unwrap();
for i in 0..4 {
accumulate.resume::<_, ()>(i).unwrap();
}
assert_eq!(accumulate.resume::<_, i64>(4).unwrap(), Some(10));
assert_eq!(accumulate.status().unwrap(), LuaThreadStatus::Active);
assert!(accumulate.resume::<_, ()>("error").is_err());
assert_eq!(accumulate.status().unwrap(), LuaThreadStatus::Error);
let thread = lua.eval::<LuaThread>(
r#"coroutine.create(function ()
while true do
coroutine.yield(42)
end
end)"#,
).unwrap();
assert_eq!(thread.status().unwrap(), LuaThreadStatus::Active);
assert_eq!(thread.resume::<_, i64>(()).unwrap(), Some(42));
}
#[test]
fn test_lightuserdata() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"function id(a)
return a
end"#,
None,
).unwrap();
let res = globals
.get::<_, LuaFunction>("id")
.unwrap()
.call::<_, LightUserData>(LightUserData(42 as *mut c_void))
.unwrap();
assert_eq!(res, LightUserData(42 as *mut c_void));
}
#[test]
fn test_table_error() {
let lua = Lua::new();
let globals = lua.globals().unwrap();
lua.load::<()>(
r#"
table = {}
setmetatable(table, {
__index = function()
error("lua error")
end,
__newindex = function()
error("lua error")
end,
__len = function()
error("lua error")
end
})
"#,
None,
).unwrap();
let bad_table: LuaTable = globals.get("table").unwrap();
assert!(bad_table.set(1, 1).is_err());
assert!(bad_table.get::<_, i32>(1).is_err());
assert!(bad_table.length().is_err());
assert!(bad_table.raw_set(1, 1).is_ok());
assert!(bad_table.raw_get::<_, i32>(1).is_ok());
assert_eq!(bad_table.raw_length().unwrap(), 1);
assert!(bad_table.pairs::<i64, i64>().is_ok());
assert!(bad_table.array_values::<i64>().is_ok());
}
|
use image;
use image::{DynamicImage, GenericImageView, Rgba};
use image::codecs::png::{PngDecoder};
use image::codecs::gif::{GifDecoder};
use image::{AnimationDecoder};
use std::fs::File;
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use enum_iterator::Sequence;
#[derive(Debug)]
pub enum IconError {
InvalidIconSize(String, usize, usize),
InvalidPaletteSize(String, usize, usize),
FileLoadFailure(String, std::io::Error),
ImageParseError(String, image::ImageError)
}
#[derive(Debug)]
pub enum ImageLoadError {
FileLoadFailure(String, std::io::Error),
ImageParseError(String, image::ImageError)
}
impl std::convert::From<ImageLoadError> for IconError {
fn from(error: ImageLoadError) -> IconError {
match error {
ImageLoadError::FileLoadFailure(s, e) => IconError::FileLoadFailure(s, e),
ImageLoadError::ImageParseError(s, e) => IconError::ImageParseError(s, e)
}
}
}
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq,Deserialize,Serialize,Sequence)]
pub enum ImageFormat {
Json,
JsonPretty,
Asm1Bit,
Asm1BitMasked
}
impl ImageFormat {
pub fn name(&self) -> &str {
use ImageFormat::*;
match self {
Json => "json",
JsonPretty => "json-pretty",
Asm1Bit => "1bit-asm",
Asm1BitMasked => "1bit-asm-masked"
}
}
pub fn from(name: &str) -> Option<ImageFormat> {
match name {
"json" => Some(ImageFormat::Json),
"json-pretty" => Some(ImageFormat::JsonPretty),
"1bit-asm" => Some(ImageFormat::Asm1Bit),
"1bit-asm-masked" => Some(ImageFormat::Asm1BitMasked),
_ => None
}
}
}
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq,Deserialize,Serialize)]
pub struct Color {
pub red: u8,
pub green: u8,
pub blue: u8,
pub alpha: u8
}
impl std::default::Default for Color {
fn default() -> Self {
Color {
red: 0,
green: 0,
blue: 0,
alpha: 0
}
}
}
impl Color {
pub fn scale_channels(&self, max: u8) -> Color {
let maxf = max as f64;
Color {
red: ((maxf * (self.red as f64)) / 255.0f64).round() as u8,
green: ((maxf * (self.green as f64)) / 255.0f64).round() as u8,
blue: ((maxf * (self.blue as f64)) / 255.0f64).round() as u8,
alpha: ((maxf * (self.alpha as f64)) / 255.0f64).round() as u8
}
}
pub fn to_palette_color(&self) -> i32 {
(((self.alpha as i32) & 0xF) << 12)
| (((self.red as i32) & 0xF) << 8)
| (((self.green as i32) & 0xF) << 4)
| ((self.blue as i32) & 0xF)
}
pub fn grayscale(&self) -> Color {
let gray = ((0.3f64 * (self.red as f64))
+ (0.59f64 * (self.green as f64))
+ (0.11f64 * (self.blue as f64))).round() as u8;
Color {
red: gray,
green: gray,
blue: gray,
alpha: self.alpha
}
}
// If transparent, return 1, else 0.
pub fn transparency(&self) -> u8 {
if self.alpha <= 127 { 1u8 } else { 0u8 }
}
}
#[derive(Debug,Clone,Deserialize,Serialize)]
pub struct Frame {
pub delay: (u32, u32),
pub rows: Vec<Vec<Color>>
}
impl Frame {
pub fn size(&self) -> (usize, usize) {
let height = self.rows.len();
let width = if height > 0 {
self.rows[0].len()
} else {
0usize
};
(width, height)
}
pub fn height(&self) -> usize {
self.rows.len()
}
pub fn width(&self) -> usize {
if self.rows.len() > 0 {
self.rows[0].len()
} else {
0usize
}
}
pub fn get(&self, x: usize, y: usize) -> &Color {
let width = self.width();
let height = self.height();
if x >= width || y >= height {
panic!("Frame coordinate is out of bounds: get({}, {}). Actual dimensions: {}x{}", x, y, width, height);
}
&self.rows[y][x]
}
pub fn to_1bit_asm(&self, masked: bool) -> crate::ast::Statements {
use crate::expression::{Expr};
use crate::ast::{Statement, Statements, ByteValue};
let mut stmts = vec![];
let (width, height) = self.size();
stmts.push(Statement::comment(&format!("Width: {}, Height: {}", width, height)));
stmts.push(Statement::byte(vec![
ByteValue::Expr(Expr::decimal(width as i32)),
ByteValue::Expr(Expr::decimal(height as i32))]));
if masked {
for row in &self.rows {
let mut bytes: Vec<u8> = vec![];
let mut idx: usize = 0;
for color in row {
let bit = color.transparency();
if idx % 8 == 0 {
bytes.push(0b01111111 | bit << 7 as u8);
} else {
let last_idx = bytes.len() - 1;
let byte = bytes[last_idx];
let bit_pos = 7 - (idx % 8);
bytes[last_idx] = (byte & !(1 << bit_pos)) | (bit << bit_pos);
}
idx = idx + 1;
}
let byte_exprs: Vec<ByteValue> = bytes.iter()
.map(|b| ByteValue::Expr(Expr::binary(*b as i32)))
.collect();
stmts.push(Statement::byte(byte_exprs));
}
}
for row in &self.rows {
let mut bytes: Vec<u8> = vec![];
let mut idx: usize = 0;
for color in row {
let gray = color.grayscale();
let bit = if gray.red <= 127 { 1 } else { 0 };
if idx % 8 == 0 {
bytes.push(bit << 7 as u8);
} else {
let last_idx = bytes.len() - 1;
let byte = bytes[last_idx];
bytes[last_idx] = byte | (bit << (7 - (idx % 8)));
}
idx = idx + 1;
}
let byte_exprs: Vec<ByteValue> = bytes.iter()
.map(|b| ByteValue::Expr(Expr::binary(*b as i32)))
.collect();
stmts.push(Statement::byte(byte_exprs));
}
Statements::new(HashMap::new(), stmts)
}
pub fn is_size(&self, width: usize, height: usize) -> bool {
if height != self.rows.len() {
return false;
}
for row in &self.rows {
if row.len() != width {
return false;
}
}
return true;
}
}
#[derive(Debug,Clone,Deserialize,Serialize)]
pub struct Image {
pub frames: Vec<Frame>
}
impl Image {
pub fn width(&self) -> usize {
self.frames[0].width()
}
pub fn height(&self) -> usize {
self.frames[0].height()
}
pub fn get(&self, x: usize, y: usize) -> &Color {
self.frames[0].get(x, y)
}
pub fn as_still(&self) -> Image {
Image {
frames: vec![self.frames[0].clone()]
}
}
pub fn scale_channels(&self, max: u8) -> Image {
let mut frames = vec![];
for frame in &self.frames {
let mut rows = vec![];
for row in &frame.rows {
let mut new_row: Vec<Color> = vec![];
for color in row {
new_row.push(color.scale_channels(max));
}
rows.push(new_row);
}
frames.push(Frame { delay: frame.delay, rows: rows })
}
Image { frames }
}
pub fn get_palette(&self) -> HashMap<Color,usize> {
let mut index: usize = 0;
let mut palette = HashMap::new();
for frame in &self.frames {
for row in &frame.rows {
for color in row {
if !palette.contains_key(color) {
palette.insert(*color, index);
index = index + 1;
}
}
}
}
palette
}
pub fn to_frame_count_and_speed(&self, name: &str, speed: u16) -> crate::ast::Statements {
use crate::expression::{Expr};
use crate::ast::{Statements, Statement};
let mut stmts = vec![];
let frames = Expr::num(self.frames.len() as i32);
let anim_speed = Expr::num(speed as i32);
stmts.push(Statement::comment(&format!("\n\"{}\" Frames: {}, Speed: {}", name, self.frames.len(), speed)));
stmts.push(Statement::word(vec![frames, anim_speed]));
Statements::new(HashMap::new(), stmts)
}
pub fn to_1bit_asm(&self, masked: bool) -> crate::ast::Statements {
use crate::ast::{Statements};
let mut stmts = vec![];
for frame in &self.frames {
for stmt in frame.to_1bit_asm(masked).iter() {
stmts.push(stmt.clone());
}
}
Statements::new(HashMap::new(), stmts)
}
pub fn to_icon(&self, palette_map: &HashMap<Color,usize>, name: &str) -> crate::ast::Statements {
use crate::expression::{Expr};
use crate::ast::{Statements, Statement, ByteValue};
let mut stmts = vec![];
let mut palette = if palette_map.len() <= 16 {
vec![Color::default(); 16]
} else if palette_map.len() <= 256 {
vec![Color::default(); 256]
} else {
vec![]
};
if palette.len() > 0 {
// Build palette
for key in palette_map.keys() {
let idx = palette_map[key];
palette[idx] = key.clone();
}
// Push palette
stmts.push(Statement::comment(&format!("\nPalette for \"{}\"", name)));
for y in 0..(palette.len() / 8) {
let mut words = vec![];
for x in 0..8 {
let idx = (y * 8) + x;
let color = palette[idx];
let palette_color = Expr::num(color.to_palette_color());
words.push(palette_color);
}
stmts.push(Statement::word(words));
}
// Push pixel data
stmts.push(Statement::comment(&format!("\nPixel data for \"{}\"", name)));
if palette.len() == 16 {
let mut frame_idx = 1;
for frame in &self.frames {
if self.frames.len() > 1 {
if frame_idx == 1 {
stmts.push(Statement::comment(&format!("Frame {}", frame_idx)));
} else {
stmts.push(Statement::comment(&format!("\nFrame {}", frame_idx)));
}
}
for row in &frame.rows {
let mut bytes = vec![];
for color_idx in (0..row.len()).step_by(2) {
let color1 = &row[color_idx];
let color2 = &row[color_idx + 1];
let index1 = palette_map[color1];
let index2 = palette_map[color2];
let value = ((index1 & 0xF) << 4) | (index2 & 0xF);
bytes.push(ByteValue::Expr(Expr::num(value as i32)));
}
stmts.push(Statement::byte(bytes));
}
frame_idx = frame_idx + 1;
}
} else {
for frame in &self.frames {
for row in &frame.rows {
let mut bytes = vec![];
for color in row {
let index = palette_map[color] & 0xFF;
bytes.push(ByteValue::Expr(Expr::num(index as i32)));
}
stmts.push(Statement::byte(bytes));
}
}
}
} else {
for frame in &self.frames {
for row in &frame.rows {
let mut words = vec![];
for color in row {
let value = color.to_palette_color();
words.push(Expr::num((value & 0xFFFF) as i32));
}
stmts.push(Statement::word(words));
}
}
}
Statements::new(HashMap::new(), stmts)
}
pub fn is_size(&self, width: usize, height: usize) -> bool {
for frame in &self.frames {
if !frame.is_size(width, height) {
return false;
}
}
return true;
}
}
pub fn load_image(path: &str) -> Result<Image, ImageLoadError> {
let lower_path = path.to_lowercase();
if lower_path.ends_with(".png") {
load_png(path)
} else if lower_path.ends_with(".gif") {
load_gif(path)
} else {
let image = image::open(path)
.map_err(|e| ImageLoadError::ImageParseError(path.to_string(), e))?;
Ok(to_image(&image))
}
}
fn load_gif(path: &str) -> Result<Image, ImageLoadError> {
let file_in = File::open(path)
.map_err(|e| ImageLoadError::FileLoadFailure(path.to_string(), e))?;
let decoder = GifDecoder::new(file_in)
.map_err(|e| ImageLoadError::ImageParseError(path.to_string(), e))?;
let frames = decoder.into_frames().collect_frames()
.map_err(|e| ImageLoadError::ImageParseError(path.to_string(), e))?;
let mut out_frames = vec![];
for frame in frames {
out_frames.push(to_frame(
frame.delay().numer_denom_ms(),
&frame.into_buffer()
));
}
Ok(Image { frames: out_frames })
}
fn load_png(path: &str) -> Result<Image, ImageLoadError> {
let file_in = File::open(path)
.map_err(|e| ImageLoadError::FileLoadFailure(path.to_string(), e))?;
let decoder = PngDecoder::new(file_in)
.map_err(|e| ImageLoadError::ImageParseError(path.to_string(), e))?;
if decoder.is_apng() {
let frames = decoder.apng().into_frames().collect_frames()
.map_err(|e| ImageLoadError::ImageParseError(path.to_string(), e))?;
let mut out_frames = vec![];
for frame in frames {
out_frames.push(to_frame(
frame.delay().numer_denom_ms(),
&frame.into_buffer()
));
}
Ok(Image { frames: out_frames })
} else {
let image = DynamicImage::from_decoder(decoder)
.map_err(|e| ImageLoadError::ImageParseError(path.to_string(), e))?;
Ok(to_image(&image))
}
}
fn to_frame<T: GenericImageView<Pixel = Rgba<u8>>>(
delay: (u32, u32),
frame: &T
) -> Frame {
let (width, height) = frame.dimensions();
let mut rows = vec![];
for y in 0..height {
let mut row = vec![];
for x in 0..width {
let Rgba(ref parts) = frame.get_pixel(x, y);
row.push(Color {
red: parts[0],
green: parts[1],
blue: parts[2],
alpha: parts[3]
});
}
rows.push(row);
}
Frame { delay: delay, rows: rows }
}
fn to_image<T: GenericImageView<Pixel = Rgba<u8>>>(image: &T) -> Image {
Image {
frames: vec![
to_frame((0, 0), image)
]
}
}
fn eyecatch_type(palette: &Option<HashMap<Color, usize>>) -> u16 {
if let Some(p) = palette.as_ref() {
let size = p.len();
if size <= 16 {
return 3;
}
if size <= 256 {
return 2;
}
1
} else {
0
}
}
pub fn to_icon(icon_path: &str, speed: Option<u16>, eyecatch_file: Option<&str>) -> Result<crate::ast::Statements, IconError> {
use crate::expression::{Expr};
use crate::ast::{Statement, ByteValue};
let image = load_image(icon_path)?.scale_channels(15);
let palette = image.get_palette();
if palette.len() > 16 {
return Err(IconError::InvalidPaletteSize(icon_path.to_string(), palette.len(), 16));
}
if !image.is_size(32, 32) {
return Err(IconError::InvalidIconSize(icon_path.to_string(), 32, 32));
}
let eyecatch = {
if let Some(eyecatch_file) = eyecatch_file {
Some(load_image(eyecatch_file)?
.as_still()
.scale_channels(15))
} else {
None
}
};
let eyecatch_palette = eyecatch.as_ref().map(|ref img| img.get_palette());
let eyecatch_type = eyecatch_type(&eyecatch_palette);
let mut stmts = image.to_frame_count_and_speed(&icon_path, speed.unwrap_or(10));
let eyecatch_comment = match eyecatch_type {
0 => "Eyecatch type is 0: there is no eyecatch image.",
1 => "Eyecatch type is 1: the eyecatch is stored as a 16-bit true color image.",
2 => "Eyecatch type is 2: the eyecatch image has a 256-color palette.",
3 => "Eyecatch type is 3: the eyecatch image has a 16-color palette.",
_ => panic!("Unexpected eyecatch type: {}", eyecatch_type)
};
stmts.push(Statement::comment(&format!("\n{}", eyecatch_comment)));
stmts.push(Statement::word(vec![Expr::num(eyecatch_type as i32)]));
stmts.push(Statement::comment("\nPlaceholder for CRC checksum."));
stmts.push(Statement::word(vec![Expr::num(0 as i32)]));
stmts.push(Statement::comment("\nPlaceholder for file data size."));
stmts.push(Statement::word(vec![Expr::num(0), Expr::num(0)]));
stmts.push(Statement::comment("\nReserved bytes."));
stmts.push(Statement::byte(vec![ByteValue::Expr(Expr::num(0)); 10]));
stmts.push(Statement::byte(vec![ByteValue::Expr(Expr::num(0)); 10]));
stmts.append(&image.to_icon(&palette, &icon_path).as_slice());
if eyecatch.is_some() && eyecatch_palette.is_some() {
let eyecatch_path = eyecatch_file.unwrap();
if !eyecatch.as_ref().unwrap().is_size(72, 56) {
return Err(IconError::InvalidIconSize(eyecatch_path.to_string(), 72, 56));
}
stmts.append(&eyecatch.unwrap().to_icon(&eyecatch_palette.unwrap(), eyecatch_path).as_slice());
}
Ok(stmts)
}
|
use crate::{
command::Command, define_node_command, get_set_swap, physics::Physics,
scene::commands::SceneContext,
};
use rg3d::{
animation::Animation,
core::{
algebra::{UnitQuaternion, Vector3},
pool::{Handle, Ticket},
},
engine::resource_manager::MaterialSearchOptions,
scene::{
base::PhysicsBinding,
graph::{Graph, SubGraph},
node::Node,
},
};
use std::path::PathBuf;
#[derive(Debug)]
pub struct MoveNodeCommand {
node: Handle<Node>,
old_position: Vector3<f32>,
new_position: Vector3<f32>,
}
impl MoveNodeCommand {
pub fn new(node: Handle<Node>, old_position: Vector3<f32>, new_position: Vector3<f32>) -> Self {
Self {
node,
old_position,
new_position,
}
}
fn swap(&mut self) -> Vector3<f32> {
let position = self.new_position;
std::mem::swap(&mut self.new_position, &mut self.old_position);
position
}
fn set_position(&self, graph: &mut Graph, physics: &mut Physics, position: Vector3<f32>) {
graph[self.node]
.local_transform_mut()
.set_position(position);
if let Some(&body) = physics.binder.value_of(&self.node) {
physics.bodies[body].position = position;
}
}
}
impl<'a> Command<'a> for MoveNodeCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Move Node".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
let position = self.swap();
self.set_position(
&mut context.scene.graph,
&mut context.editor_scene.physics,
position,
);
}
fn revert(&mut self, context: &mut Self::Context) {
let position = self.swap();
self.set_position(
&mut context.scene.graph,
&mut context.editor_scene.physics,
position,
);
}
}
#[derive(Debug)]
pub struct ScaleNodeCommand {
node: Handle<Node>,
old_scale: Vector3<f32>,
new_scale: Vector3<f32>,
}
impl ScaleNodeCommand {
pub fn new(node: Handle<Node>, old_scale: Vector3<f32>, new_scale: Vector3<f32>) -> Self {
Self {
node,
old_scale,
new_scale,
}
}
fn swap(&mut self) -> Vector3<f32> {
let position = self.new_scale;
std::mem::swap(&mut self.new_scale, &mut self.old_scale);
position
}
fn set_scale(&self, graph: &mut Graph, scale: Vector3<f32>) {
graph[self.node].local_transform_mut().set_scale(scale);
}
}
impl<'a> Command<'a> for ScaleNodeCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Scale Node".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
let scale = self.swap();
self.set_scale(&mut context.scene.graph, scale);
}
fn revert(&mut self, context: &mut Self::Context) {
let scale = self.swap();
self.set_scale(&mut context.scene.graph, scale);
}
}
#[derive(Debug)]
pub struct RotateNodeCommand {
node: Handle<Node>,
old_rotation: UnitQuaternion<f32>,
new_rotation: UnitQuaternion<f32>,
}
impl RotateNodeCommand {
pub fn new(
node: Handle<Node>,
old_rotation: UnitQuaternion<f32>,
new_rotation: UnitQuaternion<f32>,
) -> Self {
Self {
node,
old_rotation,
new_rotation,
}
}
fn swap(&mut self) -> UnitQuaternion<f32> {
let position = self.new_rotation;
std::mem::swap(&mut self.new_rotation, &mut self.old_rotation);
position
}
fn set_rotation(
&self,
graph: &mut Graph,
physics: &mut Physics,
rotation: UnitQuaternion<f32>,
) {
graph[self.node]
.local_transform_mut()
.set_rotation(rotation);
if let Some(&body) = physics.binder.value_of(&self.node) {
physics.bodies[body].rotation = rotation;
}
}
}
impl<'a> Command<'a> for RotateNodeCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Rotate Node".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
let rotation = self.swap();
self.set_rotation(
&mut context.scene.graph,
&mut context.editor_scene.physics,
rotation,
);
}
fn revert(&mut self, context: &mut Self::Context) {
let rotation = self.swap();
self.set_rotation(
&mut context.scene.graph,
&mut context.editor_scene.physics,
rotation,
);
}
}
#[derive(Debug)]
pub struct LinkNodesCommand {
child: Handle<Node>,
parent: Handle<Node>,
}
impl LinkNodesCommand {
pub fn new(child: Handle<Node>, parent: Handle<Node>) -> Self {
Self { child, parent }
}
fn link(&mut self, graph: &mut Graph) {
let old_parent = graph[self.child].parent();
graph.link_nodes(self.child, self.parent);
self.parent = old_parent;
}
}
impl<'a> Command<'a> for LinkNodesCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Link Nodes".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
self.link(&mut context.scene.graph);
}
fn revert(&mut self, context: &mut Self::Context) {
self.link(&mut context.scene.graph);
}
}
#[derive(Debug)]
pub struct DeleteNodeCommand {
handle: Handle<Node>,
ticket: Option<Ticket<Node>>,
node: Option<Node>,
parent: Handle<Node>,
}
impl DeleteNodeCommand {
pub fn new(handle: Handle<Node>) -> Self {
Self {
handle,
ticket: None,
node: None,
parent: Default::default(),
}
}
}
impl<'a> Command<'a> for DeleteNodeCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Delete Node".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
self.parent = context.scene.graph[self.handle].parent();
let (ticket, node) = context.scene.graph.take_reserve(self.handle);
self.node = Some(node);
self.ticket = Some(ticket);
}
fn revert(&mut self, context: &mut Self::Context) {
self.handle = context
.scene
.graph
.put_back(self.ticket.take().unwrap(), self.node.take().unwrap());
context.scene.graph.link_nodes(self.handle, self.parent);
}
fn finalize(&mut self, context: &mut Self::Context) {
if let Some(ticket) = self.ticket.take() {
context.scene.graph.forget_ticket(ticket)
}
}
}
#[derive(Debug)]
pub struct LoadModelCommand {
path: PathBuf,
model: Handle<Node>,
animations: Vec<Handle<Animation>>,
sub_graph: Option<SubGraph>,
animations_container: Vec<(Ticket<Animation>, Animation)>,
materials_search_options: MaterialSearchOptions,
}
impl LoadModelCommand {
pub fn new(path: PathBuf, materials_search_options: MaterialSearchOptions) -> Self {
Self {
path,
model: Default::default(),
animations: Default::default(),
sub_graph: None,
animations_container: Default::default(),
materials_search_options,
}
}
}
impl<'a> Command<'a> for LoadModelCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Load Model".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
if self.model.is_none() {
// No model was loaded yet, do it.
if let Ok(model) = rg3d::core::futures::executor::block_on(
context
.resource_manager
.request_model(&self.path, self.materials_search_options.clone()),
) {
let instance = model.instantiate(context.scene);
self.model = instance.root;
self.animations = instance.animations;
// Enable instantiated animations.
for &animation in self.animations.iter() {
context.scene.animations[animation].set_enabled(true);
}
}
} else {
// A model was loaded, but change was reverted and here we must put all nodes
// back to graph.
self.model = context
.scene
.graph
.put_sub_graph_back(self.sub_graph.take().unwrap());
for (ticket, animation) in self.animations_container.drain(..) {
context.scene.animations.put_back(ticket, animation);
}
}
}
fn revert(&mut self, context: &mut Self::Context) {
self.sub_graph = Some(context.scene.graph.take_reserve_sub_graph(self.model));
self.animations_container = self
.animations
.iter()
.map(|&anim| context.scene.animations.take_reserve(anim))
.collect();
}
fn finalize(&mut self, context: &mut Self::Context) {
if let Some(sub_graph) = self.sub_graph.take() {
context.scene.graph.forget_sub_graph(sub_graph)
}
for (ticket, _) in self.animations_container.drain(..) {
context.scene.animations.forget_ticket(ticket);
}
}
}
#[derive(Debug)]
pub struct DeleteSubGraphCommand {
sub_graph_root: Handle<Node>,
sub_graph: Option<SubGraph>,
parent: Handle<Node>,
}
impl DeleteSubGraphCommand {
pub fn new(sub_graph_root: Handle<Node>) -> Self {
Self {
sub_graph_root,
sub_graph: None,
parent: Handle::NONE,
}
}
}
impl<'a> Command<'a> for DeleteSubGraphCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Delete Sub Graph".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
self.parent = context.scene.graph[self.sub_graph_root].parent();
self.sub_graph = Some(
context
.scene
.graph
.take_reserve_sub_graph(self.sub_graph_root),
);
}
fn revert(&mut self, context: &mut Self::Context) {
context
.scene
.graph
.put_sub_graph_back(self.sub_graph.take().unwrap());
context
.scene
.graph
.link_nodes(self.sub_graph_root, self.parent);
}
fn finalize(&mut self, context: &mut Self::Context) {
if let Some(sub_graph) = self.sub_graph.take() {
context.scene.graph.forget_sub_graph(sub_graph)
}
}
}
#[derive(Debug)]
pub struct AddNodeCommand {
ticket: Option<Ticket<Node>>,
handle: Handle<Node>,
node: Option<Node>,
cached_name: String,
}
impl AddNodeCommand {
pub fn new(node: Node) -> Self {
Self {
ticket: None,
handle: Default::default(),
cached_name: format!("Add Node {}", node.name()),
node: Some(node),
}
}
}
impl<'a> Command<'a> for AddNodeCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
self.cached_name.clone()
}
fn execute(&mut self, context: &mut Self::Context) {
match self.ticket.take() {
None => {
self.handle = context.scene.graph.add_node(self.node.take().unwrap());
}
Some(ticket) => {
let handle = context
.scene
.graph
.put_back(ticket, self.node.take().unwrap());
assert_eq!(handle, self.handle);
}
}
}
fn revert(&mut self, context: &mut Self::Context) {
let (ticket, node) = context.scene.graph.take_reserve(self.handle);
self.ticket = Some(ticket);
self.node = Some(node);
}
fn finalize(&mut self, context: &mut Self::Context) {
if let Some(ticket) = self.ticket.take() {
context.scene.graph.forget_ticket(ticket)
}
}
}
define_node_command!(SetNameCommand("Set Name", String) where fn swap(self, node) {
get_set_swap!(self, node, name_owned, set_name);
});
define_node_command!(SetPhysicsBindingCommand("Set Physics Binding", PhysicsBinding) where fn swap(self, node) {
get_set_swap!(self, node, physics_binding, set_physics_binding);
});
define_node_command!(SetTagCommand("Set Tag", String) where fn swap(self, node) {
get_set_swap!(self, node, tag_owned, set_tag);
});
define_node_command!(SetVisibleCommand("Set Visible", bool) where fn swap(self, node) {
get_set_swap!(self, node, visibility, set_visibility)
});
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use js::jsapi::CallArgs;
use js::jsapi::CompartmentOptions;
use js::jsapi::JSAutoCompartment;
use js::jsapi::JSClass;
use js::jsapi::JSContext;
use js::jsapi::JS_FireOnNewGlobalObject;
use js::jsapi::JS_GlobalObjectTraceHook;
use js::jsapi::JS_InitStandardClasses;
use js::jsapi::JSNativeWrapper;
use js::jsapi::JS_NewGlobalObject;
use js::jsapi::JSObject;
use js::jsapi::JSPropertySpec;
use js::jsapi::JS_SetPrototype;
use js::jsapi::JSTraceOp;
use js::jsapi::JSVersion;
use js::jsapi::MutableHandleObject;
use js::jsapi::OnNewGlobalHookOption;
use js::jsapi::RootedObject;
use js::jsapi::Value;
use js::JSCLASS_GLOBAL_SLOT_COUNT;
use js::JSCLASS_IS_GLOBAL;
use js::JSCLASS_RESERVED_SLOTS_MASK;
use js::JSCLASS_RESERVED_SLOTS_SHIFT;
use js::JSPROP_ENUMERATE;
use js::JSPROP_SHARED;
use js::jsval::ObjectValue;
use libc::c_char;
use script::console;
use script::reflect::{Reflectable, PrototypeID, finalize, initialize_global};
use std::ptr;
use std::env;
use std::process;
use js::jsapi::JSFunctionSpec;
use js::conversions::FromJSValConvertible;
pub struct Global(usize);
impl Global {
fn launch_browser(&self, url: String) {
let path_to_self = env::var_os("SERVO_PATH").expect("Please set SERVO_PATH");
let mut child_process = process::Command::new(path_to_self);
child_process.arg(url);
let _ = child_process.spawn().unwrap();
}
}
static CLASS: JSClass = JSClass {
name: b"Global\0" as *const u8 as *const c_char,
flags: JSCLASS_IS_GLOBAL |
(((JSCLASS_GLOBAL_SLOT_COUNT + 1) & JSCLASS_RESERVED_SLOTS_MASK) <<
JSCLASS_RESERVED_SLOTS_SHIFT),
addProperty: None,
delProperty: None,
getProperty: None,
setProperty: None,
enumerate: None,
resolve: None,
convert: None,
finalize: Some(finalize::<Global>),
call: None,
hasInstance: None,
construct: None,
trace: Some(JS_GlobalObjectTraceHook),
reserved: [0 as *mut _; 25],
};
static PROTOTYPE_CLASS: JSClass = JSClass {
name: b"GlobalPrototype\0" as *const u8 as *const c_char,
flags: 0,
addProperty: None,
delProperty: None,
getProperty: None,
setProperty: None,
enumerate: None,
resolve: None,
convert: None,
finalize: None,
call: None,
hasInstance: None,
construct: None,
trace: None,
reserved: [0 as *mut _; 25],
};
const ATTRIBUTES: &'static [JSPropertySpec] = &[
JSPropertySpec {
name: b"console\0" as *const u8 as *const c_char,
flags: ((JSPROP_SHARED | JSPROP_ENUMERATE) & 0xFF) as u8,
getter: JSNativeWrapper { op: Some(get_console_native), info: 0 as *const _ },
setter: JSNativeWrapper { op: None, info: 0 as *const _ }
},
JSPropertySpec {
name: 0 as *const c_char,
flags: 0,
getter: JSNativeWrapper { op: None, info: 0 as *const _ },
setter: JSNativeWrapper { op: None, info: 0 as *const _ }
}
];
const METHODS: &'static [JSFunctionSpec] = &[
JSFunctionSpec {
name: b"launchBrowser\0" as *const u8 as *const c_char,
call: JSNativeWrapper {op: Some(launch_browser_native), info: 0 as *const _},
nargs: 1,
flags: JSPROP_ENUMERATE as u16,
selfHostedName: 0 as *const c_char
},
JSFunctionSpec {
name: 0 as *const c_char,
call: JSNativeWrapper { op: None, info: 0 as *const _ },
nargs: 0,
flags: 0,
selfHostedName: 0 as *const c_char
}
];
impl Reflectable for Global {
fn class() -> &'static JSClass {
&CLASS
}
fn prototype_class() -> &'static JSClass {
&PROTOTYPE_CLASS
}
fn attributes() -> Option<&'static [JSPropertySpec]> {
Some(ATTRIBUTES)
}
fn methods() -> Option<&'static [JSFunctionSpec]> {
Some(METHODS)
}
fn prototype_index() -> PrototypeID {
PrototypeID::Global
}
}
unsafe fn get_console(cx: *mut JSContext, args: &CallArgs) -> Result<(), ()> {
let thisv = args.thisv();
let scope = RootedObject::new(cx, thisv.to_object());
let mut rval = RootedObject::new(cx, ptr::null_mut());
try!(console::create_console(cx,
scope.handle(),
Box::new(console::StdoutHandler),
rval.handle_mut()));
args.rval().set(ObjectValue(&*rval.ptr));
Ok(())
}
unsafe extern "C" fn get_console_native(cx: *mut JSContext, argc: u32, vp: *mut Value) -> bool {
let args = CallArgs::from_vp(vp, argc);
get_console(cx, &args).is_ok()
}
unsafe fn launch_browser(cx: *mut JSContext, args: &CallArgs) -> Result<(), ()> {
let global = try!(Global::from_value(cx, args.thisv()));
let url = try!(String::from_jsval(cx, args.get(0), ()));
(*global).launch_browser(url);
Ok(())
}
unsafe extern "C" fn launch_browser_native(cx: *mut JSContext, argc: u32, vp: *mut Value) -> bool {
let args = CallArgs::from_vp(vp, argc);
launch_browser(cx, &args).is_ok()
}
/// Create a DOM global object with the given class.
pub fn create_dom_global(cx: *mut JSContext,
class: &'static JSClass,
global: Box<Global>,
trace: JSTraceOp)
-> *mut JSObject {
unsafe {
let mut options = CompartmentOptions::default();
options.version_ = JSVersion::JSVERSION_ECMA_5;
options.traceGlobal_ = trace;
let obj =
RootedObject::new(cx,
JS_NewGlobalObject(cx,
class,
ptr::null_mut(),
OnNewGlobalHookOption::DontFireOnNewGlobalHook,
&options));
assert!(!obj.ptr.is_null());
let _ac = JSAutoCompartment::new(cx, obj.ptr);
global.init(obj.ptr);
JS_InitStandardClasses(cx, obj.handle());
initialize_global(obj.ptr);
JS_FireOnNewGlobalObject(cx, obj.handle());
obj.ptr
}
}
pub unsafe fn create(cx: *mut JSContext, rval: MutableHandleObject) {
rval.set(create_dom_global(cx, &CLASS, Box::new(Global(0)), None));
let _ac = JSAutoCompartment::new(cx, rval.handle().get());
let mut proto = RootedObject::new(cx, ptr::null_mut());
Global::get_prototype_object(cx, rval.handle(), proto.handle_mut());
assert!(JS_SetPrototype(cx, rval.handle(), proto.handle()));
}
|
use libc::{c_char, c_void, size_t, ssize_t};
use H5Ipublic::hid_t;
use H5Opublic::H5O_type_t;
use H5public::herr_t;
#[derive(Clone, Copy, Debug)]
#[repr(C)]
pub enum H5R_type_t {
H5R_BADTYPE = -1,
H5R_OBJECT,
H5R_DATASET_REGION,
H5R_MAXTYPE,
}
pub use self::H5R_type_t::*;
extern "C" {
pub fn H5Rcreate(reference: *mut c_void, loc_id: hid_t, name: *const c_char,
ref_type: H5R_type_t, space_id: hid_t) -> herr_t;
pub fn H5Rdereference(obj_id: hid_t, ref_type: H5R_type_t, reference: *const c_void) -> hid_t;
pub fn H5Rget_obj_type2(loc_id: hid_t, ref_type: H5R_type_t, reference: *const c_void,
obj_type: *mut H5O_type_t) -> herr_t;
pub fn H5Rget_region(loc_id: hid_t, ref_type: H5R_type_t, reference: *const c_void) -> hid_t;
pub fn H5Rget_name(loc_id: hid_t, ref_type: H5R_type_t, reference: *const c_void,
name: *mut c_char, size: size_t) -> ssize_t;
}
|
fn main() {
println!("嗨,世界!");
}
|
enum ArgType {
Int(i32),
IntAndStr(i32, &'static str)
}
fn overload_fn( arg : ArgType ) {
match arg {
ArgType::Int(i) => println!("{}", i),
ArgType::IntAndStr(i, s) => println!("{},{}", i, s),
}
}
fn main() {
overload_fn(ArgType::Int(3));
overload_fn(ArgType::IntAndStr(4, "hello"));
} |
pub const AUDIT_EVENT_MESSAGE_MIN: u16 = 1300;
pub const AUDIT_EVENT_MESSAGE_MAX: u16 = 1399;
/// Syscall event
pub const AUDIT_SYSCALL: u16 = 1300;
/// Filename path information
pub const AUDIT_PATH: u16 = 1302;
/// IPC record
pub const AUDIT_IPC: u16 = 1303;
/// sys_socketcall arguments
pub const AUDIT_SOCKETCALL: u16 = 1304;
/// Audit system configuration change
pub const AUDIT_CONFIG_CHANGE: u16 = 1305;
/// sockaddr copied as syscall arg
pub const AUDIT_SOCKADDR: u16 = 1306;
/// Current working directory
pub const AUDIT_CWD: u16 = 1307;
/// execve arguments
pub const AUDIT_EXECVE: u16 = 1309;
/// IPC new permissions record type
pub const AUDIT_IPC_SET_PERM: u16 = 1311;
/// POSIX MQ open record type
pub const AUDIT_MQ_OPEN: u16 = 1312;
/// POSIX MQ send/receive record type
pub const AUDIT_MQ_SENDRECV: u16 = 1313;
/// POSIX MQ notify record type
pub const AUDIT_MQ_NOTIFY: u16 = 1314;
/// POSIX MQ get/set attribute record type
pub const AUDIT_MQ_GETSETATTR: u16 = 1315;
/// For use by 3rd party modules
pub const AUDIT_KERNEL_OTHER: u16 = 1316;
/// audit record for pipe/socketpair
pub const AUDIT_FD_PAIR: u16 = 1317;
/// ptrace target
pub const AUDIT_OBJ_PID: u16 = 1318;
/// Input on an administrative TTY
pub const AUDIT_TTY: u16 = 1319;
/// End of multi-record event
pub const AUDIT_EOE: u16 = 1320;
/// Information about fcaps increasing perms
pub const AUDIT_BPRM_FCAPS: u16 = 1321;
/// Record showing argument to sys_capset
pub const AUDIT_CAPSET: u16 = 1322;
/// Record showing descriptor and flags in mmap
pub const AUDIT_MMAP: u16 = 1323;
/// Packets traversing netfilter chains
pub const AUDIT_NETFILTER_PKT: u16 = 1324;
/// Netfilter chain modifications
pub const AUDIT_NETFILTER_CFG: u16 = 1325;
/// Secure Computing event
pub const AUDIT_SECCOMP: u16 = 1326;
/// Proctitle emit event
pub const AUDIT_PROCTITLE: u16 = 1327;
/// audit log listing feature changes
pub const AUDIT_FEATURE_CHANGE: u16 = 1328;
/// Replace auditd if this packet unanswerd
pub const AUDIT_REPLACE: u16 = 1329;
/// Kernel Module events
pub const AUDIT_KERN_MODULE: u16 = 1330;
/// Fanotify access decision
pub const AUDIT_FANOTIFY: u16 = 1331;
|
struct Pair<T> {
a: T,
b: T,
}
struct Number {
odd: bool,
value: i32,
}
impl Number {
fn is_positive(self) -> bool {
self.value > 0
}
}
fn print_number(n: Number) {
match n.value {
1 => println!("One"),
2 => println!("Two"),
_ => println!("{}", n.value),
}
}
fn greet() {
println!("Hi there!");
}
fn fair_dice_roll() -> i32 {
4
}
fn main() {
greet();
println!("{}", fair_dice_roll());
let a = (10, 20);
let nick="vcatafesta";
println!("{}",nick.len());
let p1 = Pair {a:3, b:9};
let p2 = Pair {a:true, b:false};
let s1 = str::from_utf8(
&[240, 159, 141, 137]
);
println!("{:?}", s1);
}
|
use rusb::{Device, DeviceDescriptor, DeviceHandle, GlobalContext};
use std::time::Duration;
pub struct MouseDevice {
device: Device<GlobalContext>,
device_desc: DeviceDescriptor,
handle: DeviceHandle<GlobalContext>,
}
impl MouseDevice {
pub fn new() -> Result<Self, rusb::Error> {
let vid = 0x258a;
let pid = 0x1007;
let devices = rusb::DeviceList::new()?;
let device = devices.iter().find(|d| match d.device_descriptor() {
Ok(d) => d.vendor_id() == vid && d.product_id() == pid,
Err(_) => false,
});
let device = if let Some(device) = device {
device
} else {
return Err(rusb::Error::NotFound);
};
let handle = device.open()?;
Ok(Self {
device_desc: device.device_descriptor()?,
device,
handle,
})
}
pub fn kernel_detach(&mut self) -> Result<(), rusb::Error> {
let nc = self.device_desc.num_configurations();
for n in 0..nc {
let cd = self.device.config_descriptor(n)?;
for i in cd.interfaces() {
if self.handle.kernel_driver_active(i.number()).is_ok() {
self.handle.detach_kernel_driver(i.number()).ok();
}
}
}
Ok(())
}
pub fn kernel_attach(&mut self) -> Result<(), rusb::Error> {
self.handle.attach_kernel_driver(0)?;
Ok(())
}
pub fn read(&mut self) -> Result<crate::protocol::ConfigData, rusb::Error> {
let mut out: [u8; 154] = [0; 154];
self.handle
.read_control(0xa1, 0x01, 0x304, 1, &mut out, Duration::from_secs(1))?;
let data: crate::protocol::ConfigData = unsafe { std::mem::transmute(out) };
Ok(data)
}
pub fn send(&mut self, config_data: &crate::protocol::ConfigData) -> Result<(), rusb::Error> {
let data: &[u8; 154] = unsafe { std::mem::transmute(config_data) };
self.handle
.write_control(0x21, 0x09, 0x0304, 1, data, Duration::from_secs(1))?;
Ok(())
}
}
|
use std::{env, fs};
use yaml_rust::YamlLoader;
fn main() {
let pkg_path = env::args()
.nth(1)
.expect("The path of emacs package directory must be given.");
let excludes = vec![String::from("archives")];
let mut packages = Vec::new();
for entry in fs::read_dir(pkg_path).unwrap() {
let entry = entry.unwrap();
if entry.file_type().unwrap().is_dir() == true {
let dir_name = entry.file_name().into_string().unwrap();
if excludes.contains(&dir_name) {
continue;
}
packages.push(dir_name.rsplitn(2, '-').last().unwrap().to_owned());
}
}
println!("packages: {:#?}", packages);
let pkg_file = env::args()
.nth(2)
.expect("The path of emacs package describe file must be set.");
let yaml_string = fs::read_to_string(pkg_file).unwrap();
let docs = YamlLoader::load_from_str(&yaml_string).unwrap();
let known_packages = docs[0].as_vec().unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_owned())
.collect::<Vec<String>>();
println!("unknown packages: {:?}", packages.iter().filter(|x| known_packages.contains(x) == false).collect::<Vec<_>>());
println!("Removed known packages: {:?}", known_packages.iter().filter(|x| packages.contains(x) == false).collect::<Vec<_>>());
}
|
use crate::utils;
use crate::utils::Claims;
use actix_web::{error, web, HttpRequest, HttpResponse};
use serde::Deserialize;
//#region Web
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct WebModel {
processed: bool,
rejected: bool,
}
pub fn mark(
req: HttpRequest,
id: web::Path<String>,
flags: web::Json<WebModel>,
) -> Result<HttpResponse, actix_web::Error> {
let order_id = id.to_string();
dbg!(&order_id);
if !flags.processed && !flags.rejected {
Err(error::ErrorBadRequest("One of the flags should be set."))?;
}
let auth_header = req
.headers()
.get("Authorization")
.ok_or(error::ErrorUnauthorized("Auth required."))?
.to_str()
.map_err(|e| error::ErrorBadRequest(e))?;
let token = auth_header.replace(&"Bearer", &"");
let token = token.as_str().trim();
let jwt_key = crate::SECRETS
.get("jwt_key")
.ok_or(error::ErrorInternalServerError("Failed to get jwt_key"))?;
let token = jwt::decode::<Claims>(token, jwt_key.as_ref(), &jwt::Validation::default())
.map_err(|e| error::ErrorBadRequest(e))?;
let user_id = token.claims.id;
// Checks before saving
let order_command_conn =
utils::get_order_command_db_connection().map_err(|e| error::ErrorInternalServerError(e))?;
let order_rows = order_command_conn
.query(r#"SELECT owner_id FROM "order""#, &[])
.map_err(|e| error::ErrorInternalServerError(e))?;
let stored_order = order_rows
.into_iter()
.next()
.ok_or(error::ErrorBadRequest("Order not found."))?;
if stored_order.get::<usize, String>(0) != user_id {
Err(error::ErrorUnauthorized(
"You need to be the owner of the frame in order to mark its orders.",
))?;
}
let event_store_conn =
utils::get_event_store_db_connection().map_err(|e| error::ErrorInternalServerError(e))?;
let event_type = if flags.processed {
"OrderProcessed".to_string()
} else {
"OrderRejected".to_string()
};
event_store_conn
.execute(
r#"INSERT INTO "order" (entity_id, type, body) VALUES ($1, $2, $3)"#,
&[&order_id, &event_type, &"{}".to_string()],
)
.map_err(|e| error::ErrorInternalServerError(e))?;
// Return successfully.
Ok(HttpResponse::Ok().body("order marked successfully"))
}
//#endregion
|
fn main() {
println!("Hello, world!");
let data = another_function(10, 20);
println!("data value:{}", data);
}
fn another_function(x: i32, y: i32) ->i32{
let z = {
let x = 30;
x + y
};
println!("The value of x is:{}", x);
println!("The value of y is:{}", y);
println!("The value of z is:{}", z);
x + y
} |
use std::sync::Arc;
use super::*;
const FLOAT_ERROR: f32 = 0.05;
pub struct PrintMotor {
name: String,
state: Arc<GlobalMotorState>,
is_stopped: bool,
}
impl MotorController for PrintMotor {
fn set_speed(&mut self, new_speed: f32) {
if (self.get_motor_state().get_speed() - new_speed < FLOAT_ERROR)
|| (new_speed - self.get_motor_state().get_speed() < FLOAT_ERROR) {
info!("{}: -> {}", self.name, new_speed);
self.get_motor_state().set_speed(new_speed);
}
self.is_stopped = false;
}
fn stop(&mut self) {
if !self.is_stopped {
info!("{}: STOP", self.name);
self.is_stopped = true;
self.get_motor_state().set_speed(0.0);
}
}
fn get_motor_state(&self) -> &GlobalMotorState {
&self.state
}
}
impl PrintMotor {
pub fn new(name: &str, state: Arc<GlobalMotorState>) -> PrintMotor {
PrintMotor {
name: name.to_string(),
state,
is_stopped: false,
}
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use super::*;
#[test]
fn test_print_motor() {
let state = Arc::new(GlobalMotorState::new());
let mut motor = PrintMotor::new("t", state.clone());
assert_eq!(0.0, motor.get_motor_state().get_speed());
motor.set_speed(1.0);
assert_eq!(1.0, motor.get_motor_state().get_speed());
motor.set_speed(-1.0);
assert_eq!(-1.0, motor.get_motor_state().get_speed());
motor.stop();
assert_eq!(0.0, motor.get_motor_state().get_speed());
motor.set_speed(1.0);
assert_eq!(1.0, motor.get_motor_state().get_speed());
}
} |
use std::cmp::Ordering;
use std::io;
use crate::disk::bam::BamFormat;
use crate::disk::block::BLOCK_SIZE;
use crate::disk::directory::ENTRY_SIZE;
use crate::disk::error::DiskError;
use crate::disk::header::HeaderFormat;
use crate::disk::{Bam, BamEntry, Location};
// The "next track" routines reflect the information in Peter Schepers'
// DISK.TXT document found at:
// http://ist.uwaterloo.ca/~schepers/formats/DISK.TXT
// (With a few notable exceptions, as commented below.)
pub struct Track {
pub sectors: u8,
pub sector_offset: u16,
pub byte_offset: u32,
}
#[derive(Clone)]
pub struct DiskFormat {
/// The directory track used for this format.
pub directory_track: u8,
/// This should be pointed to from the header sector, but the various image
/// format documents say not to trust it.
pub first_directory_sector: u8,
/// The 1571 has a "second directory track" on track 53 which contains a
/// second BAM block on sector 0, and all other sectors are wasted. We
/// want to treat this entire track as reserved. This field will be 53
/// for 1571 images, and 0 for all other images to indicate no reserved
/// track. (0 is not otherwise a valid track number.)
pub reserved_track: u8,
/// The first track will normally be 1, but may be different in the case of
/// 1581 partitions.
pub first_track: u8,
/// The last track in normal use. (I.e., inclusive -- not the last track
/// plus one.)
pub last_track: u8,
/// The 1541 drive mechanism (at least) can technically access up to 40
/// tracks, although CBM DOS only provides access to 35. This field
/// contains the last track that can possibly be accessed using
/// non-standard methods.
pub last_nonstandard_track: u8,
/// The default interleave. This may be changed on a per-image basis to
/// support other layout variants such as GEOS-formatted disks.
pub interleave: u8,
/// Drives may use a special interleave for directory tracks, since
/// scanning directories usually doesn't involve I/O between the host
/// and peripheral.
pub directory_interleave: u8,
/// Is this disk GEOS-formatted? All per-drive objects will have this set
/// to false, but it may be set to true on a per-image basis as needed.
pub geos: bool,
/// Per-track parameters for this format (e.g. sectors in each track, byte
/// offsets, etc.)
pub tracks: &'static [Track],
/// A description of the header format for this disk format.
pub header: &'static HeaderFormat,
/// A description of the BAM format for this disk format.
pub bam: &'static BamFormat,
}
impl DiskFormat {
#[inline]
pub fn sectors_in_track(&self, track: u8) -> u8 {
self.tracks[track as usize].sectors
}
#[inline]
pub fn is_reserved_track(&self, track: u8) -> bool {
track == self.directory_track || track == self.reserved_track
}
#[inline]
pub fn first_directory_location(&self) -> Location {
Location(self.directory_track, self.first_directory_sector)
}
#[inline]
pub fn location_iter(&self) -> LocationIterator {
LocationIterator::new(self)
}
/// Return the list of locations which are reserved by CBM DOS and marked
/// as allocated when a disk image is newly formatted.
pub fn system_locations(&self) -> Vec<Location> {
// Header sector
let mut locations = vec![self.header.location];
// BAM sectors
for section in self.bam.sections {
locations.push(section.bitmap_location);
}
// The first directory sector
locations.push(self.first_directory_location());
// The reserved track (if present in this format)
if self.reserved_track != 0 {
for sector in 0..self.tracks[self.reserved_track as usize].sectors {
locations.push(Location(self.reserved_track, sector));
}
}
// Remove duplicates
// (e.g., on the 1541 the BAM sector and header sector are the same.)
locations.sort();
locations.dedup();
locations
}
/// Return the maximum number of directory entries that are possible for
/// this format.
pub fn max_directory_entries(&self) -> usize {
// Total sectors on directory track
let total_sectors = self.tracks[self.directory_track as usize].sectors as usize;
// Sectors on directory track used for non-directory purposes
let used_sectors = self
.system_locations()
.iter()
.filter(|Location(t, _)| *t == self.directory_track)
.count()
- 1; // -1 because system_locations includes the first directory sector.
// Sectors available for directory entries
let sectors = total_sectors - used_sectors;
// The total number of possible directory entries
sectors * BLOCK_SIZE / ENTRY_SIZE
}
/// Return the total number of data blocks available for files on a freshly
/// formatted disk. This is the equivalent of the listed "blocks free"
/// on a blank disk.
pub fn total_data_blocks(&self) -> usize {
self.tracks
.iter()
.enumerate()
.filter(|(i, _)| {
i >= &(self.first_track as usize)
&& i <= &(self.last_track as usize)
&& i != &(self.directory_track as usize)
&& i != &(self.reserved_track as usize)
})
.map(|(_, t)| t.sectors as usize)
.sum()
}
fn first_free_track<'a>(&self, bam: &'a Bam) -> io::Result<(u8, &'a BamEntry)> {
let max_distance = ::std::cmp::max(
self.directory_track - self.first_track,
self.last_track + 1 - self.directory_track,
);
for distance in 1..=max_distance {
// Check bottom half
if distance <= self.directory_track {
let track = self.directory_track - distance;
if track >= self.first_track {
let entry = bam.entry(track)?;
if entry.has_availability() {
return Ok((track, entry));
}
}
}
// Check top half
let track = self.directory_track + distance;
if track <= self.last_track {
let entry = bam.entry(track)?;
if entry.has_availability() {
return Ok((track, entry));
}
}
}
Err(DiskError::DiskFull.into())
}
fn first_free_block(&self, bam: &Bam) -> io::Result<Location> {
if self.geos {
return self.next_free_block_from_previous(bam, Location(self.first_track, 0));
}
// Find available track
let (track, entry) = self.first_free_track(bam)?;
// Find available sector
let mut map = entry.sector_map();
for sector in 0..self.sectors_in_track(track) {
if map & 1 == 1 {
// Sector is available.
return Ok(Location(track, sector));
}
map >>= 1;
}
// Unless the BAM is corrupt (free_sectors is not consistent with the bitmap),
// this should never happen.
Err(DiskError::DiskFull.into())
}
// If a free track is successfully found, return the following tuple:
// (track: u8, entry: &BAMEntry, reset_sector: bool)
fn next_free_track_geos<'a>(
&self,
bam: &'a Bam,
previous_track: u8,
) -> io::Result<(u8, &'a BamEntry, bool)> {
let mut track = previous_track;
// If we get to the end (and we didn't start with track 1), make another pass
// starting with track 1 to find any availability missed on the first
// pass. Note that this is slightly different from the algorithm in
// DISK.TXT [1] which only scans from the current track to the
// end of the disk. It's not clear to me how that handles sequential append
// cases where availability exists prior to the current track, but none
// on the current track or after.
const NUM_PASSES: usize = 2;
let mut passes = if previous_track == self.first_track {
1
} else {
NUM_PASSES
};
let mut reset_sector = false;
// GEOS: Advance the track sequentially until we find availability
while passes > 0 {
// Does the current track have availability?
let entry = bam.entry(track)?;
if entry.has_availability() {
return Ok((track, entry, reset_sector));
}
// Don't leave the directory track.
if track == self.directory_track {
// We're writing directory sectors, but there are no more directory tracks.
// (The 1571 track 53 "second directory track" doesn't actually hold chained
// directory sectors, and is mostly wasted except for a second
// BAM sector.)
return Err(DiskError::DiskFull.into());
}
// Advance to the next track, skipping reserved tracks.
track += 1;
if track > self.last_track {
track = self.first_track;
passes -= 1;
reset_sector = true;
}
while self.is_reserved_track(track) {
track += 1;
// This shouldn't happen with the formats I know about, but just in case there's
// some oddball format with a reserved track at the end of the disk...
if track > self.last_track {
track = self.first_track;
passes -= 1;
reset_sector = true;
}
}
}
Err(DiskError::DiskFull.into())
}
// If a free track is successfully found, return the following tuple:
// (track: u8, entry: &BAMEntry, reset_sector: bool)
fn next_free_track_cbm<'a>(
&self,
bam: &'a Bam,
previous_track: u8,
) -> io::Result<(u8, &'a BamEntry, bool)> {
// The CBM algorithm is to grow files away from the central directory track.
// If the file's previous sector is on the bottom half, the next sector
// will be on that track or below, if possible. Likewise, if the
// file's previous sector is on the top half, the next sector will be
// on that track or above, if possible.
// As best as I can tell from DISK.TXT [1], the three-pass scheme mimics the
// approach used by the CBM DOS. Three passes are necessary to fully
// scan the disk for available sectors: The first pass scans the disk
// half containing the current track, from the current track outward
// (away from the directory track), and misses any potential inward
// availability on that half. The second pass scans the other half in its
// entirety, and the third pass scans the original half in its
// entirety, catching any inward availability that was missed on the
// first pass.
const NUM_PASSES: usize = 3;
let mut passes = NUM_PASSES;
let mut reset_sector = false;
let mut track = previous_track;
// Iterate over every track from the current track outward (pass 1), the
// entirety of the other half (pass 2), and then the entirety of the
// original half (pass 3).
while passes > 0 {
// Does the current track have availability?
let entry = bam.entry(track)?;
if entry.has_availability() {
return Ok((track, entry, reset_sector));
}
// The next candidate track is determined differently depending on whether the
// previous track was on the directory track, below the directory
// track (bottom half), or above the directory track (top half).
match track.cmp(&self.directory_track) {
Ordering::Less => {
// Bottom half: Scan downwards.
track -= 1;
while track > 0 && self.is_reserved_track(track) {
track -= 1;
}
if track < self.first_track {
// No more availability downwards. Jump to the top half.
track = self.directory_track + 1;
passes -= 1;
reset_sector = true;
}
}
Ordering::Equal => {
// We're writing directory sectors, but there are no more directory tracks.
// (The 1571 track 53 "second directory track" doesn't actually hold chained
// directory sectors, and is mostly wasted except for a second
// BAM sector.)
return Err(DiskError::DiskFull.into());
}
Ordering::Greater => {
// Top half: Scan upwards.
track += 1;
while self.is_reserved_track(track) {
track += 1;
}
if track > self.last_track {
// No more availability upwards. Jump to the bottom half.
track = self.directory_track - 1;
passes -= 1;
reset_sector = true;
}
}
}
}
Err(DiskError::DiskFull.into())
}
// If a free track is successfully found, return the following tuple:
// (track: u8, entry: &BAMEntry, reset_sector: bool)
fn next_free_track<'a>(
&self,
bam: &'a Bam,
previous_track: u8,
) -> io::Result<(u8, &'a BamEntry, bool)> {
if self.geos {
self.next_free_track_geos(bam, previous_track)
} else {
self.next_free_track_cbm(bam, previous_track)
}
}
fn next_free_block_from_previous(&self, bam: &Bam, previous: Location) -> io::Result<Location> {
let mut sector = previous.1;
// Find the next track (which will be the current track, if it has
// availability).
let (track, entry, reset_sector) = self.next_free_track(bam, previous.0)?;
let num_sectors = self.sectors_in_track(track);
// Determine the interleave for this track.
let interleave = if track == self.directory_track {
self.directory_interleave
} else {
self.interleave
};
// Advance the sector by the interleave before scanning.
if reset_sector {
sector = 0;
} else if !self.geos || track == previous.0 {
// The CBM case and the GEOS same-track case: Apply normal interleave.
sector += interleave;
// From DISK.TXT:
// "Empirical GEOS optimization, get one sector backwards if over track 25"
if self.geos && (track >= 25) {
sector -= 1;
}
} else {
// The GEOS different-track case: Apply GEOS's oddball interleave.
// Note that "track - previous.0" is overflow-safe, because:
// 1. The reset_sector case would be used if the track wrapped around to be
// lower than the previous. 2. The highest (track-previous.0) is 79
// (1581 case), and the highest possible interleave is 10 (1541
// case), so the worst case result is 172, which still fits in a u8.
// From DISK.TXT:
// "For a different track of a GEOS-formatted disk, use sector skew"
sector = ((track - previous.0) << 1) + 4 + interleave;
}
// Wrap sectors as needed to fit on the track.
while sector >= num_sectors {
sector -= num_sectors;
// From DISK.TXT:
// "Empirical optimization, get one sector backwards if beyond sector zero"
if (sector > 0) && !self.geos {
sector -= 1;
}
}
// Scan for the next free sector
let start_sector: u8 = sector;
let map = entry.sector_map();
loop {
// Is this sector available?
if map >> sector & 1 == 1 {
return Ok(Location(track, sector));
}
// Advance to the next sector.
sector += 1;
if sector >= num_sectors {
sector = 0;
}
if sector == start_sector {
// The BAM entry's free sector count indicated free sectors,
// but there were no free sectors in the bitmap.
return Err(DiskError::InvalidBAM.into());
}
}
}
pub fn next_free_block(&self, bam: &Bam, previous: Option<Location>) -> io::Result<Location> {
match previous {
Some(previous) => self.next_free_block_from_previous(bam, previous),
None => self.first_free_block(bam),
}
}
}
pub struct LocationIterator<'a> {
format: &'a DiskFormat,
next: Option<Location>,
}
impl<'a> LocationIterator<'a> {
fn new(format: &'a DiskFormat) -> LocationIterator {
LocationIterator {
format,
next: Some(Location::new(format.first_track, 0)),
}
}
}
impl<'a> Iterator for LocationIterator<'a> {
type Item = Location;
fn next(&mut self) -> Option<Location> {
let current_location = self.next;
self.next = match current_location {
Some(Location(mut track, mut sector)) => {
sector += 1;
if sector == self.format.tracks[track as usize].sectors {
track += 1;
sector = 0;
}
if track > self.format.last_track {
None
} else {
Some(Location(track, sector))
}
}
None => None,
};
current_location
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::disk::{Disk, D64, D71, D81};
const TOTAL_ALLOCABLE_BLOCKS: usize = 664;
const REMAINING_DIRECTORY_BLOCKS: usize = 17;
const TEST_ALLOCATION_LIMIT: usize = TOTAL_ALLOCABLE_BLOCKS * 4;
fn get_fresh_d64() -> (DiskFormat, D64) {
let mut d64 = D64::open_memory(D64::geometry(false)).unwrap();
d64.write_format(&"test".into(), &"t1".into()).unwrap();
let format = d64.disk_format().unwrap().clone();
(format, d64)
}
fn get_fresh_d64_geos() -> (DiskFormat, D64) {
let mut d64 = D64::open_memory(D64::geometry(false)).unwrap();
d64.write_format(&"test".into(), &"t1".into()).unwrap();
d64.disk_format_mut().unwrap().geos = true;
let format = d64.disk_format().unwrap().clone();
(format, d64)
}
fn allocate_chain(
d64: &mut D64,
limit: Option<usize>,
start: Option<Location>,
) -> (usize, bool) {
let mut blocks_allocated = 0usize;
let mut disk_full = false;
let mut location = start;
let format = d64.disk_format().unwrap().clone();
let bam = d64.bam().unwrap();
let mut bam = bam.borrow_mut();
loop {
let next = match format.next_free_block(&bam, location) {
Ok(l) => l,
Err(ref e) => match DiskError::from_io_error(e) {
Some(ref e) if *e == DiskError::DiskFull => {
disk_full = true;
break;
}
Some(_) => break,
None => break,
},
};
bam.allocate(next).unwrap();
blocks_allocated += 1;
if let Some(limit) = limit {
if blocks_allocated == limit {
break;
}
}
if blocks_allocated > TEST_ALLOCATION_LIMIT {
// Prevent infinite loop in case of failure
panic!("runaway chain allocation.");
}
println!("next({:?}) -> {}", location, next);
location = Some(next);
}
(blocks_allocated, disk_full)
}
#[test]
fn test_next_for_all_sectors() {
let (format, d64) = get_fresh_d64();
for track in format.first_track..=format.last_track {
for sector in 0..format.tracks[track as usize].sectors {
let location = Location(track, sector);
let bam = d64.bam().unwrap();
let bam = bam.borrow();
let next = format.next_free_block(&bam, Some(location)).unwrap();
println!("next({}) -> {}", location, next);
}
}
}
#[test]
fn test_next_for_all_sectors_with_allocation() {
let (format, d64) = get_fresh_d64();
let mut blocks_allocated = 0usize;
'outer: for track in format.first_track..=format.last_track {
if format.is_reserved_track(track) {
continue;
}
for sector in 0..format.tracks[track as usize].sectors {
let location = Location(track, sector);
let bam = d64.bam().unwrap();
let mut bam = bam.borrow_mut();
let next = match format.next_free_block(&bam, Some(location)) {
Ok(l) => l,
Err(_) => break 'outer,
};
bam.allocate(next).unwrap();
blocks_allocated += 1;
println!("next({}) -> {}", location, next);
}
}
println!("blocks allocated: {}", blocks_allocated);
println!("BAM: {:?}", d64.bam());
assert_eq!(blocks_allocated, TOTAL_ALLOCABLE_BLOCKS);
}
#[test]
fn test_full_chain_allocation() {
let (_format, mut d64) = get_fresh_d64();
let (blocks_allocated, disk_full) = allocate_chain(&mut d64, None, None);
println!(
"blocks allocated: {} disk_full={:?}",
blocks_allocated, disk_full
);
println!("BAM: {:?}", d64.bam());
assert!(disk_full);
assert_eq!(blocks_allocated, TOTAL_ALLOCABLE_BLOCKS);
}
#[test]
fn test_full_chain_allocation_geos() {
let (_format, mut d64) = get_fresh_d64_geos();
let (blocks_allocated, disk_full) = allocate_chain(&mut d64, None, None);
println!(
"blocks allocated: {} disk_full={:?}",
blocks_allocated, disk_full
);
println!("BAM: {:?}", d64.bam());
assert!(disk_full);
assert_eq!(blocks_allocated, TOTAL_ALLOCABLE_BLOCKS);
}
#[test]
fn test_directory_chain_allocation() {
let (format, mut d64) = get_fresh_d64();
let start = Location(format.directory_track, 1);
let (blocks_allocated, disk_full) = allocate_chain(&mut d64, None, Some(start));
println!(
"blocks allocated: {} disk_full={:?}",
blocks_allocated, disk_full
);
println!("BAM: {:?}", d64.bam());
assert!(disk_full);
assert_eq!(blocks_allocated, REMAINING_DIRECTORY_BLOCKS);
}
#[test]
fn test_directory_chain_allocation_geos() {
let (format, mut d64) = get_fresh_d64_geos();
let start = Location(format.directory_track, 1);
let (blocks_allocated, disk_full) = allocate_chain(&mut d64, None, Some(start));
println!(
"blocks allocated: {} disk_full={:?}",
blocks_allocated, disk_full
);
println!("BAM: {:?}", d64.bam());
assert!(disk_full);
assert_eq!(blocks_allocated, REMAINING_DIRECTORY_BLOCKS);
}
#[test]
fn test_max_directory_entries() {
let mut d64 = D64::open_memory(D64::geometry(false)).unwrap();
d64.write_format(&"test".into(), &"t1".into()).unwrap();
assert_eq!(d64.disk_format().unwrap().max_directory_entries(), 144);
let mut d71 = D71::open_memory(D71::geometry(false)).unwrap();
d71.write_format(&"test".into(), &"t1".into()).unwrap();
assert_eq!(d71.disk_format().unwrap().max_directory_entries(), 144);
let mut d81 = D81::open_memory(D81::geometry(false)).unwrap();
d81.write_format(&"test".into(), &"t1".into()).unwrap();
assert_eq!(d81.disk_format().unwrap().max_directory_entries(), 296);
}
#[test]
fn test_total_data_blocks() {
let mut d64 = D64::open_memory(D64::geometry(false)).unwrap();
d64.write_format(&"test".into(), &"t1".into()).unwrap();
assert_eq!(d64.disk_format().unwrap().total_data_blocks(), 664);
let mut d71 = D71::open_memory(D71::geometry(false)).unwrap();
d71.write_format(&"test".into(), &"t1".into()).unwrap();
assert_eq!(d71.disk_format().unwrap().total_data_blocks(), 1328);
let mut d81 = D81::open_memory(D81::geometry(false)).unwrap();
d81.write_format(&"test".into(), &"t1".into()).unwrap();
assert_eq!(d81.disk_format().unwrap().total_data_blocks(), 3160);
}
}
|
mod control_command;
mod monitor;
pub(crate) use self::control_command::{ControlCommand, Reply};
pub use self::{control_command::WatchdogQuery, monitor::WatchdogMonitor};
use crate::{
runtime::Runtimes,
service::{ServiceError, ServiceIdentifier, StatusReport},
};
use async_trait::async_trait;
use std::{any::Any, fmt};
use thiserror::Error;
use tokio::sync::{mpsc, oneshot};
/// trait to define the different core services and their
/// associated metadata
#[async_trait]
pub trait Organix: Send + Sync {
fn new(_: &mut Runtimes) -> Self;
fn stop(&mut self, service_identifier: ServiceIdentifier) -> Result<(), WatchdogError>;
async fn status(
&mut self,
service_identifier: ServiceIdentifier,
) -> Result<StatusReport, WatchdogError>;
fn start(
&mut self,
service_identifier: ServiceIdentifier,
watchdog_query: WatchdogQuery,
) -> Result<(), WatchdogError>;
fn intercoms(
&mut self,
service_identifier: ServiceIdentifier,
) -> Result<Box<dyn Any + Send + 'static>, WatchdogError>;
}
pub struct Watchdog<T: Organix> {
services: T,
on_drop_send: oneshot::Sender<()>,
}
pub struct WatchdogBuilder<T>
where
T: Organix,
{
_marker: std::marker::PhantomData<T>,
}
#[derive(Debug, Error, PartialEq, Eq)]
pub enum WatchdogError {
#[error("Unknown service {service_identifier}, available services are {possible_values:?}")]
UnknownService {
service_identifier: ServiceIdentifier,
possible_values: &'static [ServiceIdentifier],
},
#[error("Cannot start service {service_identifier}: {source}")]
CannotStartService {
service_identifier: ServiceIdentifier,
source: ServiceError,
},
#[error("Cannot connect to service {service_identifier}, service might be shutdown")]
CannotConnectToService {
service_identifier: ServiceIdentifier,
retry_attempted: bool,
},
#[error("The watchdog didn't reply to the {context}: {reason}")]
NoReply {
reason: oneshot::error::RecvError,
context: &'static str,
},
}
impl<T> WatchdogBuilder<T>
where
T: Organix,
{
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
Self {
_marker: std::marker::PhantomData,
}
}
pub fn build(self) -> WatchdogMonitor
where
T: Organix + 'static,
{
let mut runtimes = Runtimes::new().unwrap();
let services = T::new(&mut runtimes);
let (sender, receiver) = mpsc::channel(10);
let (on_drop_send, on_drop_receive) = oneshot::channel();
let watchdog = Watchdog {
on_drop_send,
services,
};
let watchdog_query_handle = runtimes.watchdog().handle().clone();
let query = WatchdogQuery::new(watchdog_query_handle, sender.clone());
runtimes
.watchdog()
.handle()
.spawn(async move { watchdog.watchdog(receiver, query).await });
WatchdogMonitor::new(runtimes, sender, on_drop_receive)
}
}
impl<T> Watchdog<T>
where
T: Organix,
{
#[tracing::instrument(skip(self, cc, watchdog_query), target = "watchdog", level = "info")]
async fn watchdog(
mut self,
mut cc: mpsc::Receiver<ControlCommand>,
watchdog_query: WatchdogQuery,
) {
while let Some(command) = cc.recv().await {
match command {
ControlCommand::Shutdown | ControlCommand::Kill => {
// TODO: for now we assume shutdown and kill are the same
// but on the long run it will need to send a Shutdown
// signal to every services so they can save state and
// release resources properly
tracing::warn!(%command, "stopping watchdog");
break;
}
ControlCommand::Status {
service_identifier,
reply,
} => {
let status_report = self.services.status(service_identifier).await;
if let Ok(status_report) = &status_report {
tracing::info!(
%status_report.identifier,
status_report.number_restart = status_report.started,
%status_report.status,
%status_report.intercom.number_sent,
%status_report.intercom.number_received,
%status_report.intercom.number_connections,
%status_report.intercom.processing_speed_mean,
%status_report.intercom.processing_speed_variance,
%status_report.intercom.processing_speed_standard_derivation,
);
}
reply.reply(status_report);
}
ControlCommand::Start {
service_identifier,
reply,
} => {
tracing::info!(%service_identifier, "start");
reply.reply(
self.services
.start(service_identifier, watchdog_query.clone()),
);
}
ControlCommand::Stop {
service_identifier,
reply,
} => {
tracing::info!(%service_identifier, "stop");
reply.reply(self.services.stop(service_identifier));
}
ControlCommand::Intercom {
service_identifier,
reply,
} => {
tracing::trace!(%service_identifier, "query intercom");
// TODO: surround the operation with a timeout and
// result to success
reply.reply(self.services.intercoms(service_identifier));
}
}
}
if self.on_drop_send.send(()).is_err() {
// ignore error for now
}
}
}
impl<T: Organix> fmt::Debug for Watchdog<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Watchdog").finish()
}
}
|
use amethyst::{
assets::PrefabData,
derive::PrefabData,
ecs::{prelude::Entity, Component, DenseVecStorage, NullStorage, WriteStorage},
error::Error,
};
use dsf_core::components::{Direction2D, Pos};
use serde::{Deserialize, Serialize};
#[derive(Clone, Copy, Debug, Default, Deserialize, Serialize, PrefabData)]
#[prefab(Component)]
#[serde(deny_unknown_fields)]
pub struct SelectionTag;
impl Component for SelectionTag {
type Storage = NullStorage<Self>;
}
#[derive(Clone, Copy, Debug, Default, Deserialize, Serialize, PrefabData)]
#[prefab(Component)]
#[serde(deny_unknown_fields)]
pub struct CursorPreviewParentTag;
impl Component for CursorPreviewParentTag {
type Storage = NullStorage<Self>;
}
#[derive(Clone, Copy, Debug, Default, Deserialize, Serialize, PrefabData)]
#[prefab(Component)]
#[serde(deny_unknown_fields)]
pub struct CursorPreviewTag;
impl Component for CursorPreviewTag {
type Storage = NullStorage<Self>;
}
#[derive(Clone, Copy, Component, Debug, Default, Deserialize, Serialize, PrefabData)]
#[prefab(Component)]
#[serde(deny_unknown_fields)]
pub struct Cursor {
pub last_direction: Direction2D,
pub cooldown: f32,
}
#[derive(Clone, Copy, Debug, Default, Component, Deserialize, Serialize, PrefabData)]
#[prefab(Component)]
#[serde(deny_unknown_fields)]
pub struct PaintedTile {
pub pos: Pos,
}
impl PaintedTile {
pub fn new(pos: Pos) -> Self {
PaintedTile { pos }
}
}
|
//! Channel related types.
mod name;
mod wildcard_spec;
pub use name::Name;
pub use wildcard_spec::WildcardSpec;
|
use actix_web::{web, Responder, HttpResponse};
pub fn init(cfg: &mut web::ServiceConfig) {
cfg.service(web::scope("/session")
.route("", web::post().to(login))
.route("", web::delete().to(logout))
);
}
async fn login() -> impl Responder {
HttpResponse::Ok().json(format!("{{ session id: {}, user: {} }}", "a","b"))
}
async fn logout() -> impl Responder {
HttpResponse::Ok().json("{}")
}
|
use crate::component::Component;
use crate::AsTable;
use clap::{App, AppSettings, ArgMatches};
use digitalocean::prelude::*;
use failure::Error;
use serde_derive::{Deserialize, Serialize};
mod get;
pub use self::get::Get;
mod apply;
pub use self::apply::Apply;
pub struct Root;
impl Component for Root {
fn app() -> App<'static, 'static> {
App::new("infrastructure")
.about("Interact with the entire infrastructure")
.setting(AppSettings::SubcommandRequired)
.subcommand(Get::app())
.subcommand(Apply::app())
}
fn handle(client: DigitalOcean, arg_matches: &ArgMatches) -> Result<(), Error> {
match arg_matches.subcommand() {
("get", Some(arg_matches)) => Get::handle(client, arg_matches),
("apply", Some(arg_matches)) => Apply::handle(client, arg_matches),
_ => panic!("Unknown subcommand provided"),
}
}
}
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct Infrastructure {
droplets: Vec<Droplet>,
domains: Vec<Domain>,
}
impl AsTable for Infrastructure {
fn as_table(&self) {
println!("Droplets:");
self.droplets.as_table();
println!("Domains:");
self.domains.as_table();
}
}
|
use bit_field::BitField;
use core::fmt;
#[repr(transparent)]
pub struct PageTableEntry {
entry: u64
}
impl PageTableEntry {
pub fn from(entry: u64) -> PageTableEntry {
PageTableEntry { entry }
}
pub fn as_u64(&self) -> u64 {
self.entry as u64
}
pub fn is_present(&self) -> bool {
self.entry.get_bit(0)
}
pub fn is_writeable(&self) -> bool {
self.entry.get_bit(1)
}
pub fn is_user_accessible(&self) -> bool {
self.entry.get_bit(2)
}
pub fn is_write_through_caching(&self) -> bool {
self.entry.get_bit(3)
}
pub fn is_disable_cache(&self) -> bool {
self.entry.get_bit(4)
}
pub fn is_accessed(&self) -> bool {
self.entry.get_bit(5)
}
pub fn is_dirty(&self) -> bool {
self.entry.get_bit(6)
}
pub fn is_huge_page(&self) -> bool {
self.entry.get_bit(7)
}
pub fn is_global(&self) -> bool {
self.entry.get_bit(8)
}
pub fn is_not_executable(&self) -> bool {
self.entry.get_bit(63)
}
pub fn physical_address(&self) -> u64 {
self.entry.get_bits(12..=51) << 12
}
}
impl fmt::Debug for PageTableEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if !self.is_present() {
write!(f, "PageTableEntry( not present )" )
}
else {
write!(f, "PageTableEntry(PhysAddr=0x{:x}, present={}, writeable={}, user_accessible={}, dirty={}, accessed={}, nx={}, huge_page={})",
self.physical_address(),
self.is_present(),
self.is_writeable(),
self.is_user_accessible(),
self.is_dirty(),
self.is_accessed(),
self.is_not_executable(),
self.is_huge_page())
}
}
}
pub unsafe fn describe_page_table(virtual_address: u64, physical_address: u64, level: u64, memory_mapping_offset: u64) {
describe_page_table_inner(virtual_address, physical_address, level, memory_mapping_offset);
}
fn describe_page_table_inner(virtual_address: u64, physical_address: u64, level: u64, memory_mapping_offset: u64) {
let virtual_address_offset = 1 << (12 + 9 * (level-1));
let ptr = (physical_address + memory_mapping_offset) as *const [u64; 512];
let page_table = unsafe { &(*ptr) };
let mut previous_entry = 0;
let mut nb_continuation = 0;
for ix in 0..512 {
let page_table_entry = page_table[ix as usize];
nb_continuation = describe_page_table_entry_inner(
virtual_address + ix * virtual_address_offset,
physical_address + ix * 8,
page_table_entry,
ix,
level,
memory_mapping_offset,
previous_entry,
nb_continuation);
previous_entry = page_table_entry;
}
}
impl PageTableEntry {
fn is_continuation_of(&self, previous: &PageTableEntry) -> bool {
if self.is_present() != previous.is_present() ||
self.is_writeable() != previous.is_writeable() ||
self.is_user_accessible() != previous.is_user_accessible() ||
self.is_write_through_caching() != previous.is_write_through_caching() ||
self.is_disable_cache() != previous.is_disable_cache() ||
self.is_accessed() != previous.is_accessed() ||
self.is_dirty() != previous.is_dirty() ||
self.is_huge_page() != previous.is_huge_page() ||
self.is_global() != previous.is_global() ||
self.is_not_executable() != previous.is_not_executable() {
return false;
}
if !self.is_present() { return true; }
if self.is_huge_page() {
return self.physical_address() == previous.physical_address() + 0x200000;
}
else {
return self.physical_address() == previous.physical_address() + 0x1000;
}
}
}
fn describe_page_table_entry_inner(virtual_address: u64, physical_address: u64,
page_table_entry_u64: u64, index: u64,
level: u64, memory_mapping_offset: u64,
previous_entry: u64, nb_continuation: u32 ) -> u32 {
use crate::serial_println;
let indent = match level {
4 => "",
3 => " ",
2 => " ",
1 => " ",
_ => panic!("Error! level should be 1..=4")
};
let page_table_entry = PageTableEntry::from(page_table_entry_u64);
//if !page_table_entry.is_present() { return 0; }
if index > 0 && index < 511 {
let previous_page_table_entry = PageTableEntry::from(previous_entry);
if level == 1 || !page_table_entry.is_present() || page_table_entry.is_huge_page() {
if page_table_entry.is_continuation_of(&previous_page_table_entry) {
return nb_continuation + 1;
}
}
}
// we are not a continuation, or the last entry in the table, so we display the page table and the number of continuation in between
if nb_continuation > 0 {
serial_println!(" ... + {} ... ", nb_continuation);
}
serial_println!("{} {} {:03} 0x{:x}... {:?} (stored in physical address 0x{:x})",
level, indent, index, virtual_address, page_table_entry, physical_address);
if !page_table_entry.is_huge_page() && page_table_entry.is_present() && level > 1 {
describe_page_table_inner(virtual_address, page_table_entry.physical_address(), level - 1, memory_mapping_offset);
}
return 0;
}
use x86_64::VirtAddr;
use x86_64::structures::paging::{OffsetPageTable, PageTable};
fn active_level_4_table(physical_memory_offset: VirtAddr) -> &'static mut PageTable {
let (level_4_address, _ ) = x86_64::registers::control::Cr3::read();
let ptr = (physical_memory_offset + level_4_address.start_address().as_u64()).as_mut_ptr();
let page_table = unsafe { &mut *ptr };
return page_table;
}
pub unsafe fn init_mapper(physical_memory_offset: VirtAddr) -> OffsetPageTable<'static> {
let level_4_table = active_level_4_table(physical_memory_offset);
OffsetPageTable::new(level_4_table, physical_memory_offset)
}
use x86_64::structures::paging::{FrameAllocator, Size4KiB, UnusedPhysFrame};
use x86_64::structures::paging::{Page, PhysFrame, PageTableFlags, Mapper};
use x86_64::PhysAddr;
pub struct EmptyFrameAllocator;
unsafe impl FrameAllocator<Size4KiB> for EmptyFrameAllocator {
fn allocate_frame(&mut self) -> Option<UnusedPhysFrame> {
None
}
}
pub fn create_example_mapping(page: Page, mapper: &mut OffsetPageTable, frame_allocator: &mut impl FrameAllocator<Size4KiB>) {
let frame = PhysFrame::containing_address(PhysAddr::new(0xb8000));
let unused_frame = unsafe { UnusedPhysFrame::new(frame) };
let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE;
let map_to_result = mapper.map_to(page, unused_frame, flags, frame_allocator);
map_to_result.expect("map_to failed").flush();
}
use bootloader::bootinfo::{MemoryMap, MemoryRegion, MemoryRegionType};
pub struct BootInfoFrameAllocator {
memory_map: &'static MemoryMap,
next: usize
}
impl BootInfoFrameAllocator {
pub fn init(memory_map: &'static MemoryMap) -> Self {
BootInfoFrameAllocator {
memory_map,
next: 0
}
}
fn usable_frames(&self) -> impl Iterator<Item = UnusedPhysFrame> {
use crate::serial_println;
let regions = self.memory_map.iter();
let usable_regions = regions.filter(|r| r.region_type == MemoryRegionType::Usable);
let frames = usable_regions.map(|r| r.range.start_addr()..r.range.end_addr());
let frame_addresses = frames.flat_map(|r| r.step_by(4096));
let frames = frame_addresses.map(|addr| {/* serial_println!("{:x}", addr); */ PhysFrame::containing_address(PhysAddr::new(addr)) });
frames.map(|frame| unsafe { UnusedPhysFrame::new(frame)})
}
}
unsafe impl FrameAllocator<Size4KiB> for BootInfoFrameAllocator {
fn allocate_frame(&mut self) -> Option<UnusedPhysFrame> {
// FIXME : this is in O(self.next) so allocation of N pages is in O(N^2)
let frame = self.usable_frames().nth(self.next);
self.next += 1;
frame
}
} |
fn main() {
print_two("zerd", "Shaw");
print_two_again("zed", "Shaw");
print_one("First!");
print_none()
}
fn print_two(a: &str, b: &str){
println!("{}, {}", a.to_owned(), b.to_owned());
}
fn print_two_again(a: &str, b: &str){
println!("a: {}, b: {}", a.to_owned(), b.to_owned());
}
fn print_one(a: &str){
println!("a: {}", a.to_owned());
}
fn print_none(){
println!("I got nothing.");
} |
/// NO. 100: Same Tree
pub struct Solution;
pub use leetcode::{TreeNode, vec_to_tree, tree};
// ----- submission codes start here -----
use std::rc::Rc;
use std::cell::RefCell;
pub type Tree = Option<Rc<RefCell<TreeNode>>>;
impl Solution {
fn stack_method(p: Tree, q: Tree) -> bool {
let mut stack = vec![p, q];
while let (Some(a), Some(b)) = (stack.pop(), stack.pop()) {
match (a, b) {
(None, None) => continue,
(Some(a), Some(b)) => {
let (a, b) = (
Rc::try_unwrap(a).unwrap().into_inner(),
Rc::try_unwrap(b).unwrap().into_inner()
);
if a.val != b.val {
return false
} else {
stack.push(a.left);
stack.push(b.left);
stack.push(a.right);
stack.push(b.right);
}
},
_ => return false
}
}
true
}
fn recursive_method(p: Tree, q: Tree) -> bool {
fn recursive(p: &Tree, q: &Tree) -> bool {
match (p, q) {
(Some(p), Some(q)) => {
let (p, q) = (p.borrow(), q.borrow());
p.val == q.val &&
recursive(&p.left, &q.left) &&
recursive(&p.right, &q.right)
}
(None, None) => true,
_ => false,
}
}
recursive(&p, &q)
}
pub fn is_same_tree(p: Tree, q: Tree) -> bool {
// Solution::recursive_method(p, q)
Solution::stack_method(p, q)
}
}
// ----- submission codes end here -----
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test() {
assert_eq!(Solution::is_same_tree(
tree![1, 2, 3],
tree![1, 2, 3]
), true);
assert_eq!(Solution::is_same_tree(
tree![1, 2, 3],
tree![1, null, 2]
), false);
assert_eq!(Solution::is_same_tree(
tree![1, 2, 1],
tree![1, 1, 2]
), false);
}
}
|
use super::{AuthTokenExtractor, Response};
use crate::server::Server;
use crate::Config;
use axum::extract::Extension;
use axum::{extract, Json};
use hyper::StatusCode;
use log::{error, info, warn};
use serde::Deserialize;
use std::convert::TryFrom;
use std::net::Ipv4Addr;
use std::sync::Arc;
use std::time::Duration;
use tokio::time::Instant;
use tokio_postgres::NoTls;
#[derive(Debug, Deserialize)]
pub struct CheckBody {
pub address: Ipv4Addr,
#[serde(default = "default_port")]
pub port: u16,
}
pub async fn check_handler(
auth_token: AuthTokenExtractor,
body: extract::Json<CheckBody>,
server: Extension<Arc<Server>>,
) -> (StatusCode, Json<Response>) {
if auth_token.0 != server.config.server.auth_key {
return (
StatusCode::UNAUTHORIZED,
Json(Response::error("Invalid auth key")),
);
}
// Don't send our credentials to random hosts!
if !server
.config
.database
.allowed_hosts
.iter()
.any(|range| range.contains(&body.address))
{
warn!("Authorized request provided disallowed IP {}", body.address);
return (
StatusCode::FORBIDDEN,
Json(Response::error("Address not allowed")),
);
}
info!("Performing check on {}:{}", body.address, body.port);
let res = try_connect(&server.config, &body.0).await;
(StatusCode::OK, Json(res))
}
async fn try_connect(config: &Config, body: &CheckBody) -> Response {
let mut pg_config = tokio_postgres::Config::new();
pg_config.user(config.database.username.as_str());
pg_config.password(config.database.password.as_str());
pg_config.dbname(config.database.database.as_str());
pg_config.host(body.address.to_string().as_str());
pg_config.port(body.port);
pg_config.connect_timeout(Duration::from_secs(5));
let start = Instant::now();
match pg_config.connect(NoTls).await {
Ok(_) => {
let elapsed = start.elapsed();
let millis = match usize::try_from(elapsed.as_millis()) {
Ok(v) => v,
Err(_) => {
error!("elapsed time exceeded u64");
usize::MAX
}
};
info!("Check returned online in {}ms", millis);
Response::check_response(true, Some(millis))
}
Err(e) => {
info!("Check returned offline: {}", e);
Response::check_response(false, None)
}
}
}
const fn default_port() -> u16 {
5432
}
|
use super::{Token, TokenFilter, TokenStream};
/// Token filter that lowercase terms.
#[derive(Clone)]
pub struct LowerCaser;
impl<TailTokenStream> TokenFilter<TailTokenStream> for LowerCaser
where
TailTokenStream: TokenStream,
{
type ResultTokenStream = LowerCaserTokenStream<TailTokenStream>;
fn transform(&self, token_stream: TailTokenStream) -> Self::ResultTokenStream {
LowerCaserTokenStream::wrap(token_stream)
}
}
pub struct LowerCaserTokenStream<TailTokenStream>
where
TailTokenStream: TokenStream,
{
tail: TailTokenStream,
}
impl<TailTokenStream> TokenStream for LowerCaserTokenStream<TailTokenStream>
where
TailTokenStream: TokenStream,
{
fn token(&self) -> &Token {
self.tail.token()
}
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
fn advance(&mut self) -> bool {
if self.tail.advance() {
self.tail.token_mut().text.make_ascii_lowercase();
true
} else {
false
}
}
}
impl<TailTokenStream> LowerCaserTokenStream<TailTokenStream>
where
TailTokenStream: TokenStream,
{
fn wrap(tail: TailTokenStream) -> LowerCaserTokenStream<TailTokenStream> {
LowerCaserTokenStream { tail }
}
}
|
use std::path::PathBuf;
use structopt::clap::AppSettings;
use structopt::StructOpt;
use wascc_host::{host, HostManifest};
#[macro_use]
extern crate log;
#[derive(Debug, StructOpt, Clone)]
#[structopt(
global_settings(&[AppSettings::ColoredHelp, AppSettings::VersionlessSubcommands]),
name = "wascc-host",
about = "A general-purpose waSCC runtime host")]
struct Cli {
#[structopt(flatten)]
command: CliCommand,
}
#[derive(Debug, Clone, StructOpt)]
struct CliCommand {
/// Path to the host manifest
#[structopt(short = "m", long = "manifest", parse(from_os_str))]
manifest_path: PathBuf,
}
fn main() -> std::result::Result<(), Box<dyn std::error::Error>> {
let args = Cli::from_args();
let cmd = args.command;
env_logger::init();
let manifest = HostManifest::from_yaml(cmd.manifest_path)?;
info!(
"waSCC Host Manifest loaded, CWD: {:?}",
std::env::current_dir()?
);
host::apply_manifest(manifest)?;
std::thread::park();
Ok(())
}
|
use super::{GBranchData, GLeafData, GNodeData, GreenTree, GreenTreeFactory};
use crate::green::GRangeUnit;
use lru_cache::LruCache;
const LEAF_CACHE_SIZE: usize = 64;
#[derive(Debug)]
pub struct DefaultTreeFactory<G: GreenTree> {
leaf_cache: LruCache<(GNodeData<G>, GLeafData<G>), G::Node>,
parents: Vec<(GNodeData<G>, GBranchData<G>, usize)>,
children: Vec<G::Node>,
}
impl<G: GreenTree> Default for DefaultTreeFactory<G> {
fn default() -> Self {
DefaultTreeFactory {
leaf_cache: LruCache::new(LEAF_CACHE_SIZE),
parents: Vec::new(),
children: Vec::new(),
}
}
}
impl<G: GreenTree> GreenTreeFactory<G> for DefaultTreeFactory<G> {
fn leaf(
&mut self,
node_data: GNodeData<G>,
leaf_data: GLeafData<G>,
width: GRangeUnit<G>,
) -> &mut Self {
let key = (node_data, leaf_data);
let child = if let Some(child) = self.leaf_cache.get_mut(&key) {
child.clone()
} else {
let (node_data, leaf_data) = key.clone();
let child = G::new_leaf(node_data, leaf_data, width);
self.leaf_cache.insert(key, child.clone());
child
};
self.children.push(child);
self
}
fn start_branch(&mut self, node_data: GNodeData<G>, branch_data: GBranchData<G>) -> &mut Self {
let len = self.children.len();
self.parents.push((node_data, branch_data, len));
self
}
fn finish_branch(&mut self) -> &mut Self {
let (node_data, branch_data, first_child) = self.parents.pop().unwrap();
let children: Vec<_> = self.children.drain(first_child..).collect();
self.children.push(G::new_branch(
node_data,
branch_data,
children.into_boxed_slice(),
));
self
}
fn finish(&mut self) -> G::Node {
assert_eq!(self.children.len(), 1);
self.children.pop().unwrap()
}
}
|
use checklists::ChecklistModel;
#[derive(Debug)]
pub struct ProgramModel {
pub id: i32,
pub name: String,
pub description: Option<String>,
pub org_id: i32,
pub checklists: Option<Vec<ChecklistModel>>,
}
|
use super::sort;
use super::sort_by;
use std::io;
use std::io::Seek;
use std::io::Write;
use std::str;
#[test]
fn test_sort_in_buf() {
let mut fin = tempfile::tempfile().unwrap();
write!(
fin,
"寿限無、寿限無、
五劫の擦り切れ、
海砂利水魚の、
水行末・雲来末・風来末、
喰う寝る処に住む処、
藪ら柑子の藪柑子、
パイポ・パイポ・パイポのシューリンガン、
シューリンガンのグーリンダイ、
グーリンダイのポンポコピーのポンポコナの、
長久命の長助
"
)
.unwrap();
fin.seek(io::SeekFrom::Start(0)).unwrap();
let mut buf = Vec::new();
let fout = Box::new(&mut buf);
sort(fin, fout, 1024).unwrap();
assert_eq!(
"グーリンダイのポンポコピーのポンポコナの、
シューリンガンのグーリンダイ、
パイポ・パイポ・パイポのシューリンガン、
五劫の擦り切れ、
喰う寝る処に住む処、
寿限無、寿限無、
水行末・雲来末・風来末、
海砂利水魚の、
藪ら柑子の藪柑子、
長久命の長助
",
str::from_utf8(&buf).unwrap()
);
}
#[test]
fn test_sort_using_file() {
let mut fin = tempfile::tempfile().unwrap();
write!(
fin,
"寿限無、寿限無、
五劫の擦り切れ、
海砂利水魚の、
水行末・雲来末・風来末、
喰う寝る処に住む処、
藪ら柑子の藪柑子、
パイポ・パイポ・パイポのシューリンガン、
シューリンガンのグーリンダイ、
グーリンダイのポンポコピーのポンポコナの、
長久命の長助
"
)
.unwrap();
fin.seek(io::SeekFrom::Start(0)).unwrap();
let mut buf = Vec::new();
let fout = Box::new(&mut buf);
sort(fin, fout, 50).unwrap();
assert_eq!(
"グーリンダイのポンポコピーのポンポコナの、
シューリンガンのグーリンダイ、
パイポ・パイポ・パイポのシューリンガン、
五劫の擦り切れ、
喰う寝る処に住む処、
寿限無、寿限無、
水行末・雲来末・風来末、
海砂利水魚の、
藪ら柑子の藪柑子、
長久命の長助
",
str::from_utf8(&buf).unwrap()
);
}
#[test]
fn test_sort_empty() {
let fin = tempfile::tempfile().unwrap();
let mut buf = Vec::new();
let fout = Box::new(&mut buf);
sort(fin, fout, 50).unwrap();
assert_eq!("", str::from_utf8(&buf).unwrap());
}
#[test]
fn test_sort_one_line() {
let mut fin = tempfile::tempfile().unwrap();
write!(fin, "寿限無、寿限無、\n").unwrap();
fin.seek(io::SeekFrom::Start(0)).unwrap();
let mut buf = Vec::new();
let fout = Box::new(&mut buf);
sort(fin, fout, 50).unwrap();
assert_eq!("寿限無、寿限無、\n", str::from_utf8(&buf).unwrap());
}
#[test]
fn test_sort_two_lines() {
let mut fin = tempfile::tempfile().unwrap();
write!(fin, "寿限無、寿限無、\n五劫の擦り切れ、\n").unwrap();
fin.seek(io::SeekFrom::Start(0)).unwrap();
let mut buf = Vec::new();
let fout = Box::new(&mut buf);
sort(fin, fout, 50).unwrap();
assert_eq!(
"五劫の擦り切れ、\n寿限無、寿限無、\n",
str::from_utf8(&buf).unwrap()
);
}
#[test]
fn test_sort_three_lines() {
let mut fin = tempfile::tempfile().unwrap();
write!(fin, "寿限無、寿限無、\n五劫の擦り切れ、\n海砂利水魚の、\n").unwrap();
fin.seek(io::SeekFrom::Start(0)).unwrap();
let mut buf = Vec::new();
let fout = Box::new(&mut buf);
sort(fin, fout, 50).unwrap();
assert_eq!(
"五劫の擦り切れ、\n寿限無、寿限無、\n海砂利水魚の、\n",
str::from_utf8(&buf).unwrap()
);
}
#[test]
fn test_sort_desc() {
let mut fin = tempfile::tempfile().unwrap();
write!(
fin,
"寿限無、寿限無、
五劫の擦り切れ、
海砂利水魚の、
水行末・雲来末・風来末、
喰う寝る処に住む処、
藪ら柑子の藪柑子、
パイポ・パイポ・パイポのシューリンガン、
シューリンガンのグーリンダイ、
グーリンダイのポンポコピーのポンポコナの、
長久命の長助
"
)
.unwrap();
fin.seek(io::SeekFrom::Start(0)).unwrap();
let mut buf = Vec::new();
let fout = Box::new(&mut buf);
let cmp = |a: &String, b: &String| {
let a = a.trim_end_matches(|c| c == '\r' || c == '\n');
let b = b.trim_end_matches(|c| c == '\r' || c == '\n');
b.partial_cmp(a).unwrap()
};
sort_by(fin, fout, 50, cmp).unwrap();
assert_eq!(
"長久命の長助
藪ら柑子の藪柑子、
海砂利水魚の、
水行末・雲来末・風来末、
寿限無、寿限無、
喰う寝る処に住む処、
五劫の擦り切れ、
パイポ・パイポ・パイポのシューリンガン、
シューリンガンのグーリンダイ、
グーリンダイのポンポコピーのポンポコナの、
",
str::from_utf8(&buf).unwrap()
);
}
|
static PI_STR: &str = "31415926535897";
const DIGITS_LEN: usize = 10;
fn main() {
let pi_str_len = PI_STR.len();
for i in 0..(pi_str_len - DIGITS_LEN + 1) {
let digits = &PI_STR[i..(i + DIGITS_LEN)];
if digits.chars().nth(0).unwrap() == '0' {
continue;
}
let digits = digits.parse::<i64>().unwrap();
if is_prime(digits) {
println!("{}", digits);
break;
}
}
}
fn is_prime(x: i64) -> bool {
let ceil = (x as f64).sqrt().ceil() as i64;
for i in 2..ceil {
if x % i == 0 {
return false;
}
}
true
}
|
//! Built-in geometries, shaders and effects.
pub use builtin::object_material::{OBJECT_VERTEX_SRC, OBJECT_FRAGMENT_SRC, ObjectMatrixerial};
pub use builtin::normals_material::{NORMAL_VERTEX_SRC, NORMAL_FRAGMENT_SRC, NormalsMatrixerial};
pub use builtin::uvs_material::{UVS_VERTEX_SRC, UVS_FRAGMENT_SRC, UvsMatrixerial};
mod object_material;
mod normals_material;
mod uvs_material;
|
use std::fs::read_to_string;
use crate::error;
pub struct Fasta {
pub title_list : Vec<String>,
pub seq_list : Vec<String>,
pub site_list : Vec<String>,
}
impl Fasta {
pub fn new() -> Fasta
{
let title_list : Vec<String> = Vec::new();
let seq_list : Vec<String> = Vec::new();
let site_list : Vec<String> = Vec::new();
Fasta {
title_list : title_list,
seq_list : seq_list,
site_list : site_list,
}
}
pub fn read_fasta_info( &mut self, arg_i : &String )
{
let fin = read_to_string( ( *arg_i ).as_str() ).expect( "FAILED to open input file" );
/* Temporary String to conbine a sequence line separated by "\n" */
let mut segment : Vec<String> = Vec::new();
for line in fin.lines() {
if line.starts_with( ">" ) && segment.is_empty() {
( self.title_list ).push( line.to_string() );
} else if line.starts_with( ">" ) && !segment.is_empty() {
( self.title_list ).push( line.to_string() );
( self.seq_list ).push( segment.concat() );
segment.clear();
} else {
segment.push( line.to_string() );
}
}
( self.seq_list ).push( segment.concat() );
//segment.clear();
//segment.shrink_to_fit();
( self.title_list ).shrink_to_fit();
( self.seq_list ).shrink_to_fit();
( self.site_list ).shrink_to_fit();
}
pub fn check_fasta_info( &mut self, arg_t : &String )
{
let num_title : usize = ( self.title_list ).len();
let num_seq : usize = ( self.seq_list ).len();
/**/
for i in 0 .. num_seq {
let sequence : &String = &( self.seq_list[ i ] );
if *arg_t == "yes" { self.seq_list[ i ] = convert_to_gap( sequence, i + 1 ); }
else if *arg_t == "no" { check_symbol( sequence, i + 1 ); }
}
/**/
if num_seq != num_title { error::error_bomb( "seq_title_not_same" ); }
/**/
for i in 1 .. num_seq {
if ( self.seq_list[ 0 ] ).len() != ( self.seq_list[ i ] ).len() {
error::error_bomb( "seq_len_not_same" );
}
}
}
pub fn get_site_list( &mut self )
{
let num_seq : usize = ( self.seq_list ).len();
let num_site : usize = ( self.seq_list[ 0 ] ).to_string().len();
println!( "Number of the sequences : {}", num_seq );
println!( "Number of the sites : {}", num_site );
let mut site : Vec<String> = Vec::new();
for i in 0 .. num_site {
for j in 0 .. num_seq {
let segment : Vec<char> = ( self.seq_list[ j ] ).chars().collect();
site.push( segment[ i ].to_string() );
}
( self.site_list ).push( site.concat() );
site.clear();
}
//site.shrink_to_fit();
}
}
fn convert_to_gap( sequence : &String, seq_order : usize ) -> String
{
let mut aa_list : Vec<char> = ( *sequence ).chars().collect();
for i in 0 .. aa_list.len() {
let aa : char = aa_list[ i ];
match aa {
'A'|'R'|'N'|'D'|'C'|'Q'|'E'|'G'|'H'|'I'|'L'|'K'|'M'|'F'|'P'|'S'|'T'|'W'|'Y'|'V'|'-' => (),
'B'|'Z'|'X'|'U'|'O' => {
println!( "\nNOTE :");
println!( "Non-standard residue was observed in sequence {} : '{}'", seq_order, aa );
println!( "'{}' was converted into gap.", aa );
println!( "" );
aa_list[ i ] = '-';
},
_ => {
println!( "\nNOTE :" );
println!( "Unexpected symbol was observed in sequence {} : '{}'", seq_order, aa );
println!( "'{}' was converted into gap.", aa );
println!( "" );
aa_list[ i ] = '-';
},
}
}
/* Convert Vec<char> into String. */
aa_list.iter().collect()
}
fn check_symbol( sequence : &String, seq_order : usize )
{
let aa_list : Vec<char> = ( *sequence ).chars().collect();
for i in 0 .. aa_list.len() {
let aa : char = aa_list[ i ];
match aa {
'A'|'R'|'N'|'D'|'C'|'Q'|'E'|'G'|'H'|'I'|'L'|'K'|'M'|'F'|'P'|'S'|'T'|'W'|'Y'|'V'|'-' => (),
'B'|'Z'|'X'|'U'|'O' => {
println!( "\nFATAL :" );
println!( "Non-standard residue was observed in sequence {} : '{}'", seq_order, aa );
println!( "" );
error::error_bomb( "non_standard_residue" );
},
_ => {
println!( "\nFATAL :" );
println!( "Unexpected symbol was observed in sequence {} : '{}'", seq_order, aa );
println!( "" );
error::error_bomb( "unexpected_symbol" );
},
}
}
}
|
extern crate rand;
use rand::Rng;
use rand::distributions::{Range, IndependentSample};
/// Heaviside Step Function
trait Heaviside {
fn heaviside(&self) -> i8;
}
/// Implement heaviside() for f64
impl Heaviside for f64 {
fn heaviside(&self) -> i8 {
(*self >= 0.0) as i8
}
}
/// Dot product of input and weights
fn dot(input: (i8, i8, i8), weights: (f64, f64, f64)) -> f64 {
input.0 as f64 * weights.0
+ input.1 as f64 * weights.1
+ input.2 as f64 * weights.2
}
struct TrainingDatum {
input: (i8, i8, i8),
expected: i8,
}
fn main() {
println!("Hello, perceptron!");
let mut rng = rand::thread_rng();
// Provide some training data
let training_data = [
TrainingDatum { input: (0, 0, 1), expected: 0 },
TrainingDatum { input: (0, 1, 1), expected: 1 },
TrainingDatum { input: (1, 0, 1), expected: 1 },
TrainingDatum { input: (1, 1, 1), expected: 1 },
];
// Initialize weight vector with random data between 0 and 1
let range = Range::new(0.0, 1.0);
let mut w = (
range.ind_sample(&mut rng),
range.ind_sample(&mut rng),
range.ind_sample(&mut rng),
);
// Learning rate
let eta = 0.2;
// Number of iterations
let n = 100;
// Training
println!("Starting training phase with {} iterations...", n);
for _ in 0..n {
// Choose a random training sample
let &TrainingDatum { input: x, expected } = rng.choose(&training_data).unwrap();
// Calculate the dot product
let result = dot(x, w);
// Calculate the error
let error = expected - result.heaviside();
// Update the weights
w.0 += eta * error as f64 * x.0 as f64;
w.1 += eta * error as f64 * x.1 as f64;
w.2 += eta * error as f64 * x.2 as f64;
}
// Show result
for &TrainingDatum { input, .. } in &training_data {
let result = dot(input, w);
println!("{} OR {}: {:.*} -> {}", input.0, input.1, 8, result, result.heaviside());
}
}
#[cfg(test)]
mod test {
use super::Heaviside;
#[test]
fn heaviside_positive() {
assert_eq!((0.5).heaviside(), 1i8);
}
#[test]
fn heaviside_zero() {
assert_eq!((0.0).heaviside(), 1i8);
}
#[test]
fn heaviside_negative() {
assert_eq!((-0.5).heaviside(), 0i8);
}
}
|
use std::time::{Instant, Duration};
use std::ops::{Index,IndexMut};
use std::process;
use std::env;
use std::vec;
// The supported calculation Algorithms
// Gauss Seidel working on the same matrix
// Jacobi using in and out matrices
#[derive(Debug, PartialEq)]
enum CalculationMethod
{
MethGaussSeidel,
MethJacobi,
}
// For parsing command line arguments
impl std::str::FromStr for CalculationMethod
{
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err>
{
match s
{
"MethGaussSeidel" | "1" => Ok(CalculationMethod::MethGaussSeidel),
"MethJacobi" | "2" => Ok(CalculationMethod::MethJacobi),
_ => Err(format!("'{}' is not a valid value for CalculationMethod", s)),
}
}
}
// The supported inference functions used during calculation
// F0: f(x,y) = 0
// FPiSin: f(x,y) = 2pi^2*sin(pi*x)sin(pi*y)
#[derive(Debug, PartialEq)]
enum InferenceFunction
{
FuncF0,
FuncFPiSin,
}
// For parsing command line arguments
impl std::str::FromStr for InferenceFunction
{
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err>
{
match s
{
"FuncF0" | "1" => Ok(InferenceFunction::FuncF0),
"FuncFPiSin" | "2" => Ok(InferenceFunction::FuncFPiSin),
_ => Err(format!("'{}' is not a valid value for InferenceFunction", s)),
}
}
}
// The supported termination conditions
// TermPrec: terminate after set precision is reached
// TermIter: terminate after set amount of iterations
#[derive(Debug, PartialEq)]
enum TerminationCondition
{
TermPrec,
TermIter,
}
// For parsing command line arguments
impl std::str::FromStr for TerminationCondition
{
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err>
{
match s
{
"TermPrec" | "1" => Ok(TerminationCondition::TermPrec),
"TermIter" | "2" => Ok(TerminationCondition::TermIter),
_ => Err(format!("'{}' is not a valid value for TerminationCondition", s)),
}
}
}
// Data structure for storing the given parameters for the calculation
#[derive(Debug)]
struct CalculationOptions
{
number: u64, // number of threads
method: CalculationMethod, // Gauss Seidel or Jacobi method of iteration
interlines: usize, // matrix size = interline*8+9
inf_func: InferenceFunction, // inference function
termination: TerminationCondition, // termination condition
term_iteration: u64, // terminate if iteration number reached
term_precision: f64, // terminate if precision reached
}
impl CalculationOptions
{
fn new(number: u64, method: CalculationMethod, interlines: usize, inf_func: InferenceFunction,
termination: TerminationCondition, term_iteration: u64, term_precision: f64)
-> CalculationOptions
{
CalculationOptions{number, method, interlines, inf_func, termination, term_iteration, term_precision}
}
}
// Data structure for storing the the data needed during calculation
#[derive(Debug)]
struct CalculationArguments
{
n: usize, // Number of spaces between lines (lines=n+1)
num_matrices: usize, // number of matrices
h: f64, // length of a space between two lines
matrices: Vec<PartdiffMatrix>, // The matrices for calculation
}
impl CalculationArguments
{
fn new(n: usize, num_matrices: usize, h: f64) -> CalculationArguments
{
let mut matrices: Vec<PartdiffMatrix> = Vec::with_capacity(num_matrices);
for _ in 0..num_matrices
{
let matrix = PartdiffMatrix::new(n+1);
matrices.push(matrix);
}
CalculationArguments{n, num_matrices, h, matrices}
}
}
// Data structure for storing result data of the calculation
#[derive(Debug)]
struct CalculationResults
{
m: usize, // Index of matrix that holds the final state
stat_iteration: u64, // number of current iteration
stat_precision: f64, // actual precision of all slaces in iteration
}
impl CalculationResults
{
fn new(m: usize, stat_iteration: u64, stat_precision: f64) -> CalculationResults
{
CalculationResults{m, stat_iteration, stat_precision}
}
}
// Simple data structure for a 2D matrix
// Has an efficient continuous 1D memory layout
#[derive(Debug)]
struct PartdiffMatrix
{
n: usize,
matrix: Vec<f64>,
}
impl PartdiffMatrix
{
fn new(n: usize) -> PartdiffMatrix
{
let matrix = vec![0.0; ((n+1)*(n+1)) as usize];
PartdiffMatrix{n, matrix}
}
}
// Implementation of Index and IndexMut traits for the matrix
// 2d-array-indexing allows access to matrix elements with following syntax:
// matrix[[x,y]]
//
// This version is used if the crate is build with: --features "2d-array-indexing"
//
// Also supports switching between indexing with or without bounds checking
// This can be set by building the crate with or without: --features "unsafe-indexing"
#[cfg(feature = "2d-array-indexing")]
impl Index<[usize; 2]> for PartdiffMatrix
{
type Output = f64;
fn index(&self, idx: [usize; 2]) -> &Self::Output
{
#[cfg(not(feature = "unsafe-indexing"))]
{
&self.matrix[idx[0] * self.n + idx[1]]
}
#[cfg(feature = "unsafe-indexing")]
unsafe
{
&self.matrix.get_unchecked(idx[0] * self.n + idx[1])
}
}
}
#[cfg(feature = "2d-array-indexing")]
impl IndexMut<[usize; 2]> for PartdiffMatrix
{
fn index_mut(&mut self, idx: [usize; 2]) -> &mut Self::Output
{
#[cfg(not(feature = "unsafe-indexing"))]
{
&mut self.matrix[idx[0] * self.n + idx[1]]
}
#[cfg(feature = "unsafe-indexing")]
unsafe
{
self.matrix.get_unchecked_mut(idx[0] * self.n + idx[1])
}
}
}
// Implementation of Index and IndexMut traits for the matrix
// C-style-indexing allows access to matrix elements with following syntax:
// matrix[x][y]
//
// This version is used if the crate is build with: --features "C-style-indexing"
//
// Also supports switching between indexing with or without bounds checking
// This can be set by building the crate with or without: --features "unsafe-indexing"
#[cfg(feature = "C-style-indexing")]
impl Index<usize> for PartdiffMatrix
{
type Output = [f64];
fn index(&self, idx: usize) -> &Self::Output
{
#[cfg(not(feature = "unsafe-indexing"))]
{
&self.matrix[idx*self.n .. (idx+1)*self.n]
}
#[cfg(feature = "unsafe-indexing")]
unsafe
{
&self.matrix.get_unchecked(idx*self.n .. (idx+1)*self.n)
}
}
}
#[cfg(feature = "C-style-indexing")]
impl IndexMut<usize> for PartdiffMatrix
{
fn index_mut(&mut self, idx: usize) -> &mut Self::Output
{
#[cfg(not(feature = "unsafe-indexing"))]
{
&mut self.matrix[idx*self.n .. (idx+1)*self.n]
}
#[cfg(feature = "unsafe-indexing")]
unsafe
{
self.matrix.get_unchecked_mut(idx*self.n .. (idx+1)*self.n)
}
}
}
// Display help message to show the required command line arguments to run the binary
fn usage()
{
println!("Usage: ./rust_partdiff [number] [method] [interlines] [func] [termination] [prec/iter]\n");
println!(" -number: number of threads (1 .. n)");
println!(" -method: calculation method (MethGaussSeidel/MethJacobi OR 1/2)");
println!(" -interlines: number of interlines (1 .. n)");
println!(" matrixsize = (interlines * 8) + 9");
println!(" -func: inference function (FuncF0/FuncFPiSin OR 1/2)");
println!(" -termination: termination condition (TermPrec/TermIter OR 1/2)");
println!(" TermPrec: sufficient precision");
println!(" TermIter: number of iterations");
println!(" -prec/iter: depending on termination:");
println!(" precision: 1e-4 .. 1e-20");
println!(" iterations: 1 .. n");
}
// Helper function to parse command line arguments
fn parse_arg<U>(arg: Option<String>) -> U
where U: std::str::FromStr,
<U as std::str::FromStr>::Err: std::fmt::Display
{
let ret: U = match arg
{
Some(a) =>
{
a.parse().unwrap_or_else(|error|
{
eprintln!("Error: {}", error);
usage();
process::exit(1);
})
},
None =>
{
eprintln!("Error: incomplete arguments.");
usage();
process::exit(1);
},
};
ret
}
// Parsing of command line arguments
fn ask_params(mut args: std::env::Args) -> CalculationOptions
{
// TODO keep authors of original c version?
println!("============================================================");
println!("Program for calculation of partial differential equations. ");
println!("============================================================");
// println!("(c) Dr. Thomas Ludwig, TU München.");
// println!(" Thomas A. Zochler, TU München.");
// println!(" Andreas C. Schmidt, TU München.");
// println!("============================================================");
// TODO interactive arguments
args.next();
let number: u64 = parse_arg(args.next());
if number < 1
{
eprintln!("Error number argument must be a positive integer");
usage();
process::exit(1);
}
let method: CalculationMethod = parse_arg(args.next());
let interlines: usize = parse_arg(args.next());
let inf_func: InferenceFunction = parse_arg(args.next());
let termination: TerminationCondition = parse_arg(args.next());
// Check for the meaning of the last argument
match termination
{
TerminationCondition::TermPrec =>
{
let prec: f64 = parse_arg(args.next());
if (prec < 1e-20) | (prec > 1e-4)
{
eprintln!("Error: termination precision must be between 1e-20 and 1e-4");
usage();
process::exit(1);
}
return CalculationOptions::new(number, method, interlines, inf_func, termination, std::u64::MAX, prec);
},
TerminationCondition::TermIter =>
{
let iterations = parse_arg(args.next());
if iterations < 1
{
eprintln!("Error: termination iterations must be > 0");
usage();
process::exit(1);
}
return CalculationOptions::new(number, method, interlines, inf_func, termination, iterations, 0.0);
},
}
}
// Determine calculation arguments and initialize calculation results
fn init_variables(options: &CalculationOptions) -> (CalculationArguments, CalculationResults)
{
let n: usize = (options.interlines * 8) + 9 - 1;
let num_matrices: usize = match options.method
{
CalculationMethod::MethGaussSeidel => 1,
CalculationMethod::MethJacobi => 2,
};
let h: f64 = 1.0 as f64 / n as f64;
let arguments = CalculationArguments::new(n, num_matrices, h);
let results = CalculationResults::new(0,0,0.0);
(arguments, results)
}
// Initialize the matrix borders according to the used inference function
fn init_matrices(arguments: &mut CalculationArguments, options: &CalculationOptions)
{
if options.inf_func == InferenceFunction::FuncF0
{
let matrix = &mut arguments.matrices;
let n = arguments.n;
let h = arguments.h;
for g in 0 .. arguments.num_matrices as usize
{
for i in 0..(n+1)
{
#[cfg(feature = "2d-array-indexing")]
{
matrix[g][[i,0]] = 1.0 - (h * i as f64);
matrix[g][[i,n]] = h * i as f64;
matrix[g][[0,i]] = 1.0 - (h * i as f64);
matrix[g][[n,i]] = h * i as f64;
}
#[cfg(feature = "C-style-indexing")]
{
matrix[g][i][0] = 1.0 - (h * i as f64);
matrix[g][i][n] = h * i as f64;
matrix[g][0][i] = 1.0 - (h * i as f64);
matrix[g][n][i] = h * i as f64;
}
}
}
}
}
// Main calculation
fn calculate(arguments: &mut CalculationArguments, results: &mut CalculationResults, options: &CalculationOptions)
{
const PI: f64 = 3.141592653589793;
const TWO_PI_SQUARE: f64 = 2.0 * PI * PI;
let n = arguments.n;
let h = arguments.h;
let mut star: f64;
let mut residuum: f64;
let mut maxresiduum: f64;
let mut pih: f64 = 0.0;
let mut fpisin: f64 = 0.0;
let mut term_iteration = options.term_iteration;
// for distinguishing between old and new state of the matrix if two matrices are used
let mut m1: usize = 0;
let mut m2: usize = 0;
if options.method == CalculationMethod::MethJacobi
{
m1 = 0;
m2 = 1;
}
if options.inf_func == InferenceFunction::FuncFPiSin
{
pih = PI * h;
fpisin = 0.25 * TWO_PI_SQUARE * h * h;
}
while term_iteration > 0
{
let matrix = &mut arguments.matrices;
maxresiduum = 0.0;
for i in 1..n
{
let mut fpisin_i = 0.0;
if options.inf_func == InferenceFunction::FuncFPiSin
{
fpisin_i = fpisin * (pih * i as f64).sin();
}
for j in 1..n
{
#[cfg(feature = "2d-array-indexing")]
{
star = 0.25 * (matrix[m1][[i-1,j]] + matrix[m1][[i+1,j]] +
matrix[m1][[i,j-1]] + matrix[m1][[i,j+1]]);
}
#[cfg(feature = "C-style-indexing")]
{
star = 0.25 * (matrix[m1][i-1][j] + matrix[m1][i+1][j] +
matrix[m1][i][j-1] + matrix[m1][i][j+1]);
}
if options.inf_func == InferenceFunction::FuncFPiSin
{
star += fpisin_i * (pih * j as f64).sin();
}
if (options.termination == TerminationCondition::TermPrec) | (term_iteration == 1)
{
#[cfg(feature = "2d-array-indexing")]
{
residuum = (matrix[m1][[i,j]] - star).abs();
}
#[cfg(feature = "C-style-indexing")]
{
residuum = (matrix[m1][i][j] - star).abs();
}
maxresiduum = match residuum
{
r if r < maxresiduum => maxresiduum,
_ => residuum,
};
}
#[cfg(feature = "2d-array-indexing")]
{
matrix[m2][[i,j]] = star;
}
#[cfg(feature = "C-style-indexing")]
{
matrix[m2][i][j] = star;
}
}
}
results.stat_iteration += 1;
results.stat_precision = maxresiduum;
let tmp = m1;
m1 = m2;
m2 = tmp;
match options.termination
{
TerminationCondition::TermPrec =>
{
if maxresiduum < options.term_precision
{
term_iteration = 0;
}
},
TerminationCondition::TermIter => term_iteration -= 1,
}
}
results.m = m2;
}
// Display important information about the calculation
fn display_statistics(arguments: &CalculationArguments, results: &CalculationResults, options: &CalculationOptions, duration: Duration)
{
let n = arguments.n;
println!("Berechnungszeit: {:.6}", duration.as_secs_f64());
println!("Speicherbedarf: {:.4} MiB", ((n+1)*(n+1)*std::mem::size_of::<f64>()*arguments.num_matrices) as f64 / 1024.0 / 1024.0);
println!("Berechnungsmethode: {:?}", options.method);
println!("Interlines: {}", options.interlines);
print!("Stoerfunktion: ");
match options.inf_func
{
InferenceFunction::FuncF0 => print!("f(x,y) = 0\n"),
InferenceFunction::FuncFPiSin => print!("f(x,y) = 2pi^2*sin(pi*x)sin(pi*y)\n"),
}
print!("Terminierung: ");
match options.termination
{
TerminationCondition::TermPrec => print!("Hinreichende Genauigkeit\n"),
TerminationCondition::TermIter => print!("Anzahl der Iterationen\n"),
}
println!("Anzahl Iterationen: {}", results.stat_iteration);
println!("Norm des Fehlers: {:.6e}", results.stat_precision);
}
// Beschreibung der Funktion displayMatrix:
//
// Die Funktion displayMatrix gibt eine Matrix
// in einer "ubersichtlichen Art und Weise auf die Standardausgabe aus.
//
// Die "Ubersichtlichkeit wird erreicht, indem nur ein Teil der Matrix
// ausgegeben wird. Aus der Matrix werden die Randzeilen/-spalten sowie
// sieben Zwischenzeilen ausgegeben.
fn display_matrix(arguments: &mut CalculationArguments, results: &CalculationResults, options: &CalculationOptions)
{
let matrix = &mut arguments.matrices[results.m as usize];
let interlines = options.interlines;
println!("Matrix:");
for y in 0..9 as usize
{
for x in 0..9 as usize
{
#[cfg(feature = "2d-array-indexing")]
{
print!(" {:.4}", matrix[[y * (interlines+1),x * (interlines+1)]]);
}
#[cfg(feature = "C-style-indexing")]
{
print!(" {:.4}", matrix[y * (interlines+1)][x * (interlines+1)]);
}
}
print!("\n");
}
}
fn main()
{
let options = ask_params(env::args());
let (mut arguments, mut results) = init_variables(&options);
init_matrices(&mut arguments, &options);
let now = Instant::now();
calculate(&mut arguments, &mut results, &options);
let duration = now.elapsed();
display_statistics(&arguments, &results, &options, duration);
display_matrix(&mut arguments, &results, &options);
}
|
pub fn hamming_distance(left: &str, right: &str) -> Result<usize, usize> {
match calculate_distance(left, right) {
None => Err(0),
Some(n) => Ok(n)
}
}
fn calculate_distance(left: &str, right: &str) -> Option<usize> {
if left.len() != right.len() {
None
} else {
let iter = left.chars().zip(right.chars());
let length = iter.filter(|&(a, b)| a != b).count();
Some(length)
}
} |
/*
https://projecteuler.net
If p is the perimeter of a right angle triangle with integral length
sides, {a,b,c}, there are exactly three solutions for p = 120.
{20,48,52}, {24,45,51}, {30,40,50}
For which value of p ≤ 1000, is the number of solutions maximised?
NOTES:
*/
fn count_solutions(p : u64) -> usize {
let mut rv = 0_usize;
for a in 1..p/3 {
let a2 = a*a;
for b in a..(p-a/2) {
let b2 = b*b;
let c = p - a - b;
let c2 = c*c;
let a2b2 = a2+b2;
if a2b2 == c2 {
rv += 1;
continue;
}
if a2b2 > c2 {
break;
}
}
}
rv
}
#[test]
fn test_count() {
assert!(count_solutions(120) == 3);
}
#[allow(dead_code)]
fn solve1() -> u64 {
let mut rv = 0;
let mut max = 0;
for n in 1..=1000 {
let count = count_solutions(n);
if count > max {
max = count;
rv = n;
}
}
rv
}
fn solve2() -> u64 {
let mut perimeter = [0_u64; 1001];
for a in 1..333 {
let a2 = a*a;
for b in a..666 {
let a2b2 = a2 + b*b;
let c = (a2b2 as f64).sqrt() as u64;
let abc = a+b+c;
if abc > 1000 || c < b {
break;
}
if a2b2 == c*c {
perimeter[abc as usize] += 1;
continue;
}
}
}
let mut rv = 0;
let mut max = 0;
for (i, j) in perimeter.iter().enumerate() {
if *j > max {
max = *j;
rv = i;
}
}
rv as u64
}
fn main() {
let start_time = std::time::Instant::now();
let sol = solve2();
let elapsed = start_time.elapsed().as_micros();
println!("\nSolution: {}", sol);
let mut remain = elapsed;
let mut s = String::new();
if remain == 0 {
s.insert(0,'0');
}
while remain > 0 {
let temp = remain%1000;
remain /= 1000;
if remain > 0 {
s = format!(",{:03}",temp) + &s;
}
else {
s = format!("{}",temp) + &s;
}
}
println!("Elasped time: {} us", s);
}
|
pub extern crate disrusm;
pub mod ir;
pub mod ir_vm;
pub mod mnem2ir;
pub mod operand;
|
#![feature(rand)]
#![feature(core)]
extern crate piston;
extern crate graphics;
extern crate sdl2_window;
extern crate opengl_graphics;
use graphics::vecmath::Matrix2d;
use piston::window::WindowSettings;
use std::thread;
use std::rand;
use std::cell::RefCell;
use std::rand::Rng;
use std::num::Float;
use std::sync::atomic::{AtomicUsize, Ordering, AtomicIsize};
use std::sync::{Arc, Mutex};
use piston::input::Button;
use piston::input::keyboard::Key;
use piston::event::{
events,
PressEvent,
ReleaseEvent,
RenderEvent,
};
use sdl2_window::Sdl2Window as Window;
use opengl_graphics::{ Gl, OpenGL, Texture };
const COLOR_UP:[f32; 4] = [0.8, 0.2, 0.2, 1.0];
const COLOR_DOWN:[f32; 4] = [0.2, 0.2, 0.8, 1.0];
static mut temperature: f64 = 2.0;
const WINDOWSIZE: u32 = 800;
const SIZE: usize = 200;
const BLOCKSIZE: f64 = (WINDOWSIZE as f64 / SIZE as f64);
fn get_rand() -> f64 {
rand::thread_rng().gen_range(0.0, 1.0)
}
fn calc_energy(s: [[i8; SIZE]; SIZE], i: usize, j: usize) -> i8 {
let top = match i {
0 => s[SIZE - 1][j],
_ => s[i-1][j]
};
let bottom = match i + 1 {
SIZE => s[0][j],
_ => s[i+1][j]
};
let left = match j {
0 => s[i][SIZE-1],
_ => s[i][j-1]
};
let right = match j + 1 {
SIZE => s[i][0],
_ => s[i][j+1]
};
-s[i][j] * (top + bottom + left + right)
}
fn delta_u(s: [[i8; SIZE]; SIZE], i: usize, j: usize) -> i8 {
-2 * calc_energy(s, i, j)
}
fn do_iter(s: &mut [[i8; SIZE]; SIZE], i: usize, j: usize) -> i8 {
let mut newenergy = 0;
let ediff = delta_u(*s, i, j);
if ediff <= 0 {
s[i][j] = -s[i][j];
newenergy = ediff;
} else {
if get_rand() < (-ediff as f64 / unsafe { temperature }).exp() {
s[i][j] = -s[i][j];
newenergy = ediff;
}
}
return newenergy;
}
static mut state: [[i8; SIZE]; SIZE] = [[0i8; SIZE]; SIZE];
fn main() {
// Create an SDL window.
let window = Window::new(
OpenGL::_3_2,
WindowSettings {
title: "Ising".to_string(),
size: [WINDOWSIZE, WINDOWSIZE + 100],
..WindowSettings::default()
});
let window = RefCell::new(window);
// Create a new game and run it.
let mut gl = Gl::new(OpenGL::_3_2);
for i in range(0, SIZE) {
for j in range(0, SIZE) {
unsafe {
state[i][j] = match get_rand() < 0.5 {
true => 1,
false => -1
}
}
}
}
let mut initial_energy: isize = 1;
for i in range(0, SIZE) {
for j in range(0, SIZE) {
print!("calculating initial energy state: {} / {} ({}%)\r",
i*SIZE + j + 1, SIZE * SIZE, ((i * SIZE + j + 1) * 100) / (SIZE * SIZE));
initial_energy += calc_energy(unsafe { * &mut state }, i, j) as isize;
}
}
println!("\ninitial energy: {}", initial_energy);
let iters = Arc::new(AtomicUsize::new(0));
let ubar_value: isize = initial_energy;
let ubar = Arc::new(AtomicIsize::new(ubar_value));
let energy = Arc::new(AtomicIsize::new(initial_energy));
for threadnum in range(0, 2) {
let iters = iters.clone();
let ubar = ubar.clone();
let energy = energy.clone();
thread::spawn(move || {
loop {
let iter = iters.fetch_add(1, Ordering::Relaxed) + 1;
let i = (get_rand() * SIZE as f64).floor() as usize;
let j = (get_rand() * SIZE as f64).floor() as usize;
let energy = energy.fetch_add(
do_iter(unsafe {&mut state}, i, j) as isize, Ordering::Relaxed);
let u = ubar.fetch_add(energy, Ordering::Relaxed);
if threadnum == 0 && iter % 100 == 0 {
print!("iteration {} ({} per cell) : average energy per cell {}\r", iter, iter / (SIZE * SIZE),
u as f64 / ((iter * SIZE * SIZE + 1) as f64));
}
}
});
}
for e in events(&window) {
if let Some(Button::Keyboard(key)) = e.press_args() {
unsafe {
temperature += match(key) {
Key::Up => 0.1,
Key::Down => -0.1,
Key::Right => 0.01,
Key::Left => -0.01,
_ => 0.0
};
if temperature < 0.0 {
temperature = 0.0;
}
println!("\nNew temperature: {}", unsafe { temperature });
}
};
if let Some(r) = e.render_args() {
gl.draw([0, 100, r.width as i32, (r.height - 100) as u32 as i32], |_, gl| {
graphics::clear([0.0; 4], gl);
for i in range(0, SIZE) {
for j in range(0, SIZE) {
let col = match unsafe { state[i][j] } {
1 => COLOR_UP,
_ => COLOR_DOWN
};
let square = graphics::rectangle::square(i as f64 * BLOCKSIZE,
j as f64 * BLOCKSIZE, BLOCKSIZE);
let context = &graphics::Context::abs(WINDOWSIZE as f64,
WINDOWSIZE as f64);
graphics::rectangle(col, square, context.transform, gl);
}
}
});
}
}
}
|
#[doc = "Reader of register DDRPHYC_ACDLLCR"]
pub type R = crate::R<u32, super::DDRPHYC_ACDLLCR>;
#[doc = "Writer for register DDRPHYC_ACDLLCR"]
pub type W = crate::W<u32, super::DDRPHYC_ACDLLCR>;
#[doc = "Register DDRPHYC_ACDLLCR `reset()`'s with value 0x4000_0000"]
impl crate::ResetValue for super::DDRPHYC_ACDLLCR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x4000_0000
}
}
#[doc = "Reader of field `MFBDLY`"]
pub type MFBDLY_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `MFBDLY`"]
pub struct MFBDLY_W<'a> {
w: &'a mut W,
}
impl<'a> MFBDLY_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 6)) | (((value as u32) & 0x07) << 6);
self.w
}
}
#[doc = "Reader of field `MFWDLY`"]
pub type MFWDLY_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `MFWDLY`"]
pub struct MFWDLY_W<'a> {
w: &'a mut W,
}
impl<'a> MFWDLY_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 9)) | (((value as u32) & 0x07) << 9);
self.w
}
}
#[doc = "Reader of field `ATESTEN`"]
pub type ATESTEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ATESTEN`"]
pub struct ATESTEN_W<'a> {
w: &'a mut W,
}
impl<'a> ATESTEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
#[doc = "Reader of field `DLLSRST`"]
pub type DLLSRST_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DLLSRST`"]
pub struct DLLSRST_W<'a> {
w: &'a mut W,
}
impl<'a> DLLSRST_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `DLLDIS`"]
pub type DLLDIS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DLLDIS`"]
pub struct DLLDIS_W<'a> {
w: &'a mut W,
}
impl<'a> DLLDIS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bits 6:8 - MFBDLY"]
#[inline(always)]
pub fn mfbdly(&self) -> MFBDLY_R {
MFBDLY_R::new(((self.bits >> 6) & 0x07) as u8)
}
#[doc = "Bits 9:11 - MFWDLY"]
#[inline(always)]
pub fn mfwdly(&self) -> MFWDLY_R {
MFWDLY_R::new(((self.bits >> 9) & 0x07) as u8)
}
#[doc = "Bit 18 - ATESTEN"]
#[inline(always)]
pub fn atesten(&self) -> ATESTEN_R {
ATESTEN_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 30 - DLLSRST"]
#[inline(always)]
pub fn dllsrst(&self) -> DLLSRST_R {
DLLSRST_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - DLLDIS"]
#[inline(always)]
pub fn dlldis(&self) -> DLLDIS_R {
DLLDIS_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 6:8 - MFBDLY"]
#[inline(always)]
pub fn mfbdly(&mut self) -> MFBDLY_W {
MFBDLY_W { w: self }
}
#[doc = "Bits 9:11 - MFWDLY"]
#[inline(always)]
pub fn mfwdly(&mut self) -> MFWDLY_W {
MFWDLY_W { w: self }
}
#[doc = "Bit 18 - ATESTEN"]
#[inline(always)]
pub fn atesten(&mut self) -> ATESTEN_W {
ATESTEN_W { w: self }
}
#[doc = "Bit 30 - DLLSRST"]
#[inline(always)]
pub fn dllsrst(&mut self) -> DLLSRST_W {
DLLSRST_W { w: self }
}
#[doc = "Bit 31 - DLLDIS"]
#[inline(always)]
pub fn dlldis(&mut self) -> DLLDIS_W {
DLLDIS_W { w: self }
}
}
|
mod app;
mod route;
mod layout;
use app::App;
fn main() {
yew::start_app::<App>();
}
|
use actix_web::*;
use futures::future::result;
use futures::Future;
use std::collections::HashMap;
use super::{errors, AppState};
use crate::engine::ingestor::IngestEvents;
use crate::opentracing::Span;
#[derive(Debug, Serialize, Deserialize)]
pub struct IngestResponse {
ingest_id: crate::engine::IngestId,
pub nb_events: usize,
}
pub fn ingest(
req: &HttpRequest<AppState>,
) -> Box<dyn Future<Item = HttpResponse, Error = errors::IkError>> {
let ingestor = actix::System::current()
.registry()
.get::<crate::engine::ingestor::Ingestor>();
req.json()
.from_err()
.and_then(move |val: Vec<Span>| {
let nb_spans = val.len();
let ingest = IngestEvents::new(val);
let ingest_id = ingest.ingest_id.clone();
debug!("ingesting {} event(s) as {}", nb_spans, ingest_id,);
ingestor.do_send(ingest);
Ok(HttpResponse::Ok().json(IngestResponse {
ingest_id,
nb_events: nb_spans,
}))
})
.responder()
}
pub fn get_services(
_req: &HttpRequest<AppState>,
) -> Box<dyn Future<Item = HttpResponse, Error = errors::IkError>> {
crate::DB_READ_EXECUTOR_POOL
.send(crate::db::read::span::GetServices)
.from_err()
.and_then(|mut services| {
services.dedup();
Ok(HttpResponse::Ok().json(services))
})
.responder()
}
pub fn get_spans_by_service(
req: &HttpRequest<AppState>,
) -> Box<dyn Future<Item = HttpResponse, Error = errors::IkError>> {
match req.query().get("serviceName") {
Some(_) => crate::DB_READ_EXECUTOR_POOL
.send(crate::db::read::span::GetSpans(
crate::db::read::span::SpanQuery::from_req(&req),
))
.from_err()
.and_then(|res| {
let mut span_names = res
.iter()
.map(|span| span.name.clone().unwrap_or_else(|| "n/a".to_string()))
.collect::<Vec<String>>();
span_names.sort_unstable();
span_names.dedup();
Ok(HttpResponse::Ok().json(span_names))
})
.responder(),
_ => result(Err(super::errors::IkError::BadRequest(
"missing serviceName query parameter".to_string(),
)))
.responder(),
}
}
pub fn get_spans_by_trace_id(
req: &HttpRequest<AppState>,
) -> Box<dyn Future<Item = HttpResponse, Error = errors::IkError>> {
match req.match_info().get("traceId") {
Some(trace_id) => crate::DB_READ_EXECUTOR_POOL
.send(crate::db::read::span::GetSpans(
crate::db::read::span::SpanQuery::from_req(&req)
.with_trace_id(trace_id.to_string()),
))
.from_err()
.and_then(|res| Ok(HttpResponse::Ok().json(res)))
.responder(),
_ => result(Err(super::errors::IkError::BadRequest(
"missing traceId path parameter".to_string(),
)))
.responder(),
}
}
pub fn get_traces(
req: &HttpRequest<AppState>,
) -> Box<dyn Future<Item = HttpResponse, Error = errors::IkError>> {
crate::DB_READ_EXECUTOR_POOL
.send(crate::db::read::span::GetSpans(
crate::db::read::span::SpanQuery::from_req(&req),
))
.from_err()
.and_then(|res| {
Ok(HttpResponse::Ok().json({
let mut by_trace_with_key = HashMap::new();
for span in res {
by_trace_with_key
.entry(span.trace_id.clone())
.or_insert_with(Vec::new)
.push(span);
}
let mut by_trace = Vec::new();
for (_, spans) in by_trace_with_key {
by_trace.push(spans);
}
by_trace
}))
})
.responder()
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct Dependency {
parent: String,
child: String,
call_count: u32,
error_count: u32,
}
impl Dependency {
fn add_call(&self) -> Self {
Dependency {
parent: self.parent.clone(),
child: self.child.clone(),
call_count: self.call_count + 1,
error_count: self.error_count,
}
}
}
pub fn get_dependencies(
req: &HttpRequest<AppState>,
) -> Box<dyn Future<Item = HttpResponse, Error = errors::IkError>> {
crate::DB_READ_EXECUTOR_POOL
.send(crate::db::read::span::GetSpans(
crate::db::read::span::SpanQuery::from_req(&req)
.with_limit(100_000)
.only_endpoint(),
))
.from_err()
.and_then(|res| {
Ok(HttpResponse::Ok().json({
let by_services = res.into_iter().fold(HashMap::new(), |mut map, elt| {
let local_service = elt
.local_endpoint
.and_then(|ep| ep.service_name)
.unwrap_or_else(|| "n/a".to_string());
let remote_service = elt
.remote_endpoint
.and_then(|ep| ep.service_name)
.unwrap_or_else(|| "n/a".to_string());
{
let dep = {
map.entry(format!("{}-{}", local_service, remote_service))
.or_insert(Dependency {
parent: local_service.clone(),
child: remote_service.clone(),
call_count: 0,
error_count: 0,
})
.add_call()
};
map.insert(format!("{}-{}", local_service, remote_service), dep);
}
map
});
let mut by_trace = Vec::new();
for (_, spans) in by_services {
by_trace.push(spans);
}
by_trace
}))
})
.responder()
}
|
extern crate walkdir;
extern crate serde_json;
extern crate semver;
use std::env;
use std::process::Command;
use walkdir::WalkDir;
use std::path::{PathBuf, Path};
use std::fs::File;
use std::io::Read;
use serde_json::Value;
use semver::Version;
fn make(path: &Path, compiler: &str) {
let status = Command::new("make")
.arg(format!("COMPILER={}", compiler))
.current_dir(path)
.status()
.expect("Failed to build");
assert!(status.success());
}
fn run(path: &Path) {
let program = if !cfg!(windows) {
PathBuf::from("./a.out")
} else {
path.join("a.exe")
};
let status = Command::new(program)
.current_dir(path)
.status()
.expect("Failed to run");
assert!(status.success());
}
fn read_expected(path: &Path, compiler: &str, compiler_ver: &str, format: &str) -> String {
let name_with_ver = format!("expected_{}_{}.{}", compiler, compiler_ver, format);
let name = if path.join(&name_with_ver).exists() {
name_with_ver
} else {
format!("expected_{}.{}", compiler, format)
};
let mut f = File::open(path.join(&name)).expect(format!("{} file not found", name).as_str());
let mut s = String::new();
f.read_to_string(&mut s).unwrap();
s
}
fn run_grcov(path: &Path, llvm: bool, output_format: &str) -> String {
let mut args: Vec<&str> = Vec::new();
args.push("--");
if llvm {
args.push("--llvm");
}
args.push("-t");
args.push(output_format);
if output_format == "coveralls" {
args.push("--token");
args.push("TOKEN");
args.push("--commit-sha");
args.push("COMMIT");
args.push("-s");
args.push(path.to_str().unwrap());
args.push("--branch");
}
let output = Command::new("cargo")
.arg("run")
.arg(path)
.args(args)
.output()
.expect("Failed to run grcov");
let err = String::from_utf8(output.stderr).unwrap();
eprintln!("{}", err);
String::from_utf8(output.stdout).unwrap()
}
fn make_clean(path: &Path) {
let status = Command::new("make")
.arg("clean")
.current_dir(path)
.status()
.expect("Failed to clean");
assert!(status.success());
}
fn check_equal_inner(a: &Value, b: &Value, skip_methods: bool) -> bool {
a["is_file"] == b["is_file"] &&
a["language"] == b["language"] &&
(skip_methods || a["method"]["name"] == b["method"]["name"]) &&
a["method"]["covered"] == b["method"]["covered"] &&
a["method"]["uncovered"] == b["method"]["uncovered"] &&
a["method"]["percentage_covered"] == b["method"]["percentage_covered"] &&
a["method"]["total_covered"] == b["method"]["total_covered"] &&
a["method"]["total_uncovered"] == b["method"]["total_uncovered"] &&
a["file"]["name"] == b["file"]["name"] &&
a["file"]["covered"] == b["file"]["covered"] &&
a["file"]["uncovered"] == b["file"]["uncovered"] &&
a["file"]["percentage_covered"] == b["file"]["percentage_covered"] &&
a["file"]["total_covered"] == b["file"]["total_covered"] &&
a["file"]["total_uncovered"] == b["file"]["total_uncovered"]
}
fn check_equal_ade(expected_output: &str, output: &str) {
let mut expected: Vec<Value> = Vec::new();
for line in expected_output.lines() {
expected.push(serde_json::from_str(line).unwrap());
}
let mut actual: Vec<Value> = Vec::new();
for line in output.lines() {
actual.push(serde_json::from_str(line).unwrap());
}
// On CI, don't check methods, as on different machines names are slightly differently mangled.
let skip_methods = env::var("CONTINUOUS_INTEGRATION").is_ok();
let mut actual_len = 0;
for out in &actual {
if out["file"]["name"].as_str().unwrap().starts_with("/usr/") {
continue;
}
actual_len += 1;
let exp = expected.iter().find(|x| check_equal_inner(x, out, skip_methods));
assert!(exp.is_some(), "Got unexpected {} - Expected output: {:?}", out, expected_output);
}
for exp in &expected {
let out = actual.iter().find(|x| check_equal_inner(x, exp, skip_methods));
assert!(out.is_some(), "Missing {} - Full output: {:?}", exp, output);
}
assert_eq!(expected.len(), actual_len, "Got same number of expected records.");
}
fn check_equal_coveralls(expected_output: &str, output: &str, skip_branches: bool) {
let expected: Value = serde_json::from_str(expected_output).unwrap();
let actual: Value = serde_json::from_str(output).unwrap();
println!("{}", serde_json::to_string_pretty(&actual).unwrap());
assert_eq!(expected["git"]["branch"], actual["git"]["branch"]);
assert_eq!(expected["git"]["head"]["id"], actual["git"]["head"]["id"]);
assert_eq!(expected["repo_token"], actual["repo_token"]);
assert_eq!(expected["service_job_number"], actual["service_job_number"]);
assert_eq!(expected["service_name"], actual["service_name"]);
assert_eq!(expected["service_number"], actual["service_number"]);
// On CI, don't check line counts, as on different compiler versions they are slightly different.
let skip_line_counts = env::var("CONTINUOUS_INTEGRATION").is_ok();
let actual_source_files = actual["source_files"].as_array().unwrap();
let expected_source_files = expected["source_files"].as_array().unwrap();
for exp in expected_source_files {
let out = actual_source_files.iter().find(|x| x["name"] == exp["name"]);
assert!(out.is_some(), "Missing {} - Full output: {:?}", exp, output);
let out = out.unwrap();
assert_eq!(exp["name"], out["name"]);
assert_eq!(exp["source_digest"], out["source_digest"], "Got correct digest for {}", exp["name"]);
if !skip_line_counts {
assert_eq!(exp["coverage"], out["coverage"], "Got correct coverage for {}", exp["name"]);
} else {
let expected_coverage = exp["coverage"].as_array().unwrap();
let actual_coverage = out["coverage"].as_array().unwrap();
assert_eq!(expected_coverage.len(), actual_coverage.len(), "Got same number of lines.");
for i in 0..expected_coverage.len() {
if expected_coverage[i].is_null() {
assert!(actual_coverage[i].is_null(), "Got correct coverage at line {} for {}", i, exp["name"]);
} else {
assert_eq!(expected_coverage[i].as_i64().unwrap() > 0, actual_coverage[i].as_i64().unwrap() > 0, "Got correct coverage at line {} for {}", i, exp["name"]);
}
}
}
if !skip_line_counts || !skip_branches {
assert_eq!(exp["branches"], out["branches"], "Got correct branch coverage for {}", exp["name"]);
}
}
for out in actual_source_files {
let exp = expected_source_files.iter().find(|x| x["name"] == out["name"]);
assert!(exp.is_some(), "Got unexpected {} - Expected output: {:?}", out, expected_output);
}
assert_eq!(expected_source_files.len(), actual_source_files.len(), "Got same number of source files.");
}
fn get_gcc_version() -> String {
let output = Command::new("gcc")
.arg("-dumpversion")
.output()
.expect("Failed to execute `gcc`.");
assert!(output.status.success(), "`gcc` failed to execute.");
let version = String::from_utf8(output.stdout).unwrap();
match Version::parse(&version) {
Ok(v) => v.major.to_string(),
Err(_e) => version.trim().to_string(),
}
}
#[test]
fn test_integration() {
if cfg!(windows) {
println!("Integration tests still not supported under Windows.");
return;
}
let compiler_ver = match env::var("COMPILER_VER") {
Ok(v) => v,
Err(_e) => get_gcc_version(),
};
for entry in WalkDir::new("tests").min_depth(1) {
let entry = entry.unwrap();
let path = entry.path();
if path.is_dir() {
println!("\n\n{}", path.display());
let skip_branches = path == Path::new("tests/template") || path == Path::new("tests/include") ||
path == Path::new("tests/include2") || path == Path::new("tests/class");
make_clean(path);
if !cfg!(windows) {
println!("GCC");
make(path, "g++");
run(path);
check_equal_ade(&read_expected(path, "gcc", &compiler_ver, "ade"), &run_grcov(path, false, "ade"));
check_equal_coveralls(&read_expected(path, "gcc", &compiler_ver, "coveralls"), &run_grcov(path, false, "coveralls"), skip_branches);
make_clean(path);
}
if !cfg!(target_os="macos") {
println!("\nLLVM");
make(path, "clang++");
run(path);
check_equal_ade(&read_expected(path, "llvm", &compiler_ver, "ade"), &run_grcov(path, true, "ade"));
check_equal_coveralls(&read_expected(path, "llvm", &compiler_ver, "coveralls"), &run_grcov(path, true, "coveralls"), skip_branches);
make_clean(path);
}
}
}
}
|
pub fn read_numbers<P: AsRef<Path>>(path: P) -> impl Iterator<Item=u32> {
let file = File::open(path).unwrap();
BufReader::new(file).lines()
.map(Result::unwrap)
.map(|line| line.parse::<u32>().unwrap())
} |
mod get_all;
pub use get_all::*;
|
use blst_rust::types::g1::FsG1;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use kzg::G1;
use std::num::NonZeroUsize;
use subspace_core_primitives::crypto::kzg::Commitment;
use subspace_core_primitives::ArchivedHistorySegment;
use subspace_erasure_coding::ErasureCoding;
fn criterion_benchmark(c: &mut Criterion) {
let num_shards = ArchivedHistorySegment::NUM_PIECES;
let scale = NonZeroUsize::new(num_shards.ilog2() as usize)
.expect("Recorded history segment contains at very least one record; qed");
let ec = ErasureCoding::new(scale).unwrap();
let source_commitments = (0..num_shards / 2)
.map(|_| Commitment::from(FsG1::rand()))
.collect::<Vec<_>>();
c.bench_function("extend", |b| {
b.iter(|| {
ec.extend_commitments(black_box(&source_commitments))
.unwrap()
})
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
// BLAKE2: simpler, smaller, fast as MD5
// https://www.blake2.net/blake2.pdf
//
// The BLAKE2 Cryptographic Hash and Message Authentication Code (MAC)
// https://datatracker.ietf.org/doc/html/rfc7693
//
// BLAKE2 comes in two basic flavors:
//
// o BLAKE2b (or just BLAKE2) is optimized for 64-bit platforms and
// produces digests of any size between 1 and 64 bytes.
//
// o BLAKE2s is optimized for 8- to 32-bit platforms and produces
// digests of any size between 1 and 32 bytes.
//
// Both BLAKE2b and BLAKE2s are believed to be highly secure and perform
// well on any platform, software, or hardware. BLAKE2 does not require
// a special "HMAC" (Hashed Message Authentication Code) construction
// for keyed message authentication as it has a built-in keying mechanism.
//
//
// 2.1. Parameters
// https://datatracker.ietf.org/doc/html/rfc7693#section-2.1
//
// The following table summarizes various parameters and their ranges:
//
// | BLAKE2b | BLAKE2s |
// --------------+------------------+------------------+
// Bits in word | w = 64 | w = 32 |
// Rounds in F | r = 12 | r = 10 |
// Block bytes | bb = 128 | bb = 64 |
// Hash bytes | 1 <= nn <= 64 | 1 <= nn <= 32 |
// Key bytes | 0 <= kk <= 64 | 0 <= kk <= 32 |
// Input bytes | 0 <= ll < 2**128 | 0 <= ll < 2**64 |
// --------------+------------------+------------------+
// G Rotation | (R1, R2, R3, R4) | (R1, R2, R3, R4) |
// constants = | (32, 24, 16, 63) | (16, 12, 8, 7) |
// --------------+------------------+------------------+
const BLAKE2B_IV: [u64; 8] = [
0x6a09e667f3bcc908,
0xbb67ae8584caa73b,
0x3c6ef372fe94f82b,
0xa54ff53a5f1d36f1,
0x510e527fade682d1,
0x9b05688c2b3e6c1f,
0x1f83d9abfb41bd6b,
0x5be0cd19137e2179,
];
const BLAKE2B_224_IV: [u64; 8] = [
0x6a09e667f2bdc914,
0xbb67ae8584caa73b,
0x3c6ef372fe94f82b,
0xa54ff53a5f1d36f1,
0x510e527fade682d1,
0x9b05688c2b3e6c1f,
0x1f83d9abfb41bd6b,
0x5be0cd19137e2179,
];
const BLAKE2B_256_IV: [u64; 8] = [
0x6a09e667f2bdc928,
0xbb67ae8584caa73b,
0x3c6ef372fe94f82b,
0xa54ff53a5f1d36f1,
0x510e527fade682d1,
0x9b05688c2b3e6c1f,
0x1f83d9abfb41bd6b,
0x5be0cd19137e2179,
];
const BLAKE2B_384_IV: [u64; 8] = [
0x6a09e667f2bdc938,
0xbb67ae8584caa73b,
0x3c6ef372fe94f82b,
0xa54ff53a5f1d36f1,
0x510e527fade682d1,
0x9b05688c2b3e6c1f,
0x1f83d9abfb41bd6b,
0x5be0cd19137e2179,
];
const BLAKE2B_512_IV: [u64; 8] = [
0x6a09e667f2bdc948,
0xbb67ae8584caa73b,
0x3c6ef372fe94f82b,
0xa54ff53a5f1d36f1,
0x510e527fade682d1,
0x9b05688c2b3e6c1f,
0x1f83d9abfb41bd6b,
0x5be0cd19137e2179,
];
#[cfg(all(
any(target_arch = "x86", target_arch = "x86_64"),
target_feature = "avx2",
not(feature = "force-soft")
))]
#[path = "./x86/mod.rs"]
mod platform;
// #[cfg(all(target_arch = "aarch64", target_feature = "crypto"))]
// #[path = "./aarch64.rs"]
// mod platform;
#[cfg(any(
not(any(
all(
any(target_arch = "x86", target_arch = "x86_64"),
target_feature = "avx2",
),
all(target_arch = "aarch64", target_feature = "crypto")
)),
feature = "force-soft"
))]
#[path = "./generic.rs"]
mod platform;
// // #[path = "./generic.rs"]
// #[path = "./x86/mod.rs"]
// mod platform;
pub use self::platform::*;
/// BLAKE2b-224
pub fn blake2b_224<T: AsRef<[u8]>>(data: T) -> [u8; Blake2b224::DIGEST_LEN] {
Blake2b224::oneshot(data)
}
/// BLAKE2b-256
pub fn blake2b_256<T: AsRef<[u8]>>(data: T) -> [u8; Blake2b256::DIGEST_LEN] {
Blake2b256::oneshot(data)
}
/// BLAKE2b-384
pub fn blake2b_384<T: AsRef<[u8]>>(data: T) -> [u8; Blake2b384::DIGEST_LEN] {
Blake2b384::oneshot(data)
}
/// BLAKE2b-512
pub fn blake2b_512<T: AsRef<[u8]>>(data: T) -> [u8; Blake2b512::DIGEST_LEN] {
Blake2b512::oneshot(data)
}
/// BLAKE2b-224
#[derive(Clone)]
pub struct Blake2b224 {
inner: Blake2b,
}
impl Blake2b224 {
pub const BLOCK_LEN: usize = Blake2b::BLOCK_LEN;
pub const DIGEST_LEN: usize = 28;
#[inline]
pub fn new() -> Self {
Self {
inner: Blake2b::new(BLAKE2B_224_IV, b""),
}
}
#[inline]
pub fn update(&mut self, data: &[u8]) {
self.inner.update(data)
}
#[inline]
pub fn finalize(self) -> [u8; Self::DIGEST_LEN] {
let h = self.inner.finalize();
let mut digest = [0u8; Self::DIGEST_LEN];
digest[..Self::DIGEST_LEN].copy_from_slice(&h[..Self::DIGEST_LEN]);
digest
}
#[inline]
pub fn oneshot<T: AsRef<[u8]>>(data: T) -> [u8; Self::DIGEST_LEN] {
let h = Blake2b::oneshot_hash(BLAKE2B_224_IV, data);
let mut out = [0u8; Self::DIGEST_LEN];
out.copy_from_slice(&h[..Self::DIGEST_LEN]);
out
}
}
/// BLAKE2b-256
#[derive(Clone)]
pub struct Blake2b256 {
inner: Blake2b,
}
impl Blake2b256 {
pub const BLOCK_LEN: usize = Blake2b::BLOCK_LEN;
pub const DIGEST_LEN: usize = 32;
#[inline]
pub fn new() -> Self {
Self {
inner: Blake2b::new(BLAKE2B_256_IV, b""),
}
}
#[inline]
pub fn update(&mut self, data: &[u8]) {
self.inner.update(data)
}
#[inline]
pub fn finalize(self) -> [u8; Self::DIGEST_LEN] {
let h = self.inner.finalize();
let mut digest = [0u8; Self::DIGEST_LEN];
digest[..Self::DIGEST_LEN].copy_from_slice(&h[..Self::DIGEST_LEN]);
digest
}
#[inline]
pub fn oneshot<T: AsRef<[u8]>>(data: T) -> [u8; Self::DIGEST_LEN] {
let h = Blake2b::oneshot_hash(BLAKE2B_256_IV, data);
let mut out = [0u8; Self::DIGEST_LEN];
out.copy_from_slice(&h[..Self::DIGEST_LEN]);
out
}
}
/// BLAKE2b-384
#[derive(Clone)]
pub struct Blake2b384 {
inner: Blake2b,
}
impl Blake2b384 {
pub const BLOCK_LEN: usize = Blake2b::BLOCK_LEN;
pub const DIGEST_LEN: usize = 48;
#[inline]
pub fn new() -> Self {
Self {
inner: Blake2b::new(BLAKE2B_384_IV, b""),
}
}
#[inline]
pub fn update(&mut self, data: &[u8]) {
self.inner.update(data)
}
#[inline]
pub fn finalize(self) -> [u8; Self::DIGEST_LEN] {
let h = self.inner.finalize();
let mut digest = [0u8; Self::DIGEST_LEN];
digest[..Self::DIGEST_LEN].copy_from_slice(&h[..Self::DIGEST_LEN]);
digest
}
#[inline]
pub fn oneshot<T: AsRef<[u8]>>(data: T) -> [u8; Self::DIGEST_LEN] {
let h = Blake2b::oneshot_hash(BLAKE2B_384_IV, data);
let mut out = [0u8; Self::DIGEST_LEN];
out.copy_from_slice(&h[..Self::DIGEST_LEN]);
out
}
}
/// BLAKE2b-512
#[derive(Clone)]
pub struct Blake2b512 {
inner: Blake2b,
}
impl Blake2b512 {
pub const BLOCK_LEN: usize = Blake2b::BLOCK_LEN;
pub const DIGEST_LEN: usize = 64;
#[inline]
pub fn new() -> Self {
Self {
inner: Blake2b::new(BLAKE2B_512_IV, b""),
}
}
#[inline]
pub fn update(&mut self, data: &[u8]) {
self.inner.update(data)
}
#[inline]
pub fn finalize(self) -> [u8; Self::DIGEST_LEN] {
self.inner.finalize()
}
#[inline]
pub fn oneshot<T: AsRef<[u8]>>(data: T) -> [u8; Self::DIGEST_LEN] {
Blake2b::oneshot_hash(BLAKE2B_512_IV, data)
}
}
#[test]
fn test_blake2b() {
use crate::encoding::hex;
// Appendix A. Example of BLAKE2b Computation
// https://datatracker.ietf.org/doc/html/rfc7693#appendix-A
assert_eq!(
&blake2b_512(b"abc"),
&[
0xBA, 0x80, 0xA5, 0x3F, 0x98, 0x1C, 0x4D, 0x0D, 0x6A, 0x27, 0x97, 0xB6, 0x9F, 0x12,
0xF6, 0xE9, 0x4C, 0x21, 0x2F, 0x14, 0x68, 0x5A, 0xC4, 0xB7, 0x4B, 0x12, 0xBB, 0x6F,
0xDB, 0xFF, 0xA2, 0xD1, 0x7D, 0x87, 0xC5, 0x39, 0x2A, 0xAB, 0x79, 0x2D, 0xC2, 0x52,
0xD5, 0xDE, 0x45, 0x33, 0xCC, 0x95, 0x18, 0xD3, 0x8A, 0xA8, 0xDB, 0xF1, 0x92, 0x5A,
0xB9, 0x23, 0x86, 0xED, 0xD4, 0x00, 0x99, 0x23,
]
);
// Example digests
// https://en.wikipedia.org/wiki/BLAKE_(hash_function)#Example_digests
assert_eq!(
&blake2b_384(b""),
&hex::decode(
"b32811423377f52d7862286ee1a72ee5\
40524380fda1724a6f25d7978c6fd324\
4a6caf0498812673c5e05ef583825100"
)
.unwrap()[..]
);
assert_eq!(
&blake2b_512(b""),
&hex::decode(
"786a02f742015903c6c6fd852552d272\
912f4740e15847618a86e217f71f5419\
d25e1031afee585313896444934eb04b\
903a685b1448b755d56f701afe9be2ce"
)
.unwrap()[..]
);
assert_eq!(
&blake2b_512(b"The quick brown fox jumps over the lazy dog"),
&hex::decode(
"a8add4bdddfd93e4877d2746e62817b1\
16364a1fa7bc148d95090bc7333b3673\
f82401cf7aa2e4cb1ecd90296e3f14cb\
5413f8ed77be73045b13914cdcd6a918"
)
.unwrap()[..]
);
assert_eq!(
&blake2b_512(b"The quick brown fox jumps over the lazy dof"),
&hex::decode(
"ab6b007747d8068c02e25a6008db8a77\
c218d94f3b40d2291a7dc8a62090a744\
c082ea27af01521a102e42f480a31e98\
44053f456b4b41e8aa78bbe5c12957bb"
)
.unwrap()[..]
);
}
|
#[doc = "Register `TCR` reader"]
pub type R = crate::R<TCR_SPEC>;
#[doc = "Register `TCR` writer"]
pub type W = crate::W<TCR_SPEC>;
#[doc = "Field `DCYC` reader - Number of dummy cycles"]
pub type DCYC_R = crate::FieldReader;
#[doc = "Field `DCYC` writer - Number of dummy cycles"]
pub type DCYC_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 5, O>;
#[doc = "Field `DHQC` reader - Delay hold quarter cycle"]
pub type DHQC_R = crate::BitReader<DHQC_A>;
#[doc = "Delay hold quarter cycle\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DHQC_A {
#[doc = "0: No delay hold"]
NoDelay = 0,
#[doc = "1: 1/4 cycle hold"]
QuarterCycleHold = 1,
}
impl From<DHQC_A> for bool {
#[inline(always)]
fn from(variant: DHQC_A) -> Self {
variant as u8 != 0
}
}
impl DHQC_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DHQC_A {
match self.bits {
false => DHQC_A::NoDelay,
true => DHQC_A::QuarterCycleHold,
}
}
#[doc = "No delay hold"]
#[inline(always)]
pub fn is_no_delay(&self) -> bool {
*self == DHQC_A::NoDelay
}
#[doc = "1/4 cycle hold"]
#[inline(always)]
pub fn is_quarter_cycle_hold(&self) -> bool {
*self == DHQC_A::QuarterCycleHold
}
}
#[doc = "Field `DHQC` writer - Delay hold quarter cycle"]
pub type DHQC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DHQC_A>;
impl<'a, REG, const O: u8> DHQC_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No delay hold"]
#[inline(always)]
pub fn no_delay(self) -> &'a mut crate::W<REG> {
self.variant(DHQC_A::NoDelay)
}
#[doc = "1/4 cycle hold"]
#[inline(always)]
pub fn quarter_cycle_hold(self) -> &'a mut crate::W<REG> {
self.variant(DHQC_A::QuarterCycleHold)
}
}
#[doc = "Field `SSHIFT` reader - Sample shift"]
pub type SSHIFT_R = crate::BitReader<SSHIFT_A>;
#[doc = "Sample shift\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SSHIFT_A {
#[doc = "0: No shift"]
NoShift = 0,
#[doc = "1: 1/2 cycle shift"]
HalfCycleShift = 1,
}
impl From<SSHIFT_A> for bool {
#[inline(always)]
fn from(variant: SSHIFT_A) -> Self {
variant as u8 != 0
}
}
impl SSHIFT_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SSHIFT_A {
match self.bits {
false => SSHIFT_A::NoShift,
true => SSHIFT_A::HalfCycleShift,
}
}
#[doc = "No shift"]
#[inline(always)]
pub fn is_no_shift(&self) -> bool {
*self == SSHIFT_A::NoShift
}
#[doc = "1/2 cycle shift"]
#[inline(always)]
pub fn is_half_cycle_shift(&self) -> bool {
*self == SSHIFT_A::HalfCycleShift
}
}
#[doc = "Field `SSHIFT` writer - Sample shift"]
pub type SSHIFT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SSHIFT_A>;
impl<'a, REG, const O: u8> SSHIFT_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No shift"]
#[inline(always)]
pub fn no_shift(self) -> &'a mut crate::W<REG> {
self.variant(SSHIFT_A::NoShift)
}
#[doc = "1/2 cycle shift"]
#[inline(always)]
pub fn half_cycle_shift(self) -> &'a mut crate::W<REG> {
self.variant(SSHIFT_A::HalfCycleShift)
}
}
impl R {
#[doc = "Bits 0:4 - Number of dummy cycles"]
#[inline(always)]
pub fn dcyc(&self) -> DCYC_R {
DCYC_R::new((self.bits & 0x1f) as u8)
}
#[doc = "Bit 28 - Delay hold quarter cycle"]
#[inline(always)]
pub fn dhqc(&self) -> DHQC_R {
DHQC_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 30 - Sample shift"]
#[inline(always)]
pub fn sshift(&self) -> SSHIFT_R {
SSHIFT_R::new(((self.bits >> 30) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:4 - Number of dummy cycles"]
#[inline(always)]
#[must_use]
pub fn dcyc(&mut self) -> DCYC_W<TCR_SPEC, 0> {
DCYC_W::new(self)
}
#[doc = "Bit 28 - Delay hold quarter cycle"]
#[inline(always)]
#[must_use]
pub fn dhqc(&mut self) -> DHQC_W<TCR_SPEC, 28> {
DHQC_W::new(self)
}
#[doc = "Bit 30 - Sample shift"]
#[inline(always)]
#[must_use]
pub fn sshift(&mut self) -> SSHIFT_W<TCR_SPEC, 30> {
SSHIFT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "timing configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct TCR_SPEC;
impl crate::RegisterSpec for TCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`tcr::R`](R) reader structure"]
impl crate::Readable for TCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`tcr::W`](W) writer structure"]
impl crate::Writable for TCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets TCR to value 0"]
impl crate::Resettable for TCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use core::{fmt::Write, panic};
// use cortex_m::{itm::write_fmt, peripheral::mpu};
// use nb::block;
use stm32f1xx_hal::prelude::*;
use stm32f1xx_hal::serial::*;
use stm32f1xx_hal::rcc;
use stm32f1xx_hal::pac::USART2;
use stm32f1xx_hal::afio::MAPR;
use stm32f1xx_hal::dma::dma1::Channels;
use stm32f1xx_hal::dma;
use stm32f1xx_hal::gpio::gpioa::{PA2, PA3};
use stm32f1xx_hal::gpio::{Alternate, PushPull, Input, Floating};
// use embedded_hal::digital::v2::{OutputPin, InputPin};
use cortex_m::singleton;
pub static BAUDRATE: u32 = 19200;
use crate::motion_control;
use crate::mpu6050;
pub struct Pars {
pub angle_offset: f32,
pub v_kp: f32,
pub v_kd: f32,
pub b_kp: f32,
pub b_ki: f32
}
impl Pars {
pub fn new() -> Pars {
Pars {
angle_offset: mpu6050::ANGLE_OFFSET,
v_kp: motion_control::V_KP,
v_kd: motion_control::V_KD,
b_kp: motion_control::B_KP,
b_ki: motion_control::B_KI
}
}
}
pub struct HC05<'a> {
pub tx: Tx<USART2>,
pub rx_circbuf: dma::CircBuffer<[u8; 7], RxDma2>,
mpu6050_data: &'a mpu6050::Data,
mc_data:&'a motion_control::Data,
pub pars: &'a mut Pars
}
impl<'a> HC05<'a> {
pub fn init(
usart: USART2,
txpin: PA2<Alternate<PushPull>>,
rxpin: PA3<Input<Floating>>,
mapr: &mut MAPR,
clocks: rcc::Clocks,
channels: Channels,
pars: &'a mut Pars,
mpu6050_data: &'a mpu6050::Data,
mc_data: &'a motion_control::Data
) -> Self {
let serial = Serial::usart2(
usart,
(txpin, rxpin),
mapr,
Config::default().baudrate(BAUDRATE.bps()),
clocks,
);
let (tx, rx) = serial.split();
let rx = rx.with_dma(channels.6);
let buf= singleton!(: [[u8; 7]; 2] = [[0; 7]; 2]).unwrap();
let rx_circbuf = rx.circ_read(buf);
HC05 {
tx,
rx_circbuf,
mpu6050_data,
mc_data,
pars
}
}
pub fn send_packets(&mut self) {
let angle_buf = f32_to_u8(self.mpu6050_data.angle);
let angle_i_buf = f32_to_u8(self.mc_data.angle_i);
let v_kp_buf = f32_to_u8(self.pars.v_kp);
let v_kd_buf = f32_to_u8(self.pars.v_kd);
let b_kp_buf = f32_to_u8(self.pars.b_kp);
let b_ki_buf = f32_to_u8(self.pars.b_ki);
let mut check: u32 = 0;
for i in 0..4 {
check += angle_buf[i] as u32;
check += angle_i_buf[i] as u32;
check += v_kp_buf[i] as u32;
check += v_kd_buf[i] as u32;
check += b_kp_buf[i] as u32;
check += b_ki_buf[i] as u32;
}
let buffer: [u8; 27] = [
0xA5,
angle_buf[0], angle_buf[1], angle_buf[2], angle_buf[3],
angle_i_buf[0], angle_i_buf[1], angle_i_buf[2], angle_i_buf[3],
v_kp_buf[0], v_kp_buf[1], v_kp_buf[2], v_kp_buf[3],
v_kd_buf[0], v_kd_buf[1], v_kd_buf[2], v_kd_buf[3],
b_kp_buf[0], b_kp_buf[1], b_kp_buf[2], b_kp_buf[3],
b_ki_buf[0], b_ki_buf[1], b_ki_buf[2], b_ki_buf[3],
get_the_lowest_byte(check),
0x5A
];
let str = unsafe {
core::intrinsics::transmute::<&[u8], &str>(&buffer)
};
self.tx.write_str(str).ok();
}
// pub fn fuck(&mut self) {
// let buf= singleton!(: [u8; 8] = [0; 8]).unwrap();
// self.rx.read(buf);
// }
pub fn packets_analyse(&mut self) {
let data = self.rx_circbuf.peek(|half, _| *half).unwrap();
if let Ok(_) = data_check(&data) {
if data[1] == 1 {
self.pars.angle_offset += 0.002;
} else if data[1] == 2 {
self.pars.angle_offset -= 0.002;
}
if data[2] == 1 {
self.pars.v_kp += 0.02;
} else if data[2] == 2 {
self.pars.v_kp -= 0.02;
} else if data[2] == 3 {
self.pars.v_kd += 0.02;
} else if data[2] == 4 {
self.pars.v_kd -= 0.02;
}
if data[3] == 1 {
self.pars.b_kp += 5.0;
} else if data[3] == 2 {
self.pars.b_kp -= 5.0;
}
if data[4] == 1 {
self.pars.b_ki += 2.0;
} else if data[4] == 2 {
self.pars.b_ki -= 2.0;
}
}
}
}
pub fn get_half(result: Result<dma::Half, dma::Error>) -> dma::Half{
match result {
Ok(h) => h,
Err(_) => dma::Half::Second
}
}
fn f32_to_u8(num: f32) -> [u8;4] {
let mut u8_buf: [u8; 4] = [0x0; 4];
let f32_ptr: *const f32 = &num as *const f32;
let u8_ptr: *const u8 = f32_ptr as *const u8;
for i in 0..4 {
u8_buf[i as usize] = unsafe {
*u8_ptr.offset(i) as u8
}
}
u8_buf
}
fn get_the_lowest_byte(num: u32) -> u8 {
let u32_ptr: *const u32 = &num as *const u32;
let u8_ptr: *const u8 = u32_ptr as *const u8;
unsafe {
*u8_ptr.offset(0) as u8
}
}
pub fn data_check(data: &[u8;7]) -> Result<u8,u8> {
if (data[0] != 0xA5) || (data[6] != 0x5A) {
return Err(1);
}
let check: u32 = (data[1] as u32) + (data[2] as u32) + (data[3] as u32) + (data[4] as u32);
if get_the_lowest_byte(check) != data[5] {
return Err(2);
}
Ok(1)
} |
#[macro_use]
extern crate cascade;
mod app;
mod background;
mod utils;
mod widgets;
use self::app::App;
use gio::prelude::*;
slotmap::new_key_type! {
pub struct TaskEntity;
}
pub enum Event {
//inserting a task, identified by its key
Insert(TaskEntity),
//a previous task list has been fetched from a file using a background thread
//and now its our job to display it in our UI
Load(String), //SUGGESTION -> MAKE THIS A PATH
//signal that an entry(task) was modifies and at some point we should save it
Modified,
//remove a task identified by its key -> DOES THIS MAKE SENSE
Remove(TaskEntity),
//signals that we should collect up the text from each task and pass it to a background thread
//to save it to a file
SyncToDisk, //ME SaveToDisk
//signals that the window has been closed, so we should clean up and Quit
Closed,
Delete,
Toggled(bool),
//Signals that the process has been saved to the disk and it is safe to Quit,
Quit,
}
pub const APP_ID: &str = "me.mutuku.todo";
fn main() {
let app_name = "gtk-todo";
glib::set_program_name(app_name.into());
glib::set_application_name(app_name);
//init the gtk application and register the app_id
let app = gtk::Application::new(APP_ID.into(), Default::default());
//after app has been registered it will trigger an activate signal,
//which will give us the okay to construct our app and set up the app logic
app.connect_activate(|app| {
//channel for UI events in the main thread
let (tx, rx) = async_channel::unbounded();
//channel for background events in the background thread
let (btx, brx) = async_channel::unbounded();
//take ownership of a copy of the UI event sender (tx)
//and the background receiver (brx)
std::thread::spawn(glib::clone!(@strong tx => move || {
//fetch the executor registered for this thread
utils::thread_context()
//block this thread on an event loop future
.block_on(background::run(tx, brx));
}));
let mut app = app::App::new(app, tx, btx);
let event_handler = async move {
while let Ok(event) = rx.recv().await {
match event {
//event are arranged in the order they are most-likely to be called, with the called first
Event::Modified => app.modified(),
Event::Insert(entity) => app.insert(entity),
Event::Remove(entity) => app.remove(entity),
Event::SyncToDisk => app.sync_to_disk().await,
Event::Toggled(active) => (),
// Event::Toggled(active) => app.toggled(active),
Event::Delete => (),
// Event::Delete => app.delete(),
Event::Load(data) => app.load(data),
Event::Closed => app.closed().await,
Event::Quit => (),
// Event::Quit => app.quit(),
}
}
};
utils::spawn(event_handler);
});
app.run(); //problem here too
}
|
use std::collections::HashMap;
struct Cacher<T, U, V>
where
T: Fn(U) -> V,
{
calculation: T,
values: HashMap<U, V>,
}
impl<T, U, V> Cacher<T, U, V>
where
T: Fn(U) -> V,
U: std::hash::Hash + Eq + Copy,
V: Copy,
{
fn new(calculation: T) -> Cacher<T, U, V> {
Self {
calculation,
values: HashMap::new(),
}
}
fn value(&mut self, arg: U) -> V {
match self.values.get(&arg) {
Some(v) => *v,
None => {
let v = (self.calculation)(arg);
self.values.insert(arg, v);
v
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn call_with_different_values() {
let mut c = Cacher::new(|a| a);
c.value(1);
let result = c.value(2);
assert_eq!(result, 2);
}
#[test]
fn call_with_different_types() {
let mut c = Cacher::new(|a: char| a.is_ascii_alphanumeric());
let result = c.value('a');
assert_eq!(result, true);
let mut c = Cacher::new(|a: &str| a.len());
let result = c.value("hello");
assert_eq!(result, 5);
}
}
|
// Problem 7 - 10001st prime
//
// By listing the first six prime numbers: 2, 3, 5, 7, 11, and 13, we can see
// that the 6th prime is 13.
//
// What is the 10001st prime number?
fn main() {
println!("{}", solution());
}
fn solution() -> i32 {
(2..).filter(|&n| is_prime(n)).skip(10000).take(1).next().unwrap()
}
fn is_prime(n: i32) -> bool {
let isqrn = (n as f64).sqrt().floor() as i32;
for k in 2..(isqrn + 1) {
if n % k == 0 {
return false;
}
}
true
}
|
--- cargo-crates/cpal-0.10.0/src/host/alsa/mod.rs.orig 2019-07-05 18:30:36 UTC
+++ cargo-crates/cpal-0.10.0/src/host/alsa/mod.rs
@@ -34,7 +34,7 @@ pub type SupportedOutputFormats = VecInt
mod enumerate;
-/// The default linux and freebsd host type.
+/// The default linux, dragonfly and freebsd host type.
#[derive(Debug)]
pub struct Host;
@@ -50,7 +50,7 @@ impl HostTrait for Host {
type EventLoop = EventLoop;
fn is_available() -> bool {
- // Assume ALSA is always available on linux/freebsd.
+ // Assume ALSA is always available on linux/dragonfly/freebsd.
true
}
|
use rocket::{ Request };
use rocket::http::{ Status };
use crate::api::server::{ SimpleResponse, JsonResponse };
#[catch(404)]
pub fn not_found(_: &Request) -> JsonResponse {
JsonResponse::parse_new(Status::NotFound, SimpleResponse {
message: "Method not found!"
})
} |
use image::{ImageBuffer, Rgba};
use std::sync::Arc;
use vulkano::buffer::{BufferUsage, CpuAccessibleBuffer};
use vulkano::command_buffer::{AutoCommandBufferBuilder, CommandBuffer};
use vulkano::device::{Device, Queue};
use vulkano::format::ClearValue;
use vulkano::format::Format;
use vulkano::image::{Dimensions, StorageImage};
use vulkano::sync::GpuFuture;
pub fn image_clear_and_save(device: Arc<Device>, queue: Arc<Queue>) {
let image = StorageImage::new(
device.clone(),
Dimensions::Dim2d {
width: 1024,
height: 1024,
},
Format::R8G8B8A8Unorm,
Some(queue.family()),
)
.unwrap();
let image_dest_buffer = CpuAccessibleBuffer::from_iter(
device.clone(),
BufferUsage::all(),
false,
(0..1024 * 1024 * 4).map(|_| 0u8),
)
.expect("failed to create image_dest_buffer");
let image_clear_cmd_buffer = AutoCommandBufferBuilder::new(device.clone(), queue.family())
.unwrap()
.clear_color_image(image.clone(), ClearValue::Float([0.0, 1.0, 1.0, 1.0]))
.unwrap()
.copy_image_to_buffer(image.clone(), image_dest_buffer.clone())
.unwrap()
.build()
.unwrap();
let image_finished = image_clear_cmd_buffer.execute(queue.clone()).unwrap();
image_finished
.then_signal_fence_and_flush()
.unwrap()
.wait(None)
.unwrap();
let image_buffer_data = image_dest_buffer.read().unwrap();
let image = ImageBuffer::<Rgba<u8>, _>::from_raw(1024, 1024, &image_buffer_data[..]).unwrap();
image.save("image.png").unwrap();
}
|
extern crate advent_of_code_2019;
extern crate aoc_runner_derive;
extern crate aoc_runner;
extern crate failure;
use aoc_runner_derive::aoc_main;
aoc_main! { lib = advent_of_code_2019 } |
use alga::general::{AbstractMagma, AbstractGroup, AbstractLoop, AbstractMonoid, AbstractQuasigroup,
AbstractSemigroup, Real, Inverse, Multiplicative, Identity, Id};
use alga::linear::{Transformation, Similarity, AffineTransformation, DirectIsometry, Isometry,
Rotation, ProjectiveTransformation};
use core::ColumnVector;
use core::dimension::{DimName, U1};
use core::storage::OwnedStorage;
use core::allocator::OwnedAllocator;
use geometry::{IsometryBase, TranslationBase, PointBase};
/*
*
* Algebraic structures.
*
*/
impl<N, D: DimName, S, R> Identity<Multiplicative> for IsometryBase<N, D, S, R>
where N: Real,
S: OwnedStorage<N, D, U1>,
R: Rotation<PointBase<N, D, S>>,
S::Alloc: OwnedAllocator<N, D, U1, S> {
#[inline]
fn identity() -> Self {
Self::identity()
}
}
impl<N, D: DimName, S, R> Inverse<Multiplicative> for IsometryBase<N, D, S, R>
where N: Real,
S: OwnedStorage<N, D, U1>,
R: Rotation<PointBase<N, D, S>>,
S::Alloc: OwnedAllocator<N, D, U1, S> {
#[inline]
fn inverse(&self) -> Self {
self.inverse()
}
#[inline]
fn inverse_mut(&mut self) {
self.inverse_mut()
}
}
impl<N, D: DimName, S, R> AbstractMagma<Multiplicative> for IsometryBase<N, D, S, R>
where N: Real,
S: OwnedStorage<N, D, U1>,
R: Rotation<PointBase<N, D, S>>,
S::Alloc: OwnedAllocator<N, D, U1, S> {
#[inline]
fn operate(&self, rhs: &Self) -> Self {
self * rhs
}
}
macro_rules! impl_multiplicative_structures(
($($marker: ident<$operator: ident>),* $(,)*) => {$(
impl<N, D: DimName, S, R> $marker<$operator> for IsometryBase<N, D, S, R>
where N: Real,
S: OwnedStorage<N, D, U1>,
R: Rotation<PointBase<N, D, S>>,
S::Alloc: OwnedAllocator<N, D, U1, S> { }
)*}
);
impl_multiplicative_structures!(
AbstractSemigroup<Multiplicative>,
AbstractMonoid<Multiplicative>,
AbstractQuasigroup<Multiplicative>,
AbstractLoop<Multiplicative>,
AbstractGroup<Multiplicative>
);
/*
*
* Transformation groups.
*
*/
impl<N, D: DimName, S, R> Transformation<PointBase<N, D, S>> for IsometryBase<N, D, S, R>
where N: Real,
S: OwnedStorage<N, D, U1>,
R: Rotation<PointBase<N, D, S>>,
S::Alloc: OwnedAllocator<N, D, U1, S> {
#[inline]
fn transform_point(&self, pt: &PointBase<N, D, S>) -> PointBase<N, D, S> {
self * pt
}
#[inline]
fn transform_vector(&self, v: &ColumnVector<N, D, S>) -> ColumnVector<N, D, S> {
self * v
}
}
impl<N, D: DimName, S, R> ProjectiveTransformation<PointBase<N, D, S>> for IsometryBase<N, D, S, R>
where N: Real,
S: OwnedStorage<N, D, U1>,
R: Rotation<PointBase<N, D, S>>,
S::Alloc: OwnedAllocator<N, D, U1, S> {
#[inline]
fn inverse_transform_point(&self, pt: &PointBase<N, D, S>) -> PointBase<N, D, S> {
self.rotation.inverse_transform_point(&(pt - &self.translation.vector))
}
#[inline]
fn inverse_transform_vector(&self, v: &ColumnVector<N, D, S>) -> ColumnVector<N, D, S> {
self.rotation.inverse_transform_vector(v)
}
}
impl<N, D: DimName, S, R> AffineTransformation<PointBase<N, D, S>> for IsometryBase<N, D, S, R>
where N: Real,
S: OwnedStorage<N, D, U1>,
R: Rotation<PointBase<N, D, S>>,
S::Alloc: OwnedAllocator<N, D, U1, S> {
type Rotation = R;
type NonUniformScaling = Id;
type Translation = TranslationBase<N, D, S>;
#[inline]
fn decompose(&self) -> (TranslationBase<N, D, S>, R, Id, R) {
(self.translation.clone(), self.rotation.clone(), Id::new(), R::identity())
}
#[inline]
fn append_translation(&self, t: &Self::Translation) -> Self {
t * self
}
#[inline]
fn prepend_translation(&self, t: &Self::Translation) -> Self {
self * t
}
#[inline]
fn append_rotation(&self, r: &Self::Rotation) -> Self {
let shift = r.transform_vector(&self.translation.vector);
IsometryBase::from_parts(TranslationBase::from_vector(shift), r.clone() * self.rotation.clone())
}
#[inline]
fn prepend_rotation(&self, r: &Self::Rotation) -> Self {
self * r
}
#[inline]
fn append_scaling(&self, _: &Self::NonUniformScaling) -> Self {
self.clone()
}
#[inline]
fn prepend_scaling(&self, _: &Self::NonUniformScaling) -> Self {
self.clone()
}
#[inline]
fn append_rotation_wrt_point(&self, r: &Self::Rotation, p: &PointBase<N, D, S>) -> Option<Self> {
let mut res = self.clone();
res.append_rotation_wrt_point_mut(r, p);
Some(res)
}
}
impl<N, D: DimName, S, R> Similarity<PointBase<N, D, S>> for IsometryBase<N, D, S, R>
where N: Real,
S: OwnedStorage<N, D, U1>,
R: Rotation<PointBase<N, D, S>>,
S::Alloc: OwnedAllocator<N, D, U1, S> {
type Scaling = Id;
#[inline]
fn translation(&self) -> TranslationBase<N, D, S> {
self.translation.clone()
}
#[inline]
fn rotation(&self) -> R {
self.rotation.clone()
}
#[inline]
fn scaling(&self) -> Id {
Id::new()
}
}
macro_rules! marker_impl(
($($Trait: ident),*) => {$(
impl<N, D: DimName, S, R> $Trait<PointBase<N, D, S>> for IsometryBase<N, D, S, R>
where N: Real,
S: OwnedStorage<N, D, U1>,
R: Rotation<PointBase<N, D, S>>,
S::Alloc: OwnedAllocator<N, D, U1, S> { }
)*}
);
marker_impl!(Isometry, DirectIsometry);
|
#[doc = "Reader of register IC_CLR_RX_DONE"]
pub type R = crate::R<u32, super::IC_CLR_RX_DONE>;
#[doc = "Reader of field `CLR_RX_DONE`"]
pub type CLR_RX_DONE_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - Read this register to clear the RX_DONE interrupt (bit 7) of the IC_RAW_INTR_STAT register.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn clr_rx_done(&self) -> CLR_RX_DONE_R {
CLR_RX_DONE_R::new((self.bits & 0x01) != 0)
}
}
|
use std::borrow::Cow;
use std::error::Error;
use std::str::FromStr;
use std::{fmt, io};
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u64)]
pub enum CompressionType {
None = 0,
Snappy = 1,
Zlib = 2,
Lz4 = 3,
Lz4hc = 4,
Zstd = 5,
}
impl CompressionType {
pub(crate) fn from_u64(value: u64) -> Option<CompressionType> {
match value {
0 => Some(CompressionType::None),
1 => Some(CompressionType::Snappy),
2 => Some(CompressionType::Zlib),
3 => Some(CompressionType::Lz4),
4 => Some(CompressionType::Lz4hc),
5 => Some(CompressionType::Zstd),
_ => None,
}
}
}
impl FromStr for CompressionType {
type Err = InvalidCompressionType;
fn from_str(name: &str) -> Result<Self, Self::Err> {
match name {
"snappy" => Ok(CompressionType::Snappy),
"zlib" => Ok(CompressionType::Zlib),
"lz4" => Ok(CompressionType::Lz4),
"lz4hc" => Ok(CompressionType::Lz4hc),
"zstd" => Ok(CompressionType::Zstd),
_ => Err(InvalidCompressionType),
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct InvalidCompressionType;
impl fmt::Display for InvalidCompressionType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Invalid compression type")
}
}
impl Error for InvalidCompressionType {}
pub fn decompress(type_: CompressionType, data: &[u8]) -> io::Result<Cow<[u8]>> {
match type_ {
CompressionType::None => Ok(Cow::Borrowed(data)),
CompressionType::Zlib => zlib_decompress(data),
CompressionType::Snappy => snappy_decompress(data),
CompressionType::Zstd => zstd_decompress(data),
other => {
let error = format!("unsupported {:?} decompression", other);
Err(io::Error::new(io::ErrorKind::Other, error))
},
}
}
pub fn compress(type_: CompressionType, level: u32, data: &[u8]) -> io::Result<Cow<[u8]>> {
match type_ {
CompressionType::None => Ok(Cow::Borrowed(data)),
CompressionType::Zlib => zlib_compress(data, level),
CompressionType::Snappy => snappy_compress(data, level),
CompressionType::Zstd => zstd_compress(data, level),
other => {
let error = format!("unsupported {:?} decompression", other);
Err(io::Error::new(io::ErrorKind::Other, error))
},
}
}
// --------- zlib ---------
#[cfg(feature = "zlib")]
fn zlib_decompress(data: &[u8]) -> io::Result<Cow<[u8]>> {
use std::io::Read;
let mut decoder = flate2::read::ZlibDecoder::new(data);
let mut buffer = Vec::new();
decoder.read_to_end(&mut buffer)?;
Ok(Cow::Owned(buffer))
}
#[cfg(not(feature = "zlib"))]
fn zlib_decompress(_data: &[u8]) -> io::Result<Cow<[u8]>> {
Err(io::Error::new(io::ErrorKind::Other, "unsupported zlib decompression"))
}
#[cfg(feature = "zlib")]
fn zlib_compress(data: &[u8], level: u32) -> io::Result<Cow<[u8]>> {
use std::io::Write;
let compression = flate2::Compression::new(level);
let mut encoder = flate2::write::ZlibEncoder::new(Vec::new(), compression);
encoder.write_all(data)?;
encoder.finish().map(Cow::Owned)
}
#[cfg(not(feature = "zlib"))]
fn zlib_compress(_data: &[u8], _level: u32) -> io::Result<Cow<[u8]>> {
Err(io::Error::new(io::ErrorKind::Other, "unsupported zlib compression"))
}
// --------- snappy ---------
#[cfg(feature = "snappy")]
fn snappy_decompress(data: &[u8]) -> io::Result<Cow<[u8]>> {
let mut decoder = snap::raw::Decoder::new();
decoder.decompress_vec(data).map_err(Into::into).map(Cow::Owned)
}
#[cfg(not(feature = "snappy"))]
fn snappy_decompress(_data: &[u8]) -> io::Result<Cow<[u8]>> {
Err(io::Error::new(io::ErrorKind::Other, "unsupported snappy decompression"))
}
#[cfg(feature = "snappy")]
fn snappy_compress(data: &[u8], _level: u32) -> io::Result<Cow<[u8]>> {
let mut decoder = snap::raw::Encoder::new();
decoder.compress_vec(data).map_err(Into::into).map(Cow::Owned)
}
#[cfg(not(feature = "snappy"))]
fn snappy_compress(_data: &[u8], _level: u32) -> io::Result<Cow<[u8]>> {
Err(io::Error::new(io::ErrorKind::Other, "unsupported snappy compression"))
}
// --------- zstd ---------
#[cfg(feature = "zstd")]
fn zstd_decompress(data: &[u8]) -> io::Result<Cow<[u8]>> {
let mut buffer = Vec::new();
zstd::stream::copy_decode(data, &mut buffer)?;
Ok(Cow::Owned(buffer))
}
#[cfg(not(feature = "zstd"))]
fn zstd_decompress(_data: &[u8]) -> io::Result<Cow<[u8]>> {
Err(io::Error::new(io::ErrorKind::Other, "unsupported zstd decompression"))
}
#[cfg(feature = "zstd")]
fn zstd_compress(data: &[u8], level: u32) -> io::Result<Cow<[u8]>> {
let mut buffer = Vec::new();
zstd::stream::copy_encode(data, &mut buffer, level as i32)?;
Ok(Cow::Owned(buffer))
}
#[cfg(not(feature = "zstd"))]
fn zstd_compress(_data: &[u8], _level: u32) -> io::Result<Cow<[u8]>> {
Err(io::Error::new(io::ErrorKind::Other, "unsupported zstd compression"))
}
|
use std::path::PathBuf;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
pub mod constants {
pub const TESTING_MODE_KEY: &str = "TESTING_MODE";
pub const TESTING_MODE_REPLAY: &str = "REPLAY";
pub const TESTING_MODE_RECORD: &str = "RECORD";
}
#[derive(Debug, Clone)]
pub(crate) struct MockTransaction {
pub(crate) name: String,
pub(crate) number: Arc<AtomicUsize>,
}
impl MockTransaction {
pub(crate) fn new(name: impl Into<String>) -> Self {
Self {
name: name.into(),
number: Arc::new(AtomicUsize::new(0)),
}
}
pub(crate) fn name(&self) -> &str {
&self.name
}
pub(crate) fn number(&self) -> usize {
self.number.load(Ordering::SeqCst)
}
pub(crate) fn increment_number(&self) -> usize {
self.number.fetch_add(1, Ordering::SeqCst)
}
pub(crate) fn file_path(
&self,
create_when_not_exist: bool,
) -> Result<PathBuf, crate::MockFrameworkError> {
let mut path = PathBuf::from(workspace_root().map_err(|e| {
crate::MockFrameworkError::TransactionStorageError(format!(
"could not read the workspace_root from the cargo metadata: {}",
e,
))
})?);
path.push("test");
path.push("transactions");
let name = self.name();
if name.is_empty() {
panic!(
"`ClientOptions` and `TransportOptions` must be created with a non-empty transaction \
name when using the `mock_transport_framework` feature. You can do this by using \
`ClientOptions::new_with_transaction_name`"
);
}
path.push(name);
if !path.exists() {
if create_when_not_exist {
std::fs::create_dir_all(&path).map_err(|e| {
crate::MockFrameworkError::IOError(
format!("cannot create transaction folder: {}", path.display()),
e,
)
})?;
} else {
return Err(crate::MockFrameworkError::MissingTransaction(format!(
"the transaction location '{}' does not exist",
path.canonicalize().unwrap_or(path).display()
)));
}
}
Ok(path)
}
}
/// Run cargo to get the root of the workspace
fn workspace_root() -> Result<String, Box<dyn std::error::Error>> {
let output = std::process::Command::new("cargo")
.arg("metadata")
.output()?;
let output = String::from_utf8_lossy(&output.stdout);
let key = "workspace_root\":\"";
let index = output
.find(key)
.ok_or_else(|| format!("workspace_root key not found in metadata"))?;
let value = &output[index + key.len()..];
let end = value
.find("\"")
.ok_or_else(|| format!("workspace_root value was malformed"))?;
Ok(value[..end].into())
}
|
use axum::{
extract::{Path, State},
http::Uri,
response::{IntoResponse, Redirect, Response},
routing::*,
Router, Server,
};
use include_dir::*;
use miette::{Context, IntoDiagnostic};
use std::{net::SocketAddr, sync::Arc};
use tower_http::trace::TraceLayer;
use crate::{
posts::blog::{BlogPosts, ToCanonicalPath},
AppState,
};
pub use config::*;
use errors::*;
pub(crate) mod cmd;
pub(crate) mod pages {
pub mod blog;
pub mod home;
pub mod til;
}
mod api {
pub mod external {
pub mod github_oauth;
pub mod twitch_oauth;
}
}
mod config;
pub mod errors;
mod routes;
mod server_tracing;
mod templates;
const TAILWIND_STYLES: &str = include_str!("../../../target/tailwind.css");
const STATIC_ASSETS: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/static");
type ResponseResult<T = Response> = Result<T, MietteError>;
pub(crate) async fn run_axum(config: AppState) -> miette::Result<()> {
let syntax_css = syntect::html::css_for_theme_with_class_style(
&config.markdown_to_html_context.theme,
syntect::html::ClassStyle::Spaced,
)
.unwrap();
let tracer = server_tracing::Tracer;
let trace_layer = TraceLayer::new_for_http()
.make_span_with(tracer)
.on_response(tracer);
let app = routes::make_router(syntax_css)
.with_state(config)
.layer(trace_layer);
let addr = SocketAddr::from(([0, 0, 0, 0], 3000));
tracing::debug!("listening on {}", addr);
Server::bind(&addr)
.serve(app.into_make_service())
.await
.into_diagnostic()
.wrap_err("Failed to run server")
}
|
extern {
// Our C function definitions!
pub fn strcpy(dest: *mut u8, src: *const u8) -> *mut u8;
pub fn puts(s: *const u8) -> i32;
}
fn main() {
let x = b"Hello, world!\0"; // our string to copy
let mut y = [0u8; 32]; // declare some space on the stack to copy the string into
unsafe {
// calling C code is definitely unsafe. it could be doing ANYTHING
strcpy(y.as_mut_ptr(), x.as_ptr()); // we need to call .as_ptr() to get a pointer for C to use
puts(y.as_ptr());
}
}
|
#[doc = "Reader of register TRIM_LDO_1"]
pub type R = crate::R<u32, super::TRIM_LDO_1>;
#[doc = "Writer for register TRIM_LDO_1"]
pub type W = crate::W<u32, super::TRIM_LDO_1>;
#[doc = "Register TRIM_LDO_1 `reset()`'s with value 0x08"]
impl crate::ResetValue for super::TRIM_LDO_1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x08
}
}
#[doc = "Reader of field `ACT_REF_BGR`"]
pub type ACT_REF_BGR_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ACT_REF_BGR`"]
pub struct ACT_REF_BGR_W<'a> {
w: &'a mut W,
}
impl<'a> ACT_REF_BGR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "Reader of field `SB_BGRES`"]
pub type SB_BGRES_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SB_BGRES`"]
pub struct SB_BGRES_W<'a> {
w: &'a mut W,
}
impl<'a> SB_BGRES_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - To trim active regulator reference voltage"]
#[inline(always)]
pub fn act_ref_bgr(&self) -> ACT_REF_BGR_R {
ACT_REF_BGR_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - To trim standby regulator reference voltage"]
#[inline(always)]
pub fn sb_bgres(&self) -> SB_BGRES_R {
SB_BGRES_R::new(((self.bits >> 4) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - To trim active regulator reference voltage"]
#[inline(always)]
pub fn act_ref_bgr(&mut self) -> ACT_REF_BGR_W {
ACT_REF_BGR_W { w: self }
}
#[doc = "Bits 4:7 - To trim standby regulator reference voltage"]
#[inline(always)]
pub fn sb_bgres(&mut self) -> SB_BGRES_W {
SB_BGRES_W { w: self }
}
}
|
// #![warn(clippy::all, clippy::pedantic, clippy::nursery, clippy::cargo)]
// #![allow(clippy::missing_const_for_fn)]
// #![allow(clippy::multiple_crate_versions)]
// #![allow(clippy::missing_errors_doc)]
// #![allow(clippy::module_name_repetitions)]
pub mod bracken;
pub mod data;
pub mod errors;
pub mod kraken;
pub mod parser;
pub mod taxonomy;
#[macro_use]
extern crate serde;
|
use std::cell::RefCell;
use std::borrow::BorrowMut;
use std::marker::PhantomData;
use rustc_serialize;
use hyper;
use oauth2;
use super::super::YouTube;
/// Reresents all aspects of a youtube video resource. May only be partially
/// available
#[derive(RustcEncodable, RustcDecodable, Default, Clone)]
pub struct Video {
pub snippet: Option<VideoSnippet>,
pub recordingDetails: Option<VideoRecordingDetails>,
pub status: Option<VideoStatus>,
}
#[allow(non_snake_case)]
#[derive(RustcEncodable, RustcDecodable, Default, Clone)]
pub struct VideoSnippet {
pub categoryId: String,
pub description: String,
pub tags: Vec<String>,
pub title: String,
pub status: Option<VideoStatus>,
pub recordingDetails: Option<VideoRecordingDetails>,
}
impl Video {
fn parts(&self) -> String {
let mut res = String::new();
if self.status.is_some() {
res = res + "status,";
}
if self.recordingDetails.is_some() {
res = res + "recordingDetails";
}
if self.snippet.is_some() {
res = res + "snippet,";
}
res
}
}
#[allow(non_snake_case)]
#[derive(RustcEncodable, RustcDecodable, Default, Clone)]
pub struct VideoStatus {
pub privacyStatus: String,
pub embeddable: bool,
pub license: String,
pub publicStatsViewable: bool,
pub publishAt: String,
}
#[allow(non_snake_case)]
#[derive(RustcEncodable, RustcDecodable, Default, Clone)]
pub struct VideoRecordingDetails {
locationDescription: String,
recordingDate: String,
}
#[allow(non_snake_case)]
#[derive(RustcEncodable, RustcDecodable, Default, Clone)]
pub struct GeoPoint {
altitude: f64,
latitude: f64,
longitude: f64,
}
/// The videos service - provides actual functionality through builders.
pub struct Service<'a, C, NC, A>
where NC: 'a,
C: 'a,
A: 'a, {
hub: &'a YouTube<C, NC, A>
}
impl<'a, C, NC, A> Service<'a, C, NC, A>
where NC: hyper::net::NetworkConnector,
C: BorrowMut<hyper::Client<NC>> + 'a,
A: oauth2::GetToken + 'a {
pub fn new(hub: &'a YouTube<C, NC, A>) -> Service<'a, C, NC, A> {
Service { hub: hub }
}
pub fn insert(&self, parts: &str, video: &Video) -> VideosInsertBuilder<'a, C, NC, A> {
VideosInsertBuilder {
hub: self.hub,
video: video.clone(),
parts: parts.to_string(),
}
}
}
pub struct VideosInsertBuilder<'a, C, NC, A>
where NC: 'a,
C: 'a,
A: 'a {
hub: &'a YouTube<C, NC, A>,
video: Video,
parts: String,
}
impl<'a, C, NC, A> VideosInsertBuilder<'a, C, NC, A>
where NC: hyper::net::NetworkConnector,
C: BorrowMut<hyper::Client<NC>> + 'a,
A: oauth2::GetToken + 'a {
}
#[cfg(test)]
mod tests {
use std::default::Default;
use super::*;
} |
use super::utils::parse_string;
use crate::ast::rules;
/*#[test]
fn test_simple_statement() {
assert_eq!(
parse_statement(&mut make_lexer("break;")),
Ok(exp!(Statement::Break))
)
}*/
#[test]
fn test_return_statement() {
assert_eq!(
parse_string("return nil, false, 42;", rules::retstat),
"[Single(Return(Some(Expressions([Nil, Boolean(false), Number(42.0)]))))]"
);
assert_eq!(
parse_string("return;", rules::retstat),
"[Single(Return(None))]"
);
}
|
use apllodb_test_support::setup::setup_test_logger;
/// general test setup sequence
pub fn test_setup() {
setup_test_logger();
}
|
mod serde_impl;
use serde_derive::Deserialize;
use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign};
/// Represents a hex point in axial coordinate space
#[derive(Debug, Clone, Default, Copy, Eq, PartialEq, Deserialize, Ord, PartialOrd, Hash)]
pub struct Axial {
pub q: i32,
pub r: i32,
}
impl std::fmt::Display for Axial {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "[{}, {}]", self.q, self.r)
}
}
unsafe impl Send for Axial {}
impl Axial {
pub const ZERO: Axial = Axial { q: 0, r: 0 };
pub const NEIGHBOURS: [Axial; 6] = [
Axial::new(1, 0),
Axial::new(1, -1),
Axial::new(0, -1),
Axial::new(-1, 0),
Axial::new(-1, 1),
Axial::new(0, 1),
];
pub const fn new(q: i32, r: i32) -> Self {
Self { q, r }
}
/// Return the "Manhattan" distance between two points in a hexagonal coordinate space
/// Interprets points as axial coordiantes
/// See https://www.redblobgames.com/grids/hexagons/#distances for more information
#[inline]
pub fn hex_distance(self, rhs: Axial) -> u32 {
let [ax, ay, az] = self.hex_axial_to_cube();
let [bx, by, bz] = rhs.hex_axial_to_cube();
let x = (ax - bx).abs();
let y = (ay - by).abs();
let z = (az - bz).abs();
x.max(y).max(z) as u32
}
/// Convert self from a hexagonal axial vector to a hexagonal cube vector
#[inline]
pub const fn hex_axial_to_cube(self) -> [i32; 3] {
let x = self.q;
let z = self.r;
let y = -x - z;
[x, y, z]
}
#[inline]
pub const fn hex_cube_to_axial([q, _, r]: [i32; 3]) -> Self {
Self { q, r }
}
/// Get the neighbours of this point starting at top left and going counter-clockwise
#[inline]
pub const fn hex_neighbours(self) -> [Axial; 6] {
[
Axial::new(
self.q + Self::NEIGHBOURS[0].q,
self.r + Self::NEIGHBOURS[0].r,
),
Axial::new(
self.q + Self::NEIGHBOURS[1].q,
self.r + Self::NEIGHBOURS[1].r,
),
Axial::new(
self.q + Self::NEIGHBOURS[2].q,
self.r + Self::NEIGHBOURS[2].r,
),
Axial::new(
self.q + Self::NEIGHBOURS[3].q,
self.r + Self::NEIGHBOURS[3].r,
),
Axial::new(
self.q + Self::NEIGHBOURS[4].q,
self.r + Self::NEIGHBOURS[4].r,
),
Axial::new(
self.q + Self::NEIGHBOURS[5].q,
self.r + Self::NEIGHBOURS[5].r,
),
]
}
pub fn hex_neighbour(self, i: usize) -> Axial {
self + Self::NEIGHBOURS[i]
}
/// Return the index in `hex_neighbours` of the neighbour if applicable. None otherwise.
/// `q` and `r` must be in the set {-1, 0, 1}.
/// To get the index of the neighbour of a point
/// ```rust
/// use caolo_sim::geometry::Axial;
/// let point = Axial::new(42, 69);
/// let neighbour = Axial::new(42, 68);
/// // `neighbour - point` will result in the vector pointing from `point` to `neighbour`
/// let i = Axial::neighbour_index(neighbour - point);
/// assert_eq!(i, Some(2));
/// ```
#[inline]
pub fn neighbour_index(ax: Axial) -> Option<usize> {
Self::NEIGHBOURS
.iter()
.enumerate()
.find(|(_i, bx)| ax == **bx)
.map(|(i, _)| i)
}
#[inline]
pub fn rotate_right_around(self, center: Axial) -> Axial {
let p = self - center;
let p = p.rotate_right();
p + center
}
#[inline]
pub fn rotate_left_around(self, center: Axial) -> Axial {
let p = self - center;
let p = p.rotate_left();
p + center
}
#[inline]
pub const fn rotate_right(self) -> Axial {
let [x, y, z] = self.hex_axial_to_cube();
Self::hex_cube_to_axial([-z, -x, -y])
}
#[inline]
pub fn rotate_left(self) -> Axial {
let [x, y, z] = self.hex_axial_to_cube();
Self::hex_cube_to_axial([-y, -z, -x])
}
#[inline]
pub const fn as_array(self) -> [i32; 2] {
[self.q, self.r]
}
#[inline]
pub fn dist(self, other: Self) -> u32 {
self.hex_distance(other)
}
pub fn to_pixel_pointy(self, size: f32) -> [f32; 2] {
let Axial { q, r } = self;
let [q, r] = [q as f32, r as f32];
const SQRT_3: f32 = 1.732_050_8;
let x = size * (SQRT_3 * q + SQRT_3 / 2.0 * r);
let y = size * (3. / 2. * r);
[x, y]
}
}
impl AddAssign for Axial {
fn add_assign(&mut self, rhs: Self) {
self.q += rhs.q;
self.r += rhs.r;
}
}
impl Add for Axial {
type Output = Self;
fn add(mut self, rhs: Self) -> Self {
self += rhs;
self
}
}
impl SubAssign for Axial {
fn sub_assign(&mut self, rhs: Self) {
self.q -= rhs.q;
self.r -= rhs.r;
}
}
impl Sub for Axial {
type Output = Self;
fn sub(mut self, rhs: Self) -> Self {
self -= rhs;
self
}
}
impl MulAssign<i32> for Axial {
fn mul_assign(&mut self, rhs: i32) {
self.q *= rhs;
self.r *= rhs;
}
}
impl Mul<i32> for Axial {
type Output = Self;
fn mul(mut self, rhs: i32) -> Self {
self *= rhs;
self
}
}
impl DivAssign<i32> for Axial {
fn div_assign(&mut self, rhs: i32) {
self.q /= rhs;
self.r /= rhs;
}
}
impl Div<i32> for Axial {
type Output = Self;
fn div(mut self, rhs: i32) -> Self {
self /= rhs;
self
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn basic_arithmetic() {
let p1 = Axial::new(0, 0);
let p2 = Axial::new(-1, 2);
let sum = p1 + p2;
assert_eq!(sum, p2);
assert_eq!(sum - p2, p1);
}
#[test]
fn distance_simple() {
let a = Axial::new(0, 0);
let b = Axial::new(1, 3);
assert_eq!(a.hex_distance(b), 4);
for p in a.hex_neighbours().iter() {
assert_eq!(p.hex_distance(a), 1);
}
}
#[test]
fn neighbour_indices() {
let p = Axial::new(13, 42);
let neighbours = p.hex_neighbours();
for (i, n) in neighbours.iter().cloned().enumerate() {
let j = Axial::neighbour_index(n - p);
assert_eq!(j, Some(i));
}
}
}
|
extern crate advent_of_code_2017_day_9;
use advent_of_code_2017_day_9::*;
#[test]
fn part_1_example() {
// {}, score of 1.
assert_eq!(solve_puzzle_part_1("{}"), "1");
// {{{}}}, score of 1 + 2 + 3 = 6.
assert_eq!(solve_puzzle_part_1("{{{}}}"), "6");
// {{},{}}, score of 1 + 2 + 2 = 5.
assert_eq!(solve_puzzle_part_1("{{},{}}"), "5");
// {{{},{},{{}}}}, score of 1 + 2 + 3 + 3 + 3 + 4 = 16.
assert_eq!(solve_puzzle_part_1("{{{},{},{{}}}}"), "16");
// {<a>,<a>,<a>,<a>}, score of 1.
assert_eq!(solve_puzzle_part_1("{<a>,<a>,<a>,<a>}"), "1");
// {{<ab>},{<ab>},{<ab>},{<ab>}}, score of 1 + 2 + 2 + 2 + 2 = 9.
assert_eq!(solve_puzzle_part_1("{{<ab>},{<ab>},{<ab>},{<ab>}}"), "9");
// {{<!!>},{<!!>},{<!!>},{<!!>}}, score of 1 + 2 + 2 + 2 + 2 = 9."
assert_eq!(solve_puzzle_part_1("{{<!!>},{<!!>},{<!!>},{<!!>}}"), "9");
// {{<a!>},{<a!>},{<a!>},{<ab>}}, score of 1 + 2 = 3.
assert_eq!(solve_puzzle_part_1("{{<a!>},{<a!>},{<a!>},{<ab>}}"), "3");
}
#[test]
fn part_2_example() {
// <>, 0 characters.
assert_eq!(solve_puzzle_part_2("<>"), "0");
// <random characters>, 17 characters.
assert_eq!(solve_puzzle_part_2("<random characters>"), "17");
// <<<<>, 3 characters.
assert_eq!(solve_puzzle_part_2("<<<<>"), "3");
// <{!>}>, 2 characters.
assert_eq!(solve_puzzle_part_2("<{!>}>"), "2");
// <!!>, 0 characters.
assert_eq!(solve_puzzle_part_2("<!!>"), "0");
// <!!!>>, 0 characters.
assert_eq!(solve_puzzle_part_2("<!!!>>"), "0");
// <{o"i!a,<{i<a>, 10 characters.
assert_eq!(solve_puzzle_part_2("<{o\"i!a,<{i<a>"), "10");
}
|
use std::fmt;
use std::error::Error as StdError;
#[derive(Debug)]
pub struct Error {
detail: Option<String>
}
impl Error {
pub fn new() -> Error {
Error { detail: None }
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.description().fmt(f)
}
}
impl StdError for Error {
fn description(&self) -> &str {
"Error"
}
}
|
extern crate regex;
mod util;
use regex::Regex;
use std::collections::HashMap;
use std::io::BufRead;
use util::error_exit;
struct Graph {
node_list: HashMap<String, usize>,
adj_list: Vec<Vec<(usize, usize)>>,
units: Vec<usize>,
}
impl Graph {
fn new() -> Graph {
Graph {
node_list: HashMap::new(),
adj_list: Vec::new(),
units: Vec::new(),
}
}
fn get_node(&mut self, name: &str) -> usize {
if !self.node_list.contains_key(name) {
let count = self.count();
self.node_list.insert(String::from(name), count);
self.adj_list.push(Vec::new());
self.units.push(0);
}
self.node_list[name]
}
fn count(&self) -> usize {
self.node_list.len()
}
}
fn _visit(node: usize, sorted: &mut Vec<usize>, visited: &mut Vec<bool>, graph: &Graph) {
if visited[node] {
return;
}
for &(child, _) in graph.adj_list[node].iter() {
_visit(child, sorted, visited, graph);
}
visited[node] = true;
sorted.push(node);
}
fn topological_sort(graph: &Graph) -> Vec<usize> {
let mut visited = [false].repeat(graph.count());
let mut sorted = Vec::<usize>::new();
for node in 0..graph.count() {
_visit(node, &mut sorted, &mut visited, graph);
}
sorted.reverse();
sorted
}
fn min_multiple(base: usize, min_val: usize) -> usize {
(min_val - 1) / base + 1
}
fn required_ore(fuel: usize, sorted_id: &Vec<usize>, graph: &Graph) -> usize {
let mut counts = [0usize].repeat(graph.count());
counts[graph.node_list["FUEL"]] = fuel;
for &node in sorted_id[0..sorted_id.len() - 1].iter() {
let min_count = counts[node];
let multiple = min_multiple(graph.units[node], min_count);
for &(child_id, child_coef) in graph.adj_list[node].iter() {
counts[child_id] += multiple * child_coef;
}
}
counts[graph.node_list["ORE"]]
}
fn main() {
let matcher = Regex::new(r"(\d+) ([A-Z]+)").expect("Failed to build regex");
let mut graph = Graph::new();
for line in lines_from_stdin!() {
let components: Vec<(usize, usize)> = matcher
.captures_iter(&line)
.map(|cap| (graph.get_node(&cap[2]), cap[1].parse::<usize>().expect("_")))
.collect();
match components.split_last() {
Some((&(rhs_id, rhs_coef), lhs)) => {
graph.units[rhs_id] = rhs_coef;
for &(id, coef) in lhs {
graph.adj_list[rhs_id].push((id, coef));
}
}
_ => unreachable!(),
}
}
let sorted = topological_sort(&graph);
println!("PART1 {}", required_ore(1, &sorted, &graph));
let mut hi = 1;
while required_ore(hi, &sorted, &graph) < 1000000000000 {
hi *= 2;
}
let mut lo = hi / 2;
while hi - 1 > lo {
let mid = (hi - lo) / 2 + lo;
let ore_count = required_ore(mid, &sorted, &graph);
if ore_count > 1000000000000 {
hi = mid;
} else if ore_count == 1000000000000 {
lo = mid;
break;
} else {
lo = mid;
}
}
println!("PART2 {}", lo);
}
|
#[doc = "Register `RCC_MP_APB4LPENCLRR` reader"]
pub type R = crate::R<RCC_MP_APB4LPENCLRR_SPEC>;
#[doc = "Register `RCC_MP_APB4LPENCLRR` writer"]
pub type W = crate::W<RCC_MP_APB4LPENCLRR_SPEC>;
#[doc = "Field `LTDCLPEN` reader - LTDCLPEN"]
pub type LTDCLPEN_R = crate::BitReader;
#[doc = "Field `LTDCLPEN` writer - LTDCLPEN"]
pub type LTDCLPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DSILPEN` reader - DSILPEN"]
pub type DSILPEN_R = crate::BitReader;
#[doc = "Field `DSILPEN` writer - DSILPEN"]
pub type DSILPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DDRPERFMLPEN` reader - DDRPERFMLPEN"]
pub type DDRPERFMLPEN_R = crate::BitReader;
#[doc = "Field `DDRPERFMLPEN` writer - DDRPERFMLPEN"]
pub type DDRPERFMLPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IWDG2APBLPEN` reader - IWDG2APBLPEN"]
pub type IWDG2APBLPEN_R = crate::BitReader;
#[doc = "Field `IWDG2APBLPEN` writer - IWDG2APBLPEN"]
pub type IWDG2APBLPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USBPHYLPEN` reader - USBPHYLPEN"]
pub type USBPHYLPEN_R = crate::BitReader;
#[doc = "Field `USBPHYLPEN` writer - USBPHYLPEN"]
pub type USBPHYLPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `STGENROLPEN` reader - STGENROLPEN"]
pub type STGENROLPEN_R = crate::BitReader;
#[doc = "Field `STGENROLPEN` writer - STGENROLPEN"]
pub type STGENROLPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `STGENROSTPEN` reader - STGENROSTPEN"]
pub type STGENROSTPEN_R = crate::BitReader;
#[doc = "Field `STGENROSTPEN` writer - STGENROSTPEN"]
pub type STGENROSTPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - LTDCLPEN"]
#[inline(always)]
pub fn ltdclpen(&self) -> LTDCLPEN_R {
LTDCLPEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 4 - DSILPEN"]
#[inline(always)]
pub fn dsilpen(&self) -> DSILPEN_R {
DSILPEN_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 8 - DDRPERFMLPEN"]
#[inline(always)]
pub fn ddrperfmlpen(&self) -> DDRPERFMLPEN_R {
DDRPERFMLPEN_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 15 - IWDG2APBLPEN"]
#[inline(always)]
pub fn iwdg2apblpen(&self) -> IWDG2APBLPEN_R {
IWDG2APBLPEN_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - USBPHYLPEN"]
#[inline(always)]
pub fn usbphylpen(&self) -> USBPHYLPEN_R {
USBPHYLPEN_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 20 - STGENROLPEN"]
#[inline(always)]
pub fn stgenrolpen(&self) -> STGENROLPEN_R {
STGENROLPEN_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - STGENROSTPEN"]
#[inline(always)]
pub fn stgenrostpen(&self) -> STGENROSTPEN_R {
STGENROSTPEN_R::new(((self.bits >> 21) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - LTDCLPEN"]
#[inline(always)]
#[must_use]
pub fn ltdclpen(&mut self) -> LTDCLPEN_W<RCC_MP_APB4LPENCLRR_SPEC, 0> {
LTDCLPEN_W::new(self)
}
#[doc = "Bit 4 - DSILPEN"]
#[inline(always)]
#[must_use]
pub fn dsilpen(&mut self) -> DSILPEN_W<RCC_MP_APB4LPENCLRR_SPEC, 4> {
DSILPEN_W::new(self)
}
#[doc = "Bit 8 - DDRPERFMLPEN"]
#[inline(always)]
#[must_use]
pub fn ddrperfmlpen(&mut self) -> DDRPERFMLPEN_W<RCC_MP_APB4LPENCLRR_SPEC, 8> {
DDRPERFMLPEN_W::new(self)
}
#[doc = "Bit 15 - IWDG2APBLPEN"]
#[inline(always)]
#[must_use]
pub fn iwdg2apblpen(&mut self) -> IWDG2APBLPEN_W<RCC_MP_APB4LPENCLRR_SPEC, 15> {
IWDG2APBLPEN_W::new(self)
}
#[doc = "Bit 16 - USBPHYLPEN"]
#[inline(always)]
#[must_use]
pub fn usbphylpen(&mut self) -> USBPHYLPEN_W<RCC_MP_APB4LPENCLRR_SPEC, 16> {
USBPHYLPEN_W::new(self)
}
#[doc = "Bit 20 - STGENROLPEN"]
#[inline(always)]
#[must_use]
pub fn stgenrolpen(&mut self) -> STGENROLPEN_W<RCC_MP_APB4LPENCLRR_SPEC, 20> {
STGENROLPEN_W::new(self)
}
#[doc = "Bit 21 - STGENROSTPEN"]
#[inline(always)]
#[must_use]
pub fn stgenrostpen(&mut self) -> STGENROSTPEN_W<RCC_MP_APB4LPENCLRR_SPEC, 21> {
STGENROSTPEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is used by the MCU\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rcc_mp_apb4lpenclrr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rcc_mp_apb4lpenclrr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RCC_MP_APB4LPENCLRR_SPEC;
impl crate::RegisterSpec for RCC_MP_APB4LPENCLRR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rcc_mp_apb4lpenclrr::R`](R) reader structure"]
impl crate::Readable for RCC_MP_APB4LPENCLRR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rcc_mp_apb4lpenclrr::W`](W) writer structure"]
impl crate::Writable for RCC_MP_APB4LPENCLRR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RCC_MP_APB4LPENCLRR to value 0x0011_8111"]
impl crate::Resettable for RCC_MP_APB4LPENCLRR_SPEC {
const RESET_VALUE: Self::Ux = 0x0011_8111;
}
|
use std::env;
use std::cmp::Ordering;
fn main() {
let input = env::args().nth(1).expect("Expected argument ######-######")
.trim().split('-')
.map(|s| {
s.chars().map(|c| {
c.to_digit(10).unwrap_or_else(|| panic!("Invalid digit {}", c))
}).collect::<Vec<u32>>()
}).collect::<Vec<Vec<u32>>>();
let end = &input[1];
let mut current = first_monotonic_sequence(&input[0]);
let mut repeating_count = 0;
let mut doubled_count = 0;
while is_sequence_le(¤t, end) {
if is_sequence_repeating(¤t) {
repeating_count += 1;
}
if is_sequence_doubled(¤t) {
doubled_count += 1;
}
increment_monotonic_sequence(&mut current);
}
println!("{} {}", repeating_count, doubled_count);
}
fn first_monotonic_sequence(input: &[u32]) -> Vec<u32> {
let mut max = 0;
let mut increasing = true;
input.iter().cloned()
.map(|i| {
if i < max {
increasing = false;
}
if increasing && i > max {
max = i;
}
max
}).collect()
}
#[test]
fn test_first_monotonic_sequence() {
assert_eq!(first_monotonic_sequence(&[1, 2, 3]), &[1, 2, 3]);
assert_eq!(first_monotonic_sequence(&[1, 0, 3]), &[1, 1, 1]);
assert_eq!(first_monotonic_sequence(&[1, 2, 0]), &[1, 2, 2]);
assert_eq!(first_monotonic_sequence(&[2, 6, 4, 7, 9, 3]), &[2, 6, 6, 6, 6, 6]);
}
fn is_sequence_le(a: &[u32], b: &[u32]) -> bool {
for (da, db) in a.iter().zip(b.iter()) {
match da.cmp(&db) {
Ordering::Less => return true,
Ordering::Greater => return false,
_ => (),
}
}
true
}
#[test]
fn test_is_sequence_le() {
assert_eq!(is_sequence_le(&[1, 2, 3], &[1, 2, 3]), true);
assert_eq!(is_sequence_le(&[1, 2, 3], &[1, 2, 4]), true);
assert_eq!(is_sequence_le(&[1, 2, 3], &[2, 0, 0]), true);
assert_eq!(is_sequence_le(&[1, 2, 3], &[1, 2, 2]), false);
}
fn increment_monotonic_sequence(sequence: &mut [u32]) {
let mut replacement = 0;
for &digit in sequence.iter().rev() {
if digit < 9 {
replacement = digit + 1;
break;
}
}
for digit in sequence.iter_mut().rev() {
if *digit < 9 {
*digit += 1;
return;
} else {
*digit = replacement;
}
}
}
#[cfg(test)]
fn test_increment_monotonic_sequence_helper(input: &[u32], output: &[u32]) {
let mut copy = Vec::from(input);
increment_monotonic_sequence(&mut copy);
assert_eq!(copy, output);
}
#[test]
fn test_increment_monotonic_sequence() {
test_increment_monotonic_sequence_helper(&[1, 7, 9], &[1, 8, 8]);
test_increment_monotonic_sequence_helper(&[1, 8, 8], &[1, 8, 9]);
test_increment_monotonic_sequence_helper(&[1, 8, 9], &[1, 9, 9]);
test_increment_monotonic_sequence_helper(&[1, 9, 9], &[2, 2, 2]);
test_increment_monotonic_sequence_helper(&[8, 9, 9], &[9, 9, 9]);
test_increment_monotonic_sequence_helper(&[9, 9, 9], &[0, 0, 0]);
}
fn is_sequence_repeating(sequence: &[u32]) -> bool {
let mut prev = sequence[0];
let mut repeated = false;
for &digit in sequence.iter().skip(1) {
if digit == prev {
repeated = true;
}
prev = digit;
}
repeated
}
#[test]
fn test_is_sequence_repeating() {
assert_eq!(is_sequence_repeating(&[1, 1, 1, 1, 1, 1]), true);
assert_eq!(is_sequence_repeating(&[2, 2, 3, 4, 5, 0]), true);
assert_eq!(is_sequence_repeating(&[1, 2, 3, 7, 8, 9]), false);
}
fn is_sequence_doubled(sequence: &[u32]) -> bool {
let mut prev = sequence[0];
let mut repeat_count = 0;
let mut doubled = false;
for &digit in sequence.iter().skip(1) {
if digit == prev {
repeat_count += 1;
} else {
if repeat_count == 1 {
doubled = true;
}
repeat_count = 0;
}
prev = digit;
}
doubled || repeat_count == 1
}
#[test]
fn test_is_sequence_doubled() {
assert_eq!(is_sequence_doubled(&[1, 1, 2, 2, 3, 3]), true);
assert_eq!(is_sequence_doubled(&[1, 2, 3, 4, 4, 4]), false);
assert_eq!(is_sequence_doubled(&[1, 1, 1, 1, 2, 2]), true);
}
#[cfg(test)]
fn is_sequence_monotonic(sequence: &[u32]) -> bool {
let mut max = sequence[0];
for &digit in sequence.iter().skip(1) {
if digit < max {
return false;
} else {
max = digit;
}
}
true
}
#[test]
fn test_is_sequence_monotonic() {
assert_eq!(is_sequence_monotonic(&[1, 1, 1, 1, 1, 1]), true);
assert_eq!(is_sequence_monotonic(&[2, 2, 3, 4, 5, 0]), false);
assert_eq!(is_sequence_monotonic(&[1, 2, 3, 7, 8, 9]), true);
}
|
#[macro_use] extern crate clap;
extern crate ini;
extern crate sha1;
use std::ffi::{OsStr, OsString};
use std::fmt;
use std::fs::{self, File};
use std::io::{self, Read, Write, BufWriter};
use std::iter::FromIterator;
use std::path::{Path, PathBuf};
use ini::Ini;
use ini::ini::Error as IniError;
type Sha1DigestBytes = [u8; 20];
type ModList = Vec<String>;
type MediaSet = Vec<Asset>;
struct Asset {
path: PathBuf,
hash: Sha1DigestBytes,
}
impl Asset {
pub fn new(pb: PathBuf, h: Sha1DigestBytes) -> Self {
Asset {
path: pb,
hash: h,
}
}
}
enum Error {
Io(io::Error),
Ini(IniError),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::Io(ref e) => write!(f, "IO error: {}", e),
Error::Ini(ref e) => write!(f, "Settings file error: {}", e),
}
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Self { Error::Io(e) }
}
impl From<IniError> for Error {
fn from(e: IniError) -> Self { Error::Ini(e) }
}
enum AssetCopyMode {
Symlink,
Hardlink,
Copy,
None,
}
fn to_hex(input: &[u8]) -> String {
String::from_iter(input.iter().map(|b| format!("{:02x}", b)))
}
fn make_absolute(path: &Path) -> PathBuf {
if path.is_absolute() {
path.to_path_buf()
} else {
std::env::current_dir()
.and_then(|cd| Ok(cd.join(path)))
.or_else(|_err| -> io::Result<_> {
Ok(path.to_path_buf())
})
.unwrap()
}
}
fn hash_file(path: &Path) -> io::Result<Sha1DigestBytes> {
let mut buf = [0u8; 8192];
let mut hash = sha1::Sha1::new();
let mut file = File::open(&path)?;
loop {
match file.read(&mut buf) {
Ok(0) => break,
Ok(len) => hash.update(&buf[..len]),
Err(e) => return Err(e),
}
}
Ok(hash.digest().bytes())
}
fn search_media_dir(ms: &mut MediaSet, path: &Path) -> io::Result<()> {
for entry in path.read_dir()? {
let pb = entry?.path();
if pb.is_file() {
let h = hash_file(pb.as_path())?;
ms.push(Asset::new(pb, h));
}
}
Ok(())
}
fn search_mod_dir(ms: &mut MediaSet, path: &Path) -> io::Result<()> {
static MEDIA_DIRS: &'static [&'static str] = &["textures", "models", "sounds"];
for media_dir in MEDIA_DIRS {
let media_pb = path.join(media_dir);
if media_pb.is_dir() {
search_media_dir(ms, media_pb.as_path())?;
}
}
Ok(())
}
fn search_modpack_dir(ms: &mut MediaSet, path: &Path, mods: Option<&ModList>) -> io::Result<()> {
for entry in path.read_dir()? {
let entry_path = entry?.path();
if !entry_path.is_dir() {
continue;
} else if entry_path.join("modpack.txt").exists() {
search_modpack_dir(ms, entry_path.as_path(), mods)?;
} else if entry_path.join("init.lua").exists() {
if let Some(mod_list) = mods {
let mod_name = &entry_path.file_name()
.expect("Mod directory has no name!")
.to_str()
.expect("Mod directory name is not valid Unicode")
.to_string();
if !mod_list.contains(mod_name) {
continue;
}
}
search_mod_dir(ms, entry_path.as_path())?;
}
// Otherwise it's probably a VCS directory or something similar
}
Ok(())
}
fn write_index(ms: &MediaSet, path: &Path) -> io::Result<()> {
let file = File::create(&path)?;
let mut writer = BufWriter::new(file);
writer.write_all(b"MTHS\x00\x01")?;
for asset in ms {
writer.write_all(&asset.hash)?;
}
Ok(())
}
fn copy_assets(ms: &MediaSet, path: &Path, mode: AssetCopyMode) -> io::Result<()> {
fn copy_no_result<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> io::Result<()> {
fs::copy(src, dst).map(|_| ())
}
#[cfg(unix)]
fn symlink_file<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> io::Result<()> {
std::os::unix::fs::symlink(src, dst)
}
#[cfg(windows)]
fn symlink_file<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> io::Result<()> {
std::os::windows::fs::symlink_file(src, dst)
}
#[cfg(not(any(unix, windows)))]
fn symlink_file<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> io::Result<()> {
Err(io::Error::new(io::ErrorKind::Other,
"Symlinking not supported on this platform!"))
}
let copy_func = match mode {
AssetCopyMode::Symlink => symlink_file,
AssetCopyMode::Hardlink => fs::hard_link,
AssetCopyMode::Copy => copy_no_result,
AssetCopyMode::None => return Ok(()),
};
for asset in ms {
let to_path = path.join(to_hex(&asset.hash));
if !to_path.exists() {
copy_func(&asset.path, to_path)?;
}
}
Ok(())
}
fn get_mod_list(path: &Path) -> Result<ModList, IniError> {
let world_mt = Ini::load_from_file(path.join("world.mt"))?;
let main_sec = world_mt.general_section();
let mut list: ModList = vec![];
for (key, value) in main_sec {
if !key.starts_with("load_mod_") || value != "true" {
continue;
}
let (_, mod_name) = key.split_at(9);
list.push(mod_name.to_string());
}
Ok(list)
}
fn get_args<'a>() -> clap::ArgMatches<'a> {
use clap::{App, Arg, ArgGroup};
fn check_parent_dir(p: &Path) -> bool {
if let Some(parent) = p.parent() {
if parent.is_dir() {
return true
}
}
false
}
fn check_new_dir(s: &OsStr) -> Result<(), OsString> {
let p = make_absolute(Path::new(&s));
if p.is_dir() || check_parent_dir(&p) {
Ok(())
} else {
Err("Invalid directory path.".into())
}
}
fn check_existing_dir(s: &OsStr) -> Result<(), OsString> {
if make_absolute(Path::new(&s)).is_dir() {
Ok(())
} else {
Err("Invalid directory path.".into())
}
}
fn check_new_file(s: &OsStr) -> Result<(), OsString> {
let p = make_absolute(Path::new(&s));
if p.is_file() || check_parent_dir(&p) {
Ok(())
} else {
Err("Invalid file path.".into())
}
}
let app = clap_app! { @app (app_from_crate!())
(version_short: "v")
(@arg mod_paths: [PATHS] ... validator_os(check_existing_dir) "Additional mod paths to search.")
(@arg world: -w --world <PATH> validator_os(check_existing_dir) "Path to the world directory.")
(@arg game: -g --game <PATH> validator_os(check_existing_dir) "Path to the game directory.")
(@group output =>
(@attributes +multiple +required)
(@arg out: -o --out [PATH] validator_os(check_new_dir) display_order(1001)
conflicts_with_all(&["media", "index"])
"Directory to output media files and index. \
Convenience for --index PATH/index.mth --media PATH.")
(@arg media: -m --media [PATH] validator_os(check_new_dir) display_order(1001)
requires("media_transfer")
"Directory to output media files.")
(@arg index: -i --index [PATH] validator_os(check_new_file) display_order(1001)
"Path to the index file to output."))
// Group these together with display_order
(@arg copy: -c --copy display_order(1000) requires("media_out") "Copy assets to output folder.")
// Symlink added below if applicable
(@arg hardlink: -l --hardlink display_order(1000) requires("media_out") "Hard link assets to output folder.")
};
// Add symlink option if supported
#[cfg(not(any(unix, windows)))]
let add_symlink_arg = |app| app;
#[cfg(any(unix, windows))]
fn add_symlink_arg<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
app.arg(Arg::with_name("symlink")
.short("s")
.long("symlink")
.display_order(1000)
.requires("media_out")
.help("Symbolically link assets to output folder."))
}
let matches = add_symlink_arg(app)
.group(ArgGroup::with_name("media_out")
.args(&["out", "media"]))
.group(ArgGroup::with_name("media_transfer")
.args(&["copy", "symlink", "hardlink"]))
.get_matches();
matches
}
fn run(args: clap::ArgMatches) -> Result<(), Error> {
// These unwraps are safe since the values are required
// and clap will exit if the value is missing.
let world_opt = args.value_of_os("world").unwrap();
let world_path = Path::new(&world_opt);
let game_opt = args.value_of_os("game").unwrap();
let game_path = Path::new(&game_opt);
let out_path = args.value_of_os("out").map(|s| PathBuf::from(s));
let media_path = if let Some(media_opt) = args.value_of_os("media") {
Some(PathBuf::from(media_opt))
} else if let Some(ref out_path) = out_path {
Some(out_path.clone())
} else {
None
};
let index_path = if let Some(index_opt) = args.value_of_os("index") {
Some(PathBuf::from(index_opt))
} else if let Some(ref out_path) = out_path {
Some(out_path.join("index.mth"))
} else {
None
};
let copy_type = if args.is_present("copy") {
AssetCopyMode::Copy
} else if args.is_present("symlink") {
AssetCopyMode::Symlink
} else if args.is_present("hardlink") {
AssetCopyMode::Hardlink
} else {
AssetCopyMode::None
};
let mut ms = MediaSet::new();
let mods = get_mod_list(world_path)?;
// Search world mods.
let worldmods_path = world_path.join("worldmods");
if worldmods_path.exists() {
search_modpack_dir(&mut ms, worldmods_path.as_path(), Some(&mods))?;
}
// Search game mods.
// Note: Game mods can not currently be disabled.
search_modpack_dir(&mut ms, game_path.join("mods").as_path(), None)?;
if let Some(mod_paths) = args.values_of_os("mod_paths") {
for mod_path in mod_paths {
search_modpack_dir(&mut ms,
Path::new(&mod_path),
Some(&mods))?;
}
}
// Deduplicate list. Otherwise linking will fail and the index will
// be unnecessarily large.
ms.sort_by_key(|a| a.hash);
ms.dedup_by_key(|a| a.hash);
if let Some(media_path) = media_path {
if !media_path.exists() {
fs::create_dir(media_path.as_path())?;
}
copy_assets(&ms, media_path.as_path(), copy_type)?;
}
if let Some(index_path) = index_path {
write_index(&ms, index_path.as_path())?;
}
Ok(())
}
fn main() {
match run(get_args()) {
Ok(()) => return,
Err(e) => {
println!("{}", e);
std::process::exit(1)
}
}
}
|
// # Mech
// ## Prelude
extern crate mech_core;
extern crate mech_syntax;
extern crate mech_program;
extern crate mech_utilities;
mod repl;
pub use mech_core::{Core, TableIndex, ValueMethods, Change, Transaction, Transformation, hash_string, Block, Table, Value, Error, ErrorType};
pub use mech_core::QuantityMath;
pub use mech_syntax::compiler::Compiler;
pub use mech_syntax::parser::{Parser, Node as ParserNode};
pub use mech_program::{Program, ProgramRunner, RunLoop, ClientMessage};
pub use mech_utilities::{RunLoopMessage, MiniBlock, MechCode, WebsocketMessage};
pub use self::repl::{ReplCommand, parse_repl_command};
extern crate colored;
use colored::*;
extern crate bincode;
use std::io::{Write, BufReader, BufWriter};
use std::fs::{OpenOptions, File, canonicalize, create_dir};
extern crate core;
use std::path::{Path, PathBuf};
use std::io;
use std::io::prelude::*;
extern crate nom;
pub async fn read_mech_files(mech_paths: &Vec<String>) -> Result<Vec<MechCode>, Box<dyn std::error::Error>> {
let mut code: Vec<MechCode> = Vec::new();
let read_file_to_code = |path: &Path| -> Vec<MechCode> {
let mut code: Vec<MechCode> = Vec::new();
match (path.to_str(), path.extension()) {
(Some(name), Some(extension)) => {
match extension.to_str() {
Some("blx") => {
match File::open(name) {
Ok(file) => {
println!("{} {}", "[Loading]".bright_green(), name);
let mut reader = BufReader::new(file);
match bincode::deserialize_from(&mut reader) {
Ok(miniblocks) => {code.push(MechCode::MiniBlocks(miniblocks));},
Err(err) => {
println!("{} Failed to load {}", "[Error]".bright_red(), name);
},
}
}
Err(err) => {
println!("{} Failed to load {}", "[Error]".bright_red(), name);
},
};
}
Some("mec") => {
match File::open(name) {
Ok(mut file) => {
println!("{} {}", "[Loading]".bright_green(), name);
let mut buffer = String::new();
file.read_to_string(&mut buffer);
code.push(MechCode::String(buffer));
}
Err(err) => {
println!("{} Failed to load {}", "[Error]".bright_red(), name);
},
};
}
_ => (),
}
},
_ => {println!("{} Failed to load {:?}", "[Error]".bright_red(), path);},
}
code
};
for path_str in mech_paths {
let path = Path::new(path_str);
// Compile a .mec file on the web
if path.to_str().unwrap().starts_with("https") {
println!("{} {}", "[Downloading]".bright_green(), path.display());
let program = reqwest::get(path.to_str().unwrap()).await?.text().await?;
code.push(MechCode::String(program));
} else {
// Compile a directory of mech files
if path.is_dir() {
for entry in path.read_dir().expect("read_dir call failed") {
if let Ok(entry) = entry {
let path = entry.path();
let mut new_code = read_file_to_code(&path);
code.append(&mut new_code);
}
}
} else if path.is_file() {
// Compile a single file
let mut new_code = read_file_to_code(&path);
code.append(&mut new_code);
} else {
println!("{} Failed to open {:?}", "[Error]".bright_red(), path);
}
};
}
Ok(code)
}
pub fn compile_code(code: Vec<MechCode>) -> Vec<Block> {
print!("{}", "[Compiling] ".bright_green());
let mut compiler = Compiler::new();
for c in code {
match c {
MechCode::String(c) => {compiler.compile_string(c);},
MechCode::MiniBlocks(c) => {
let mut blocks: Vec<Block> = Vec::new();
for miniblock in c {
let mut block = Block::new(100);
for tfm in miniblock.transformations {
block.register_transformations(tfm);
}
for tfm in miniblock.plan {
block.plan.push(tfm);
}
for error in miniblock.errors {
block.errors.insert(error);
}
block.id = miniblock.id;
blocks.push(block);
}
compiler.blocks.append(&mut blocks);
},
}
}
println!("Compiled {} blocks.", compiler.blocks.len());
compiler.blocks
}
pub fn minify_blocks(blocks: &Vec<Block>) -> Vec<MiniBlock> {
let mut miniblocks = Vec::new();
for block in blocks {
let mut miniblock = MiniBlock::new();
miniblock.transformations = block.transformations.clone();
miniblock.plan = block.plan.clone();
for (k,v) in block.store.strings.iter() {
miniblock.strings.push((k.clone(), v.clone()));
}
for (k,v) in block.store.number_literals.iter() {
miniblock.number_literals.push((k.clone(), v.clone()));
}
for error in &block.errors {
miniblock.errors.push(error.clone());
}
miniblock.id = block.id;
miniblocks.push(miniblock);
}
miniblocks
} |
pub static GAS_CUBIC_METERS_ID: &'static str = "id_gas_meters";
pub static GAS_CUBIC_PRICE_ID: &'static str = "id_cubic_meter_price";
pub static GAS_DATE_ID: &'static str = "id_date";
pub static GAS_DATA_JSON_FILE: &'static str = "gas_data.json";
|
use std::collections::hash_map::DefaultHasher;
use std::hash::Hasher;
pub fn quick_hash(bytes: &[u8]) -> u64 {
let mut hasher = DefaultHasher::new();
hasher.write(bytes);
hasher.finish()
} |
// Copyright 2016, NICTA
//
// This software may be distributed and modified according to the terms of
// the BSD 2-Clause license. Note that NO WARRANTY is provided.
// See "LICENSE_BSD2.txt" for details.
//
// @TAG(NICTA_BSD)
//
extern crate bindgen;
use bindgen::{Bindings, BindgenOptions};
use std::default::Default;
use std::fs;
use std::env;
use std::path::{PathBuf, Path};
/**
* This build.rs script is run before the rust source is compiled and we use it to
* generate rust bindings of the camkes.h symbols so that we can call them easier.
*
* The generated file can be found in (Must compile at least once):
* target/{target}/{debug|release}/build/{library or binary name}/out/generated.rs
*
* (note: The camkes.h file that gets used can be found at:
* build/{arm/imx31|or another target}/keyvalue/include/main_object/generated/camkes.h)
*/
fn main() {
// Setup build and stage paths from global env variables provided by kbuild
let build_dir = PathBuf::from(&env::var("BUILD_DIR").expect("BUILD_DIR env var"));
let stage_dir = PathBuf::from(&env::var("STAGE_DIR").expect("STAGE_DIR env var"));
// Construct bindgen options
// see: https://github.com/crabtw/rust-bindgen for config options
let mut bindgen_opts = BindgenOptions::default();
// Add the staging include directory to resolve #include files
bindgen_opts.clang_args.push(format!("-I{}", stage_dir.join("include").display()));
// Add camkes.h file that we generate bindings for
bindgen_opts.clang_args.push(format!("{}",
build_dir.join("include/main_object/generated/camkes.h")
.display()));
// Generate bindings for builtins
bindgen_opts.builtins = true;
// Generate bindings
let bindings: Bindings = Bindings::generate(&bindgen_opts, None, None)
.expect("Generating bindings");
// Save bindings to generated.rs file in
// target/{target}/{debug|release}/build/{library or binary name}/out/ directory.
// This file is then imported into the rust source at compile time.
let gen = fs::File::create(&Path::new(&env::var("OUT_DIR").expect("OUT_DIR env var"))
.join("generated.rs"))
.expect("Create file");
bindings.write(Box::new(gen)).expect("Writing bindings to file")
}
|
/// Assume one value is less than another value.
///
/// * When true, return `Ok(true)`.
///
/// * Otherwise, return [`Err`] with a message and the values of the
/// expressions with their debug representations.
///
/// # Example
///
/// ```rust
/// # #[macro_use] extern crate assertable; fn main() {
/// let x = assume_lt!(1, 2);
/// //-> Ok(true)
/// # }
/// ```
///
/// ```rust
/// # #[macro_use] extern crate assertable; fn main() {
/// let x = assume_lt!(2, 1);
/// //-> Err("assumption failed: `assume_lt(left, right)`\n left: `2`\n right: `1`")
/// # }
/// ```
///
/// This macro has a second form where a custom message can be provided.
#[macro_export]
macro_rules! assume_lt {
($left:expr, $right:expr $(,)?) => ({
match (&$left, &$right) {
(left_val, right_val) => {
if (left_val < right_val) {
Ok(true)
} else {
Err(format!("assumption failed: `assume_lt(left, right)`\n left: `{:?}`\n right: `{:?}`", $left, $right))
}
}
}
});
($left:expr, $right:expr, $($arg:tt)+) => ({
match (&($left), &($right)) {
(left_val, right_val) => {
if (left_val < right_val) {
Ok(true)
} else {
Err($($arg)+)
}
}
}
});
}
#[cfg(test)]
mod tests {
#[test]
fn test_assume_lt_x_arity_2_success() {
let a = 1;
let b = 2;
let x = assume_lt!(a, b);
assert_eq!(
x.unwrap(),
true
);
}
#[test]
fn test_assume_lt_x_arity_2_failure() {
let a = 2;
let b = 1;
let x = assume_lt!(a, b);
assert_eq!(
x.unwrap_err(),
"assumption failed: `assume_lt(left, right)`\n left: `2`\n right: `1`"
);
}
#[test]
fn test_assume_lt_x_arity_3_success() {
let a = 1;
let b = 2;
let x = assume_lt!(a, b, "message");
assert_eq!(
x.unwrap(),
true
);
}
#[test]
fn test_assume_lt_x_arity_3_failure() {
let a = 2;
let b = 1;
let x = assume_lt!(a, b, "message");
assert_eq!(
x.unwrap_err(),
"message"
);
}
}
|
use syntax_base::syntax_kind::SyntaxKindId;
use syntax_base::parser::parser_api::ParserApi;
use crate::simple_lang::syntax::*;
use syntax_base::parser::parser_impl::sink::ParseEventSink;
use syntax_base::syntax_kind_set::SmallSyntaxKindSet;
pub fn parse_file<T, S: ParseEventSink<T>>(mut p: ParserApi, sink: S) -> T {
parse_file_internal(&mut p);
p.build(sink)
}
fn parse_file_internal(p: &mut ParserApi) {
let mut file = p.start();
while !p.at(SyntaxKindId::END) {
eprintln!("Before fun");
if !p.at(FUN_KW) {
eprintln!("Not at fun");
p.err_recover("function expected", FUN_KW);
break
}
eprintln!("parse fun");
parse_fun(p);
}
file.complete(p, SyntaxKindId::ROOT);
}
fn parse_fun(p: &mut ParserApi) {
assert!(p.at(FUN_KW));
let function = p.start();
p.bump();
if !p.at(ID) {
p.err_recover("identifier expected", LPAR)
}
if !p.at(LPAR) {
p.err_recover("", RPAR)
}
parse_args(p);
function.complete(p, FUNCTION);
// TODO body
}
fn parse_args(p: &mut ParserApi) {
assert!(p.at(LPAR));
let args = p.start();
p.leaf(LPAR);
while !p.at(RPAR) {
p.leaf(ID);
if p.at(COMMA) {
p.leaf(COMMA);
}
}
p.leaf(RPAR);
args.complete(p, ARGS);
}
#[cfg(test)]
mod tests {
use crate::simple_lang::SimpleLangSyntax;
use syntax_base::syntax::SyntaxDefinition;
use std::path::PathBuf;
use crate::test_support::ParserTest;
#[test]
fn test1() {
by_file("1")
}
const EXT: &str = "simple";
const BASE_PATH: &str = "./tests/data/simple/parser";
fn by_file(name: &str) {
let syntax = SimpleLangSyntax {};
let lexer = syntax.lexer().unwrap();
let parser = syntax.parser().unwrap();
let test = ParserTest::new(
&syntax,
lexer.as_ref(),
parser.as_ref(),
EXT.to_string(),
PathBuf::from(BASE_PATH)
);
test.test(name);
}
}
|
use chrono::{DateTime, Utc};
pub type Temperature = f64;
pub type Humidity = f64;
pub type Pressure = f64;
#[derive(Debug)]
pub struct WeatherEvent {
pub time: DateTime<Utc>,
pub temperature: Temperature,
pub humidity: Humidity,
pub pressure: Pressure
}
impl super::ToInfluxDB for WeatherEvent {
fn to_line(&self) -> String {
format!("weather temperature={},humidity={},pressure={} {}",
self.temperature,
self.humidity,
self.pressure,
self.time.timestamp()
)
}
}
|
use std::io::{stdin, Read, StdinLock};
use std::str::FromStr;
#[allow(dead_code)]
struct Scanner<'a> {
cin: StdinLock<'a>,
}
#[allow(dead_code)]
impl<'a> Scanner<'a> {
fn new(cin: StdinLock<'a>) -> Scanner<'a> {
Scanner { cin: cin }
}
fn read<T: FromStr>(&mut self) -> Option<T> {
let token = self
.cin
.by_ref()
.bytes()
.map(|c| c.unwrap() as char)
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
token.parse::<T>().ok()
}
fn input<T: FromStr>(&mut self) -> T {
self.read().unwrap()
}
fn vec<T: FromStr>(&mut self, len: usize) -> Vec<T> {
(0..len).map(|_| self.input()).collect()
}
fn mat<T: FromStr>(&mut self, row: usize, col: usize) -> Vec<Vec<T>> {
(0..row).map(|_| self.vec(col)).collect()
}
}
fn main() {
let cin = stdin();
let cin = cin.lock();
let mut sc = Scanner::new(cin);
const MOD: usize = 1_000_000_007;
let n: usize = sc.input();
let k: usize = sc.input();
let com = Combination::new(1_400_001, MOD);
if k >= n {
// 任意の配置が可能
let ans: usize = com.hcom(n, n);
println!("{}", ans);
} else if k == 1 {
println!("{}", (n * (n - 1)) % MOD);
} else {
// 0人になる部屋がm(0..=k)個あるとすると、m人が残りのn - m部屋に散らばる。
let mut ans = 0usize;
for i in 0..k + 1 {
let room = n - i;
ans += com.com(n, i) * com.hcom(room, i) % MOD;
}
println!("{}", ans % MOD);
}
}
struct Combination {
max_n: usize,
modulo: usize,
fac: Vec<usize>,
finv: Vec<usize>,
inv: Vec<usize>,
}
impl Combination {
fn new(max_n: usize, modulo: usize) -> Combination {
let mut fac = vec![0; max_n];
let mut finv = vec![0; max_n];
let mut inv = vec![0; max_n];
fac[0] = 1;
fac[1] = 1;
finv[0] = 1;
finv[1] = 1;
inv[1] = 1;
for i in 2..max_n {
fac[i] = fac[i - 1] * i % modulo;
inv[i] = modulo - inv[modulo % i] * (modulo / i) % modulo;
finv[i] = finv[i - 1] * inv[i] % modulo;
}
Combination {
max_n: max_n,
modulo: modulo,
fac: fac,
finv: finv,
inv: inv,
}
}
fn com(&self, n: usize, k: usize) -> usize {
if n < k || n < 0 || k < 0 {
0
} else {
self.fac[n] * (self.finv[k] * self.finv[n - k] % self.modulo) % self.modulo
}
}
fn hcom(&self, n: usize, k: usize) -> usize {
if n == 0 && k == 0 {
1
} else {
self.com(n + k - 1, k)
}
}
}
|
mod jobkorea;
mod posting;
mod saramin;
use reqwest;
use std::env;
use std::error::Error;
use tokio::time::{sleep, Duration};
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
let mstdn_enable = env::var("MASTODON_ENABLE").is_ok();
let mstdn = if mstdn_enable {
println!("[log] MASTODON ENABLE");
Some(posting::mastodon::Mastodon::new(
env::var("MASTODON_URL")?,
env::var("MASTODON_BEARER_TOKEN")?,
))
} else {
println!("[log] MASTODON DISABLE");
None
};
let http_client = reqwest::Client::new();
let mut latest_saramin_id = saramin::init(&http_client).await?;
let mut latest_jobkorea_id = jobkorea::init(&http_client).await?;
loop {
println!("[log] latest_saramin_id={}", latest_saramin_id);
println!("[log] latest_jobkorea_id={}", latest_jobkorea_id);
sleep(Duration::from_millis(120_000)).await;
latest_saramin_id = saramin::cycle(latest_saramin_id, &http_client, &mstdn).await?;
latest_jobkorea_id = jobkorea::cycle(latest_jobkorea_id, &http_client, &mstdn).await?;
}
}
|
struct Node {
value: usize,
next: usize,
previous: usize,
}
struct CircularLinkedList {
free: Vec<usize>,
used: Vec<Node>,
current_index: usize,
}
impl CircularLinkedList {
fn new(initial: usize, capacity: usize) -> CircularLinkedList {
let mut values = Vec::with_capacity(capacity);
values.push(Node {
value: initial,
previous: 0,
next: 0,
});
CircularLinkedList {
free: Vec::new(),
used: values,
current_index: 0,
}
}
fn get_free_node(&mut self) -> usize {
match self.free.pop() {
None => {
// no free nodes, add one
self.used.push(Node {
value: 0,
previous: 0,
next: 0,
});
self.used.len() - 1
}
Some(i) => {
i
}
}
}
fn insert(&mut self, value: usize) {
let current_index = self.current_index;
let old_next = self.used[current_index].next;
let new_index = self.get_free_node();
// update previous node to point to new node
self.used[current_index].next = new_index;
// add new node
self.used[new_index].previous = current_index;
self.used[new_index].next = old_next;
self.used[new_index].value = value;
// update next node to point to new node
self.used[old_next].previous = new_index;
self.current_index = new_index;
}
fn remove(&mut self) -> usize {
let current_index = self.current_index;
let previous = self.used[current_index].previous;
let next = self.used[current_index].next;
let value = self.used[current_index].value;
self.used[previous].next = next;
self.used[next].previous = previous;
self.free.push(current_index);
self.current_index = next;
value
}
fn forward(&mut self, count: usize) {
for _ in 0..count {
self.current_index = self.used[self.current_index].next;
}
}
fn back(&mut self, count: usize) {
for _ in 0..count {
self.current_index = self.used[self.current_index].previous;
}
}
}
pub fn part1() -> usize {
calculate_score_fast(459, 71790)
}
pub fn part2() -> usize {
calculate_score_fast(459, 71790 * 100)
}
#[allow(dead_code)]
fn calculate_score(players: usize, last_marble: usize) -> usize {
let mut player_scores: Vec<usize> = vec![0; players];
let mut board: Vec<usize> = Vec::new();
board.push(0);
let mut current_index: usize = 0;
for m in 1..=last_marble {
if m % 23 == 0 {
current_index = (board.len() + current_index - 7) % board.len();
player_scores[(m - 1) % players] += m + board.remove(current_index);
} else {
current_index = (current_index + 2) % board.len();
board.insert(current_index, m);
}
}
*player_scores.iter().max().unwrap()
}
fn calculate_score_fast(players: usize, last_marble: usize) -> usize {
let mut player_scores: Vec<usize> = vec![0; players];
let mut board = CircularLinkedList::new(0, last_marble);
for m in 1..=last_marble {
if m % 23 == 0 {
board.back(7);
player_scores[(m - 1) % players] += m + board.remove();
} else {
board.forward(1);
board.insert(m);
}
}
*player_scores.iter().max().unwrap()
}
#[cfg(test)]
mod tests {
use test::Bencher;
use super::*;
#[bench]
fn part1_bench(b: &mut Bencher) {
b.iter(|| part1());
}
#[bench]
fn part2_bench(b: &mut Bencher) {
b.iter(|| part2());
}
#[test]
fn part1_example1() {
assert_eq!(32, calculate_score_fast(9, 25));
}
#[test]
fn part1_example2() {
assert_eq!(8317, calculate_score_fast(10, 1618));
}
#[test]
fn part1_example3() {
assert_eq!(146373, calculate_score_fast(13, 7999));
}
#[test]
fn part1_example4() {
assert_eq!(2764, calculate_score_fast(17, 1104));
}
#[test]
fn part1_example5() {
assert_eq!(54718, calculate_score_fast(21, 6111));
}
#[test]
fn part1_example6() {
assert_eq!(37305, calculate_score_fast(30, 5807));
}
}
|
#![allow(non_snake_case)]
#![deny(warnings)]
#[deny(unused_imports)]
use time;
mod config;
mod queue;
mod convert;
mod analyze;
mod analyze_result;
mod dbase;
pub use analyze_result::AnalyzeResult;
use analyze::read_analyze_dir;
pub use config::Config;
pub use queue::Queue;
pub use convert::Convert;
use dbase::DBase;
fn main() {
let start = time::now(); //获取开始时间
let c = Config::new("config.json");
let cc = c.clone();
let db = DBase::new(cc.database);
read_analyze_dir(&c.root_dir, db, c.multi_thread);
// unsafe{
// SQL_QUEUE = Some(Queue::new());
// }
// // 启动一个线程来处理待插入数据库的队列
// thread::spawn(move || {
// write_to_mongo(&cc.clone().database, vec![]);
// });
// write_to_mongo(c.database);
// let f_path = c.root_dir; // "/users/shaipe/binlog";
// let f_path = "/users/shaipe/react.sh";
// read_analyze_dir(&f_path, c.database, c.multi_thread);
let end = time::now(); //获取结束时间
println!(
"done!start : {:?},end :{:?},duration:{:?}",
start,
end,
end - start
);
}
|
//! Shader form.
use crate::input::{Light, Samples, Shader, Shadow, SkyBuilder};
use arctk::{access, err::Error, file::Build};
use arctk_attr::input;
use std::path::Path;
/// Shader settings.
#[input]
pub struct ShaderBuilder {
/// Sky builder.
sky: SkyBuilder,
/// Lighting samples.
samples: Samples,
/// Lighting settings.
light: Light,
/// Shadowing settings.
shadow: Shadow,
}
impl ShaderBuilder {
access!(sky, SkyBuilder);
access!(samples, Samples);
access!(light, Light);
access!(shadow, Shadow);
/// Construct a new instance.
#[inline]
#[must_use]
pub const fn new(sky: SkyBuilder, samples: Samples, light: Light, shadow: Shadow) -> Self {
Self {
sky,
samples,
light,
shadow,
}
}
}
impl Build for ShaderBuilder {
type Inst = Shader;
#[inline]
fn build(self, in_dir: &Path) -> Result<Self::Inst, Error> {
Ok(Self::Inst::new(
self.sky.build(in_dir)?,
self.samples,
self.light,
self.shadow,
))
}
}
|
#[cfg(not(feature = "geoip"))]
mod default_geoip {
use protocol::FlagCode;
use std::net::IpAddr;
/// An empty lookup that always returns None
pub fn locate(_: &IpAddr) -> Option<FlagCode> {
None
}
}
#[cfg(feature = "geoip")]
mod full_geoip {
extern crate geolocate_ip;
use protocol::FlagCode;
use std::net::IpAddr;
/// Look up ISO-2 country code
pub fn locate(addr: &IpAddr) -> Option<FlagCode> {
match *addr {
IpAddr::V4(a) => match geolocate_ip::lookup_ip(&a) {
Some(s) => FlagCode::from_str(s),
None => None,
},
// IP lookups not done for Ipv6 addresses yet
IpAddr::V6(_) => None,
}
}
}
#[cfg(feature = "geoip")]
pub use self::full_geoip::*;
#[cfg(not(feature = "geoip"))]
pub use self::default_geoip::*;
|
#[doc = r"Register block"]
#[repr(C)]
pub struct CH {
#[doc = "0x00 - AConfiguration register 1"]
pub cr1: CR1,
#[doc = "0x04 - AConfiguration register 2"]
pub cr2: CR2,
#[doc = "0x08 - AFRCR"]
pub frcr: FRCR,
#[doc = "0x0c - ASlot register"]
pub slotr: SLOTR,
#[doc = "0x10 - AInterrupt mask register2"]
pub im: IM,
#[doc = "0x14 - AStatus register"]
pub sr: SR,
#[doc = "0x18 - AClear flag register"]
pub clrfr: CLRFR,
#[doc = "0x1c - AData register"]
pub dr: DR,
}
#[doc = "CR1 (rw) register accessor: AConfiguration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr1`]
module"]
pub type CR1 = crate::Reg<cr1::CR1_SPEC>;
#[doc = "AConfiguration register 1"]
pub mod cr1;
#[doc = "CR2 (rw) register accessor: AConfiguration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr2`]
module"]
pub type CR2 = crate::Reg<cr2::CR2_SPEC>;
#[doc = "AConfiguration register 2"]
pub mod cr2;
#[doc = "FRCR (rw) register accessor: AFRCR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`frcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`frcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`frcr`]
module"]
pub type FRCR = crate::Reg<frcr::FRCR_SPEC>;
#[doc = "AFRCR"]
pub mod frcr;
#[doc = "SLOTR (rw) register accessor: ASlot register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`slotr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`slotr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`slotr`]
module"]
pub type SLOTR = crate::Reg<slotr::SLOTR_SPEC>;
#[doc = "ASlot register"]
pub mod slotr;
#[doc = "IM (rw) register accessor: AInterrupt mask register2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`im::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`im::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`im`]
module"]
pub type IM = crate::Reg<im::IM_SPEC>;
#[doc = "AInterrupt mask register2"]
pub mod im;
#[doc = "SR (r) register accessor: AStatus register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sr`]
module"]
pub type SR = crate::Reg<sr::SR_SPEC>;
#[doc = "AStatus register"]
pub mod sr;
#[doc = "CLRFR (w) register accessor: AClear flag register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`clrfr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`clrfr`]
module"]
pub type CLRFR = crate::Reg<clrfr::CLRFR_SPEC>;
#[doc = "AClear flag register"]
pub mod clrfr;
#[doc = "DR (rw) register accessor: AData register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dr`]
module"]
pub type DR = crate::Reg<dr::DR_SPEC>;
#[doc = "AData register"]
pub mod dr;
|
use super::*;
#[test]
fn test_controlled_get_from() -> Result<(), Error> {
let mut m = Message::new();
let mut c = AttrControlled(4321);
let result = c.get_from(&m);
if let Err(err) = result {
assert_eq!(err, *ERR_ATTRIBUTE_NOT_FOUND, "unexpected error");
} else {
panic!("expected error, but got ok");
}
m.build(&[Box::new(BINDING_REQUEST), Box::new(c)])?;
let mut m1 = Message::new();
m1.write(&m.raw)?;
let mut c1 = AttrControlled::default();
c1.get_from(&m1)?;
assert_eq!(c1, c, "not equal");
//"IncorrectSize"
{
let mut m3 = Message::new();
m3.add(ATTR_ICE_CONTROLLED, &[0; 100]);
let mut c2 = AttrControlled::default();
let result = c2.get_from(&m3);
if let Err(err) = result {
assert!(is_attr_size_invalid(&err), "should error");
} else {
panic!("expected error, but got ok");
}
}
Ok(())
}
#[test]
fn test_controlling_get_from() -> Result<(), Error> {
let mut m = Message::new();
let mut c = AttrControlling(4321);
let result = c.get_from(&m);
if let Err(err) = result {
assert_eq!(err, *ERR_ATTRIBUTE_NOT_FOUND, "unexpected error");
} else {
panic!("expected error, but got ok");
}
m.build(&[Box::new(BINDING_REQUEST), Box::new(c)])?;
let mut m1 = Message::new();
m1.write(&m.raw)?;
let mut c1 = AttrControlling::default();
c1.get_from(&m1)?;
assert_eq!(c1, c, "not equal");
//"IncorrectSize"
{
let mut m3 = Message::new();
m3.add(ATTR_ICE_CONTROLLING, &[0; 100]);
let mut c2 = AttrControlling::default();
let result = c2.get_from(&m3);
if let Err(err) = result {
assert!(is_attr_size_invalid(&err), "should error");
} else {
panic!("expected error, but got ok");
}
}
Ok(())
}
#[test]
fn test_control_get_from() -> Result<(), Error> {
//"Blank"
{
let m = Message::new();
let mut c = AttrControl::default();
let result = c.get_from(&m);
if let Err(err) = result {
assert_eq!(err, *ERR_ATTRIBUTE_NOT_FOUND, "unexpected error");
} else {
panic!("expected error, but got ok");
}
}
//"Controlling"
{
let mut m = Message::new();
let mut c = AttrControl::default();
let result = c.get_from(&m);
if let Err(err) = result {
assert_eq!(err, *ERR_ATTRIBUTE_NOT_FOUND, "unexpected error");
} else {
panic!("expected error, but got ok");
}
c.role = Role::Controlling;
c.tie_breaker = TieBreaker(4321);
m.build(&[Box::new(BINDING_REQUEST), Box::new(c)])?;
let mut m1 = Message::new();
m1.write(&m.raw)?;
let mut c1 = AttrControl::default();
c1.get_from(&m1)?;
assert_eq!(c1, c, "not equal");
//"IncorrectSize"
{
let mut m3 = Message::new();
m3.add(ATTR_ICE_CONTROLLING, &[0; 100]);
let mut c2 = AttrControl::default();
let result = c2.get_from(&m3);
if let Err(err) = result {
assert!(is_attr_size_invalid(&err), "should error");
} else {
panic!("expected error, but got ok");
}
}
}
//"Controlled"
{
let mut m = Message::new();
let mut c = AttrControl::default();
let result = c.get_from(&m);
if let Err(err) = result {
assert_eq!(err, *ERR_ATTRIBUTE_NOT_FOUND, "unexpected error");
} else {
panic!("expected error, but got ok");
}
c.role = Role::Controlled;
c.tie_breaker = TieBreaker(1234);
m.build(&[Box::new(BINDING_REQUEST), Box::new(c)])?;
let mut m1 = Message::new();
m1.write(&m.raw)?;
let mut c1 = AttrControl::default();
c1.get_from(&m1)?;
assert_eq!(c1, c, "not equal");
//"IncorrectSize"
{
let mut m3 = Message::new();
m3.add(ATTR_ICE_CONTROLLING, &[0; 100]);
let mut c2 = AttrControl::default();
let result = c2.get_from(&m3);
if let Err(err) = result {
assert!(is_attr_size_invalid(&err), "should error");
} else {
panic!("expected error, but got ok");
}
}
}
Ok(())
}
|
fn main() {
println!("\n============== Scalar Types ============\n");
int_num();
float_num();
numeric_operations();
bool_type();
char_type();
println!("\n============== Compound Types ============\n");
compound_types_tuple();
compound_types_array();
}
fn int_num() {
println!("******** Integer Types ********");
println!("Decimal => {}", 98_300); // 98300
println!("Hex => {}", 0xf1); // 241
println!("Octal => {}", 0o72); // 58
println!("Binary => {}", 0b1111_0000); // 240
println!("Byte => {}", b'&'); // 38
}
fn float_num() {
println!("******** Floating-Point Types ********");
let x = 2.0;
let y: f32 = 3.0;
println!("x => {}, y => {}", x, y);
}
fn numeric_operations() {
println!("******** Numeric Operations ********");
// addition
let sum = 5 + 10;
// subtraction
let difference = 34.1 - 4.2;
// multiplication
let product = 4 * 9;
// division
let quotient = 25.6 / 3.4;
// remainder
let remainder = 49 % 3;
println!("sum => {}; difference => {}; product => {}", sum, difference, product);
println!("quotient => {}; remainder => {}", quotient, remainder);
}
fn bool_type() {
println!("******** Boolean Type ********");
let t = true;
let f: bool = false;
println!("t => {}; f => {}", t, f);
}
fn char_type() {
println!("******** Character Type ********");
let a = 'A';
let b = '#';
let c = '😝';
println!("a => {}; b => {}; c => {}", a, b, c);
}
fn compound_types_tuple() {
println!("******** Tuple Type ********");
let tup: (i32, bool, f32, isize) = (300, true, 3.9, 10);
let (a, _, _, _) = tup;
println!("tup => {:#?}", tup);
println!("a => {}", a);
println!("tup.2 => {}", tup.2);
}
fn compound_types_array() {
println!("******** Array Type ********");
let a = [1, 2, 3, 4, 5];
let b: [f32; 3] = [1.2, 2.004, 4.3];
println!("a => {:?}; b => {:?}", a, b);
println!("a[1] => {}", a[1]);
/* index out of bounds: the len is 5 but the index is 5 */
// println!("a[5] => {}", a[5]);
} |
use std::{sync::mpsc, thread, time::Duration};
fn main() {
let (tx, rx) = mpsc::channel();
// move 强制转移闭包里面使用的变量的所有权到闭包里面,比如此例中的 tx
let handle = thread::spawn(move || {
let vals = vec![
String::from("data"),
String::from("data2"),
String::from("data3"),
];
for val in vals {
tx.send(val).unwrap();
thread::sleep(Duration::from_secs(1));
}
// vals.into_iter().map(|val| {
// tx.send(val).unwrap();
// thread::sleep(Duration::from_secs(1));
// })
});
for received in rx {
println!("Got: {}", received);
}
handle.join().unwrap();
}
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::mem;
use std::rt::task::BlockedTask;
use std::time::Duration;
use libc;
use homing::HomingMissile;
use {access, uvll, raw, UvError, UvResult, EventLoop};
use raw::{Handle, Request};
/// Management of a timeout when gaining access to a portion of a duplex stream.
pub struct AccessTimeout<T> {
inner: Box<Inner<T>>, // stored in a box to get a stable address
}
struct Inner<T> {
state: State,
timer: Option<raw::Timer>,
user_unblock: Option<fn(uint) -> Option<BlockedTask>>,
user_payload: uint,
access: access::Access<T>,
}
pub struct Guard<'a, T: 'a> {
state: &'a mut State,
pub access: access::Guard<'a, T>,
pub can_timeout: bool,
}
#[deriving(PartialEq)]
enum State {
NoTimeout,
TimeoutPending(Client),
TimedOut,
}
#[deriving(PartialEq)]
enum Client {
NoWaiter,
AccessPending,
RequestPending,
}
impl<T: Send> AccessTimeout<T> {
pub fn new(data: T) -> AccessTimeout<T> {
AccessTimeout {
inner: box Inner {
state: State::NoTimeout,
timer: None,
user_unblock: None,
user_payload: 0,
access: access::Access::new(data),
},
}
}
/// Grants access to half of a duplex stream, timing out if necessary.
///
/// On success, Ok(Guard) is returned and access has been granted to the
/// stream. If a timeout occurs, then Err is returned with an appropriate
/// error.
pub fn grant<'a>(&'a mut self, m: HomingMissile) -> UvResult<Guard<'a, T>> {
// First, flag that we're attempting to acquire access. This will allow
// us to cancel the pending grant if we timeout out while waiting for a
// grant.
let inner = &mut *self.inner;
match inner.state {
State::NoTimeout => {},
State::TimeoutPending(ref mut client) => {
*client = Client::AccessPending;
}
State::TimedOut => return Err(UvError(uvll::ECANCELED))
}
let access = inner.access.grant(inner as *mut _ as uint, m);
// After acquiring the grant, we need to flag ourselves as having a
// pending request so the timeout knows to cancel the request.
let can_timeout = match inner.state {
State::NoTimeout => false,
State::TimeoutPending(ref mut client) => {
*client = Client::RequestPending; true
}
State::TimedOut => return Err(UvError(uvll::ECANCELED))
};
Ok(Guard {
access: access,
state: &mut inner.state,
can_timeout: can_timeout
})
}
pub fn timed_out(&self) -> bool {
match self.inner.state {
State::TimedOut => true,
_ => false,
}
}
pub fn access(&mut self) -> &mut access::Access<T> { &mut self.inner.access }
/// Sets the pending timeout to the value specified.
///
/// The home/loop variables are used to construct a timer if one has not
/// been previously constructed.
///
/// The callback will be invoked if the timeout elapses, and the data of
/// the time will be set to `data`.
pub fn set_timeout(&mut self, dur: Option<Duration>,
uv_loop: raw::Loop,
cb: fn(uint) -> Option<BlockedTask>,
data: uint) {
self.inner.state = State::NoTimeout;
let ms = match dur {
Some(dur) if dur.num_milliseconds() < 0 => 0,
Some(dur) => dur.num_milliseconds() as u64,
None => return match self.inner.timer {
Some(ref mut t) => t.stop().unwrap(),
None => {}
}
};
// If we have a timeout, lazily initialize the timer which will be used
// to fire when the timeout runs out.
if self.inner.timer.is_none() {
let mut timer = unsafe { raw::Timer::new(&uv_loop).unwrap() };
timer.set_data(&*self.inner as *const _ as *mut _);
self.inner.timer = Some(timer);
}
// Update our local state and timer with the appropriate information for
// the new timeout.
self.inner.user_unblock = Some(cb);
self.inner.user_payload = data;
self.inner.state = State::TimeoutPending(Client::NoWaiter);
let timer = self.inner.timer.as_mut().unwrap();
timer.stop().unwrap();
timer.start(ms, 0, timer_cb::<T>).unwrap();
// When the timeout fires, we expect a TimeoutPending message and we
// take an appropriate action depending on what state any waiter is in.
extern fn timer_cb<T: Send>(timer: *mut uvll::uv_timer_t) {
unsafe {
let timer: raw::Timer = Handle::from_raw(timer);
let inner: &mut Inner<T> = mem::transmute(timer.get_data());
match mem::replace(&mut inner.state, State::TimedOut) {
State::TimedOut | State::NoTimeout => unreachable!(),
State::TimeoutPending(Client::NoWaiter) => {}
State::TimeoutPending(Client::AccessPending) => {
match inner.access.dequeue(inner as *mut _ as uint) {
Some(task) => task.reawaken(),
None => unreachable!(),
}
}
State::TimeoutPending(Client::RequestPending) => {
match (inner.user_unblock.unwrap())(inner.user_payload) {
Some(task) => task.reawaken(),
None => unreachable!(),
}
}
}
}
}
}
}
impl<T: Send> Clone for AccessTimeout<T> {
fn clone(&self) -> AccessTimeout<T> {
AccessTimeout {
inner: box Inner {
access: self.inner.access.clone(),
state: State::NoTimeout,
timer: None,
user_unblock: None,
user_payload: 0,
},
}
}
}
#[unsafe_destructor]
impl<'a, T> Drop for Guard<'a, T> {
fn drop(&mut self) {
match *self.state {
State::TimeoutPending(Client::NoWaiter) |
State::TimeoutPending(Client::AccessPending) => unreachable!(),
State::NoTimeout | State::TimedOut => {}
State::TimeoutPending(Client::RequestPending) => {
*self.state = State::TimeoutPending(Client::NoWaiter);
}
}
}
}
#[unsafe_destructor]
impl<T> Drop for AccessTimeout<T> {
fn drop(&mut self) {
match self.inner.timer {
Some(ref mut timer) => unsafe {
timer.close_and_free();
},
None => {}
}
}
}
////////////////////////////////////////////////////////////////////////////////
// Connect timeouts
////////////////////////////////////////////////////////////////////////////////
pub struct ConnectCtx {
status: libc::c_int,
task: Option<BlockedTask>,
timer: Option<raw::Timer>,
}
impl ConnectCtx {
pub fn new() -> ConnectCtx {
ConnectCtx { status: -1, task: None, timer: None }
}
pub fn connect<T>(mut self, obj: T, timeout: Option<Duration>,
io: &mut EventLoop,
f: |raw::Connect, &T, uvll::uv_connect_cb| -> UvResult<()>)
-> UvResult<T> {
// Issue the connect request
let mut req = unsafe { Request::alloc() };
match f(req, &obj, connect_cb) {
Ok(()) => {}
Err(e) => unsafe { req.free(); return Err(e) },
}
req.set_data(&self as *const _ as *mut _);
// Apply any timeout by scheduling a timer to fire when the timeout
// expires which will wake up the task.
match timeout {
Some(t) => unsafe {
let t = t.num_milliseconds();
if t <= 0 { return Err(UvError(uvll::ECANCELED)) }
let mut timer = raw::Timer::new(&io.uv_loop()).unwrap();
timer.start(t as u64, 0, timer_cb).unwrap();
timer.set_data(&self as *const _ as *mut _);
self.timer = Some(timer);
},
None => {}
}
// Wait for some callback to fire.
unsafe {
::block(io.uv_loop(), |task| {
self.task = Some(task);
});
}
// Make sure an erroneously fired callback doesn't have access
// to the context any more.
req.set_data(0 as *mut _);
match self.timer {
Some(ref mut t) => unsafe { t.close_and_free() },
None => {}
}
// If we failed because of a timeout, drop the TcpWatcher as
// soon as possible because it's data is now set to null and we
// want to cancel the callback ASAP.
return match self.status {
0 => Ok(obj),
n => { drop(obj); Err(UvError(n)) }
};
extern fn timer_cb(handle: *mut uvll::uv_timer_t) {
// Don't close the corresponding request, just wake up the task
// and let RAII take care of the pending watcher.
unsafe {
let raw: raw::Timer = Handle::from_raw(handle);
let cx: &mut ConnectCtx = mem::transmute(raw.get_data());
cx.status = uvll::ECANCELED;
::wakeup(&mut cx.task);
}
}
extern fn connect_cb(req: *mut uvll::uv_connect_t, status: libc::c_int) {
// This callback can be invoked with ECANCELED if the watcher is
// closed by the timeout callback. In that case we just want to free
// the request and be along our merry way.
unsafe {
let mut req: raw::Connect = Request::from_raw(req);
if status == uvll::ECANCELED { req.free(); return }
// Apparently on windows when the handle is closed this callback
// may not be invoked with ECANCELED but rather another error
// code. Either ways, if the data is null, then our timeout has
// expired and there's nothing we can do.
let data = req.get_data();
if data.is_null() { req.free(); return }
let cx: &mut ConnectCtx = &mut *(data as *mut ConnectCtx);
cx.status = status;
match cx.timer {
Some(ref mut t) => t.stop().unwrap(),
None => {}
}
// Note that the timer callback doesn't cancel the connect
// request (that's the job of uv_close()), so it's possible for
// this callback to get triggered after the timeout callback
// fires, but before the task wakes up. In that case, we did
// indeed successfully connect, but we don't need to wake
// someone up. We updated the status above (correctly so), and
// the task will pick up on this when it wakes up.
if cx.task.is_some() {
::wakeup(&mut cx.task);
}
req.free();
}
}
}
}
pub struct AcceptTimeout<T> {
access: AccessTimeout<AcceptorState<T>>,
}
pub struct Pusher<T> {
access: access::Access<AcceptorState<T>>,
}
struct AcceptorState<T> {
blocked_acceptor: Option<BlockedTask>,
pending: Vec<UvResult<T>>,
}
impl<T: Send> AcceptTimeout<T> {
pub fn new() -> AcceptTimeout<T> {
AcceptTimeout {
access: AccessTimeout::new(AcceptorState {
blocked_acceptor: None,
pending: Vec::new(),
})
}
}
pub fn accept(&mut self,
missile: HomingMissile,
uv_loop: raw::Loop) -> UvResult<T> {
// If we've timed out but we're not closed yet, poll the state of the
// queue to see if we can peel off a connection.
if self.access.timed_out() &&
!self.access.inner.access.is_closed(&missile) {
let tmp = self.access.inner.access.get_mut(&missile);
return match tmp.pending.remove(0) {
Some(msg) => msg,
None => Err(UvError(uvll::ECANCELED))
}
}
// Now that we're not polling, attempt to gain access and then peel off
// a connection. If we have no pending connections, then we need to go
// to sleep and wait for one.
//
// Note that if we're woken up for a pending connection then we're
// guaranteed that the check above will not steal our connection due to
// the single-threaded nature of the event loop.
let mut guard = try!(self.access.grant(missile));
if guard.access.is_closed() {
return Err(UvError(uvll::EOF))
}
match guard.access.pending.remove(0) {
Some(msg) => return msg,
None => {}
}
::block(uv_loop, |task| {
guard.access.blocked_acceptor = Some(task);
});
match guard.access.pending.remove(0) {
_ if guard.access.is_closed() => Err(UvError(uvll::EOF)),
Some(msg) => msg,
None => Err(UvError(uvll::ECANCELED))
}
}
pub fn pusher(&self) -> Pusher<T> {
Pusher { access: self.access.inner.access.clone() }
}
pub fn set_timeout(&mut self,
dur: Option<Duration>,
uv_loop: raw::Loop) {
let data = self.access.inner.access.unsafe_get() as uint;
self.access.set_timeout(dur, uv_loop, cancel_accept::<T>, data);
fn cancel_accept<T: Send>(me: uint) -> Option<BlockedTask> {
unsafe {
let me: &mut AcceptorState<T> = mem::transmute(me);
me.blocked_acceptor.take()
}
}
}
pub fn close(&mut self, m: HomingMissile) {
self.access.inner.access.close(&m);
let task = self.access.inner.access.get_mut(&m).blocked_acceptor.take();
drop(m);
let _ = task.map(|t| t.reawaken());
}
}
impl<T: Send> Pusher<T> {
pub unsafe fn push(&self, t: UvResult<T>) {
let state = self.access.unsafe_get();
(*state).pending.push(t);
let _ = (*state).blocked_acceptor.take().map(|t| t.reawaken());
}
}
impl<T: Send> Clone for AcceptTimeout<T> {
fn clone(&self) -> AcceptTimeout<T> {
AcceptTimeout { access: self.access.clone() }
}
}
|
use super::StreamDevice;
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
use cpal::{Device, Host, Stream, StreamConfig, SupportedStreamConfig};
use ringbuf::Producer;
use std::fmt;
use super::Log;
use std::process;
pub struct Input {
device: Device,
name: String,
supported_stream_config: SupportedStreamConfig,
pub stream_config: StreamConfig,
stream: Option<Stream>,
}
impl Input {
pub(super) fn new(host: &Host) -> Result<Input, anyhow::Error> {
let device = host.default_input_device().unwrap_or_else(|| {
Log::error("No input device available".to_string());
process::exit(1);
});
let name = match device.name() {
Ok(name) => name,
Err(err) => {
Log::error(format!("Error getting input device name: {}", err));
String::from("Default")
}
};
let supported_stream_config = device.default_input_config()?;
let supp_stream = supported_stream_config.clone();
let stream_config: StreamConfig = supp_stream.into();
Ok(Input {
device,
name,
supported_stream_config,
stream_config,
stream: None,
})
}
pub fn build_stream(&mut self, mut producer: Producer<f32>) -> Result<(), anyhow::Error> {
let err_fn = |err: cpal::StreamError| {
Log::error(format!("an error occurred on stream: {}", err));
};
let data_callback = move |data: &[f32], _: &cpal::InputCallbackInfo| {
let mut output_fell_behind = false;
for &sample in data {
if producer.push(sample).is_err() {
output_fell_behind = true;
}
}
if output_fell_behind {
Log::warn("output stream fell behind: try increasing latency".to_string());
}
};
self.stream = Some(self.device.build_input_stream(
&self.stream_config,
data_callback,
err_fn,
)?);
Ok(())
}
}
impl StreamDevice for Input {
fn play(&self) -> Result<(), anyhow::Error> {
match &self.stream {
Some(s) => s.play()?,
None => Log::error("Stream not created".to_string()),
}
Ok(())
}
}
impl fmt::Display for Input {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let info = format!(
"{} - {:?}\n",
self.name, self.supported_stream_config
);
write!(f, "{}", info)
}
}
|
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate crossbeam_channel;
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate failure;
#[macro_use]
extern crate log;
#[macro_use]
extern crate observer_attribute;
#[cfg(any(
all(
feature = "postgre_default",
any(feature = "mysql_default", feature = "sqlite_default")
),
all(
feature = "mysql_default",
any(feature = "postgre_default", feature = "sqlite_default")
),
all(
feature = "sqlite_default",
any(feature = "mysql_default", feature = "postgre_default")
),
))]
compile_error!("only one of postgre_default, mysql_default or sqlite_default can be activated");
pub mod base;
mod context;
pub mod iframe;
mod mode;
mod page;
pub mod request_config;
mod response;
pub mod serve;
pub mod serve_static;
pub mod storybook;
pub mod test;
mod urls;
pub mod utils;
pub mod watcher;
pub use crate::context::Context;
pub use crate::mode::Mode;
pub use crate::page::{Page, PageSpec};
pub use crate::request_config::RequestConfig;
pub use crate::response::{json, json_with_context};
pub use crate::serve::{http_to_hyper, THREAD_POOL};
pub use crate::serve_static::serve_static;
pub use crate::urls::{handle, is_realm_url};
pub use crate::response::Response;
pub type Result = std::result::Result<crate::response::Response, failure::Error>;
pub type Request = http::request::Request<Vec<u8>>;
pub trait Subject: askama::Template {}
pub trait Text: askama::Template {}
pub trait HTML: askama::Template {}
pub trait UserData: std::string::ToString + std::str::FromStr {}
#[derive(Fail, Debug)]
pub enum Error {
#[fail(display = "404 Page Not Found: {}", message)]
PageNotFound { message: String },
#[fail(display = "Input Error: {:?}", error)]
InputError {
#[cause]
error: crate::request_config::Error,
},
#[fail(display = "Form Error: {:?}", errors)]
FormError {
errors: std::collections::HashMap<String, String>,
},
#[fail(display = "Internal Server Error: {}", message)]
CustomError { message: String },
#[fail(display = "HTTP Error: {}", error)]
HttpError {
#[cause]
error: http::Error,
},
#[fail(display = "Env Var Error: {}", error)]
VarError {
#[cause]
error: std::env::VarError,
},
#[fail(display = "Diesel Error: {}", error)]
DieselError {
#[cause]
error: diesel::result::Error,
},
}
pub fn error<T>(key: &str, message: &str) -> std::result::Result<T, failure::Error> {
let mut e = std::collections::HashMap::new();
e.insert(key.into(), message.into());
Err(Error::FormError { errors: e }.into())
}
impl From<diesel::result::Error> for Error {
fn from(error: diesel::result::Error) -> Error {
Error::DieselError { error }
}
}
impl From<std::env::VarError> for Error {
fn from(error: std::env::VarError) -> Error {
Error::VarError { error }
}
}
impl From<http::Error> for Error {
fn from(error: http::Error) -> Error {
Error::HttpError { error }
}
}
impl From<crate::request_config::Error> for Error {
fn from(error: crate::request_config::Error) -> Error {
Error::InputError { error }
}
}
pub trait Or404<T> {
fn or_404(self) -> std::result::Result<T, failure::Error>;
}
impl<T> Or404<T> for std::result::Result<T, failure::Error> {
fn or_404(self) -> std::result::Result<T, failure::Error> {
self.map_err(|e| {
Error::PageNotFound {
message: e.to_string(),
}
.into()
})
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.