text stringlengths 8 4.13M |
|---|
use board_formatter;
use lines;
use board::Board;
const OFFSET: usize = 1;
pub fn format_board(board: &Board) -> String {
let split_board = lines::split_board_into_rows(
&number_spaces(&board_formatter::expand_board(board)),
board.get_size().abs(),
);
let mut formatted_board: String = "".to_string();
for (index, row) in split_board.iter().enumerate() {
let formatted_row = format_row(&row.to_vec());
let length = formatted_row.len();
formatted_board += &formatted_row;
if index < row.len() - OFFSET {
formatted_board += &"-".repeat(length - OFFSET);
formatted_board += "\n";
}
}
formatted_board
}
fn format_row(row: &[String]) -> String {
let mut formatted_row: String = "".to_string();
for (index, mark) in row.iter().enumerate() {
formatted_row.push_str(" ");
formatted_row.push_str(mark);
if mark.len() == OFFSET {
formatted_row.push_str(" ");
}
formatted_row.push_str(" ");
if index < row.len() - OFFSET {
formatted_row.push_str("|");
} else {
formatted_row.push_str("\n");
}
}
formatted_row
}
fn number_spaces(spaces: &[String]) -> Vec<String> {
let mut updated_spaces: Vec<String> = vec![" ".to_string(); spaces.len() as usize];
for (index, space) in spaces.iter().enumerate() {
if space == " " {
let number = index + OFFSET;
updated_spaces[index] = number.to_string();
} else {
updated_spaces[index] = space.to_string();
}
}
updated_spaces
}
#[cfg(test)]
mod tests {
use super::*;
use board::tests::set_up_board;
#[test]
fn displays_an_empty_3_by_3_board() {
let board: Board = set_up_board(3, vec![]);
let blank_board: String =
" 1 | 2 | 3 \n--------------\n 4 | 5 | 6 \n--------------\n 7 | 8 | 9 \n"
.to_string();
assert_eq!(blank_board, format_board(&board));
}
#[test]
fn displays_a_full_3_by_3_board() {
let board: Board = set_up_board(3, vec![0, 4, 8, 2, 6, 7, 1, 3, 5]);
let blank_board: String =
" X | X | O \n--------------\n O | O | X \n--------------\n X | O | X \n"
.to_string();
assert_eq!(blank_board, format_board(&board));
}
#[test]
fn formats_a_row() {
let row: String = " 1 | 2 | 3 \n".to_string();
assert_eq!(
row,
format_row(&vec!["1".to_string(), "2".to_string(), "3".to_string()])
);
}
#[test]
fn formats_numbers() {
let numbered_spaces: Vec<String> = vec![
"1".to_string(),
"2".to_string(),
"3".to_string(),
"4".to_string(),
"5".to_string(),
"6".to_string(),
"7".to_string(),
"8".to_string(),
"9".to_string(),
];
assert_eq!(
numbered_spaces,
number_spaces(&vec![
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
])
);
}
}
|
#![allow(dead_code)]
use {Timer, EventEntry, now_micro};
use sys::Selector;
use {EventFlags, EventBuffer, TimerCb, AcceptCb, EventCb, EndCb};
use std::io;
use std::any::Any;
use psocket::{TcpSocket, SOCKET};
///回调的函数返回值, 如果返回OK和CONTINUE, 则默认处理
///如果返回OVER则主动结束循环, 比如READ则停止READ, 定时器如果是循环的则主动停止当前的定时器
pub enum RetValue {
OK,
CONTINUE,
OVER,
}
/// Configure EventLoop runtime details
#[derive(Copy, Clone, Debug)]
pub struct EventLoopConfig {
pub io_poll_timeout_ms: usize,
pub select_catacity: usize,
pub buffer_capacity: usize,
// == Timer ==
pub time_max_id: u32,
}
impl Default for EventLoopConfig {
fn default() -> EventLoopConfig {
EventLoopConfig {
io_poll_timeout_ms: 1,
select_catacity: 1024,
buffer_capacity: 65_536,
time_max_id: u32::max_value() / 2,
}
}
}
/// Single threaded IO event loop.
// #[derive(Debug)]
pub struct EventLoop {
run: bool,
timer: Timer,
pub selector: Selector,
config: EventLoopConfig,
}
impl EventLoop {
pub fn new() -> io::Result<EventLoop> {
EventLoop::configured(Default::default())
}
pub fn configured(config: EventLoopConfig) -> io::Result<EventLoop> {
let timer = Timer::new(config.time_max_id);
let selector = Selector::new(config.select_catacity)?;
Ok(EventLoop {
run: true,
timer: timer,
selector: selector,
config: config,
})
}
/// 关闭主循环, 将在下一次逻辑执行时退出主循环
pub fn shutdown(&mut self) {
self.run = false;
}
/// 判断刚才主循环是否在运行中
pub fn is_running(&self) -> bool {
self.run
}
/// 循环执行事件的主逻辑, 直到此主循环被shutdown则停止执行
pub fn run(&mut self) -> io::Result<()> {
self.run = true;
while self.run {
// 该此循环中, 没有任何数据得到处理, 则强制cpu休眠1ms, 以防止cpu跑满100%
if !self.run_once()? {
::std::thread::sleep(::std::time::Duration::from_millis(1));
}
}
Ok(())
}
/// 进行一次的数据处理, 处理包括处理sockets信息, 及处理定时器的信息
pub fn run_once(&mut self) -> io::Result<bool> {
let timeout_ms = self.config.io_poll_timeout_ms;
let size = Selector::do_select(self, timeout_ms)?;
let is_op = self.timer_process();
Ok(size != 0 || !is_op)
}
/// 根据socket构造EventBuffer
pub fn new_buff(&self, socket: TcpSocket) -> EventBuffer {
EventBuffer::new(socket, self.config.buffer_capacity)
}
/// 添加定时器, 如果time_step为0, 则添加定时器失败
pub fn add_timer(&mut self, entry: EventEntry) -> u32 {
self.timer.add_timer(entry)
}
/// 添加定时器, tick_step变量表示每隔多少ms调用一次该回调
/// tick_repeat变量表示该定时器是否重复, 如果为true, 则会每tick_step ms进行调用一次, 直到回调返回RetValue::OVER, 或者被主动删除该定时器
/// 添加定时器, 如果time_step为0, 则添加定时器失败
pub fn add_new_timer(
&mut self,
tick_step: u64,
tick_repeat: bool,
timer_cb: Option<TimerCb>,
data: Option<Box<dyn Any>>,
) -> u32 {
self.timer.add_first_timer(EventEntry::new_timer(
tick_step,
tick_repeat,
timer_cb,
data,
))
}
/// 添加定时器, tick_time指定某一时间添加触发定时器
pub fn add_new_timer_at(
&mut self,
tick_time: u64,
timer_cb: Option<TimerCb>,
data: Option<Box<dyn Any>>,
) -> u32 {
self.timer.add_first_timer(EventEntry::new_timer_at(
tick_time,
timer_cb,
data,
))
}
/// 删除指定的定时器id, 定时器内部实现细节为红黑树, 删除定时器的时间为O(logn), 如果存在该定时器, 则返回相关的定时器信息
pub fn del_timer(&mut self, time_id: u32) -> Option<EventEntry> {
self.timer.del_timer(time_id)
}
/// 添加socket监听
pub fn register_socket(&mut self, buffer: EventBuffer, entry: EventEntry) -> io::Result<()> {
let _ = Selector::register_socket(self, buffer, entry)?;
Ok(())
}
/// 修改socket监听
pub fn modify_socket(&mut self, is_del: bool, socket: SOCKET, entry: EventEntry) -> io::Result<()> {
let _ = Selector::modify_socket(self, is_del, socket, entry)?;
Ok(())
}
/// 删除指定socket的句柄信息
pub fn unregister_socket(&mut self, ev_fd: SOCKET) -> io::Result<()> {
let _ = Selector::unregister_socket(self, ev_fd)?;
Ok(())
}
/// 向指定socket发送数据, 返回发送的数据长度
pub fn send_socket(&mut self, ev_fd: &SOCKET, data: &[u8]) -> io::Result<usize> {
Selector::send_socket(self, ev_fd, data)
}
/// 添加定时器, ev_fd为socket的句柄id, ev_events为监听读, 写, 持久的信息
pub fn add_new_event(
&mut self,
socket: TcpSocket,
ev_events: EventFlags,
read: Option<EventCb>,
write: Option<EventCb>,
error: Option<EndCb>,
data: Option<Box<dyn Any>>,
) -> io::Result<()> {
let ev_fd = socket.as_raw_socket();
let buffer = self.new_buff(socket);
self.register_socket(buffer, EventEntry::new_event(ev_fd, ev_events, read, write, error, data))
}
/// 添加定时器, ev_fd为socket的句柄id, ev_events为监听读, 写, 持久的信息
pub fn add_new_accept(
&mut self,
socket: TcpSocket,
ev_events: EventFlags,
accept: Option<AcceptCb>,
error: Option<EndCb>,
data: Option<Box<dyn Any>>,
) -> io::Result<()> {
let ev_fd = socket.as_raw_socket();
let buffer = self.new_buff(socket);
self.register_socket(buffer, EventEntry::new_accept(ev_fd, ev_events, accept, error, data))
}
/// 定时器的处理处理
/// 1.取出定时器的第一个, 如果第一个大于当前时间, 则跳出循环, 如果小于等于当前时间进入2
/// 2.调用回调函数, 如果回调返回OVER或者定时器不是循环定时器, 则删除定时器, 否则把该定时器重时添加到列表
fn timer_process(&mut self) -> bool {
let now = now_micro();
let mut is_op = false;
loop {
match self.timer.tick_time(now) {
Some(mut entry) => {
is_op = true;
let time_id = entry.time_id;
let is_over = match entry.timer_cb(self, time_id) {
(RetValue::OVER, _) => true,
(RetValue::CONTINUE, time) => {
entry.tick_step = time;
false
},
_ => !entry.ev_events.contains(EventFlags::FLAG_PERSIST),
};
if !is_over {
let _ = self.add_timer(entry);
}
}
_ => return is_op,
}
}
}
}
unsafe impl Sync for EventLoop {}
|
fn main() {
let i = 0;
let _j: u16 = i;
}
|
pub fn simplify_path(path: String) -> String {
let mut directories = vec![];
for s in path.split('/') {
if s == "." || s == "" {
continue
} else if s == ".." {
if directories.len() > 0 {
directories.pop();
}
} else {
directories.push(s);
}
}
if directories.len() == 0 {
"/".to_string()
} else {
directories.iter().fold("".to_string(), |acc, x| acc + "/" + x)
}
} |
use crate::completions::{Completer, CompletionOptions, SortBy};
use nu_engine::eval_call;
use nu_protocol::{
ast::{Argument, Call, Expr, Expression},
engine::{EngineState, Stack, StateWorkingSet},
PipelineData, Span, Type, Value, CONFIG_VARIABLE_ID,
};
use reedline::Suggestion;
use std::sync::Arc;
pub struct CustomCompletion {
engine_state: Arc<EngineState>,
stack: Stack,
config: Option<Value>,
decl_id: usize,
line: String,
}
impl CustomCompletion {
pub fn new(
engine_state: Arc<EngineState>,
stack: Stack,
config: Option<Value>,
decl_id: usize,
line: String,
) -> Self {
Self {
engine_state,
stack,
config,
decl_id,
line,
}
}
fn map_completions<'a>(
&self,
list: impl Iterator<Item = &'a Value>,
span: Span,
offset: usize,
) -> Vec<Suggestion> {
list.filter_map(move |x| {
let s = x.as_string();
match s {
Ok(s) => Some(Suggestion {
value: s,
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
}),
Err(_) => None,
}
})
.collect()
}
}
impl Completer for CustomCompletion {
fn fetch(
&mut self,
_: &StateWorkingSet,
_: Vec<u8>,
span: Span,
offset: usize,
pos: usize,
) -> (Vec<Suggestion>, CompletionOptions) {
// Line position
let line_pos = pos - offset;
// Set up our initial config to start from
if let Some(conf) = &self.config {
self.stack.vars.insert(CONFIG_VARIABLE_ID, conf.clone());
} else {
self.stack.vars.insert(
CONFIG_VARIABLE_ID,
Value::Record {
cols: vec![],
vals: vec![],
span: Span { start: 0, end: 0 },
},
);
}
// Call custom declaration
let result = eval_call(
&self.engine_state,
&mut self.stack,
&Call {
decl_id: self.decl_id,
head: span,
arguments: vec![
Argument::Positional(Expression {
span: Span { start: 0, end: 0 },
ty: Type::String,
expr: Expr::String(self.line.clone()),
custom_completion: None,
}),
Argument::Positional(Expression {
span: Span { start: 0, end: 0 },
ty: Type::Int,
expr: Expr::Int(line_pos as i64),
custom_completion: None,
}),
],
redirect_stdout: true,
redirect_stderr: true,
},
PipelineData::new(span),
);
// Parse result
let (suggestions, options) = match result {
Ok(pd) => {
let value = pd.into_value(span);
match &value {
Value::Record { .. } => {
let completions = value
.get_data_by_key("completions")
.and_then(|val| {
val.as_list()
.ok()
.map(|it| self.map_completions(it.iter(), span, offset))
})
.unwrap_or_default();
let options = value.get_data_by_key("options");
let options = if let Some(Value::Record { .. }) = &options {
let options = options.unwrap_or_default();
let should_sort = options
.get_data_by_key("sort")
.and_then(|val| val.as_bool().ok())
.unwrap_or(false);
CompletionOptions {
case_sensitive: options
.get_data_by_key("case_sensitive")
.and_then(|val| val.as_bool().ok())
.unwrap_or(true),
positional: options
.get_data_by_key("positional")
.and_then(|val| val.as_bool().ok())
.unwrap_or(true),
sort_by: if should_sort {
SortBy::Ascending
} else {
SortBy::None
},
}
} else {
CompletionOptions::default()
};
(completions, options)
}
Value::List { vals, .. } => {
let completions = self.map_completions(vals.iter(), span, offset);
(completions, CompletionOptions::default())
}
_ => (vec![], CompletionOptions::default()),
}
}
_ => (vec![], CompletionOptions::default()),
};
(suggestions, options)
}
}
|
$NetBSD: patch-third__party_rust_authenticator_src_netbsd_mod.rs,v 1.1 2023/02/05 08:32:24 he Exp $
--- third_party/rust/authenticator/src/netbsd/mod.rs.orig 2020-09-02 20:55:30.875841045 +0000
+++ third_party/rust/authenticator/src/netbsd/mod.rs
@@ -0,0 +1,10 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+pub mod device;
+pub mod transaction;
+
+mod fd;
+mod monitor;
+mod uhid;
|
//!
//!
//! This module provides Traits reprensentating open devices
//! and their surfaces to render contents.
//!
//! ---
//!
//! Initialization of devices happens through an open file descriptor
//! of a drm device.
//!
//! ---
//!
//! Initialization of surfaces happens through the types provided by
//! [`drm-rs`](drm).
//!
//! Four entities are relevant for the initialization procedure.
//!
//! [`crtc`](drm::control::crtc)s represent scanout engines
//! of the device pointer to one framebuffer.
//! Their responsibility is to read the data of the framebuffer and export it into an "Encoder".
//! The number of crtc's represent the number of independant output devices the hardware may handle.
//!
//! An [`encoder`](drm::control::encoder) encodes the data of
//! connected crtcs into a video signal for a fixed set of connectors.
//! E.g. you might have an analog encoder based on a DAG for VGA ports, but another one for digital ones.
//! Also not every encoder might be connected to every crtc.
//!
//! A [`connector`](drm::control::connector) represents a port
//! on your computer, possibly with a connected monitor, TV, capture card, etc.
//!
//! On surface creation a matching encoder for your `encoder`-`connector` is automatically selected,
//! if it exists, which means you still need to check your configuration.
//!
//! At last a [`Mode`](drm::control::Mode) needs to be selected,
//! supported by the `crtc` in question.
//!
use drm::{
control::{connector, crtc, framebuffer, Device as ControlDevice, Mode, ResourceHandles, ResourceInfo},
Device as BasicDevice,
};
use nix::libc::dev_t;
use std::error::Error;
use std::iter::IntoIterator;
use std::os::unix::io::AsRawFd;
use std::path::PathBuf;
use calloop::generic::{EventedFd, Generic};
use calloop::mio::Ready;
use calloop::InsertError;
use calloop::{LoopHandle, Source};
use super::graphics::SwapBuffersError;
#[cfg(feature = "backend_drm_egl")]
pub mod egl;
#[cfg(feature = "backend_drm_gbm")]
pub mod gbm;
#[cfg(feature = "backend_drm_legacy")]
pub mod legacy;
/// Trait to receive events of a bound [`Device`]
///
/// See [`device_bind`]
pub trait DeviceHandler {
/// The [`Device`] type this handler can handle
type Device: Device + ?Sized;
/// A vblank blank event on the provided crtc has happend
fn vblank(&mut self, crtc: crtc::Handle);
/// An error happend while processing events
fn error(&mut self, error: <<<Self as DeviceHandler>::Device as Device>::Surface as Surface>::Error);
}
/// An open drm device
pub trait Device: AsRawFd + DevPath {
/// Associated [`Surface`] of this [`Device`] type
type Surface: Surface;
/// Returns the id of this device node.
fn device_id(&self) -> dev_t;
/// Assigns a [`DeviceHandler`] called during event processing.
///
/// See [`device_bind`] and [`DeviceHandler`]
fn set_handler(&mut self, handler: impl DeviceHandler<Device = Self> + 'static);
/// Clear a set [`DeviceHandler`](trait.DeviceHandler.html), if any
fn clear_handler(&mut self);
/// Creates a new rendering surface.
///
/// Initialization of surfaces happens through the types provided by
/// [`drm-rs`](drm).
///
/// [`crtc`](drm::control::crtc)s represent scanout engines
/// of the device pointer to one framebuffer.
/// Their responsibility is to read the data of the framebuffer and export it into an "Encoder".
/// The number of crtc's represent the number of independant output devices the hardware may handle.
fn create_surface(
&mut self,
ctrc: crtc::Handle,
) -> Result<Self::Surface, <Self::Surface as Surface>::Error>;
/// Processes any open events of the underlying file descriptor.
///
/// You should not call this function manually, but rather use
/// [`device_bind`] to register the device
/// to an [`EventLoop`](calloop::EventLoop)
/// to synchronize your rendering to the vblank events of the open crtc's
fn process_events(&mut self);
/// Load the resource from a [`Device`] given its
/// [`ResourceHandle`](drm::control::ResourceHandle)
fn resource_info<T: ResourceInfo>(
&self,
handle: T::Handle,
) -> Result<T, <Self::Surface as Surface>::Error>;
/// Attempts to acquire a copy of the [`Device`]'s
/// [`ResourceHandle`](drm::control::ResourceHandle)
fn resource_handles(&self) -> Result<ResourceHandles, <Self::Surface as Surface>::Error>;
}
/// Marker trait for [`Device`]s able to provide [`RawSurface`]s
pub trait RawDevice: Device<Surface = <Self as RawDevice>::Surface> {
/// Associated [`RawSurface`] of this [`RawDevice`] type
type Surface: RawSurface;
}
/// An open crtc that can be used for rendering
pub trait Surface {
/// Type repesenting a collection of
/// [`connector`](drm::control::connector)s
/// returned by [`current_connectors`](Surface::current_connectors) and
/// [`pending_connectors`](Surface::pending_connectors)
type Connectors: IntoIterator<Item = connector::Handle>;
/// Error type returned by methods of this trait
type Error: Error + Send;
/// Returns the underlying [`crtc`](drm::control::crtc) of this surface
fn crtc(&self) -> crtc::Handle;
/// Currently used [`connector`](drm::control::connector)s of this `Surface`
fn current_connectors(&self) -> Self::Connectors;
/// Returns the pending [`connector`](drm::control::connector)s
/// used after the next [`commit`](RawSurface::commit) of this [`Surface`]
///
/// *Note*: Only on a [`RawSurface`] you may directly trigger
/// a [`commit`](RawSurface::commit). Other `Surface`s provide their
/// own methods that *may* trigger a commit, you will need to read their docs.
fn pending_connectors(&self) -> Self::Connectors;
/// Tries to add a new [`connector`](drm::control::connector)
/// to be used after the next commit.
///
/// Fails if the `connector` is not compatible with the underlying [`crtc`](drm::control::crtc)
/// (e.g. no suitable [`encoder`](drm::control::encoder) may be found)
/// or is not compatible with the currently pending
/// [`Mode`](drm::control::Mode).
fn add_connector(&self, connector: connector::Handle) -> Result<(), Self::Error>;
/// Tries to mark a [`connector`](drm::control::connector)
/// for removal on the next commit.
fn remove_connector(&self, connector: connector::Handle) -> Result<(), Self::Error>;
/// Returns the currently active [`Mode`](drm::control::Mode)
/// of the underlying [`crtc`](drm::control::crtc)
/// if any.
fn current_mode(&self) -> Option<Mode>;
/// Returns the currently pending [`Mode`](drm::control::Mode)
/// to be used after the next commit, if any.
fn pending_mode(&self) -> Option<Mode>;
/// Tries to set a new [`Mode`](drm::control::Mode)
/// to be used after the next commit.
///
/// Fails if the mode is not compatible with the underlying
/// [`crtc`](drm::control::crtc) or any of the
/// pending [`connector`](drm::control::connector)s.
///
/// *Note*: Only on a [`RawSurface`] you may directly trigger
/// a [`commit`](RawSurface::commit). Other [`Surface`]s provide their
/// own methods that *may* trigger a commit, you will need to read their docs.
fn use_mode(&self, mode: Option<Mode>) -> Result<(), Self::Error>;
}
/// An open bare crtc without any rendering abstractions
pub trait RawSurface: Surface + ControlDevice + BasicDevice {
/// Returns true whenever any state changes are pending to be commited
///
/// The following functions may trigger a pending commit:
/// - [`add_connector`](Surface::add_connector)
/// - [`remove_connector`](Surface::remove_connector)
/// - [`use_mode`](Surface::use_mode)
fn commit_pending(&self) -> bool;
/// Commit the pending state rendering a given framebuffer.
///
/// *Note*: This will trigger a full modeset on the underlying device,
/// potentially causing some flickering. Check before performing this
/// operation if a commit really is necessary using [`commit_pending`](RawSurface::commit_pending).
///
/// This operation is blocking until the crtc is in the desired state.
fn commit(&self, framebuffer: framebuffer::Handle) -> Result<(), <Self as Surface>::Error>;
/// Page-flip the underlying [`crtc`](drm::control::crtc)
/// to a new given [`framebuffer`].
///
/// This will not cause the crtc to modeset.
///
/// This operation is not blocking and will produce a `vblank` event once swapping is done.
/// Make sure to [set a `DeviceHandler`](Device::set_handler) and
/// [register the belonging `Device`](device_bind) before to receive the event in time.
fn page_flip(&self, framebuffer: framebuffer::Handle) -> Result<(), SwapBuffersError>;
}
/// Trait representing open devices that *may* return a `Path`
pub trait DevPath {
/// Returns the path of the open device if possible
fn dev_path(&self) -> Option<PathBuf>;
}
impl<A: AsRawFd> DevPath for A {
fn dev_path(&self) -> Option<PathBuf> {
use std::fs;
fs::read_link(format!("/proc/self/fd/{:?}", self.as_raw_fd())).ok()
}
}
/// Bind a `Device` to an [`EventLoop`](calloop::EventLoop),
///
/// This will cause it to recieve events and feed them into a previously
/// set [`DeviceHandler`](DeviceHandler).
pub fn device_bind<D: Device + 'static, Data>(
handle: &LoopHandle<Data>,
device: D,
) -> ::std::result::Result<Source<Generic<EventedFd<D>>>, InsertError<Generic<EventedFd<D>>>>
where
D: Device,
Data: 'static,
{
let mut source = Generic::from_fd_source(device);
source.set_interest(Ready::readable());
handle.insert_source(source, |evt, _| {
evt.source.borrow_mut().0.process_events();
})
}
|
use thiserror::Error;
/// Library errors.
#[derive(Error, Debug)]
#[error("libsystemd error: {0}")]
pub struct SdError(pub(crate) String);
impl From<&str> for SdError {
fn from(arg: &str) -> Self {
Self(arg.to_string())
}
}
impl From<String> for SdError {
fn from(arg: String) -> Self {
Self(arg)
}
}
|
use hydroflow::hydroflow_syntax;
fn main() {
let mut df = hydroflow_syntax! {
source_iter() -> for_each();
};
df.run_available();
}
|
extern crate libloading as lib;
extern crate patronus_provider;
use self::error::Error;
use patronus_provider as provider;
pub use patronus_provider::AnnotationKind;
use std::borrow::Cow;
use std::env;
use std::ffi::CStr;
use std::ffi::CString;
use std::fs;
use std::os::raw::c_int;
use std::path::Path;
use std::path::PathBuf;
mod error;
/// Represents a profile to be passed down to checkers.
/// Currently only primary language is supported.
pub struct Properties {
pub primary_language: String,
}
/// Unified annotation produced by the checkers.
#[derive(Debug)]
pub struct Annotation {
pub offset: usize,
pub length: usize,
pub message: String,
pub kind: AnnotationKind,
pub suggestions: Vec<String>,
}
const PROVIDER_VERSION_FUNCTION: &[u8] = b"patronus_provider_version\0";
const PROVIDER_INIT_FUNCTION: &[u8] = b"patronus_provider_init\0";
/// Provider wrapper.
/// Keeps the associated dynamically loaded library so it could be properly freed.
pub struct Provider {
internal: *mut provider::Provider,
library: *mut lib::Library,
}
impl Provider {
/// Checks a text for mistakes using given provider.
pub fn check(&self, props: *const provider::Properties, text: &Cow<str>) -> Vec<Annotation> {
let text = CString::new(text.clone().into_owned()).expect("cannot create C string");
let response = unsafe { (*self.internal).check(props, text.as_ptr()) };
let annotations = unsafe { &*response.annotations };
let length = annotations.len;
let mut anns = Vec::with_capacity(length);
unsafe {
if !annotations.data.is_null() {
for i in 0..length {
let provider::Annotation {
offset,
length,
message,
kind,
suggestions,
} = *annotations.data.offset(i as isize);
let suggestions = &*suggestions;
let suggestions = {
let length = suggestions.len;
let mut suggs = Vec::with_capacity(length);
if !suggestions.data.is_null() {
for i in 0..length {
let sugg = *suggestions.data.offset(i as isize);
suggs.push(CStr::from_ptr(sugg).to_string_lossy().into_owned())
}
}
suggs
};
anns.push(Annotation {
offset,
length,
message: CStr::from_ptr(message).to_string_lossy().into_owned(),
kind,
suggestions,
});
}
}
}
anns
}
/// Get name of the provider provider.
pub fn name(&self) -> Cow<str> {
unsafe { CStr::from_ptr((*self.internal).name()).to_string_lossy() }
}
}
impl Drop for Provider {
fn drop(&mut self) {
unsafe {
((*self.internal).free_provider)(self.internal);
Box::from_raw(self.library);
}
}
}
/// Main struct holding providers and other relevant data.
pub struct Patronus {
pub providers: Vec<Provider>,
}
impl Patronus {
/// Initializes Patronus and loads the providers.
pub fn new() -> Self {
Self {
providers: Self::load_providers().expect("cannot load providers"),
}
}
/// Checks a text for mistakes using all loaded providers.
pub fn check(&self, props: &Properties, text: &Cow<str>) -> Vec<Annotation> {
let primary_language =
CString::new(&*props.primary_language).expect("Cannot create language C String");
let properties = provider::Properties {
primary_language: primary_language.as_ptr(),
};
let mut res = Vec::new();
for provider in &self.providers {
res.extend(provider.check(&properties, text))
}
res
}
/// Traverses provider directories and tries to load all shared libraries.
/// The main provider directory is set during compile time from `PATRONUS_PROVIDER_DIR`
/// environment variable (/usr/lib/patronus by default).
/// Additionally, the directories listed in `PATRONUS_PROVIDER_PATH` at runtime are crawled as well.
fn load_providers() -> Result<Vec<Provider>, Error> {
let mut provider_locations = vec![PathBuf::from(env!("PATRONUS_PROVIDER_DIR"))];
if let Some(provider_path) = env::var_os("PATRONUS_PROVIDER_PATH") {
for path in env::split_paths(&provider_path) {
provider_locations.push(path);
}
}
let mut result = Vec::new();
for location in provider_locations {
if location.is_dir() {
for entry in fs::read_dir(location).map_err(|source| Error::IoError { source })? {
let path = entry.map_err(|source| Error::IoError { source })?.path();
if path.is_file() && path.is_dylib() {
let lib = Box::new(unsafe {
lib::Library::new(&path)
.map_err(|source| Error::LibloadingError { source })?
});
let version = unsafe {
match lib.get(PROVIDER_VERSION_FUNCTION)
as Result<lib::Symbol<unsafe extern "C" fn() -> c_int>, lib::Error>
{
Ok(get_version) => get_version(),
Err(_) => continue,
}
};
match version {
1 => {
let internal_provider = unsafe {
let init_provider: lib::Symbol<
unsafe extern "C" fn() -> *mut provider::Provider,
> = lib
.get(PROVIDER_INIT_FUNCTION)
.map_err(|source| Error::LibloadingError { source })?;
init_provider()
};
result.push(Provider {
internal: internal_provider,
library: Box::into_raw(lib),
});
}
_ => panic!(
"Unsupported provider version {} for provider {:?}",
version, path
),
}
}
}
}
}
Ok(result)
}
}
trait DylibTestable {
/// Checks whether given object is a dynamic library.
fn is_dylib(&self) -> bool;
}
/// `Path`is probably a dynamic library when it ends with certain extension.
/// The extension is platform specific – `dylib` for MacOS, `dll` for Windows and `so`
/// everywhere else.
impl DylibTestable for Path {
#[cfg(target_os = "macos")]
fn is_dylib(&self) -> bool {
self.extension().map_or(false, |ext| ext == "dylib")
}
#[cfg(target_os = "windows")]
fn is_dylib(&self) -> bool {
self.extension().map_or(false, |ext| ext == "dll")
}
#[cfg(not(any(target_os = "windows", target_os = "macos")))]
fn is_dylib(&self) -> bool {
self.extension().map_or(false, |ext| ext == "so")
}
}
|
#![allow(dead_code)]
#[macro_use] extern crate failure;
pub mod config; |
#[derive(Debug, PartialEq, Eq, Clone)]
/// A comment, effectively should be treated
/// as white space. There are 3 kinds of comments
/// according to the specification.
///
/// - Single line comments: //comment
/// - Multi line comments: /* comment */
/// - HTML comments: <!-- comment --> plus more!
pub struct Comment<T> {
pub kind: CommentKind,
pub content: T,
pub tail_content: Option<T>,
}
impl<T> Comment<T> {
pub fn from_parts(content: T, kind: CommentKind, tail_content: Option<T>) -> Self {
Comment {
content,
kind,
tail_content,
}
}
pub fn new_single_line(content: T) -> Self {
Comment::from_parts(content, CommentKind::Single, None)
}
pub fn new_multi_line(content: T) -> Self {
Comment::from_parts(content, CommentKind::Multi, None)
}
pub fn new_html(content: T, tail_content: Option<T>) -> Self {
Comment::from_parts(content, CommentKind::Html, tail_content)
}
pub fn new_html_no_tail(content: T) -> Self {
Comment::new_html(content, None)
}
pub fn new_html_with_tail(content: T, tail: T) -> Self {
Comment::new_html(content, Some(tail))
}
pub fn new_hashbang(content: T) -> Self {
Comment::from_parts(content, CommentKind::Hashbang, None)
}
pub fn is_multi_line(&self) -> bool {
self.kind == CommentKind::Multi
}
pub fn is_single_line(&self) -> bool {
self.kind == CommentKind::Single
}
pub fn is_html(&self) -> bool {
self.kind == CommentKind::Html
}
pub fn is_hashbang(&self) -> bool {
self.kind == CommentKind::Hashbang
}
}
impl<T> ToString for Comment<T>
where
T: AsRef<str>,
{
fn to_string(&self) -> String {
match self.kind {
CommentKind::Single => format!("//{}", self.content.as_ref()),
CommentKind::Multi => format!("/*{}*/", self.content.as_ref()),
CommentKind::Html => format!("<!--{}-->", self.content.as_ref()),
CommentKind::Hashbang => format!("#!{}", self.content.as_ref()),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
/// The 4 kinds of comments
pub enum CommentKind {
Single,
Multi,
Html,
Hashbang,
}
|
pub enum TimeUnit {
SECONDS,
MINUTES,
HOURS,
}
struct DateParser;
impl DateParser {
pub fn parse(date: &str) {
}
}
#[cfg(test)]
mod test {
use super::DateParser;
#[test] #[should_panic]
fn date_parser_panic() {
DateParser::parse("");
assert!(false);
}
} |
//! Representation of the JSON format used by CNI. See the [CNI Specification](https://github.com/containernetworking/cni/blob/master/SPEC.md).
use std::net::IpAddr;
use std::{collections::HashMap, fmt};
use serde::{de, Deserialize, Serialize};
use serde_json::Value;
/// A versioned CNI object. Many objects in the CNI specification are reused, but only the top-level object generally specifies a version. This wrapper allows
/// reusing the corresponding type definitions.
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct Versioned<T> {
/// Semantic Version 2.0 of the CNI specification to which this object conforms.
#[serde(rename = "cniVersion")]
cni_version: String,
#[serde(flatten)]
payload: T,
}
/// CNI network configuration
///
/// [Specification](https://github.com/containernetworking/cni/blob/master/SPEC.md#network-configuration).
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct NetworkConfiguration {
/// Network name. This should be unique across all containers on the host (or other administrative domain).
/// Must start with a alphanumeric character, optionally followed by any combination of one or more alphanumeric
/// characters, underscore (_), dot (.) or hyphen (-).
name: String,
#[serde(flatten)]
plugin: PluginConfiguration,
}
/// CNI network configuration list.
///
/// [Specification](https://github.com/containernetworking/cni/blob/master/SPEC.md#network-configuration-lists)
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct NetworkConfigurationList {
/// Network name. This should be unique across all containers on the host (or other administrative domain).
/// Must start with a alphanumeric character, optionally followed by any combination of one or more alphanumeric
/// characters, underscore (_), dot (.) or hyphen (-).
name: String,
/// f disableCheck is true, runtimes must not call CHECK for this network configuration list. This allows an administrator to prevent CHECKing where a combination of plugins is known to return spurious errors.
#[serde(skip_serializing_if = "is_false")]
#[serde(rename = "disableCheck")]
#[serde(default)]
disable_check: bool,
/// A list of standard CNI network plugin configurations.
plugins: Vec<PluginConfiguration>,
}
/// Configuration for a single CNI plugin. This may be included in either a single-plugin [`NetworkConfiguration`] or a multi-plugin
/// [`NetworkConfigurationList`].
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct PluginConfiguration {
/// Refers to the filename of the CNI plugin executable.
#[serde(rename = "type")]
plugin_type: String,
/// Additional arguments provided by the container runtime. For example a dictionary of labels could be passed to CNI
/// plugins by adding them to a labels field under args.
#[serde(default)]
#[serde(skip_serializing_if = "HashMap::is_empty")]
args: HashMap<String, Value>,
/// If supported by the plugin, sets up an IP masquerade on the host for this network.
/// This is necessary if the host will act as a gateway to subnets that are not able to route to the IP assigned to the container.
#[serde(rename = "ipMasq")]
#[serde(default)]
#[serde(skip_serializing_if = "is_false")]
ip_masq: bool,
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
ipam: Option<IpamConfiguration>,
/// DNS-specific configuration
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
dns: Option<DnsConfiguration>,
/// Additional plugin-specific fields. Plugins may define additional fields that they accept and may generate an error if called with unknown fields.
/// However, plugins should ignore fields in [`args`] if they are not understood.
#[serde(flatten)]
other: HashMap<String, Value>,
}
/// IPAM (IP Address Management) plugin configuration.
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct IpamConfiguration {
/// Refers to the filename of the IPAM plugin executable.
#[serde(rename = "type")]
plugin_type: String,
/// Additional plugin-specific fields. Plugins may define additional fields that they accept and may generate an error if called with unknown fields.
#[serde(flatten)]
other: HashMap<String, Value>,
}
/// Common DNS information.
///
/// [DNS well-known type](https://github.com/containernetworking/cni/blob/master/SPEC.md#dns).
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct DnsConfiguration {
/// A priority-ordered list of DNS nameservers that this network is aware of
#[serde(default)]
#[serde(skip_serializing_if = "Vec::is_empty")]
nameservers: Vec<IpAddr>,
/// The local domain used for short hostname lookups
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
domain: Option<String>,
/// List of priority-ordered search domains for short hostname lookups. Will be preferred over [`domain`]
/// by most resolvers.
#[serde(default)]
#[serde(skip_serializing_if = "Vec::is_empty")]
search: Vec<String>,
/// List of options that can be passed to the resolver.
#[serde(default)]
#[serde(skip_serializing_if = "Vec::is_empty")]
options: Vec<String>,
}
/// Result of a CNI plugin invocation.
///
/// [Result specification](https://github.com/containernetworking/cni/blob/master/SPEC.md#result).
#[derive(Debug, Deserialize)]
pub struct PluginResult {
/// Specific network interfaces the plugin created. If the `CNI_IFNAME` variable exists the plugin must use that name for the sandbox/hypervisor
/// interface or return an error if it cannot.
#[serde(default)]
interfaces: Vec<Interface>,
#[serde(default)]
ips: Vec<IpConfiguration>,
#[serde(default)]
routes: Vec<RouteConfiguration>,
#[serde(default)]
dns: Option<DnsConfiguration>,
}
/// A network interface created by a CNI plugin.
#[derive(Debug, Deserialize)]
pub struct Interface {
/// Network interface name.
name: String,
/// The hardware address of the interface. If L2 addresses are not meaningful for the plugin then this field is optional.
#[serde(default)]
mac: Option<String>,
/// Container/namespace-based environments should return the full filesystem path to the network namespace of that sandbox.
/// Hypervisor/VM-based plugins should return an ID unique to the virtualized sandbox the interface was created in. This
/// item must be provided for interfaces created or moved into a sandbox like a network namespace or a hypervisor/VM.
sandbox: String,
}
/// IP configuration information provided by a CNI plugin.
///
/// [IP well-known structure](https://github.com/containernetworking/cni/blob/master/SPEC.md#ips).
#[derive(Debug, Deserialize)]
pub struct IpConfiguration {
/// IP address range in CIDR notation
address: String,
/// The default gateway for this subnet, if one exists. It does not instruct the CNI plugin to add any routes with this gateway:
/// routes to add are specified separately via the routes field. An example use of this value is for the CNI bridge plugin to add
/// this IP address to the Linux bridge to make it a gateway.
#[serde(default)]
gateway: Option<String>,
/// Index into the [`Result::interfaces`] list of a CNI plugin result indicating which interface this IP configuration should be applied
/// to.
interface: usize,
}
/// IP routing configuration. Each `RouteConfiguration` must be relevant to the sandbox interface specified by `CNI_IFNAME`.
/// Routes are expected to be added with a 0 metric. A default route may be specified via "0.0.0.0/0". Since another network
/// might have already configured the default route, the CNI plugin should be prepared to skip over its default route definition.
#[derive(Debug, Deserialize)]
pub struct RouteConfiguration {
/// Destination subnet specified in CIDR notation.
#[serde(rename = "dst")]
destination: String,
/// IP of the gateway. If omitted, a default gateway is assumed (as determined by the CNI plugin).
#[serde(rename = "gw")]
#[serde(default)]
gateway: Option<String>,
}
/// Abbreviated form of [`Result`] returned by IPAM plugins.
///
/// [IP Allocation specification](https://github.com/containernetworking/cni/blob/master/SPEC.md#ip-allocation).
#[derive(Debug, Deserialize)]
pub struct IpamResult {
/// IP configuration
ips: Vec<IpamIpConfiguration>,
/// Route configuration.
#[serde(default)]
routes: Vec<RouteConfiguration>,
/// Common DNS information.
dns: Option<DnsConfiguration>,
}
/// Version of [`IpConfiguration`] that omits fields that should not be returned by IPAM plugins.
#[derive(Debug, Deserialize)]
pub struct IpamIpConfiguration {
/// IP address range in CIDR notation
address: String,
/// The default gateway for this subnet, if one exists. It does not instruct the CNI plugin to add any routes with this gateway:
/// routes to add are specified separately via the routes field. An example use of this value is for the CNI bridge plugin to add
/// this IP address to the Linux bridge to make it a gateway.
#[serde(default)]
gateway: Option<String>,
}
/// A CNI plugin error. Note that plugins may also log unstructured information to stderr.
#[derive(Debug, Deserialize)]
pub struct Error {
code: ErrorCode,
#[serde(rename = "msg")]
message: String,
#[serde(default)]
details: Option<String>,
}
/// A CNI error code. See the [Well-known Error Codes](https://github.com/containernetworking/cni/blob/master/SPEC.md#well-known-error-codes).
#[derive(Debug)]
pub enum ErrorCode {
IncompatibleCniVersion,
UnsupportedConfigurationField,
ContainerUnknown,
InvalidEnvironmentVariable,
Io,
Decode,
InvalidNetworkConfiguration,
Transient,
Reserved(u32),
Plugin(u32),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "CNI error ({}): {}", self.code, self.message)?;
if let Some(details) = &self.details {
write!(f, " ({})", details)?;
}
Ok(())
}
}
impl std::error::Error for Error {}
impl<'de> Deserialize<'de> for ErrorCode {
fn deserialize<D>(deserializer: D) -> Result<ErrorCode, D::Error>
where
D: de::Deserializer<'de>,
{
struct ErrorCodeVisitor;
impl<'de> de::Visitor<'de> for ErrorCodeVisitor {
type Value = ErrorCode;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("a CNI error code")
}
fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E> {
match value {
1 => Ok(ErrorCode::IncompatibleCniVersion),
2 => Ok(ErrorCode::UnsupportedConfigurationField),
3 => Ok(ErrorCode::ContainerUnknown),
4 => Ok(ErrorCode::InvalidEnvironmentVariable),
5 => Ok(ErrorCode::Io),
6 => Ok(ErrorCode::Decode),
7 => Ok(ErrorCode::InvalidNetworkConfiguration),
11 => Ok(ErrorCode::Transient),
8 | 9 | 12..=99 => Ok(ErrorCode::Reserved(value)),
_ => Ok(ErrorCode::Plugin(1)),
}
}
}
deserializer.deserialize_u32(ErrorCodeVisitor)
}
}
impl fmt::Display for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ErrorCode::IncompatibleCniVersion => f.write_str("Incompatible CNI version"),
ErrorCode::UnsupportedConfigurationField => {
f.write_str("Unsupported field in network configuration")
}
ErrorCode::ContainerUnknown => f.write_str("Container unknown or does not exist"),
ErrorCode::InvalidEnvironmentVariable => {
f.write_str("Invalid necessary environment variables")
}
ErrorCode::Io => f.write_str("I/O failure"),
ErrorCode::Decode => f.write_str("Failed to decode content"),
ErrorCode::InvalidNetworkConfiguration => f.write_str("Invalid network config"),
ErrorCode::Transient => f.write_str("Try again later"),
ErrorCode::Reserved(code) => write!(f, "reserved error {}", code),
ErrorCode::Plugin(code) => write!(f, "plugin-specific error {}", code),
}
}
}
/// Helper for Serde's `skip_serializing_if` attribute.
fn is_false(v: &bool) -> bool {
!*v
}
#[cfg(test)]
mod tests {
use std::fmt::Debug;
use serde::{Serialize, de::DeserializeOwned};
use serde_json::{self, Value, json};
use super::*;
/// Helper to assert that a value deserializes from / serializes to the expected JSON.
fn assert_roundtrip<T>(value: T, expected: Value) where T: DeserializeOwned + Serialize + Eq + Debug {
let encoded = serde_json::to_value(&value).expect("encode failed");
if encoded != expected {
panic!(r"Value did not serialize as expected!
Value:
{:?}
Expected JSON:
{:#}
Actual JSON:
{:#}
", value, expected, encoded);
}
let decoded: T = serde_json::from_value(expected.clone()).expect("decode failed");
if decoded != value {
panic!(r"JSON did not deserialize as expected!
JSON:
{:#}
Expected value:
{:?}
Actual value:
{:?}", expected, value, decoded);
}
}
#[test]
fn test_single_plugin() {
// Taken from https://github.com/containernetworking/cni/blob/master/SPEC.md#example-bridge-configuration
let config = Versioned {
cni_version: "1.0.0".into(),
payload: NetworkConfiguration {
name: "dbnet".into(),
plugin: PluginConfiguration {
plugin_type: "bridge".into(),
other: {
let mut map = HashMap::new();
map.insert("bridge".into(), json!("cni0"));
map
},
ipam: Some(IpamConfiguration {
plugin_type: "host-local".into(),
other: {
let mut map = HashMap::new();
map.insert("subnet".into(), json!("10.1.0.0/16"));
map.insert("gateway".into(), json!("10.1.0.1"));
map
},
}),
ip_masq: false,
dns: Some(DnsConfiguration {
nameservers: vec!["10.1.0.1".parse().unwrap()],
domain: None,
search: Vec::new(),
options: Vec::new(),
}),
args: HashMap::new(),
},
},
};
let json = json!({
"cniVersion": "1.0.0",
"name": "dbnet",
"type": "bridge",
"bridge": "cni0",
"ipam": {
"type": "host-local",
"subnet": "10.1.0.0/16",
"gateway": "10.1.0.1"
},
"dns": {
"nameservers": [ "10.1.0.1" ]
}
});
assert_roundtrip(config, json);
}
#[test]
fn test_plugin_list() {
// Taken from https://github.com/containernetworking/cni/blob/master/SPEC.md#example-network-configuration-lists
let config = Versioned {
cni_version: "1.0.0".into(),
payload: NetworkConfigurationList {
name: "dbnet".into(),
disable_check: false,
plugins: vec![
PluginConfiguration {
plugin_type: "bridge".into(),
other: {
let mut map = HashMap::new();
map.insert("bridge".into(), json!("cni0"));
map
},
args: {
let mut map = HashMap::new();
map.insert("labels".into(), json!({
"appVersion": "1.0"
}));
map
},
ipam: Some(IpamConfiguration {
plugin_type: "host-local".into(),
other: {
let mut map = HashMap::new();
map.insert("subnet".into(), json!("10.1.0.0/16"));
map.insert("gateway".into(), json!("10.1.0.1"));
map
},
}),
dns: Some(DnsConfiguration {
nameservers: vec!["10.1.0.1".parse().unwrap()],
domain: None,
search: Vec::new(),
options: Vec::new(),
}),
ip_masq: false,
},
PluginConfiguration {
plugin_type: "tuning".into(),
other: {
let mut map = HashMap::new();
map.insert("sysctl".into(), json!({
"net.core.somaxconn": "500"
}));
map
},
args: HashMap::new(),
ipam: None,
ip_masq: false,
dns: None,
}
]
}
};
let json = json!({
"cniVersion": "1.0.0",
"name": "dbnet",
"plugins": [
{
"type": "bridge",
"bridge": "cni0",
"args": {
"labels": {
"appVersion": "1.0"
}
},
"ipam": {
"type": "host-local",
"subnet": "10.1.0.0/16",
"gateway": "10.1.0.1"
},
"dns": {
"nameservers": [ "10.1.0.1" ]
}
},
{
"type": "tuning",
"sysctl": {
"net.core.somaxconn": "500"
}
}
]
});
assert_roundtrip(config, json);
}
}
|
#![cfg(feature = "alter-table")]
use std::prelude::v1::*;
use {
super::MemoryStorage,
crate::{
ast::ColumnDef,
result::{Error, MutResult},
store::AlterTable,
},
async_trait::async_trait,
};
#[async_trait(?Send)]
impl AlterTable for MemoryStorage {
async fn rename_schema(self, _table_name: &str, _new_table_name: &str) -> MutResult<Self, ()> {
Err((
self,
Error::StorageMsg("[MemoryStorage] alter-table is not supported".to_owned()),
))
}
async fn rename_column(
self,
_table_name: &str,
_old_column_name: &str,
_new_column_name: &str,
) -> MutResult<Self, ()> {
Err((
self,
Error::StorageMsg("[MemoryStorage] alter-table is not supported".to_owned()),
))
}
async fn add_column(self, _table_name: &str, _column_def: &ColumnDef) -> MutResult<Self, ()> {
Err((
self,
Error::StorageMsg("[MemoryStorage] alter-table is not supported".to_owned()),
))
}
async fn drop_column(
self,
_table_name: &str,
_column_name: &str,
_if_exists: bool,
) -> MutResult<Self, ()> {
Err((
self,
Error::StorageMsg("[MemoryStorage] alter-table is not supported".to_owned()),
))
}
}
|
fn main() {
// Statements are instructions that perform some action and do not return a value
// Expressions evaluate to a resulting value
let y = {
let x = 3;
x + 1 // expression
};
println!("y is {}", y);
print_labeled_measurement(5, 'h');
}
fn print_labeled_measurement(value: i32, unit_label: char) {
println!("The measurement is: {}{}", value, unit_label);
}
|
use syn::{parse_quote, ItemImpl};
use crate::generate::context::Context;
impl Context {
pub fn gen_partition(&self) -> ItemImpl {
let ctx = self.get_context_ident();
parse_quote! {
impl<'a, H> #ctx <'a, H> {
pub fn get_partition_status(&self) -> a653rs::prelude::PartitionStatus {
Partition::get_status()
}
pub fn set_partition_mode(&self, mode: a653rs::prelude::OperatingMode) -> Result<(), Error> {
Partition::set_mode(mode)
}
}
}
}
}
|
use super::Sha512;
use super::K64;
macro_rules! S0 {
($v:expr) => {
$v.rotate_right(1) ^ $v.rotate_right(8) ^ ($v >> 7)
};
}
macro_rules! S1 {
($v:expr) => {
$v.rotate_right(19) ^ $v.rotate_right(61) ^ ($v >> 6)
};
}
macro_rules! S2 {
($v:expr) => {
$v.rotate_right(28) ^ $v.rotate_right(34) ^ $v.rotate_right(39)
};
}
macro_rules! S3 {
($v:expr) => {
$v.rotate_right(14) ^ $v.rotate_right(18) ^ $v.rotate_right(41)
};
}
macro_rules! F0 {
($x:expr, $y:expr, $z:expr) => {
(($x) & ($y)) | (($z) & (($x) | ($y)))
};
}
macro_rules! F1 {
($x:expr, $y:expr, $z:expr) => {
(($z) ^ (($x) & (($y) ^ ($z))))
};
}
macro_rules! CH {
($x:expr, $y:expr, $z:expr) => {
(($x) & ($y)) ^ (!($x) & ($z))
};
}
macro_rules! MAJ {
($x:expr, $y:expr, $z:expr) => {
(($x) & ($y)) ^ (($x) & ($z)) ^ (($y) & ($z))
};
}
macro_rules! EP0 {
($v:expr) => {
$v.rotate_right(28) ^ $v.rotate_right(34) ^ $v.rotate_right(39)
};
}
macro_rules! EP1 {
($v:expr) => {
$v.rotate_right(14) ^ $v.rotate_right(18) ^ $v.rotate_right(41)
};
}
macro_rules! SIG0 {
($v:expr) => {
$v.rotate_right(1) ^ $v.rotate_right(8) ^ ($v >> 7)
};
}
macro_rules! SIG1 {
($v:expr) => {
$v.rotate_right(19) ^ $v.rotate_right(61) ^ ($v >> 6)
};
}
#[inline]
pub fn transform(state: &mut [u64; 8], block: &[u8]) {
debug_assert_eq!(state.len(), 8);
debug_assert_eq!(block.len(), Sha512::BLOCK_LEN);
let mut w = [0u64; 80];
for i in 0..16 {
w[i] = u64::from_be_bytes([
block[i * 8 + 0],
block[i * 8 + 1],
block[i * 8 + 2],
block[i * 8 + 3],
block[i * 8 + 4],
block[i * 8 + 5],
block[i * 8 + 6],
block[i * 8 + 7],
]);
}
for i in 16..80 {
w[i] = S1!(w[i - 2])
.wrapping_add(w[i - 7])
.wrapping_add(S0!(w[i - 15]))
.wrapping_add(w[i - 16]);
}
let mut a = state[0];
let mut b = state[1];
let mut c = state[2];
let mut d = state[3];
let mut e = state[4];
let mut f = state[5];
let mut g = state[6];
let mut h = state[7];
for i in 0..80 {
let t1 = h
.wrapping_add(EP1!(e))
.wrapping_add(CH!(e, f, g))
.wrapping_add(K64[i])
.wrapping_add(w[i]);
let t2 = EP0!(a).wrapping_add(MAJ!(a, b, c));
h = g;
g = f;
f = e;
e = d.wrapping_add(t1);
d = c;
c = b;
b = a;
a = t1.wrapping_add(t2);
}
state[0] = state[0].wrapping_add(a);
state[1] = state[1].wrapping_add(b);
state[2] = state[2].wrapping_add(c);
state[3] = state[3].wrapping_add(d);
state[4] = state[4].wrapping_add(e);
state[5] = state[5].wrapping_add(f);
state[6] = state[6].wrapping_add(g);
state[7] = state[7].wrapping_add(h);
}
|
#![allow(unused)]
use std::{
future::Future,
ops::Deref,
pin::Pin,
sync::{Arc, Mutex},
task::{Context, Poll, Waker},
time::Duration,
};
use bevy::tasks::{Task, TaskPool};
use futures::{stream, Stream};
use turbulence::{
buffer::BufferPool,
packet::{Packet, PacketPool},
packet_multiplexer::{MuxPacket, MuxPacketPool},
runtime::Runtime,
};
#[derive(Clone, Debug)]
pub struct SimpleBufferPool(pub usize);
impl BufferPool for SimpleBufferPool {
type Buffer = Box<[u8]>;
fn acquire(&self) -> Self::Buffer {
vec![0; self.0].into_boxed_slice()
}
}
#[derive(Clone)]
pub struct TaskPoolRuntime(Arc<TaskPoolRuntimeInner>);
pub struct TaskPoolRuntimeInner {
pool: TaskPool,
tasks: Mutex<Vec<Task<()>>>, // FIXME: cleanup finished
}
impl TaskPoolRuntime {
pub fn new(pool: TaskPool) -> Self {
TaskPoolRuntime(Arc::new(TaskPoolRuntimeInner {
pool,
tasks: Mutex::new(Vec::new()),
}))
}
}
impl Deref for TaskPoolRuntime {
type Target = TaskPoolRuntimeInner;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Runtime for TaskPoolRuntime {
type Instant = instant::Instant;
type Delay = Pin<Box<dyn Future<Output = ()> + Send>>;
type Interval = Pin<Box<dyn Stream<Item = instant::Instant> + Send>>;
fn spawn<F: Future<Output = ()> + Send + 'static>(&self, f: F) {
self.tasks
.lock()
.unwrap()
.push(self.pool.spawn(Box::pin(f)));
}
fn now(&self) -> Self::Instant {
Self::Instant::now()
}
fn elapsed(&self, instant: Self::Instant) -> Duration {
instant.elapsed()
}
fn duration_between(&self, earlier: Self::Instant, later: Self::Instant) -> Duration {
later.duration_since(earlier)
}
fn delay(&self, duration: Duration) -> Self::Delay {
let state = Arc::clone(&self.0);
Box::pin(async move {
do_delay(state, duration).await;
})
}
fn interval(&self, duration: Duration) -> Self::Interval {
Box::pin(stream::unfold(
Arc::clone(&self.0),
move |state| async move {
let time = do_delay(Arc::clone(&state), duration).await;
Some((time, state))
},
))
}
}
async fn do_delay(state: Arc<TaskPoolRuntimeInner>, duration: Duration) -> instant::Instant {
state
.pool
.spawn(async move {
std::thread::sleep(duration);
})
.await;
instant::Instant::now()
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// DashboardListDeleteResponse : Deleted dashboard details.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DashboardListDeleteResponse {
/// ID of the deleted dashboard list.
#[serde(rename = "deleted_dashboard_list_id", skip_serializing_if = "Option::is_none")]
pub deleted_dashboard_list_id: Option<i64>,
}
impl DashboardListDeleteResponse {
/// Deleted dashboard details.
pub fn new() -> DashboardListDeleteResponse {
DashboardListDeleteResponse {
deleted_dashboard_list_id: None,
}
}
}
|
use crate::{
animation::Animation,
frame::Frame,
sdf::{render_sdf, MultiUnion},
};
use itertools::Itertools;
use nalgebra::{vector, SMatrix, Vector3};
use palette::{LinSrgba, Mix};
use rand::Rng;
use sdfu::SDF;
#[cfg_attr(feature = "visual", derive(bevy_inspector_egui::Inspectable))]
pub struct Waves {
c2: f32,
h2: f32,
drop_speed: f32,
delta_t: f32,
drop_volume: f32,
drain_rate: f32,
#[cfg_attr(feature = "visual", inspectable(min = 0.0, max = 100.0))]
max_gradient: f32,
#[cfg_attr(feature = "visual", inspectable(read_only))]
current_volume: f32,
#[cfg_attr(feature = "visual", inspectable(ignore))]
u: SMatrix<f32, 8, 8>,
#[cfg_attr(feature = "visual", inspectable(ignore))]
u_new: SMatrix<f32, 8, 8>,
#[cfg_attr(feature = "visual", inspectable(ignore))]
v: SMatrix<f32, 8, 8>,
#[cfg_attr(feature = "visual", inspectable(ignore))]
drops: Vec<(u8, u8, f32)>,
#[cfg_attr(feature = "visual", inspectable(ignore))]
sdf_cache: Vec<sdfu::mods::Translate<Vector3<f32>, sdfu::Sphere<f32>>>,
}
impl Default for Waves {
fn default() -> Self {
Self {
c2: 0.3,
h2: 3.0,
drop_speed: 0.1,
delta_t: 0.2,
drop_volume: 10.0,
drain_rate: 0.1,
max_gradient: 20.0,
current_volume: 0.0,
u: Default::default(),
u_new: Default::default(),
v: Default::default(),
drops: Default::default(),
sdf_cache: Default::default(),
}
}
}
impl std::fmt::Debug for Waves {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Waves").finish()
}
}
impl Waves {
fn u(&self, x: usize, dx: isize, y: usize, dy: isize) -> f32 {
fn u_inner(this: &Waves, x: usize, dx: isize, y: usize, dy: isize) -> Option<f32> {
let x = x.checked_add_signed(dx)?;
let y = y.checked_add_signed(dy)?;
this.u.get((x, y)).copied()
}
u_inner(self, x, dx, y, dy).unwrap_or_else(|| self.u[(x, y)])
}
}
impl Animation for Waves {
fn next_frame(&mut self, frame: &mut Frame) {
let mut rng = rand::thread_rng();
for (x, z) in (0..8).cartesian_product(0..8) {
let f = self.c2
* (self.u(x, 1, z, 0)
+ self.u(x, -1, z, 0)
+ self.u(x, 0, z, 1)
+ self.u(x, 0, z, -1)
- 4.0 * self.u(x, 0, z, 0))
/ self.h2;
let f = f.clamp(-self.max_gradient, self.max_gradient);
self.v[(x, z)] += f * self.delta_t;
self.u_new[(x, z)] = (self.u[(x, z)] + self.v[(x, z)] * self.delta_t).max(0.0);
self.v[(x, z)] *= 0.97;
}
self.u.copy_from(&self.u_new);
self.u.add_scalar_mut(-self.drain_rate * self.delta_t);
self.current_volume = self.u.sum();
if self.drops.len() < 3 && rng.gen_bool(0.3) {
self.drops
.push((rng.gen_range(0..8), rng.gen_range(0..8), 8.0));
}
let pred = |drop: &mut (u8, u8, f32)| {
drop.2 -= self.drop_speed;
if drop.2 < 0.0 {
self.u[(drop.0 as usize, drop.1 as usize)] += self.drop_volume;
false
} else {
true
}
};
self.drops.retain_mut(pred);
self.sdf_cache.clear();
for &(x, z, y) in &self.drops {
let sdf = sdfu::Sphere::new(0.1, LinSrgba::new(0.0, 0.2, 0.9, 1.0))
.translate(vector![x as f32, y, z as f32]);
self.sdf_cache.push(sdf);
}
if self.sdf_cache.is_empty() {
frame.zero();
} else {
let union = MultiUnion::hard(&self.sdf_cache);
render_sdf(union, frame);
}
'outer: for (x, z) in (0..8).cartesian_product(0..8) {
let mut height = self.u[(x, z)];
let mut y = 0;
let led_volume = 6.0;
while height > led_volume {
frame.set(x, y, z, LinSrgba::new(0.0, 0.2, 0.9, 0.8));
y += 1;
height -= led_volume;
if y >= 8 {
continue 'outer;
}
}
let colour = LinSrgba::new(0.0, 0.2, 0.9, 0.8).mix(
&LinSrgba::new(1.0, 1.0, 1.0, 0.8),
height.max(0.0) / led_volume,
);
frame.set(x, y, z, colour);
}
}
fn reset(&mut self) {
*self = Self::default();
}
}
|
use {check_len, Error, Result, TryRead, TryWrite};
/// Context for &[u8] to determine where the slice ends.
///
/// Pattern will be included in the result
///
/// # Example
///
/// ```
/// use byte::*;
/// use byte::ctx::*;
///
/// let bytes: &[u8] = &[0xde, 0xad, 0xbe, 0xef, 0x00, 0xff];
///
/// let sub: &[u8] = bytes.read_with(&mut 0, Bytes::Len(2)).unwrap();
/// assert_eq!(sub, &[0xde, 0xad]);
///
/// static PATTERN: &'static [u8; 2] = &[0x00, 0xff];
///
/// let sub: &[u8] = bytes.read_with(&mut 0, Bytes::Pattern(PATTERN)).unwrap();
/// assert_eq!(sub, &[0xde, 0xad, 0xbe, 0xef, 0x00, 0xff]);
///
/// let sub: &[u8] = bytes.read_with(&mut 0, Bytes::PatternUntil(PATTERN, 4)).unwrap();
/// assert_eq!(sub, &[0xde, 0xad, 0xbe, 0xef]);
/// ```
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Bytes {
/// Take fix-length bytes
Len(usize),
/// Take bytes until reaching a byte pattern
Pattern(&'static [u8]),
/// Take bytes until either byte pattern or length reached
PatternUntil(&'static [u8], usize),
}
impl<'a> TryRead<'a, Bytes> for &'a [u8] {
#[inline]
fn try_read(bytes: &'a [u8], ctx: Bytes) -> Result<(Self, usize)> {
let len = match ctx {
Bytes::Len(len) => check_len(bytes, len)?,
Bytes::Pattern(pattern) => {
if pattern.len() == 0 {
return Err(Error::BadInput {
err: "Pattern is empty",
});
}
check_len(bytes, pattern.len())?;
(0..bytes.len() - pattern.len() + 1)
.map(|n| bytes[n..].starts_with(pattern))
.position(|p| p)
.map(|len| len + pattern.len())
.ok_or(Error::Incomplete)?
}
Bytes::PatternUntil(pattern, len) => {
if pattern.len() == 0 {
return Err(Error::BadInput {
err: "Pattern is empty",
});
}
if pattern.len() > len {
return Err(Error::BadInput {
err: "Pattern is longer than restricted length",
});
}
check_len(bytes, pattern.len())?;
(0..bytes.len() - pattern.len() + 1)
.map(|n| bytes[n..].starts_with(pattern))
.take(len - pattern.len())
.position(|p| p)
.map(|position| position + pattern.len())
.unwrap_or(check_len(bytes, len)?)
}
};
Ok((&bytes[..len], len))
}
}
impl<'a> TryWrite for &'a [u8] {
#[inline]
fn try_write(self, bytes: &mut [u8], _ctx: ()) -> Result<usize> {
check_len(bytes, self.len())?;
bytes[..self.len()].clone_from_slice(self);
Ok(self.len())
}
}
|
#![feature(exact_chunks)]
#[macro_use]
extern crate serde_derive;
extern crate image;
extern crate serde;
extern crate serde_json;
extern crate byteorder;
use std::mem;
use byteorder::{ WriteBytesExt, BigEndian};
use image::{RgbaImage, imageops};
#[derive(Deserialize, Debug)]
struct Metadata {
context: Option<std::collections::HashMap<String, String>>,
tags: Option<Vec<String>>,
variables: Option<std::collections::HashMap<String, i32>>,
}
#[no_mangle]
pub extern "C" fn alloc(size: usize) -> *mut u8 {
let mut buf = Vec::<u8>::with_capacity(size);
let ptr = buf.as_mut_ptr();
mem::forget(buf);
return ptr;
}
#[no_mangle]
pub extern "C" fn dealloc(ptr: *mut u8, cap: usize) {
unsafe {
let _buf = Vec::from_raw_parts(ptr, 0, cap);
}
}
#[no_mangle]
pub extern "C" fn transform(width: u32, height: u32, image_ptr: *mut u8, meta_ptr: *mut u8, meta_size: usize) -> u32 {
let size = (width * height * 4) as usize;
let bytes = unsafe {Vec::from_raw_parts(image_ptr, size, size)};
let meta_bytes = unsafe {Vec::from_raw_parts(meta_ptr, meta_size, meta_size)};
let metadata: Metadata = serde_json::from_slice(&meta_bytes).expect("Failed to deserialize metadata json");
host_trace(format!("{:?}", metadata));
let (out_w, out_h, mut out_buffer) = blur(width, height, bytes, metadata);
let mut dims = vec![];
let _ = dims.write_u32::<BigEndian>(out_w);
let _ = dims.write_u32::<BigEndian>(out_h);
dims.append(&mut out_buffer);
let out_buffer = dims;
let out_ptr = out_buffer.as_ptr() as u32;
mem::forget(out_buffer);
out_ptr
}
fn host_trace(x: String) {
let buf = x.into_bytes();
let length = buf.len();
let ptr = buf.as_ptr();
unsafe { trace(ptr as u32, length as u32) }
}
extern "C" {
pub fn trace(x: u32, length: u32);
}
fn blur(width: u32, height: u32, bytes: Vec<u8>, _metadata: Metadata) -> (u32, u32, Vec<u8>) {
let ref img = RgbaImage::from_raw(width, height, bytes).unwrap();
let subimg = imageops::blur(img, 2.5);
let out_w = subimg.width();
let out_h = subimg.height();
let out_buffer = subimg.into_raw();
(out_w, out_h, out_buffer)
} |
use serde::{Deserialize, Serialize};
pub type Date = i64;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Record {
pub remotehost: String,
pub rfc931: String,
pub authuser: String,
pub date: Date,
pub request: String,
pub status: u16,
pub bytes: u64,
}
|
use crate::ast::*;
use crate::errors::MomoaError;
use crate::location::*;
use crate::tokens::*;
use crate::Mode;
use std::collections::HashMap;
//-----------------------------------------------------------------------------
// Parser
//-----------------------------------------------------------------------------
struct Parser<'a> {
text: &'a str,
it: Tokens<'a>,
loc: Location,
tokens: Vec<Token>,
peeked: Option<Token>,
}
impl<'a> Parser<'a> {
pub fn new(text: &'a str, mode: Mode) -> Self {
Parser {
text,
it: Tokens::new(text, mode),
tokens: Vec::new(),
loc: Location {
line: 1,
column: 1,
offset: 0,
},
peeked: None,
}
}
fn get_value_loc(&self, value: &Node) -> LocationRange {
match value {
Node::Document(d) => d.loc,
Node::Array(array) => array.loc,
Node::Boolean(b) => b.loc,
Node::Element(e) => e.loc,
Node::Member(m) => m.loc,
Node::Number(n) => n.loc,
Node::Null(n) => n.loc,
Node::Object(o) => o.loc,
Node::String(s) => s.loc,
}
}
/// Parses the text contained in the parser into a `Node`.
pub fn parse(&mut self) -> Result<Node, MomoaError> {
let body = self.parse_value()?;
let loc = self.get_value_loc(&body);
/*
* For regular JSON, there should be no further tokens; JSONC may have
* comments after the body.
*/
while let Some(token_result) = self.next_token() {
return Err(match token_result {
Ok(token)
if token.kind == TokenKind::LineComment
|| token.kind == TokenKind::BlockComment =>
{
continue
}
Ok(token) => MomoaError::UnexpectedToken {
unexpected: token.kind,
line: token.loc.start.line,
column: token.loc.start.column,
},
Err(error) => error,
});
}
Ok(Node::Document(Box::new(DocumentNode {
body,
loc,
tokens: self.tokens.clone(),
})))
}
fn parse_value(&mut self) -> Result<Node, MomoaError> {
// while loop instead of if because we need to account for comments
while let Some(token_result) = self.peek_token() {
match token_result {
Ok(token) => match token.kind {
TokenKind::LBrace => return self.parse_object(),
TokenKind::LBracket => return self.parse_array(),
TokenKind::Boolean => return self.parse_boolean(),
TokenKind::Number => return self.parse_number(),
TokenKind::Null => return self.parse_null(),
TokenKind::String => return self.parse_string(),
_ => panic!("Not implemented"),
},
Err(error) => return Err(error),
}
}
// otherwise we've hit an unexpected end of input
Err(MomoaError::UnexpectedEndOfInput {
line: self.loc.line,
column: self.loc.column,
})
}
/// Advances to the next token without returning it.
fn eat_token(&mut self) {
self.next_token();
}
/// Advances to the next token and returns it or errors.
fn next_token(&mut self) -> Option<Result<Token, MomoaError>> {
if let Some(token) = self.peeked {
self.peeked = None;
return Some(Ok(token));
}
self.it.next()
}
/// Returns the next token or error without advancing the iterator.
/// Muliple calls always return the same result.
fn peek_token(&mut self) -> Option<Result<Token, MomoaError>> {
// if there's a peeked token, return it and don't overwrite it
if let Some(token) = self.peeked {
return Some(Ok(token));
}
// if there's no peeked token, try to get a new one
while let Some(token_result) = self.it.next() {
match token_result {
/*
* JSON vs. JSONC: Only the JSONC tokenization will return
* a comment. The JSON tokenization throws an error if it
* finds a comment, so it's safe to not verify if comments
* are allowed here.
*/
Ok(token)
if token.kind == TokenKind::LineComment
|| token.kind == TokenKind::BlockComment =>
{
self.loc = token.loc.start;
self.tokens.push(token);
continue;
}
Ok(token) => {
self.peeked = Some(token);
return Some(Ok(token));
}
Err(error) => {
return Some(Err(error));
}
}
}
None
}
/// Advance only if the next token matches the given `kind`.
fn maybe_match(&mut self, kind: TokenKind) -> Option<Result<Token, MomoaError>> {
// check to see if there's another result coming from the iterator
while let Some(next_token_result) = self.peek_token() {
match next_token_result {
Ok(next_token) => match next_token.kind {
_k if _k == kind => {
self.eat_token();
self.loc = next_token.loc.start;
self.tokens.push(next_token);
return Some(Ok(next_token));
}
_ => return None,
},
Err(error) => return Some(Err(error)),
}
}
// otherwise we didn't match anything
None
}
/// Advance to the next token and throw an error if it doesn't match
/// `kind`.
fn must_match(&mut self, kind: TokenKind) -> Result<Token, MomoaError> {
// check if there is a token first
if let Some(next_token_result) = self.next_token() {
let next_token = next_token_result.unwrap();
if next_token.kind == kind {
self.loc = next_token.loc.start;
self.tokens.push(next_token);
return Ok(next_token);
}
return Err(MomoaError::UnexpectedToken {
unexpected: next_token.kind,
line: next_token.loc.start.line,
column: next_token.loc.start.column,
});
}
Err(MomoaError::UnexpectedEndOfInput {
line: self.loc.line,
column: self.loc.column,
})
}
fn get_text(&self, start: usize, end: usize) -> &str {
&self.text[start..end]
}
fn parse_boolean(&mut self) -> Result<Node, MomoaError> {
let token = self.must_match(TokenKind::Boolean)?;
let text = self.get_text(token.loc.start.offset, token.loc.end.offset);
let value = text == "true";
return Ok(Node::Boolean(Box::new(ValueNode {
value,
loc: token.loc,
})));
}
fn parse_number(&mut self) -> Result<Node, MomoaError> {
let token = self.must_match(TokenKind::Number)?;
let text = self.get_text(token.loc.start.offset, token.loc.end.offset);
let value = text.parse::<f64>().unwrap();
return Ok(Node::Number(Box::new(ValueNode {
value,
loc: token.loc,
})));
}
fn parse_null(&mut self) -> Result<Node, MomoaError> {
let token = self.must_match(TokenKind::Null)?;
return Ok(Node::Null(Box::new(NullNode { loc: token.loc })));
}
fn parse_string(&mut self) -> Result<Node, MomoaError> {
let token = self.must_match(TokenKind::String)?;
let text = self.get_text(token.loc.start.offset, token.loc.end.offset);
// TODO: Find a way to move this elsewhere
// for easier lookup of token kinds for characters
let escaped_chars: HashMap<&char, char> = HashMap::from([
(&'"', '"'),
(&'\\', '\\'),
(&'/', '/'),
(&'b', '\u{0008}'),
(&'f', '\u{000c}'),
(&'n', '\n'),
(&'r', '\r'),
(&'t', '\t'),
]);
/*
* Because we are building up a string, we want to avoid unnecessary
* reallocations as data is added. So we create a string with an initial
* capacity of the length of the text minus 2 (for the two quote
* characters), which will always be enough room to represent the string
* value.
*/
let mut value = String::with_capacity(text.len() - 2);
/*
* We need to build up a string from the characters because we need to
* interpret certain escape characters that may occur inside the string
* like \t and \n. We know that all escape sequences are valid because
* the tokenizer would have already thrown an error otherwise.
*/
let mut it = text.trim_matches('"').chars();
while let Some(c) = &it.next() {
match c {
'\\' => {
// will never be false, just need to grab the character
if let Some(nc) = &it.next() {
match nc {
// read hexadecimals
'u' => {
let mut hex_sequence = String::with_capacity(4);
for _ in 0..4 {
match &it.next() {
Some(hex_digit) => hex_sequence.push(*hex_digit),
_ => panic!("Should never reach here."),
}
}
let char_code =
u32::from_str_radix(hex_sequence.as_str(), 16).unwrap();
// actually safe because we can't have an invalid hex sequence at this point
let unicode_char = unsafe { char::from_u32_unchecked(char_code) };
value.push_str(format!("{}", unicode_char).as_str());
}
c => match escaped_chars.get(c) {
Some(nc) => value.push(*nc),
_ => {}
},
}
}
}
c => {
value.push(*c);
}
}
}
return Ok(Node::String(Box::new(ValueNode {
value,
loc: token.loc,
})));
}
/// Parses arrays in the format of [value, value].
fn parse_array(&mut self) -> Result<Node, MomoaError> {
let start;
let end;
match self.must_match(TokenKind::LBracket) {
Ok(token) => start = token.loc.start,
Err(error) => return Err(error),
}
let mut elements = Vec::<Node>::new();
let mut comma_dangle = false;
while let Some(peek_token_result) = self.peek_token() {
match peek_token_result {
Ok(token) if token.kind == TokenKind::Comma => {
return Err(MomoaError::UnexpectedToken {
unexpected: token.kind,
line: token.loc.start.line,
column: token.loc.start.column,
})
}
Ok(token) if token.kind == TokenKind::RBracket => {
if comma_dangle {
return Err(MomoaError::UnexpectedToken {
unexpected: token.kind,
line: token.loc.start.line,
column: token.loc.start.column,
});
}
break;
}
Ok(_) => {
let value = self.parse_value()?;
elements.push(Node::Element(Box::new(ValueNode {
loc: self.get_value_loc(&value),
value,
})));
}
Err(error) => return Err(error),
}
// only a comma or right bracket is valid here
comma_dangle = self.maybe_match(TokenKind::Comma).is_some();
if !comma_dangle {
break;
}
}
// now there must be a right bracket
let rbracket = self.must_match(TokenKind::RBracket)?;
end = rbracket.loc.end;
return Ok(Node::Array(Box::new(ArrayNode {
elements,
loc: LocationRange { start, end },
})));
}
/// Parses objects in teh format of { "key": value, "key": value }.
fn parse_object(&mut self) -> Result<Node, MomoaError> {
let start;
let end;
let lbrace = self.must_match(TokenKind::LBrace)?;
start = lbrace.loc.start;
let mut members = Vec::<Node>::new();
let mut comma_dangle = false;
while let Some(peek_token_result) = self.peek_token() {
match peek_token_result {
Ok(token) if token.kind == TokenKind::Comma => {
return Err(MomoaError::UnexpectedToken {
unexpected: token.kind,
line: token.loc.start.line,
column: token.loc.start.column,
})
}
Ok(token) if token.kind == TokenKind::RBrace => {
if comma_dangle {
return Err(MomoaError::UnexpectedToken {
unexpected: token.kind,
line: token.loc.start.line,
column: token.loc.start.column,
});
}
break;
}
Ok(_) => {
// name: value
let name = self.parse_string()?;
self.must_match(TokenKind::Colon)?;
let value = self.parse_value()?;
members.push(Node::Member(Box::new(MemberNode {
loc: LocationRange {
start: self.get_value_loc(&name).start,
end: self.get_value_loc(&value).end,
},
name,
value,
})));
}
Err(error) => return Err(error),
}
// only a comma or right bracket is valid here
comma_dangle = self.maybe_match(TokenKind::Comma).is_some();
if !comma_dangle {
break;
}
}
// now there must be a right bracket
let rbracket = self.must_match(TokenKind::RBrace)?;
end = rbracket.loc.end;
return Ok(Node::Object(Box::new(ObjectNode {
members,
loc: LocationRange { start, end },
})));
}
}
pub fn parse(text: &str, mode: Mode) -> Result<Node, MomoaError> {
let mut parser = Parser::new(text, mode);
parser.parse()
}
|
pub struct Image {
pub width: u32,
pub height: u32,
pub pointer: usize,
}
impl Image {
pub fn new(width: u32, height: u32, pointer: usize) -> Self {
Image { width, height, pointer }
}
}
|
pub fn raindrops(n: usize) -> String {
let mut out = "".to_string();
if (n % 3) == 0 {
out.push_str("Pling");
}
if (n % 5) == 0 {
out.push_str("Plang");
}
if (n % 7) == 0 {
out.push_str("Plong");
}
if out.is_empty() {
out.push_str(&n.to_string());
}
out
}
|
use std::ops::{Add,Mul,Sub};
#[derive(Debug, PartialEq, PartialOrd, Clone, Copy)]
struct Point {
x: f64,
y: f64,
}
impl Add for Point {
type Output = Self;
fn add(self, other: Self) -> Self {
Point {
x: self.x + other.x,
y: self.y + other.y,
}
}
}
impl Sub for Point {
type Output = Self;
fn sub(self, other: Self) -> Self {
Point {
x: self.x - other.x,
y: self.y - other.y,
}
}
}
impl Mul<f64> for Point {
type Output = Self;
fn mul(self, k: f64) -> Self {
Point {
x: self.x * k,
y: self.y * k,
}
}
}
type Vector = Point;
impl Vector {
fn norm(&self) -> f64 {
self.x * self.x + self.y * self.y
}
fn abs(&self) -> f64 {
self.norm().sqrt()
}
fn dot(&self, other: &Self) -> f64 {
self.x * other.x + self.y * other.y
}
fn cross(&self, other: &Self) -> f64 {
self.x * other.y - self.y * other.x
}
fn is_orthogonal(&self, other: &Self) -> bool {
self.dot(other) == 0.0
}
fn is_parallel(&self, other: &Self) -> bool {
self.cross(other) == 0.0
}
}
struct Segment {
p1: Point,
p2: Point,
}
type Line = Segment;
struct Circle {
c: Point,
r: f64,
}
impl Circle {
fn new(c: Point, r: f64) -> Self {
Circle { c, r }
}
}
type Polygon = Vec<Point>;
#[cfg(test)]
mod test {
use super::*;
#[test]
fn vector() {
let v1 = Vector { x: 1.0, y: 2.0 };
let v2 = Vector { x: 3.0, y: 4.0 };
assert_eq!(v1 + v2, Vector { x: 4.0, y: 6.0 });
assert_eq!(v2 - v1, Vector { x: 2.0, y: 2.0 });
assert_eq!(v1 * 2.0, Vector { x: 2.0, y: 4.0 });
assert_eq!(v2.norm(), 25.0);
assert_eq!(v2.abs(), 5.0);
assert_eq!(v1.dot(&v2), 11.0);
assert_eq!(v1.cross(&v2), -2.0);
assert_eq!(v1 < v2, true);
assert_eq!(v1 > v2, false);
assert_eq!(v1 == v1, true);
assert_eq!(v1 == v2, false);
}
}
|
pub fn evaluate(x: &[u8]) -> usize {
let (remaining, result) = evaluate_expr(x);
assert!(remaining.is_empty());
result
}
fn evaluate_expr(xs: &[u8]) -> (&[u8], usize) {
match xs {
[b' ', xs @ ..] => evaluate_expr(xs),
[a @ b'0'..=b'9', xs @ ..] => evaluate_partial(xs, (*a - b'0') as usize),
[b'(', xs @ ..] => {
let (xs, a) = evaluate_group(xs);
evaluate_partial(xs, a)
}
_ => panic!("Invalid expression"),
}
}
fn evaluate_group(xs: &[u8]) -> (&[u8], usize) {
match evaluate_expr(xs) {
([b')', xs @ ..], a) => (xs, a),
_ => panic!("Invalid group"),
}
}
fn evaluate_partial(xs: &[u8], a: usize) -> (&[u8], usize) {
match xs {
[] => (xs, a),
[b')', ..] => (xs, a),
[b' ', xs @ ..] => evaluate_partial(xs, a),
[b'+', xs @ ..] => evaluate_partial_sum(xs, a),
[b'*', xs @ ..] => evaluate_partial_product(xs, a),
_ => panic!("Invalid partial"),
}
}
fn evaluate_partial_sum(xs: &[u8], a: usize) -> (&[u8], usize) {
match xs {
[b' ', xs @ ..] => evaluate_partial_sum(xs, a),
[b @ b'0'..=b'9', xs @ ..] => evaluate_partial(xs, a + (*b - b'0') as usize),
[b'(', xs @ ..] => {
let (xs, b) = evaluate_group(xs);
evaluate_partial(xs, a + b)
}
_ => panic!("Invalid partial sum"),
}
}
fn evaluate_partial_product(xs: &[u8], a: usize) -> (&[u8], usize) {
let (xs, b) = evaluate_expr(xs);
(xs, a * b)
}
|
use bevy::prelude::*;
use crate::{
Materials,
collider::{BallHitEvent},
};
pub struct SoundPlugin;
impl Plugin for SoundPlugin {
fn build(&self, app: &mut AppBuilder) {
app
.add_startup_system_to_stage("spawn", spawn_music.system())
.add_system(play_or_stop_music.system())
.add_system(sound_effects.system());
}
}
pub struct Sounds {
song_1: Handle<AudioSource>,
hit_1: Handle<AudioSource>,
}
pub fn spawn_music(
mut commands: Commands,
materials: Res<Materials>,
asset_server: Res<AssetServer>,
audio: ResMut<Audio>
) {
commands.spawn(ButtonComponents {
style: Style {
size: Size::new(Val::Px(80.0), Val::Px(80.0)),
position_type: PositionType::Absolute,
position: Rect {
right: Val::Px(50.),
bottom: Val::Px(50.),
..Default::default()
},
..Default::default()
},
material: materials.sound_button.clone(),
..Default::default()
});
let music = asset_server.load("music/08 Stage A.mp3");
audio.play(music.clone());
let hit_1 = asset_server.load("music/hit_1.mp3");
commands.insert_resource(Sounds {
song_1: music,
hit_1,
});
}
pub fn play_or_stop_music(
mut audio: ResMut<Audio>,
sounds: Res<Sounds>,
interaction_q: Query<&Interaction>,
) {
if let Some(Interaction::Clicked) = interaction_q.iter().next() {
// Check if music is playing and either play or remove it
println!("{:?}", audio.queue);
let has_songs = audio.queue.read().iter().next().is_some();
println!("Clicked on music button, currently songs: {}", has_songs);
if has_songs {
println!("Removed song");
audio.queue.get_mut().pop_front();
} else {
println!("Playing song");
audio.play(sounds.song_1.clone());
let has_songs = audio.queue.read().iter().next().is_some();
println!("Clicked on music button, currently songs: {}", has_songs);
audio.queue.get_mut().pop_front();
audio.queue.get_mut().pop_front();
}
}
}
pub fn sound_effects (
mut reader: Local<EventReader<BallHitEvent>>,
ball_hit_events: Res<Events<BallHitEvent>>,
audio: Res<Audio>,
sounds: Res<Sounds>,
) {
if let Some(_) = reader.iter(&ball_hit_events).next() {
println!("Playing ball hit sound");
audio.play(sounds.hit_1.clone());
}
} |
pub struct TabsState {
pub index: usize,
}
impl TabsState {
pub const TITLES: &'static [&'static str] = &["Subscriptions", "Stream", "Retain"];
pub fn default() -> TabsState {
TabsState { index: 0 }
}
pub fn next(&mut self) {
self.index = (self.index + 1) % Self::TITLES.len();
}
pub fn previous(&mut self) {
if self.index > 0 {
self.index -= 1;
} else {
self.index = Self::TITLES.len() - 1;
}
}
}
|
use connect_four::*;
fn main() {
let board = Board::new();
println!("{}", full_search(&board));
}
|
use std::ffi::OsStr;
use std::path::PathBuf;
use super::{debug_tool_message, ToolCommand};
use crate::error::{ErrorKind, Fallible};
use crate::layout::volta_home;
use crate::platform::{CliPlatform, Platform, Sourced};
use crate::session::{ActivityKind, Session};
use crate::tool::bin_full_path;
use crate::tool::{BinConfig, BinLoader};
use log::debug;
pub(crate) fn command(
exe: &OsStr,
cli: CliPlatform,
session: &mut Session,
) -> Fallible<ToolCommand> {
session.add_event_start(ActivityKind::Binary);
// first try to use the project toolchain
if let Some(project) = session.project()? {
// check if the executable is a direct dependency
if project.has_direct_bin(&exe)? {
let path_to_bin =
project
.find_bin(&exe)
.ok_or_else(|| ErrorKind::ProjectLocalBinaryNotFound {
command: exe.to_string_lossy().to_string(),
})?;
debug!(
"Found {} in project at '{}'",
exe.to_string_lossy(),
path_to_bin.display()
);
let path_to_bin = path_to_bin.as_os_str();
if let Some(platform) = Platform::with_cli(cli, session)? {
debug_tool_message("node", &platform.node);
let image = platform.checkout(session)?;
let path = image.path()?;
return Ok(ToolCommand::project_local(&path_to_bin, &path));
}
// if there's no platform available, pass through to existing PATH.
debug!("Could not find Volta configuration, delegating to system");
return ToolCommand::passthrough(&path_to_bin, ErrorKind::NoPlatform);
}
}
// try to use the default toolchain
if let Some(default_tool) = DefaultBinary::from_name(&exe, session)? {
let image = cli.merge(default_tool.platform).checkout(session)?;
debug!(
"Found default {} in '{}'",
exe.to_string_lossy(),
default_tool.bin_path.display()
);
debug_tool_message("node", &image.node);
let path = image.path()?;
let tool_path = default_tool.bin_path.into_os_string();
let cmd = match default_tool.loader {
Some(loader) => {
let mut command = ToolCommand::direct(loader.command.as_ref(), &path);
command.args(loader.args);
command.arg(tool_path);
command
}
None => ToolCommand::direct(&tool_path, &path),
};
return Ok(cmd);
}
// at this point, there is no project or default toolchain
// Pass through to the existing PATH
debug!(
"Could not find {}, delegating to system",
exe.to_string_lossy()
);
ToolCommand::passthrough(
&exe,
ErrorKind::BinaryNotFound {
name: exe.to_string_lossy().to_string(),
},
)
}
/// Information about the location and execution context of default binaries
///
/// Fetched from the config files in the Volta directory, represents the binary that is executed
/// when the user is outside of a project that has the given bin as a dependency.
pub struct DefaultBinary {
pub bin_path: PathBuf,
pub platform: Platform,
pub loader: Option<BinLoader>,
}
impl DefaultBinary {
pub fn from_config(bin_config: BinConfig, session: &mut Session) -> Fallible<Self> {
let bin_path = bin_full_path(
&bin_config.package,
&bin_config.version,
&bin_config.name,
&bin_config.path,
)?;
// If the user does not have yarn set in the platform for this binary, use the default
// This is necessary because some tools (e.g. ember-cli with the `--yarn` option) invoke `yarn`
let yarn = match bin_config.platform.yarn {
Some(yarn) => Some(yarn),
None => session
.default_platform()?
.and_then(|ref plat| plat.yarn.clone()),
};
let platform = Platform {
node: Sourced::with_binary(bin_config.platform.node),
npm: bin_config.platform.npm.map(Sourced::with_binary),
yarn: yarn.map(Sourced::with_binary),
};
Ok(DefaultBinary {
bin_path,
platform,
loader: bin_config.loader,
})
}
pub fn from_name(tool_name: &OsStr, session: &mut Session) -> Fallible<Option<Self>> {
let bin_config_file = match tool_name.to_str() {
Some(name) => volta_home()?.default_tool_bin_config(name),
None => return Ok(None),
};
if bin_config_file.exists() {
let bin_config = BinConfig::from_file(bin_config_file)?;
DefaultBinary::from_config(bin_config, session).map(Some)
} else {
Ok(None) // no config means the tool is not installed
}
}
}
|
// Coding up the calculator example from this helpful video - superb playlist by engineer man!
// https://youtu.be/RYTMn_kLItw
use std::io::{ stdin, stdout, Write};
fn main() {
println!("Welcome to the calculator!");
println!("--------------");
let mut num1 = String::new();
let mut num2 = String::new();
let mut operator = String::new();
loop {
println!("What is the first number?");
read(&mut num1);
println!("What is the second number?");
read(&mut num2);
println!("What operation would you like to do [ + - / * ]?");
read(&mut operator);
let num1: f32 = num1.trim().parse().unwrap();
let num2: f32 = num2.trim().parse().unwrap();
let operator: char = operator.trim().chars().next().unwrap();
println!("{} {} {}", num1, num2, operator);
let ops = String::from("+-*/");
if !ops.contains(operator) {
println!("Unknown operator!");
continue;
}
let result = match operator {
'+' => num1 + num2,
'-' => num1 - num2,
'*' => num1 * num2,
'/' => num1 / num2,
_ => panic!("Error in operator")
};
println!("Result of {} {} {} = {}", num1, operator, num2, result);
}
}
fn read(input: &mut String) {
stdout().flush().expect("failed to flush");
stdin().read_line(input).expect("failed to read line");
}
// Program generates occasional errors as shown below
/*
What is the first number?
5
What is the second number?
6
What operation would you like to do [ + - / * ]?
/
thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value: ParseFloatError { kind: Invalid }', src\main.rs:25:25
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
error: process didn't exit successfully: `target\debug\calculator.exe` (exit code: 101)
*/ |
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use structs;
use utils::*;
/// Returns true if the given operand kind can potentially have additional
/// parameters.
#[inline(always)]
pub fn has_additional_params(grammar: &structs::OperandKind) -> bool {
grammar.enumerants.iter().any(|e| !e.parameters.is_empty())
}
/// Returns true if the given operand can potentially have additional
/// parameters.
pub fn operand_has_additional_params(operand: &structs::Operand,
kinds: &[structs::OperandKind])
-> bool {
kinds.iter()
.find(|kind| kind.kind == operand.kind)
.map_or(false, |kind| has_additional_params(kind))
}
/// Returns the parameter list excluding result id.
fn get_param_list(params: &[structs::Operand],
keep_result_id: bool,
kinds: &[structs::OperandKind])
-> Vec<String> {
let mut list: Vec<String> = params.iter().filter_map(|param| {
let name = get_param_name(param);
let kind = get_enum_underlying_type(¶m.kind, true);
if param.kind == "IdResult" {
if keep_result_id {
Some("result_id: Option<spirv::Word>".to_string())
} else {
None
}
} else {
Some(if param.quantifier == "" {
format!("{}: {}", name, kind)
} else if param.quantifier == "?" {
format!("{}: Option<{}>", name, kind)
} else {
format!("{}: &[{}]", name, kind)
})
}
}).collect();
// The last operand may require additional parameters.
if let Some(o) = params.last() {
if operand_has_additional_params(o, kinds) {
list.push("additional_params: &[mr::Operand]".to_string());
}
}
list
}
/// Returns a suitable function name for the given `opname`.
fn get_function_name(opname: &str) -> String {
if opname == "OpReturn" {
"ret".to_string()
} else if opname == "OpReturnValue" {
"ret_value".to_string()
} else {
snake_casify(&opname[2..])
}
}
/// Returns the initializer list for all the parameters required to appear
/// once and only once.
fn get_init_list(params: &[structs::Operand]) -> Vec<String> {
params.iter().filter_map(|param| {
if param.quantifier == "" {
if param.kind == "IdResult" || param.kind == "IdResultType" {
// These two operands are not stored in the operands field.
None
} else {
let name = get_param_name(param);
let kind = get_mr_operand_kind(¶m.kind);
Some(if kind == "LiteralString" {
format!("mr::Operand::LiteralString({}.into())", name)
} else {
format!("mr::Operand::{}({})", kind, name)
})
}
} else {
None
}
}).collect()
}
fn get_push_extras(params: &[structs::Operand],
kinds: &[structs::OperandKind],
container: &str)
-> Vec<String> {
let mut list: Vec<String> = params.iter().filter_map(|param| {
let name = get_param_name(param);
if param.quantifier == "" {
None
} else if param.quantifier == "?" {
let kind = get_mr_operand_kind(¶m.kind);
Some(format!(
"{s:8}if let Some(v) = {name} {{\n\
{s:12}{container}.push(mr::Operand::{kind}(v{into}));\n\
{s:8}}}",
s = "",
kind = kind,
name = name,
into = if kind == "LiteralString" {
".into()"
} else {
""
},
container = container))
} else {
// TODO: Ouch! Bad smell. This has special case treatment yet
// still doesn't solve 64-bit selectors in OpSwitch.
if param.kind == "PairLiteralIntegerIdRef" {
Some(format!(
"{s:8}for v in {name} {{\n\
{s:12}{container}.push(mr::Operand::LiteralInt32(v.0));\n\
{s:12}{container}.push(mr::Operand::IdRef(v.1));\n\
{s:8}}}",
s = "",
name = name,
container = container))
} else if param.kind == "PairIdRefLiteralInteger" {
Some(format!(
"{s:8}for v in {name} {{\n\
{s:12}{container}.push(mr::Operand::IdRef(v.0));\n\
{s:12}{container}.push(mr::Operand::LiteralInt32(v.1));\n\
{s:8}}}",
s = "",
name = name,
container = container))
} else if param.kind == "PairIdRefIdRef" {
Some(format!(
"{s:8}for v in {name} {{\n\
{s:12}{container}.push(mr::Operand::IdRef(v.0));\n\
{s:12}{container}.push(mr::Operand::IdRef(v.1));\n\
{s:8}}}",
s = "",
name = name,
container = container))
} else {
let kind = get_mr_operand_kind(¶m.kind);
Some(format!(
"{s:8}for v in {name} {{\n\
{s:12}{container}.push(mr::Operand::{kind}(*v))\n\
{s:8}}}",
s = "",
kind = kind,
name = name,
container = container))
}
}
}).collect();
// The last operand may require additional parameters.
if let Some(o) = params.last() {
if operand_has_additional_params(o, kinds) {
list.push(format!("{s:8}{container}.extend_from_slice(additional_params)",
s = "", container = container));
}
}
list
}
/// Returns the generated mr::Operand and its fmt::Display implementation by
/// walking the given SPIR-V operand kinds `grammar`.
pub fn gen_mr_operand_kinds(grammar: &Vec<structs::OperandKind>) -> String {
let mut ret = String::new();
let kinds: Vec<&str> = grammar.iter().map(|element| {
element.kind.as_str()
}).filter(|element| {
// Pair kinds are not used in mr::Operand.
// LiteralContextDependentNumber is replaced by suitable literals.
// LiteralInteger is replaced by LiteralInt32.
// IdResult and IdResultType are not stored as operands in mr.
!(element.starts_with("Pair") ||
*element == "LiteralContextDependentNumber" ||
*element == "LiteralInteger" ||
*element == "IdResult" ||
*element == "IdResultType")
}).collect();
{ // Enum for all operand kinds in data representation.
let id_kinds: Vec<String> = kinds.iter().filter(|element| {
element.starts_with("Id")
}).map(|element| {
format!(" {}(spirv::Word),", element)
}).collect();
let num_kinds: Vec<&str> = vec![
" LiteralInt32(u32),",
" LiteralInt64(u64),",
" LiteralFloat32(f32),",
" LiteralFloat64(f64),",
" LiteralExtInstInteger(u32),",
" LiteralSpecConstantOpInteger(spirv::Op),"];
let str_kinds: Vec<String> = kinds.iter().filter(|element| {
element.ends_with("String")
}).map(|element| {
format!(" {}(String),", element)
}).collect();
let enum_kinds: Vec<String> = kinds.iter().filter(|element| {
!(element.starts_with("Id") ||
element.ends_with("String") ||
element.ends_with("Integer") ||
element.ends_with("Number"))
}).map(|element| {
format!(" {k}(spirv::{k}),", k=element)
}).collect();
let kind_enum = format!(
"/// Data representation of a SPIR-V operand.\n\
#[derive(Clone, Debug, PartialEq, From)]\n\
pub enum Operand {{\n\
{enum_kinds}\n{id_kinds}\n{num_kinds}\n{str_kinds}\n\
}}\n\n",
enum_kinds = enum_kinds.join("\n"),
id_kinds = id_kinds.join("\n"),
num_kinds = num_kinds.join("\n"),
str_kinds = str_kinds.join("\n"));
ret.push_str(&kind_enum);
}
{ // impl fmt::Display for mr::Operand.
let mut kinds = kinds;
kinds.append(&mut vec!["LiteralInt32", "LiteralInt64",
"LiteralFloat32", "LiteralFloat64"]);
let cases: Vec<String> =
kinds.iter().map(|element| {
format!("{s:12}Operand::{kind}(ref v) => \
write!(f, \"{{:?}}\", v),",
s = "",
kind = element)
}).collect();
let impl_code = format!(
"impl fmt::Display for Operand {{\n\
{s:4}fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {{\n\
{s:8}match *self {{\n{cases}\n{s:8}}}\n{s:4}}}\n}}\n",
s = "",
cases = cases.join("\n"));
ret.push_str(&impl_code);
}
ret
}
/// Returns the generated build methods for SPIR-V types by walking the given
/// SPIR-V instructions `grammar`.
pub fn gen_mr_builder_types(grammar: &structs::Grammar) -> String {
let kinds = &grammar.operand_kinds;
// Generate build methods for all types.
let elements: Vec<String> = grammar.instructions.iter().filter(|inst| {
inst.class == "Type" && inst.opname != "OpTypeForwardPointer" &&
inst.opname != "OpTypePointer" && inst.opname != "OpTypeOpaque"
}).map(|inst| {
// Parameter list for this build method.
let param_list = get_param_list(&inst.operands, false, kinds).join(", ");
// Initializer list for constructing the operands parameter
// for Instruction.
let init_list = get_init_list(&inst.operands[1..]).join(", ");
// Parameters that are not single values thus need special treatment.
let extras = get_push_extras(&inst.operands[1..],
kinds,
"self.module.types_global_values.last_mut()\
.expect(\"interal error\").operands").join(";\n");
format!("{s:4}/// Appends an Op{opcode} instruction and returns the result id.\n\
{s:4}pub fn {name}(&mut self{sep}{param}) -> spirv::Word {{\n\
{s:8}let id = self.id();\n\
{s:8}self.module.types_global_values.push(\
mr::Instruction::new(spirv::Op::{opcode}, \
None, Some(id), vec![{init}]));\n\
{extras}{x}\
{s:8}id\n\
{s:4}}}",
s = "",
sep = if param_list.len() != 0 { ", " } else { "" },
opcode = &inst.opname[2..],
name = snake_casify(&inst.opname[2..]),
param = param_list,
init = init_list,
extras = extras,
x = if extras.len() != 0 { ";\n" } else { "" })
}).collect();
format!("impl Builder {{\n{}\n}}", elements.join("\n\n"))
}
pub fn gen_mr_builder_terminator(grammar: &structs::Grammar) -> String {
let kinds = &grammar.operand_kinds;
// Generate build methods for all types.
let elements: Vec<String> = grammar.instructions.iter().filter(|inst| {
inst.class == "Terminator"
}).map(|inst| {
let params = get_param_list(&inst.operands, false, kinds).join(", ");
let extras = get_push_extras(&inst.operands, kinds, "inst.operands").join(";\n");
format!("{s:4}/// Appends an Op{opcode} instruction and ends the current basic block.\n\
{s:4}pub fn {name}(&mut self{x}{params}) -> BuildResult<()> {{\n\
{s:8}let {m}inst = mr::Instruction::new(\
spirv::Op::{opcode}, None, None, vec![{init}]);\n\
{extras}{y}\
{s:8}self.end_basic_block(inst)\n\
{s:4}}}",
s = "",
name = get_function_name(&inst.opname),
params = params,
extras = extras,
m = if extras.len() == 0 { "" } else { "mut " },
x = if params.len() == 0 { "" } else { ", " },
y = if extras.len() != 0 { ";\n" } else { "" },
init = get_init_list(&inst.operands).join(", "),
opcode = &inst.opname[2..])
}).collect();
format!("impl Builder {{\n{}\n}}", elements.join("\n\n"))
}
pub fn gen_mr_builder_normal_insts(grammar: &structs::Grammar) -> String {
let kinds = &grammar.operand_kinds;
// Generate build methods for all normal instructions (instructions must be
// in some basic block).
let elements: Vec<String> = grammar.instructions.iter().filter(|inst| {
inst.class == ""
}).map(|inst| {
let params = get_param_list(&inst.operands, true, kinds).join(", ");
let extras = get_push_extras(&inst.operands, kinds, "inst.operands").join(";\n");
if !inst.operands.is_empty() && inst.operands[0].kind == "IdResultType" {
// For normal instructions, they either have both result type and
// result id or have none.
format!("{s:4}/// Appends an Op{opcode} instruction to the current basic block.\n\
{s:4}pub fn {name}(&mut self{x}{params}) -> BuildResult<spirv::Word> {{\n\
{s:8}if self.basic_block.is_none() {{\n\
{s:12}return Err(Error::DetachedInstruction);\n\
{s:8}}}\n\
{s:8}let id = match result_id {{\n\
{s:12}Some(v) => v,\n\
{s:12}None => self.id(),\n\
{s:8}}};\n\
{s:8}let {m}inst = mr::Instruction::new(\
spirv::Op::{opcode}, Some(result_type), Some(id), vec![{init}]);\n\
{extras}{y}\
{s:8}self.basic_block.as_mut().unwrap().instructions.push(inst);\n\
{s:8}Ok(id)\n\
{s:4}}}",
s = "",
name = get_function_name(&inst.opname),
extras = extras,
params = params,
x = if params.len() == 0 { "" } else { ", " },
m = if extras.len() == 0 { "" } else { "mut " },
y = if extras.len() != 0 { ";\n" } else { "" },
init = get_init_list(&inst.operands).join(", "),
opcode = &inst.opname[2..])
} else {
format!("{s:4}/// Appends an Op{opcode} instruction to the current basic block.\n\
{s:4}pub fn {name}(&mut self{x}{params}) -> BuildResult<()> {{\n\
{s:8}if self.basic_block.is_none() {{\n\
{s:12}return Err(Error::DetachedInstruction);\n\
{s:8}}}\n\
{s:8}let {m}inst = mr::Instruction::new(\
spirv::Op::{opcode}, None, None, vec![{init}]);\n\
{extras}{y}\
{s:8}Ok(self.basic_block.as_mut().unwrap().instructions.push(inst))\n\
{s:4}}}",
s = "",
name = get_function_name(&inst.opname),
extras = extras,
params = params,
x = if params.len() == 0 { "" } else { ", " },
m = if extras.len() == 0 { "" } else { "mut " },
y = if extras.len() != 0 { ";\n" } else { "" },
init = get_init_list(&inst.operands).join(", "),
opcode = &inst.opname[2..])
}
}).collect();
format!("impl Builder {{\n{}\n}}", elements.join("\n\n"))
}
pub fn gen_mr_builder_constants(grammar: &structs::Grammar) -> String {
let kinds = &grammar.operand_kinds;
// Generate build methods for all constants.
let elements: Vec<String> = grammar.instructions.iter().filter(|inst| {
inst.class == "Constant" && inst.opname != "OpConstant" && inst.opname != "OpSpecConstant"
}).map(|inst| {
let params = get_param_list(&inst.operands, false, kinds).join(", ");
let extras = get_push_extras(&inst.operands, kinds, "inst.operands").join(";\n");
format!("{s:4}/// Appends an Op{opcode} instruction.\n\
{s:4}pub fn {name}(&mut self{x}{params}) -> spirv::Word {{\n\
{s:8}let id = self.id();\n\
{s:8}let {m}inst = mr::Instruction::new(\
spirv::Op::{opcode}, Some(result_type), Some(id), vec![{init}]);\n\
{extras}{y}\
{s:8}self.module.types_global_values.push(inst);\n\
{s:8}id\n\
{s:4}}}",
s = "",
name = get_function_name(&inst.opname),
extras = extras,
params = params,
x = if params.len() == 0 { "" } else { ", " },
m = if extras.len() == 0 { "" } else { "mut " },
y = if extras.len() != 0 { ";\n" } else { "" },
init = get_init_list(&inst.operands).join(", "),
opcode = &inst.opname[2..])
}).collect();
format!("impl Builder {{\n{}\n}}", elements.join("\n\n"))
}
pub fn gen_mr_builder_debug(grammar: &structs::Grammar) -> String {
let kinds = &grammar.operand_kinds;
// Generate build methods for all constants.
let elements: Vec<String> = grammar.instructions.iter().filter(|inst| {
inst.class == "Debug" && inst.opname != "OpString"
}).map(|inst| {
let params = get_param_list(&inst.operands, false, kinds).join(", ");
let extras = get_push_extras(&inst.operands, kinds, "inst.operands").join(";\n");
format!("{s:4}/// Appends an Op{opcode} instruction.\n\
{s:4}pub fn {name}<T: Into<String>>(&mut self{x}{params}) {{\n\
{s:8}let {m}inst = mr::Instruction::new(\
spirv::Op::{opcode}, None, None, vec![{init}]);\n\
{extras}{y}\
{s:8}self.module.debugs.push(inst);\n\
{s:4}}}",
s = "",
name = get_function_name(&inst.opname),
extras = extras,
params = params,
x = if params.len() == 0 { "" } else { ", " },
m = if extras.len() == 0 { "" } else { "mut " },
y = if extras.len() != 0 { ";\n" } else { "" },
init = get_init_list(&inst.operands).join(", "),
opcode = &inst.opname[2..])
}).collect();
format!("impl Builder {{\n{}\n}}", elements.join("\n\n"))
}
pub fn gen_mr_builder_annotation(grammar: &structs::Grammar) -> String {
let kinds = &grammar.operand_kinds;
// Generate build methods for all constants.
let elements: Vec<String> = grammar.instructions.iter().filter(|inst| {
inst.class == "Annotation" && inst.opname != "OpDecorationGroup"
}).map(|inst| {
let params = get_param_list(&inst.operands, false, kinds).join(", ");
let extras = get_push_extras(&inst.operands, kinds, "inst.operands").join(";\n");
format!("{s:4}/// Appends an Op{opcode} instruction.\n\
{s:4}pub fn {name}(&mut self{x}{params}) {{\n\
{s:8}let {m}inst = mr::Instruction::new(\
spirv::Op::{opcode}, None, None, vec![{init}]);\n\
{extras}{y}\
{s:8}self.module.annotations.push(inst);\n\
{s:4}}}",
s = "",
name = get_function_name(&inst.opname),
extras = extras,
params = params,
x = if params.len() == 0 { "" } else { ", " },
m = if extras.len() == 0 { "" } else { "mut " },
y = if extras.len() != 0 { ";\n" } else { "" },
init = get_init_list(&inst.operands).join(", "),
opcode = &inst.opname[2..])
}).collect();
format!("impl Builder {{\n{}\n}}", elements.join("\n\n"))
}
|
use crate::distribution::{Continuous, ContinuousCDF};
use crate::function::{beta, gamma};
use crate::is_zero;
use crate::statistics::*;
use crate::{Result, StatsError};
use core::f64::INFINITY as INF;
use rand::Rng;
/// Implements the [Beta](https://en.wikipedia.org/wiki/Beta_distribution)
/// distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::{Beta, Continuous};
/// use statrs::statistics::*;
/// use statrs::prec;
///
/// let n = Beta::new(2.0, 2.0).unwrap();
/// assert_eq!(n.mean().unwrap(), 0.5);
/// assert!(prec::almost_eq(n.pdf(0.5), 1.5, 1e-14));
/// ```
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Beta {
shape_a: f64,
shape_b: f64,
}
impl Beta {
/// Constructs a new beta distribution with shapeA (α) of `shape_a`
/// and shapeB (β) of `shape_b`
///
/// # Errors
///
/// Returns an error if `shape_a` or `shape_b` are `NaN`.
/// Also returns an error if `shape_a <= 0.0` or `shape_b <= 0.0`
///
/// # Examples
///
/// ```
/// use statrs::distribution::Beta;
///
/// let mut result = Beta::new(2.0, 2.0);
/// assert!(result.is_ok());
///
/// result = Beta::new(0.0, 0.0);
/// assert!(result.is_err());
/// ```
pub fn new(shape_a: f64, shape_b: f64) -> Result<Beta> {
if shape_a.is_nan()
|| shape_b.is_nan()
|| shape_a.is_infinite() && shape_b.is_infinite()
|| shape_a <= 0.0
|| shape_b <= 0.0
{
return Err(StatsError::BadParams);
};
Ok(Beta { shape_a, shape_b })
}
/// Returns the shapeA (α) of the beta distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::Beta;
///
/// let n = Beta::new(2.0, 2.0).unwrap();
/// assert_eq!(n.shape_a(), 2.0);
/// ```
pub fn shape_a(&self) -> f64 {
self.shape_a
}
/// Returns the shapeB (β) of the beta distributionβ
///
/// # Examples
///
/// ```
/// use statrs::distribution::Beta;
///
/// let n = Beta::new(2.0, 2.0).unwrap();
/// assert_eq!(n.shape_b(), 2.0);
/// ```
pub fn shape_b(&self) -> f64 {
self.shape_b
}
}
impl ::rand::distributions::Distribution<f64> for Beta {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> f64 {
// Generated by sampling two gamma distributions and normalizing.
let x = super::gamma::sample_unchecked(rng, self.shape_a, 1.0);
let y = super::gamma::sample_unchecked(rng, self.shape_b, 1.0);
x / (x + y)
}
}
impl ContinuousCDF<f64, f64> for Beta {
/// Calculates the cumulative distribution function for the beta
/// distribution
/// at `x`
///
/// # Formula
///
/// ```ignore
/// I_x(α, β)
/// ```
///
/// where `α` is shapeA, `β` is shapeB, and `I_x` is the regularized
/// lower incomplete beta function
fn cdf(&self, x: f64) -> f64 {
if x < 0.0 {
0.0
} else if x >= 1.0 {
1.0
} else if self.shape_a.is_infinite() {
if x < 1.0 {
0.0
} else {
1.0
}
} else if self.shape_b.is_infinite() {
1.0
} else if ulps_eq!(self.shape_a, 1.0) && ulps_eq!(self.shape_b, 1.0) {
x
} else {
beta::beta_reg(self.shape_a, self.shape_b, x)
}
}
}
impl Min<f64> for Beta {
/// Returns the minimum value in the domain of the
/// beta distribution representable by a double precision
/// float
///
/// # Formula
///
/// ```ignore
/// 0
/// ```
fn min(&self) -> f64 {
0.0
}
}
impl Max<f64> for Beta {
/// Returns the maximum value in the domain of the
/// beta distribution representable by a double precision
/// float
///
/// # Formula
///
/// ```ignore
/// 1
/// ```
fn max(&self) -> f64 {
1.0
}
}
impl Distribution<f64> for Beta {
/// Returns the mean of the beta distribution
///
/// # Formula
///
/// ```ignore
/// α / (α + β)
/// ```
///
/// where `α` is shapeA and `β` is shapeB
fn mean(&self) -> Option<f64> {
let mean = if self.shape_a.is_infinite() {
1.0
} else {
self.shape_a / (self.shape_a + self.shape_b)
};
Some(mean)
}
/// Returns the variance of the beta distribution
///
/// # Remarks
///
/// # Formula
///
/// ```ignore
/// (α * β) / ((α + β)^2 * (α + β + 1))
/// ```
///
/// where `α` is shapeA and `β` is shapeB
fn variance(&self) -> Option<f64> {
let var = if self.shape_a.is_infinite() || self.shape_b.is_infinite() {
0.0
} else {
self.shape_a * self.shape_b
/ ((self.shape_a + self.shape_b)
* (self.shape_a + self.shape_b)
* (self.shape_a + self.shape_b + 1.0))
};
Some(var)
}
/// Returns the entropy of the beta distribution
///
/// # Formula
///
/// ```ignore
/// ln(B(α, β)) - (α - 1)ψ(α) - (β - 1)ψ(β) + (α + β - 2)ψ(α + β)
/// ```
///
/// where `α` is shapeA, `β` is shapeB and `ψ` is the digamma function
fn entropy(&self) -> Option<f64> {
let entr = if self.shape_a.is_infinite() || self.shape_b.is_infinite() {
// unsupported limit
return None;
} else {
beta::ln_beta(self.shape_a, self.shape_b)
- (self.shape_a - 1.0) * gamma::digamma(self.shape_a)
- (self.shape_b - 1.0) * gamma::digamma(self.shape_b)
+ (self.shape_a + self.shape_b - 2.0) * gamma::digamma(self.shape_a + self.shape_b)
};
Some(entr)
}
/// Returns the skewness of the Beta distribution
///
/// # Formula
///
/// ```ignore
/// 2(β - α) * sqrt(α + β + 1) / ((α + β + 2) * sqrt(αβ))
/// ```
///
/// where `α` is shapeA and `β` is shapeB
fn skewness(&self) -> Option<f64> {
let skew = if self.shape_a.is_infinite() {
-2.0
} else if self.shape_b.is_infinite() {
2.0
} else {
2.0 * (self.shape_b - self.shape_a) * (self.shape_a + self.shape_b + 1.0).sqrt()
/ ((self.shape_a + self.shape_b + 2.0) * (self.shape_a * self.shape_b).sqrt())
};
Some(skew)
}
}
impl Mode<Option<f64>> for Beta {
/// Returns the mode of the Beta distribution.
///
/// # Remarks
///
/// Since the mode is technically only calculate for `α > 1, β > 1`, those
/// are the only values we allow. We may consider relaxing this constraint
/// in
/// the future.
///
/// # Panics
///
/// If `α <= 1` or `β <= 1`
///
/// # Formula
///
/// ```ignore
/// (α - 1) / (α + β - 2)
/// ```
///
/// where `α` is shapeA and `β` is shapeB
fn mode(&self) -> Option<f64> {
// TODO: perhaps relax constraint in order to allow calculation
// of 'anti-mode;
if self.shape_a <= 1.0 || self.shape_b <= 1.0 {
None
} else if self.shape_a.is_infinite() {
Some(1.0)
} else {
Some((self.shape_a - 1.0) / (self.shape_a + self.shape_b - 2.0))
}
}
}
impl Continuous<f64, f64> for Beta {
/// Calculates the probability density function for the beta distribution
/// at `x`.
///
/// # Formula
///
/// ```ignore
/// let B(α, β) = Γ(α)Γ(β)/Γ(α + β)
///
/// x^(α - 1) * (1 - x)^(β - 1) / B(α, β)
/// ```
///
/// where `α` is shapeA, `β` is shapeB, and `Γ` is the gamma function
fn pdf(&self, x: f64) -> f64 {
if !(0.0..=1.0).contains(&x) {
0.0
} else if self.shape_a.is_infinite() {
if ulps_eq!(x, 1.0) {
INF
} else {
0.0
}
} else if self.shape_b.is_infinite() {
if is_zero(x) {
INF
} else {
0.0
}
} else if ulps_eq!(self.shape_a, 1.0) && ulps_eq!(self.shape_b, 1.0) {
1.0
} else if self.shape_a > 80.0 || self.shape_b > 80.0 {
self.ln_pdf(x).exp()
} else {
let bb = gamma::gamma(self.shape_a + self.shape_b)
/ (gamma::gamma(self.shape_a) * gamma::gamma(self.shape_b));
bb * x.powf(self.shape_a - 1.0) * (1.0 - x).powf(self.shape_b - 1.0)
}
}
/// Calculates the log probability density function for the beta
/// distribution at `x`.
///
/// # Formula
///
/// ```ignore
/// let B(α, β) = Γ(α)Γ(β)/Γ(α + β)
///
/// ln(x^(α - 1) * (1 - x)^(β - 1) / B(α, β))
/// ```
///
/// where `α` is shapeA, `β` is shapeB, and `Γ` is the gamma function
fn ln_pdf(&self, x: f64) -> f64 {
if !(0.0..=1.0).contains(&x) {
-INF
} else if self.shape_a.is_infinite() {
if ulps_eq!(x, 1.0) {
INF
} else {
-INF
}
} else if self.shape_b.is_infinite() {
if is_zero(x) {
INF
} else {
-INF
}
} else if ulps_eq!(self.shape_a, 1.0) && ulps_eq!(self.shape_b, 1.0) {
0.0
} else {
let aa = gamma::ln_gamma(self.shape_a + self.shape_b)
- gamma::ln_gamma(self.shape_a)
- gamma::ln_gamma(self.shape_b);
let bb = if ulps_eq!(self.shape_a, 1.0) && is_zero(x) {
0.0
} else if is_zero(x) {
-INF
} else {
(self.shape_a - 1.0) * x.ln()
};
let cc = if ulps_eq!(self.shape_b, 1.0) && ulps_eq!(x, 1.0) {
0.0
} else if ulps_eq!(x, 1.0) {
-INF
} else {
(self.shape_b - 1.0) * (1.0 - x).ln()
};
aa + bb + cc
}
}
}
#[rustfmt::skip]
#[cfg(all(test, feature = "nightly"))]
mod tests {
use super::*;
use crate::consts::ACC;
use crate::distribution::internal::*;
use crate::statistics::*;
use crate::testing_boiler;
testing_boiler!((f64, f64), Beta);
#[test]
fn test_create() {
let valid = [(1.0, 1.0), (9.0, 1.0), (5.0, 100.0), (1.0, INF), (INF, 1.0)];
for &arg in valid.iter() {
try_create(arg);
}
}
#[test]
fn test_bad_create() {
let invalid = [
(0.0, 0.0),
(0.0, 0.1),
(1.0, 0.0),
(0.0, INF),
(INF, 0.0),
(f64::NAN, 1.0),
(1.0, f64::NAN),
(f64::NAN, f64::NAN),
(1.0, -1.0),
(-1.0, 1.0),
(-1.0, -1.0),
(INF, INF),
];
for &arg in invalid.iter() {
bad_create_case(arg);
}
}
#[test]
fn test_mean() {
let f = |x: Beta| x.mean().unwrap();
let test = [
((1.0, 1.0), 0.5),
((9.0, 1.0), 0.9),
((5.0, 100.0), 0.047619047619047619047616),
((1.0, INF), 0.0),
((INF, 1.0), 1.0),
];
for &(arg, res) in test.iter() {
test_case(arg, res, f);
}
}
#[test]
fn test_variance() {
let f = |x: Beta| x.variance().unwrap();
let test = [
((1.0, 1.0), 1.0 / 12.0),
((9.0, 1.0), 9.0 / 1100.0),
((5.0, 100.0), 500.0 / 1168650.0),
((1.0, INF), 0.0),
((INF, 1.0), 0.0),
];
for &(arg, res) in test.iter() {
test_case(arg, res, f);
}
}
#[test]
fn test_entropy() {
let f = |x: Beta| x.entropy().unwrap();
let test = [
((9.0, 1.0), -1.3083356884473304939016015),
((5.0, 100.0), -2.52016231876027436794592),
];
for &(arg, res) in test.iter() {
test_case(arg, res, f);
}
test_case_special((1.0, 1.0), 0.0, 1e-14, f);
let entropy = |x: Beta| x.entropy();
test_none((1.0, INF), entropy);
test_none((INF, 1.0), entropy);
}
#[test]
fn test_skewness() {
let skewness = |x: Beta| x.skewness().unwrap();
test_case((1.0, 1.0), 0.0, skewness);
test_case((9.0, 1.0), -1.4740554623801777107177478829, skewness);
test_case((5.0, 100.0), 0.817594109275534303545831591, skewness);
test_case((1.0, INF), 2.0, skewness);
test_case((INF, 1.0), -2.0, skewness);
}
#[test]
fn test_mode() {
let mode = |x: Beta| x.mode().unwrap();
test_case((5.0, 100.0), 0.038834951456310676243255386, mode);
test_case((92.0, INF), 0.0, mode);
test_case((INF, 2.0), 1.0, mode);
}
#[test]
#[should_panic]
fn test_mode_shape_a_lte_1() {
let mode = |x: Beta| x.mode().unwrap();
get_value((1.0, 5.0), mode);
}
#[test]
#[should_panic]
fn test_mode_shape_b_lte_1() {
let mode = |x: Beta| x.mode().unwrap();
get_value((5.0, 1.0), mode);
}
#[test]
fn test_min_max() {
let min = |x: Beta| x.min();
let max = |x: Beta| x.max();
test_case((1.0, 1.0), 0.0, min);
test_case((1.0, 1.0), 1.0, max);
}
#[test]
fn test_pdf() {
let f = |arg: f64| move |x: Beta| x.pdf(arg);
let test = [
((1.0, 1.0), 0.0, 1.0),
((1.0, 1.0), 0.5, 1.0),
((1.0, 1.0), 1.0, 1.0),
((9.0, 1.0), 0.0, 0.0),
((9.0, 1.0), 0.5, 0.03515625),
((9.0, 1.0), 1.0, 9.0),
((5.0, 100.0), 0.0, 0.0),
((5.0, 100.0), 0.5, 4.534102298350337661e-23),
((5.0, 100.0), 1.0, 0.0),
((5.0, 100.0), 1.0, 0.0),
((1.0, INF), 0.0, INF),
((1.0, INF), 0.5, 0.0),
((1.0, INF), 1.0, 0.0),
((INF, 1.0), 0.0, 0.0),
((INF, 1.0), 0.5, 0.0),
((INF, 1.0), 1.0, INF),
];
for &(arg, x, expect) in test.iter() {
test_case(arg, expect, f(x));
}
}
#[test]
fn test_pdf_input_lt_0() {
let pdf = |arg: f64| move |x: Beta| x.pdf(arg);
test_case((1.0, 1.0), 0.0, pdf(-1.0));
}
#[test]
fn test_pdf_input_gt_0() {
let pdf = |arg: f64| move |x: Beta| x.pdf(arg);
test_case((1.0, 1.0), 0.0, pdf(2.0));
}
#[test]
fn test_ln_pdf() {
let f = |arg: f64| move |x: Beta| x.ln_pdf(arg);
let test = [
((1.0, 1.0), 0.0, 0.0),
((1.0, 1.0), 0.5, 0.0),
((1.0, 1.0), 1.0, 0.0),
((9.0, 1.0), 0.0, -INF),
((9.0, 1.0), 0.5, -3.347952867143343092547366497),
((9.0, 1.0), 1.0, 2.1972245773362193827904904738),
((5.0, 100.0), 0.0, -INF),
((5.0, 100.0), 0.5, -51.447830024537682154565870),
((5.0, 100.0), 1.0, -INF),
((1.0, INF), 0.0, INF),
((1.0, INF), 0.5, -INF),
((1.0, INF), 1.0, -INF),
((INF, 1.0), 0.0, -INF),
((INF, 1.0), 0.5, -INF),
((INF, 1.0), 1.0, INF),
];
for &(arg, x, expect) in test.iter() {
test_case(arg, expect, f(x));
}
}
#[test]
fn test_ln_pdf_input_lt_0() {
let ln_pdf = |arg: f64| move |x: Beta| x.ln_pdf(arg);
test_case((1.0, 1.0), -INF, ln_pdf(-1.0));
}
#[test]
fn test_ln_pdf_input_gt_1() {
let ln_pdf = |arg: f64| move |x: Beta| x.ln_pdf(arg);
test_case((1.0, 1.0), -INF, ln_pdf(2.0));
}
#[test]
fn test_cdf() {
let cdf = |arg: f64| move |x: Beta| x.cdf(arg);
let test = [
((1.0, 1.0), 0.0, 0.0),
((1.0, 1.0), 0.5, 0.5),
((1.0, 1.0), 1.0, 1.0),
((9.0, 1.0), 0.0, 0.0),
((9.0, 1.0), 0.5, 0.001953125),
((9.0, 1.0), 1.0, 1.0),
((5.0, 100.0), 0.0, 0.0),
((5.0, 100.0), 0.5, 1.0),
((5.0, 100.0), 1.0, 1.0),
((1.0, INF), 0.0, 1.0),
((1.0, INF), 0.5, 1.0),
((1.0, INF), 1.0, 1.0),
((INF, 1.0), 0.0, 0.0),
((INF, 1.0), 0.5, 0.0),
((INF, 1.0), 1.0, 1.0),
];
for &(arg, x, expect) in test.iter() {
test_case(arg, expect, cdf(x));
}
}
#[test]
fn test_cdf_input_lt_0() {
let cdf = |arg: f64| move |x: Beta| x.cdf(arg);
test_case((1.0, 1.0), 0.0, cdf(-1.0));
}
#[test]
fn test_cdf_input_gt_1() {
let cdf = |arg: f64| move |x: Beta| x.cdf(arg);
test_case((1.0, 1.0), 1.0, cdf(2.0));
}
#[test]
fn test_continuous() {
test::check_continuous_distribution(&try_create((1.2, 3.4)), 0.0, 1.0);
test::check_continuous_distribution(&try_create((4.5, 6.7)), 0.0, 1.0);
}
}
|
// Let's not care if the TERM variable is 'dumb'
// since we depend on escape sequences
pub const ALTERNATE_ON: &str = "\x1b[?1049h";
pub const ALTERNATE_OFF: &str = "\x1b[?1049l";
pub const CURSOR_SHOW: &str = "\x1b[?25h";
pub const CURSOR_HIDE: &str = "\x1b[?25l";
pub const CURSOR_TOP_LEFT: &str = "\x1b[;H";
pub const COLOR_RESET: &str = "\x1b[0;0m";
pub const COLOR_RED: &str = "\x1b[0;31m";
pub const COLOR_GREEN: &str = "\x1b[0;32m";
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io::net::ip;
use std::mem;
use std::num::Int;
use std::rt::heap;
use libc;
use {uvll, UvResult};
pub use self::async::Async;
pub use self::connect::Connect;
pub use self::fs::Fs;
pub use self::getaddrinfo::GetAddrInfo;
pub use self::idle::Idle;
pub use self::loop_::Loop;
pub use self::pipe::Pipe;
pub use self::process::Process;
pub use self::shutdown::Shutdown;
pub use self::signal::Signal;
pub use self::tcp::Tcp;
pub use self::timer::Timer;
pub use self::tty::Tty;
pub use self::udp::Udp;
pub use self::udp_send::UdpSend;
pub use self::write::Write;
macro_rules! call( ($e:expr) => (
match $e {
n if n < 0 => Err(::UvError(n)),
n => Ok(n),
}
) )
mod async;
mod connect;
mod fs;
mod getaddrinfo;
mod idle;
mod loop_;
mod pipe;
mod process;
mod shutdown;
mod signal;
mod tcp;
mod timer;
mod tty;
mod udp;
mod udp_send;
mod write;
pub trait Allocated {
fn size(_self: Option<Self>) -> uint;
}
pub struct Raw<T> {
ptr: *mut T,
}
// FIXME: this T should be an associated type
pub trait Handle<T: Allocated> {
fn raw(&self) -> *mut T;
unsafe fn from_raw(t: *mut T) -> Self;
fn uv_loop(&self) -> Loop {
unsafe {
let loop_ = uvll::rust_uv_get_loop_for_uv_handle(self.raw() as *mut _);
Loop::from_raw(loop_)
}
}
fn get_data(&self) -> *mut libc::c_void {
unsafe { uvll::rust_uv_get_data_for_uv_handle(self.raw() as *mut _) }
}
fn set_data(&mut self, data: *mut libc::c_void) {
unsafe {
uvll::rust_uv_set_data_for_uv_handle(self.raw() as *mut _, data)
}
}
/// Invokes uv_close
///
/// This is unsafe as there is no guarantee that this handle is not actively
/// being used by other objects.
unsafe fn close(&mut self, thunk: Option<uvll::uv_close_cb>) {
uvll::uv_close(self.raw() as *mut _, thunk)
}
/// Deallocate this handle.
///
/// This is unsafe as there is no guarantee that no one else is using this
/// handle currently.
unsafe fn free(&mut self) { drop(Raw::wrap(self.raw())) }
/// Invoke uv_close, and then free the handle when the close operation is
/// done.
///
/// This is unsafe for the same reasons as `close` and `free`.
unsafe fn close_and_free(&mut self) {
extern fn done<T: Allocated>(t: *mut uvll::uv_handle_t) {
unsafe { drop(Raw::wrap(t as *mut T)) }
}
self.close(Some(done::<T>))
}
fn uv_ref(&self) { unsafe { uvll::uv_ref(self.raw() as *mut _) } }
fn uv_unref(&self) { unsafe { uvll::uv_unref(self.raw() as *mut _) } }
}
// FIXME: this T should be an associated type
pub trait Request<T: Allocated> {
fn raw(&self) -> *mut T;
unsafe fn from_raw(t: *mut T) -> Self;
fn get_data(&self) -> *mut libc::c_void {
unsafe { uvll::rust_uv_get_data_for_req(self.raw() as *mut _) }
}
fn set_data(&mut self, data: *mut libc::c_void) {
unsafe {
uvll::rust_uv_set_data_for_req(self.raw() as *mut _, data)
}
}
/// Allocate a new uninitialized request.
///
/// This function is unsafe as there is no scheduled destructor for the
/// returned value.
unsafe fn alloc() -> Self {
Request::from_raw(Raw::<T>::new().unwrap())
}
/// Invokes uv_close
///
/// This is unsafe as there is no guarantee that this handle is not actively
/// being used by other objects.
fn cancel(&mut self) -> UvResult<()> {
unsafe { try!(call!(uvll::uv_cancel(self.raw() as *mut _))); }
Ok(())
}
/// Deallocate this handle.
///
/// This is unsafe as there is no guarantee that no one else is using this
/// handle currently.
unsafe fn free(&mut self) { drop(Raw::wrap(self.raw())) }
}
pub trait Stream<T: Allocated>: Handle<T> {
fn listen(&mut self, backlog: libc::c_int,
cb: uvll::uv_connection_cb) -> UvResult<()> {
unsafe {
try!(call!(uvll::uv_listen(self.raw() as *mut _, backlog, cb)));
Ok(())
}
}
fn accept(&mut self, other: Self) -> UvResult<()> {
unsafe {
try!(call!(uvll::uv_accept(self.raw() as *mut _,
other.raw() as *mut _)));
Ok(())
}
}
fn read_start(&mut self, alloc_cb: uvll::uv_alloc_cb,
read_cb: uvll::uv_read_cb) -> UvResult<()> {
unsafe {
try!(call!(uvll::uv_read_start(self.raw() as *mut _, alloc_cb,
read_cb)));
Ok(())
}
}
fn read_stop(&mut self) -> UvResult<()> {
unsafe {
try!(call!(uvll::uv_read_stop(self.raw() as *mut _)));
Ok(())
}
}
}
impl<T: Allocated> Raw<T> {
/// Allocates a new instance of the underlying pointer.
fn new() -> Raw<T> {
let size = Allocated::size(None::<T>);
unsafe {
Raw { ptr: heap::allocate(size as uint, 8) as *mut T }
}
}
/// Wrap a pointer, scheduling it for deallocation when the returned value
/// goes out of scope.
unsafe fn wrap(ptr: *mut T) -> Raw<T> { Raw { ptr: ptr } }
fn get(&self) -> *mut T { self.ptr }
/// Unwrap this raw pointer, cancelling its deallocation.
///
/// This method is unsafe because it will leak the returned pointer.
unsafe fn unwrap(mut self) -> *mut T {
let ret = self.ptr;
self.ptr = 0 as *mut T;
return ret;
}
}
#[unsafe_destructor]
impl<T: Allocated> Drop for Raw<T> {
fn drop(&mut self) {
if self.ptr.is_null() { return }
let size = Allocated::size(None::<T>);
unsafe {
heap::deallocate(self.ptr as *mut u8, size as uint, 8)
}
}
}
pub fn slice_to_uv_buf(v: &[u8]) -> uvll::uv_buf_t {
let data = v.as_ptr();
uvll::uv_buf_t { base: data as *mut u8, len: v.len() as uvll::uv_buf_len_t }
}
fn socket_name<T>(handle: *const T,
f: unsafe extern fn(*const T, *mut libc::sockaddr,
*mut libc::c_int) -> libc::c_int)
-> UvResult<ip::SocketAddr> {
// Allocate a sockaddr_storage since we don't know if it's ipv4 or ipv6
let mut sockaddr: libc::sockaddr_storage = unsafe { mem::zeroed() };
let mut namelen = mem::size_of::<libc::sockaddr_storage>() as libc::c_int;
let sockaddr_p = &mut sockaddr as *mut libc::sockaddr_storage;
unsafe {
try!(call!(f(&*handle, sockaddr_p as *mut _, &mut namelen)));
}
Ok(sockaddr_to_addr(&sockaddr, namelen as uint))
}
pub fn sockaddr_to_addr(storage: &libc::sockaddr_storage,
len: uint) -> ip::SocketAddr {
fn ntohs(u: u16) -> u16 { Int::from_be(u) }
match storage.ss_family as libc::c_int {
libc::AF_INET => {
assert!(len as uint >= mem::size_of::<libc::sockaddr_in>());
let storage: &libc::sockaddr_in = unsafe {
mem::transmute(storage)
};
let ip = (storage.sin_addr.s_addr as u32).to_be();
let a = (ip >> 24) as u8;
let b = (ip >> 16) as u8;
let c = (ip >> 8) as u8;
let d = (ip >> 0) as u8;
ip::SocketAddr {
ip: ip::Ipv4Addr(a, b, c, d),
port: ntohs(storage.sin_port),
}
}
libc::AF_INET6 => {
assert!(len as uint >= mem::size_of::<libc::sockaddr_in6>());
let storage: &libc::sockaddr_in6 = unsafe {
mem::transmute(storage)
};
let a = ntohs(storage.sin6_addr.s6_addr[0]);
let b = ntohs(storage.sin6_addr.s6_addr[1]);
let c = ntohs(storage.sin6_addr.s6_addr[2]);
let d = ntohs(storage.sin6_addr.s6_addr[3]);
let e = ntohs(storage.sin6_addr.s6_addr[4]);
let f = ntohs(storage.sin6_addr.s6_addr[5]);
let g = ntohs(storage.sin6_addr.s6_addr[6]);
let h = ntohs(storage.sin6_addr.s6_addr[7]);
ip::SocketAddr {
ip: ip::Ipv6Addr(a, b, c, d, e, f, g, h),
port: ntohs(storage.sin6_port),
}
}
n => {
panic!("unknown family {}", n);
}
}
}
pub fn addr_to_sockaddr(addr: ip::SocketAddr,
storage: &mut libc::sockaddr_storage)
-> libc::socklen_t {
fn htons(u: u16) -> u16 { u.to_be() }
unsafe {
let len = match addr.ip {
ip::Ipv4Addr(a, b, c, d) => {
let ip = (a as u32 << 24) |
(b as u32 << 16) |
(c as u32 << 8) |
(d as u32 << 0);
let storage = storage as *mut _ as *mut libc::sockaddr_in;
(*storage).sin_family = libc::AF_INET as libc::sa_family_t;
(*storage).sin_port = htons(addr.port);
(*storage).sin_addr = libc::in_addr {
s_addr: Int::from_be(ip),
};
mem::size_of::<libc::sockaddr_in>()
}
ip::Ipv6Addr(a, b, c, d, e, f, g, h) => {
let storage = storage as *mut _ as *mut libc::sockaddr_in6;
(*storage).sin6_family = libc::AF_INET6 as libc::sa_family_t;
(*storage).sin6_port = htons(addr.port);
(*storage).sin6_port = htons(addr.port);
(*storage).sin6_addr = libc::in6_addr {
s6_addr: [
htons(a),
htons(b),
htons(c),
htons(d),
htons(e),
htons(f),
htons(g),
htons(h),
]
};
mem::size_of::<libc::sockaddr_in6>()
}
};
return len as libc::socklen_t
}
}
|
#[macro_use]
extern crate log;
extern crate getopts;
extern crate rand;
extern crate sdl2;
extern crate simplelog;
// External
use getopts::Options;
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
// Std
use std::env;
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
use std::path::Path;
use std::time::{Duration, Instant};
// Internal
use block_peers::logging;
use block_peers::net::{ServerMessage, Socket};
use block_peers::render::{Renderer, VIEWPORT_HEIGHT, VIEWPORT_WIDTH};
use block_peers::scene::{AppLifecycleEvent, GameSoundEvent, Scene};
use block_peers::scenes::TitleScene;
use block_peers::sound::{AudioManager, SoundEffect};
// Constants
const WINDOW_WIDTH: u32 = VIEWPORT_WIDTH;
const WINDOW_HEIGHT: u32 = VIEWPORT_HEIGHT;
const TICKS_PER_SECOND: u64 = 60;
const MICROSECONDS_PER_SECOND: u64 = 1_000_000;
const MICROSECONDS_PER_TICK: u64 = MICROSECONDS_PER_SECOND / TICKS_PER_SECOND;
pub fn main() {
logging::init();
let options = get_options();
let server_addr = SocketAddr::new(options.host, options.port);
// Subsystems Init
// Note: handles must stay in scope until end of program due to dropping.
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let _audio = sdl_context.audio().unwrap();
let _image = sdl2::image::init(sdl2::image::InitFlag::PNG).unwrap();
let ttf = sdl2::ttf::init().unwrap();
// Draw
let mut window_builder = video_subsystem.window("Block Wars", WINDOW_WIDTH, WINDOW_HEIGHT);
window_builder.opengl();
if options.fullscreen {
window_builder.fullscreen();
} else {
window_builder.position_centered().resizable();
}
let window = window_builder.build().unwrap();
let mut renderer = Renderer::new(
window.into_canvas().present_vsync().build().unwrap(),
Path::new("assets/textures.png"),
Path::new("assets/VT323-Regular.ttf"),
&ttf,
);
// Input
let mut event_pump = sdl_context.event_pump().unwrap();
// Timing
let tick_duration = Duration::from_micros(MICROSECONDS_PER_TICK);
let mut previous_instant = Instant::now();
let mut fps = 0;
let mut ups = 0;
let mut fps_timer = Instant::now();
// Network
let mut socket = Socket::new().expect("could not open a new socket");
// Scene
let mut scene: Box<dyn Scene> = Box::new(TitleScene::new(server_addr));
// Audio
let mut audio_manager = AudioManager::new();
let mut sound_events = Vec::new();
if options.no_sound {
audio_manager.dev_turn_sound_off();
}
audio_manager.set_volume(0.20);
audio_manager.play_bg_music();
'running: loop {
// Network
loop {
match socket.receive::<ServerMessage>() {
Ok(Some((source_addr, message))) => {
scene = scene.handle_message(&mut socket, source_addr, message);
}
Ok(None) => {
break;
}
Err(_) => {
error!("received unknown message");
}
}
}
// Input
for event in event_pump.poll_iter() {
match event {
Event::Quit { .. }
| Event::KeyDown {
keycode: Some(Keycode::Escape),
..
}
| Event::KeyDown {
keycode: Some(Keycode::Q),
..
} => {
trace!("app asked to shutdown");
scene.lifecycle(&mut socket, AppLifecycleEvent::Shutdown);
break 'running;
}
event => {
scene = scene.input(&mut socket, event);
}
}
}
// Update
let current_instant = Instant::now();
while current_instant - previous_instant >= tick_duration {
scene = scene.update(&mut socket, &mut sound_events);
previous_instant += tick_duration;
ups += 1;
}
// Handle any sounds due to update
for event in sound_events.iter() {
match event {
GameSoundEvent::LinesCleared(count) => match count {
1 => audio_manager.play_sfx(SoundEffect::SmokeOne),
2 => audio_manager.play_sfx(SoundEffect::SmokeTwo),
3 => audio_manager.play_sfx(SoundEffect::SmokeThree),
4 => audio_manager.play_sfx(SoundEffect::SmokeFour),
_ => unreachable!("tried to clear illegal number of lines"),
},
GameSoundEvent::MovePieceDown => {
audio_manager.play_sfx(SoundEffect::Whoosh);
}
GameSoundEvent::TurnSoundsOff => {
audio_manager.ui_turn_sound_off();
}
GameSoundEvent::TurnSoundsOn => {
audio_manager.ui_turn_sound_on();
}
}
}
sound_events.clear();
if scene.should_quit() {
break 'running;
}
// Render
renderer.clear();
scene.render(&mut renderer);
fps += 1;
if fps_timer.elapsed().as_millis() >= 1000 {
trace!("fps {} ups {}", fps, ups);
fps = 0;
ups = 0;
fps_timer = Instant::now();
}
renderer.present();
}
}
const DEFAULT_PORT: u16 = 4485;
const DEFAULT_HOST: IpAddr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));
struct ClientOptions {
port: u16,
host: IpAddr,
fullscreen: bool,
no_sound: bool,
}
fn get_options() -> ClientOptions {
let args: Vec<String> = env::args().collect();
let mut opts = Options::new();
opts.optopt(
"p",
"port",
"connect to server on specified port (default 4485)",
"PORT",
);
opts.optopt(
"h",
"host",
"connect to host at specified address (default 127.0.0.1)",
"HOST",
);
opts.optflag("f", "fullscreen", "open the game in a fullscreen window");
opts.optflag("s", "no-sound", "open the game without sound");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!(f.to_string()),
};
let port: u16 = match matches.opt_get("port") {
Ok(Some(port)) => port,
Ok(None) => DEFAULT_PORT,
Err(_) => panic!("specified port not valid"),
};
let host: IpAddr = match matches.opt_get("host") {
Ok(Some(host)) => host,
Ok(None) => DEFAULT_HOST,
Err(_) => panic!("specific host was not valid socket address"),
};
let fullscreen: bool = matches.opt_present("fullscreen");
let no_sound: bool = matches.opt_present("no-sound");
ClientOptions {
host,
port,
fullscreen,
no_sound,
}
}
|
extern crate bodyparser;
use std::sync::Arc;
use rustc_serialize::json;
use iron::prelude::*;
use iron::status;
use iron::middleware::Handler;
use router::Router;
use uuid::Uuid;
use repository::Repository;
use todo::Todo;
macro_rules! handler {
($x:ident) => {
pub struct $x {
repository: Arc<Repository<Todo>>,
}
impl $x {
pub fn new(repository: Arc<Repository<Todo>>) -> $x {
$x { repository: repository }
}
}
}
}
// == GET /todos
handler!(GETTodosHandler);
impl Handler for GETTodosHandler {
fn handle(&self, _: &mut Request) -> IronResult<Response> {
let all = self.repository.all();
let all_json = json::encode(&all).unwrap();
Ok(Response::with((status::Ok, all_json)))
}
}
// == POST /todos
handler!(POSTTodosHandler);
impl Handler for POSTTodosHandler {
fn handle(&self, req: &mut Request) -> IronResult<Response> {
let json_body = req.get::<bodyparser::Json>();
return match json_body {
Ok(Some(json_body)) => {
let id = Uuid::new_v4().hyphenated().to_string();
let json_object = json_body.as_object().unwrap();
let new_title: String = {
if json_object.get("title").is_some() {
String::from(json_object.get("title").unwrap().as_str().unwrap())
} else {
String::from("")
}
};
let new_order: u64 = {
if json_object.get("order").is_some() {
json_object.get("order").unwrap().as_u64().unwrap()
} else {
0
}
};
let todo = Todo::new(id.clone(), new_title, false, new_order);
let todo = self.repository.add(id, todo);
Ok(Response::with((status::Created, json::encode(&todo).unwrap())))
}
Ok(None) => panic!("No body"),
Err(err) => panic!("Error: {:?}", err)
}
}
}
// == DELETE /todos
handler!(DELETETodosHandler);
impl Handler for DELETETodosHandler {
fn handle(&self, _: &mut Request) -> IronResult<Response> {
self.repository.delete_all();
Ok(Response::with(status::Ok))
}
}
// == GET /todos/:id
handler!(GETTodoHandler);
impl Handler for GETTodoHandler {
fn handle(&self, req: &mut Request) -> IronResult<Response> {
let id = req.extensions.get::<Router>().unwrap().find("id").unwrap();
let todo = self.repository.get(String::from(id));
Ok(Response::with((status::Ok, json::encode(&todo).unwrap())))
}
}
// == PATCH /todos/:id
handler!(PATCHTodoHandler);
impl Handler for PATCHTodoHandler {
fn handle(&self, req: &mut Request) -> IronResult<Response> {
let json_body = req.get::<bodyparser::Json>();
return match json_body {
Ok(Some(json_body)) => {
let id = String::from(req.extensions.get::<Router>().unwrap().find("id").unwrap());
let json_object = json_body.as_object().unwrap();
let old_todo = self.repository.get(id.clone());
let new_title: String = {
if json_object.get("title").is_some() {
String::from(json_object.get("title").unwrap().as_str().unwrap())
} else {
old_todo.title.clone()
}
};
let new_completed: bool = {
if json_object.get("completed").is_some() {
json_object.get("completed").unwrap().as_bool().unwrap()
} else {
old_todo.completed.clone()
}
};
let new_order: u64 = {
if json_object.get("order").is_some() {
json_object.get("order").unwrap().as_u64().unwrap()
} else {
old_todo.order.clone()
}
};
let todo = Todo::new(id.clone(), new_title, new_completed, new_order);
let todo = self.repository.update(id, todo);
Ok(Response::with((status::Ok, json::encode(&todo).unwrap())))
}
Ok(None) => panic!("No body"),
Err(err) => panic!("Error: {:?}", err)
}
}
}
// == DELETE /todos/:id
handler!(DELETETodoHandler);
impl Handler for DELETETodoHandler {
fn handle(&self, req: &mut Request) -> IronResult<Response> {
let id = String::from(req.extensions.get::<Router>().unwrap().find("id").unwrap());
self.repository.delete(id);
Ok(Response::with(status::Ok))
}
}
|
#[doc = "Reader of register IPTAT_TRIM0"]
pub type R = crate::R<u32, super::IPTAT_TRIM0>;
#[doc = "Writer for register IPTAT_TRIM0"]
pub type W = crate::W<u32, super::IPTAT_TRIM0>;
#[doc = "Register IPTAT_TRIM0 `reset()`'s with value 0"]
impl crate::ResetValue for super::IPTAT_TRIM0 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `IPTAT_CORE_TRIM`"]
pub type IPTAT_CORE_TRIM_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `IPTAT_CORE_TRIM`"]
pub struct IPTAT_CORE_TRIM_W<'a> {
w: &'a mut W,
}
impl<'a> IPTAT_CORE_TRIM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "Reader of field `IPTAT_CTBM_TRIM`"]
pub type IPTAT_CTBM_TRIM_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `IPTAT_CTBM_TRIM`"]
pub struct IPTAT_CTBM_TRIM_W<'a> {
w: &'a mut W,
}
impl<'a> IPTAT_CTBM_TRIM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - IPTAT trim 0x0 : Minimum IPTAT current (~150nA at room) 0xF : Maximum IPTAT current (~350nA at room)"]
#[inline(always)]
pub fn iptat_core_trim(&self) -> IPTAT_CORE_TRIM_R {
IPTAT_CORE_TRIM_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - CTMB PTAT Current Trim 0x0 : Minimum CTMB IPTAT Current (~875nA) 0xF : Maximum CTMB IPTAT Current (~1.1uA)"]
#[inline(always)]
pub fn iptat_ctbm_trim(&self) -> IPTAT_CTBM_TRIM_R {
IPTAT_CTBM_TRIM_R::new(((self.bits >> 4) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - IPTAT trim 0x0 : Minimum IPTAT current (~150nA at room) 0xF : Maximum IPTAT current (~350nA at room)"]
#[inline(always)]
pub fn iptat_core_trim(&mut self) -> IPTAT_CORE_TRIM_W {
IPTAT_CORE_TRIM_W { w: self }
}
#[doc = "Bits 4:7 - CTMB PTAT Current Trim 0x0 : Minimum CTMB IPTAT Current (~875nA) 0xF : Maximum CTMB IPTAT Current (~1.1uA)"]
#[inline(always)]
pub fn iptat_ctbm_trim(&mut self) -> IPTAT_CTBM_TRIM_W {
IPTAT_CTBM_TRIM_W { w: self }
}
}
|
/// InternalTracker represents settings for internal tracker
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct InternalTracker {
/// Let only contributors track time (Built-in issue tracker)
pub allow_only_contributors_to_track_time: Option<bool>,
/// Enable dependencies for issues and pull requests (Built-in issue tracker)
pub enable_issue_dependencies: Option<bool>,
/// Enable time tracking (Built-in issue tracker)
pub enable_time_tracker: Option<bool>,
}
impl InternalTracker {
/// Create a builder for this object.
#[inline]
pub fn builder() -> InternalTrackerBuilder {
InternalTrackerBuilder {
body: Default::default(),
}
}
}
impl Into<InternalTracker> for InternalTrackerBuilder {
fn into(self) -> InternalTracker {
self.body
}
}
/// Builder for [`InternalTracker`](./struct.InternalTracker.html) object.
#[derive(Debug, Clone)]
pub struct InternalTrackerBuilder {
body: self::InternalTracker,
}
impl InternalTrackerBuilder {
/// Let only contributors track time (Built-in issue tracker)
#[inline]
pub fn allow_only_contributors_to_track_time(mut self, value: impl Into<bool>) -> Self {
self.body.allow_only_contributors_to_track_time = Some(value.into());
self
}
/// Enable dependencies for issues and pull requests (Built-in issue tracker)
#[inline]
pub fn enable_issue_dependencies(mut self, value: impl Into<bool>) -> Self {
self.body.enable_issue_dependencies = Some(value.into());
self
}
/// Enable time tracking (Built-in issue tracker)
#[inline]
pub fn enable_time_tracker(mut self, value: impl Into<bool>) -> Self {
self.body.enable_time_tracker = Some(value.into());
self
}
}
|
fn main() {let vector_name = vec![val1,val2,val3]} |
use crate::problem_datatypes::Solution;
use crate::problem_datatypes::DataPoints;
use crate::problem_datatypes::Constraints;
use crate::fitness_evaluation_result::FitnessEvaluationResult;
use crate::arg_parser::SearchType;
use rand::Rng;
use rand::rngs::StdRng;
use rand::seq::SliceRandom;
use std::io::{stdin, stdout, Read, Write};
// Para usar una cola con prioridad
use priority_queue::PriorityQueue;
use ordered_float::OrderedFloat;
use std::collections::HashSet;
/// Representa una poblacion para los algoritmos geneticos
#[derive(Debug, Clone)]
pub struct Population<'a, 'b>{
/// Individuos de la poblacion
individuals: Vec<Solution<'a, 'b> >,
}
/// Implementacion para la parte de los algoritmos geneticos
impl<'a, 'b> Population<'a, 'b>{
/// Genera una poblacion vacia, sin individuos
pub fn new_empty_population() -> Self{
return Self{
individuals: vec![]
};
}
/// Genera una nueva poblacion aleatoria
pub fn new_random_population(data_points: &'a DataPoints, constraints: &'b Constraints, number_of_clusters: i32, population_size: i32, rng: &mut StdRng) -> Self{
let mut rand_population = Self{individuals: vec![]};
// Añadimos las soluciones aleatorias a la poblacion
for _ in 0..population_size{
let new_individual = Solution::generate_random_solution(data_points, constraints, number_of_clusters, rng);
rand_population.individuals.push(new_individual);
}
return rand_population;
}
/// Devuelve el numero de individuos de nuestra poblacion
pub fn population_size(&self) -> usize{
return self.individuals.len();
}
pub fn get_individual(&self, index: usize) -> &Solution<'a, 'b>{
return &self.individuals[index];
}
/// Devuelve la mejor solucion de la poblacion y el indice en el que se encuentra
/// Debe haber al menos un individuo en la poblacion
pub fn get_best_individual(&self) -> FitnessEvaluationResult<(&Solution<'a, 'b>, u32)>{
let mut fit_eval_consumed = 0;
// Comprobacion inicial de seguridad
assert!(self.population_size() > 0, "La poblacion no puede ser nula en get_best_individual");
let (mut best_fitness, fit_cons) = self.individuals[0].fitness_and_consumed();
fit_eval_consumed += fit_cons;
let mut best_index = 0;
for (index, individual) in self.individuals.iter().enumerate(){
let (individual_fitness, fit_cons) = individual.fitness_and_consumed();
fit_eval_consumed += fit_cons;
if individual_fitness < best_fitness{
best_index = index;
best_fitness = individual.fitness();
}
}
return FitnessEvaluationResult::new((self.get_individual(best_index), best_index as u32), fit_eval_consumed);
}
/// Calcula el indice del individuo de la poblacion con peor fitness
/// Debe haber al menos un individuo en la poblacion
pub fn get_index_worst_individual(&self) -> FitnessEvaluationResult<usize>{
// Comprobacion inicial de seguridad
debug_assert!(self.population_size() > 0, "La poblacion no puede ser nula en get_index_worst_individual");
let mut fit_eval_consumed = 0;
let (mut worst_fitness, fit_cons) = self.individuals[0].fitness_and_consumed();
fit_eval_consumed += fit_cons;
let mut worst_index = 0;
for (index, individual) in self.individuals.iter().enumerate(){
let (individual_fitness, fit_cons) = individual.fitness_and_consumed();
fit_eval_consumed += fit_cons;
if individual_fitness > worst_fitness{
worst_index = index;
worst_fitness = individual_fitness;
}
}
return FitnessEvaluationResult::new(worst_index, fit_eval_consumed);
}
/// Modifica el individuo en una posicion dada
/// 0 <= index < population_size para que no de errores
pub fn set_individual(&mut self, index: usize, individual: Solution<'a, 'b>){
self.individuals[index] = individual;
}
/// Genera, a partir de una poblacion, una nueva poblacion de seleccion de un tamaño dado a
/// partir de repetir new_population_size veces un torneo binario
/// Los valores comunes para new_population_size son o bien el tamaño de la poblacion pasada o
/// bien 2, para el modelo estacionario
pub fn select_population_binary_tournament(&self, new_population_size: i32, rng: &mut StdRng) -> FitnessEvaluationResult<Self>{
let mut new_pop = Self::new_empty_population();
let mut fit_ev_consumed = 0;
// Añadimos individuos usando el torneo binario
for _ in 0..new_population_size{
// Los dos individuos que van a competir en el torneo
let first_candidate = self.individuals.choose(rng).expect("La poblacion no puede estar vacia para hacer el tornero binario");
let second_candidate = self.individuals.choose(rng).expect("La poblacion no puede estar vacia para hacer el tornero binario");
// Seleccionamos el ganador
let (winner, fit_consumed) = Solution::binary_tournament(first_candidate, second_candidate);
new_pop.individuals.push(winner.clone());
fit_ev_consumed += fit_consumed;
}
return FitnessEvaluationResult::new(new_pop, fit_ev_consumed);
}
/// Genera una poblacion de cruce a partir de una poblacion (que deberia ser de seleccion, pues
/// confiamos en que provenga de seleccion para que esto haya introducido ya la aleatoriedad)
/// La nueva poblacion tiene el mismo tamaño que la poblacion original
/// Se cruzan los primeros n elementos, este orden se considera aleatorio por venir de un
/// proceso de seleccion, que introduce aleatoriedad, como ya hemos comentado
pub fn cross_population_uniform(&self, crossover_probability: f64, rng: &mut StdRng) -> FitnessEvaluationResult<Self>{
// Partimos de una poblacion identica a la dada
let mut new_population = self.clone();
// Mutamos el numero de individuos que coincide con la esperanza matematica, para
// ahorrarnos evaluaciones de los numeros aleatorios
let inidividuals_to_cross = (crossover_probability * self.population_size() as f64) as usize;
// Cruzamos los inidividuals_to_cross primeros individos
let mut index = 0;
while index < inidividuals_to_cross - 1{
// Tomamos los dos padres
let first_parent = new_population.individuals[index].clone();
let second_parent = new_population.individuals[index + 1].clone();
// Generamos los dos hijos usando los dos padres
let first_child = Solution::uniform_cross(&first_parent, &second_parent, rng);
let second_child = Solution::uniform_cross(&second_parent, &first_parent, rng);
// Sustituimos los dos individuos
new_population.individuals[index] = first_child;
new_population.individuals[index + 1] = second_child;
// Avanzamos la iteracion
index = index + 2;
}
// En esta parte, directamente no estamos haciendo evaluaciones del fitness
let fit_evals_consumed = 0;
return FitnessEvaluationResult::new(new_population, fit_evals_consumed);
}
/// Genera una poblacion de cruce a partir de una poblacion (que deberia ser de seleccion, pues
/// confiamos en que provenga de seleccion para que esto haya introducido ya la aleatoriedad)
/// La nueva poblacion tiene el mismo tamaño que la poblacion original
/// Se cruzan los primeros n elementos, este orden se considera aleatorio por venir de un
/// proceso de seleccion, que introduce aleatoriedad, como ya hemos comentado
pub fn cross_population_segment(&self, crossover_probability: f64, rng: &mut StdRng) -> FitnessEvaluationResult<Self>{
// Partimos de una poblacion identica a la dada
let mut new_population = self.clone();
// Mutamos el numero de individuos que coincide con la esperanza matematica, para
// ahorrarnos evaluaciones de los numeros aleatorios
let inidividuals_to_cross = (crossover_probability * self.population_size() as f64) as usize;
// Cruzamos los inidividuals_to_cross primeros individos
let mut index = 0;
while index < inidividuals_to_cross - 1{
// Tomamos los dos padres
let first_parent = new_population.individuals[index].clone();
let second_parent = new_population.individuals[index + 1].clone();
// Generamos los dos hijos usando los dos padres
let first_child = Solution::cross_segment(&first_parent, &second_parent, rng);
let second_child = Solution::cross_segment(&second_parent, &first_parent, rng);
// Sustituimos los dos individuos
new_population.individuals[index] = first_child;
new_population.individuals[index + 1] = second_child;
// Avanzamos la iteracion
index = index + 2;
}
// En esta parte, directamente no estamos haciendo evaluaciones del fitness
let fit_evals_consumed = 0;
return FitnessEvaluationResult::new(new_population, fit_evals_consumed);
}
/// Mutamos una poblacion a partir de la poblacion que ya ha sido seleccionada y cruzada
/// Esta operacion no consume evaluaciones del fitness
/// Usamos la esperanza matematicas para no gastar tantas tiradas aleatorias, por lo que en vez
/// de pasar la probabilida de mutacion, pasamos el numero de individuos a mutar
/// Notar que un individuo puede mutar mas de una vez
pub fn mutate_population(&self, individuals_to_mutate: i32, rng: &mut StdRng) -> Self{
let mut new_pop = self.clone();
// Posiciones sobre las que podemos elegir aleatoriamente
let positions: Vec<usize> = (0..self.population_size() as usize).collect();
for _ in 0..individuals_to_mutate as usize{
let random_index = *positions.choose(rng).expect("No se ha podido escoger un valor aleatorio");
new_pop.individuals[random_index] = new_pop.individuals[random_index].mutated(rng);
}
return new_pop;
}
/// Mutamos una poblacion a partir de la poblacion que ya ha sido seleccionada y cruzada
/// Esta operacion no consume iteraciones sobre la poblacion
/// A diferencia de mutate_population, no usamos el numero esperado de mutaciones, sino tiradas
/// aleatorias. Por ello, la poblacion con la que trabajamos no debiera ser demasiado grande
pub fn mutate_population_given_prob(&self, mutation_probability_per_gen: f64, rng: &mut StdRng) -> Self{
let mut new_pop = self.clone();
// Iteramos sobre los individuos y decidimos si mutamos o no aleatoriamente
for (index, _individual) in self.individuals.iter().enumerate(){
// Iteramos sobre los genes. Realmente lanzamos numero_genes de veces los aleatorios,
// pues estamos trabajando con probabilidad por gen
for _ in 0..self.individuals[0].get_cluster_indexes().len(){
let do_mutation = rng.gen::<f64>() <= mutation_probability_per_gen;
if do_mutation == true{
new_pop.individuals[index] = new_pop.individuals[index].mutated(rng);
}
}
}
return new_pop;
}
/// Dada una poblacion original, comprueba si el mejor individuo de la poblacion original esta
/// en esta poblacion. En caso de que no este, se introduce en la nueva poblacion, en la
/// posicion en la que estaba en la poblacion original
pub fn preserve_best_past_parent(&self, original_population: &Population<'a, 'b>) -> FitnessEvaluationResult<Self>{
let mut new_pop = self.clone();
let mut fit_eval_cons = 0;
// Tomamos el mejor individuo de la poblacion original
// Añadimos las iteraciones que consume esto, deberian ser cero pues esa poblacion ya
// deberia estar evaluada
let best_individual_at_original_pop_result= original_population.get_best_individual();
let (best_individual_at_original_pop, best_individual_index_original_pop) = best_individual_at_original_pop_result.get_result();
fit_eval_cons += best_individual_at_original_pop_result.get_iterations_consumed();
// Comprobamos si esta dentro de la poblacion
// Esta operacion no consume iteraciones, porque solo estamos comprobando la igualdad entre
// vectores de posiciones
let search_result = self.search_individual_with_same_cluster_indixes(best_individual_at_original_pop.get_cluster_indexes());
match search_result{
// El mejor individuo pasado ha sobrevivido, devolvemos la poblacion sin modificar
// junto a las evaluaciones consumidas
Some(_) => return FitnessEvaluationResult::new(self.clone(), fit_eval_cons),
// No hemos encontrado el individuo, no hacemos nada, por lo que seguimos con el
// proceso de incluir el mejor individuo pasado en la poblacion
None => (),
};
// El mejor individuo pasado no esta en la nueva poblacion, lo introducimos en su posicion
// de la poblacion original en la nueva poblacion
new_pop.individuals[*best_individual_index_original_pop as usize] = (*best_individual_at_original_pop).clone();
return FitnessEvaluationResult::new(new_pop, fit_eval_cons);
}
/// Busca el individuo en la poblacion con la misma asignacion de cluster
fn search_individual_with_same_cluster_indixes(&self, cluster_indixes: Vec<u32>) -> Option<u32>{
// Realizamos la busqueda
for (index, individual) in self.individuals.iter().enumerate(){
if individual.get_cluster_indexes() == cluster_indixes{
return Some(index as u32)
}
}
// No se ha encontrado el elemento buscado
return None;
}
// Dada una poblacion original y una nueva poblacion candidata, los individuos de la poblacion
// candidata luchan contra los peores individuos de la poblacion original (&self) para quedarse
// en dicha poblacion
// La poblacion original no se modifica, se devuelve una copia con la poblacion resultante
pub fn compete_with_new_individuals(&self, candidate_population: &Population<'a, 'b>) -> FitnessEvaluationResult<Self>{
let mut new_pop = self.clone();
let mut fit_eval_cons = 0;
for candidate in candidate_population.individuals.iter(){
// Tomamos el peor individuo de la poblacion
let worst_individual_result = new_pop.get_index_worst_individual();
let worst_individual_index = worst_individual_result.get_result();
fit_eval_cons += worst_individual_result.get_iterations_consumed();
// Evaluamos el fitness del peor individuo y del candidato. En ambos casos tenemos en
// cuenta las evaluaciones que esto puede suponer. El peor individuo deberia estar
// evaluado, mientras que el candidato no. Hacemos las dos cuentas por seguridad
let (worst_fitness, worst_it_cons) = new_pop.individuals[*worst_individual_index].fitness_and_consumed();
let (candidate_fitness, candidate_it_cons) = candidate.fitness_and_consumed();
fit_eval_cons += worst_it_cons + candidate_it_cons;
// Comprobacion de seguridad
debug_assert!(candidate_it_cons == 1, "El candidato debe tener el fitness sin evaluar, el valor de consumiciones es {}", candidate_it_cons);
// Decidimos si el candidato entra o no en la poblacion
if candidate_fitness < worst_fitness {
new_pop.individuals[*worst_individual_index] = candidate.clone();
}
}
return FitnessEvaluationResult::new(new_pop, fit_eval_cons);
}
/// Evaluamos a todos los individuos de la poblacion
/// Devolvemos las evaluaciones de fitness consumidas. Potencialmente sera un valor alto, pues
/// llegamos con una poblacion nueva, que ha sido en parte cruzada y mutada. A este valor solo
/// contribuyen los individuos nuevos. Los de la poblacion pasada, que no han cambiado, no
/// contribuyen
///
/// Notar que los elementos mutan, pero al estar usando un patron de mutabilidad interior, no
/// tenemos un patron de mutabilidad interior, no hace falta pasar una referencia mutable
pub fn evaluate_all_individuals(&self) -> FitnessEvaluationResult<()>{
let mut fit_evals_consumed = 0;
for individual in &self.individuals{
let (_, ev_consumed) = individual.fitness_and_consumed();
fit_evals_consumed += ev_consumed;
}
return FitnessEvaluationResult::new((), fit_evals_consumed);
}
/// Comprueba si todos los individuos de una poblacion tienen todos los valores del fitness sin
/// calcular. Lo usamos para debuggear la poblacion de candidatos en genetico estacionario
pub fn all_population_is_not_cached(&self) -> bool{
for individual in self.individuals.iter(){
// Un individuo tiene el valor del fitness cacheado
if individual.is_fitness_cached() == true{
return false;
}
}
// Todos los individuos tienen el fitness sin precalcular
return true;
}
/// Comprobamos que todos los individuos de la poblacion tengan el valor del fitness cacheado
/// Notar que no es lo mismo que comprobar self.all_population_is_not_cached == false
pub fn all_population_is_cached(&self) -> bool{
for individual in self.individuals.iter(){
// Un individuo tiene el valor del fitness cacheado
if individual.is_fitness_cached() == false{
return false;
}
}
// Todos los individuos tienen el fitness cacheado
return true;
}
/// Muestra las asignaciones de clusters de los individuos de la poblacion
/// Lo usamos para debuggear el codigo, porque nuestra poblacion converge demasiado rapido a
/// repetir el mismo individuo
pub fn show_population(&self){
let max_values_in_row = 30;
for (index, individual) in self.individuals.iter().enumerate(){
print!("{}:\t", index);
for col in 0..max_values_in_row{
print!("{} ", individual.get_cluster_indexes()[col]);
}
println!("");
}
// Esperamos a que el usuario pulse una tecla
Population::wait_for_user_input();
}
fn wait_for_user_input() {
let mut stdout = stdout();
stdout.write(b"Press Enter to continue...").unwrap();
stdout.flush().unwrap();
stdin().read(&mut [0]).unwrap();
}
/// Intentamos medir la variedad que tenemos en nuestra poblacion. La variedad se medira como
/// el numero de elementos con distinto valor de fitness
/// WARNING -- Lo usamos para debuggear el codigo. No usar en codigo final porque puede evaluar
/// una poblacion sin considerar las evaluaciones del fitness
pub fn measure_variety(&self) -> u32{
// Para comprobar que no podemos llamar a esta funcion
// Quitar esto cuando hagamos debug del codigo. Tenemos un test para comprobar que esta
// funcion no puede ser llamada
panic!("function disabled");
let mut fitness_values = HashSet::new();
for individual in self.individuals.iter(){
fitness_values.insert(OrderedFloat::<f64>::from(individual.fitness()));
}
return fitness_values.len() as u32;
}
}
/// Implementacion para la parte de algoritmos memeticos
impl<'a, 'b> Population<'a, 'b>{
/// Aplica la busqueda local suave, segun el criterio indicado por memetic_type, a la
/// poblacion, generando una nueva poblacion
pub fn soft_local_search(&self, memetic_type: SearchType, max_fails: i32, rng: &mut StdRng) -> FitnessEvaluationResult<Self>{
// Lanzamos la busqueda local suave correspondiente
match memetic_type{
SearchType::MemeticAll => {
return self.soft_local_search_all(max_fails, rng);
}
SearchType::MemeticRandom => {
let search_percentage = 0.1;
return self.soft_local_search_random(max_fails, search_percentage, rng);
}
SearchType::MemeticElitist => {
let search_percentage = 0.1;
return self.soft_local_search_elitist(max_fails, search_percentage, rng);
}
_ => {
panic!("Valor erroneo para memetic_type")
}
}
}
// Aplica la busqueda local suave, sobre todos los individuos de la poblacion
fn soft_local_search_all(&self, max_fails: i32, rng: &mut StdRng) -> FitnessEvaluationResult<Self>{
let mut new_pop = self.clone();
let mut fit_eval_cons = 0;
// Aplicamos la busqueda local suave a todos los individuos de la poblacion
// Itero sobre indices, asi que puedo iterar sobre self para no tener problemas de
// mutabilidad con new_pop
for (index, _individual) in self.individuals.iter().enumerate(){
let new_individual_result = new_pop.individuals[index].soft_local_search(max_fails, rng);
let new_individual = new_individual_result.get_result();
fit_eval_cons += new_individual_result.get_iterations_consumed();
new_pop.individuals[index] = new_individual.clone();
}
return FitnessEvaluationResult::new(new_pop, fit_eval_cons);
}
// Aplica la busqueda local suave, sobre un porcentaje de individuos aleatorios de la poblacion
fn soft_local_search_random(&self, max_fails: i32, search_percentage: f64, rng: &mut StdRng) -> FitnessEvaluationResult<Self>{
let mut new_pop = self.clone();
let mut fit_eval_cons = 0;
// Numero de individuos sobre los que vamos a realizar la busqueda local suave
let number_of_individuals_to_intensify = (self.individuals.len() as f64 * search_percentage) as i32;
// Indices de todos los individuos ordenados aleatoriamente
let mut individuals_indixes: Vec<u32> = (0..self.individuals.len() as u32).collect();
individuals_indixes.shuffle(rng);
// Aplicamos la busqueda local solo a un numero dado de los individuos. Usando los indices
// en orden aleatorio, escogemos aleatoriamente a dichos individuos
for i in 0..number_of_individuals_to_intensify{
// Escogemos aleatoriamente al individuo
let index = individuals_indixes[i as usize] as usize;
// Aplicamos la busqueda local a ese individuo
let new_individual_result = new_pop.individuals[index].soft_local_search(max_fails, rng);
let new_individual = new_individual_result.get_result();
fit_eval_cons += new_individual_result.get_iterations_consumed();
new_pop.individuals[index] = new_individual.clone();
}
return FitnessEvaluationResult::new(new_pop, fit_eval_cons);
}
// Aplica la busqueda local suave, sobre el mejor porcentaje de individuos de la poblacion
fn soft_local_search_elitist(&self, max_fails: i32, search_percentage: f64, rng: &mut StdRng) -> FitnessEvaluationResult<Self>{
let mut new_pop = self.clone();
let mut fit_eval_cons = 0;
// Numero de individuos sobre los que vamos a realizar la busqueda local suave
let number_of_individuals_to_intensify = (self.individuals.len() as f64 * search_percentage) as i32;
// Seleccionamos los indices del mejor porcentaje de la poblacion
let best_indixes_result = self.select_best_indixes(number_of_individuals_to_intensify);
let best_indixes = best_indixes_result.get_result();
fit_eval_cons += best_indixes_result.get_iterations_consumed();
// Aplicamos la busqueda local a este porcentaje mejor de individuos
for index in best_indixes{
// Aplicamos la busqueda local a ese individuo
let new_individual_result = new_pop.individuals[*index as usize].soft_local_search(max_fails, rng);
let new_individual = new_individual_result.get_result();
fit_eval_cons += new_individual_result.get_iterations_consumed();
new_pop.individuals[*index as usize] = new_individual.clone();
}
return FitnessEvaluationResult::new(new_pop, fit_eval_cons);
}
/// Dado un numero de individuos, selecciona los indices de los mejores individuos de la
/// poblacion. Es decir, aquellos indices de individuos con mejor valor de fitness
// TODO -- es bastante facil de testear
fn select_best_indixes(&self, number_of_individuals: i32) -> FitnessEvaluationResult<Vec<u32>>{
let mut fit_evals_cons = 0;
let mut best_indixes = vec![];
// Necesitamos que toda la poblacion este evaluada para poder ordenar a sus individuos
let eval_result = self.evaluate_all_individuals();
fit_evals_cons += eval_result.get_iterations_consumed();
debug_assert!(self.all_population_is_cached());
// Guardamos a los individuos de la poblacion en una priority queue.
// En verdad, solo necesitamos guardar los indices de los individuos ordenados por su valor
// de fitness.
// Podemos guardar todos los elementos en esta, sabiendo que el fitness ya esta evaluado
// porque hemos evaluado toda la poblacion anteriormente.
let mut priority_queue = PriorityQueue::new();
for (index, individual) in self.individuals.iter().enumerate(){
// -1.0 * porque asi devolvemos los mejores individuos, y no los peores
priority_queue.push(index, OrderedFloat::<f64>::from(-1.0 * individual.fitness()));
}
// Sacamos a los number_of_individuals primeros individuos de la prioqueu
for (it, index) in priority_queue.into_sorted_iter().enumerate(){
// Tomamos el valor del indice. index tiene el valor del indice, y su valor de fitness,
// que no nos interesa
best_indixes.push(index.0 as u32);
// Solo guardamos el numero dado de individuos
if it as i32 >= number_of_individuals - 1{
break;
}
}
// Comprobacion de seguridad
debug_assert!(best_indixes.len() == number_of_individuals as usize);
return FitnessEvaluationResult::new(best_indixes, fit_evals_cons);
}
}
mod test{
use crate::problem_datatypes::population::Population;
#[test]
#[should_panic(expected="function disabled")]
/// Comprobamos que tenemos desactivada esta funcion, porque si se puede llamar puede afectar
/// al comportamiento de los algoritmos (pues evalua el fitness de todos los elementos de la
/// poblacion sin notifcar cuantas evaluaciones efectivas del fitness estamos realizando)
fn test_measure_variety_disabled(){
let pop = Population::new_empty_population();
pop.measure_variety();
}
}
|
use crate::{
ast::{
dec::{Dec, DecData},
exp::ASTExpData,
field::Field_,
stm::{AssignType, StmList, Stm_},
ty::{Type, TypeData, Type_},
var::Var_,
},
wasm::il::{
module::{ModuleList, Module_},
stm::Stm_ as WASMStm_,
util::WasmExpTy,
},
};
use super::{
entry_map::{EntryMap, EnvEntry, TemplateMap},
laze_type::LazeType_,
semantic_param::SemanticParam,
trans_funcdec::trans_funcdec,
trans_stm::trans_stm,
trans_ty::{trans_params, trans_result, trans_ty, trans_var_ty},
trans_var::get_var_name,
};
pub fn trans_dec(
dec: &Dec,
parent_class: Option<&Type>,
semantic_data: &mut SemanticParam,
) -> WasmExpTy {
let mut _result_list: ModuleList = vec![];
match &dec.data {
DecData::Var(var, var_ty, init) => {
let (new_var, new_var_ty, _object_explist) = trans_var_ty(var, var_ty);
let var_lazetype = trans_ty(&new_var_ty, semantic_data);
let new_var_access = semantic_data.frame.last_mut().unwrap().alloc(&var_lazetype);
semantic_data.venv.add_data(
get_var_name(new_var),
EnvEntry::Var(var_lazetype, new_var_access),
);
WasmExpTy::new_stm(
LazeType_::none_type(),
trans_stm(
&Stm_::assign_stm(dec.pos, new_var.clone(), init.clone(), AssignType::Normal),
semantic_data,
),
)
}
DecData::Func(func_name, params, result, func_body)
| DecData::Oper(func_name, params, result, func_body) => {
if let DecData::Oper(_, _, _, _) = &dec.data {
if let None = parent_class {
return WasmExpTy::none();
}
}
semantic_data.new_frame(&func_name, parent_class);
let params_lazetype = trans_params(¶ms, semantic_data);
let (return_var, return_lazetype) = trans_result(dec.pos, result, semantic_data);
// TODO: need to implement function overloading
semantic_data.venv.add_data(
func_name.clone(),
EnvEntry::Func(
semantic_data.func_num,
params_lazetype.clone(),
return_lazetype.clone(),
),
);
let export_name = if func_name == "実行" {
Some("main".to_string())
} else {
None
};
let func_mod = trans_funcdec(
func_body,
params,
¶ms_lazetype,
return_var,
&return_lazetype,
export_name,
semantic_data,
);
semantic_data.result_modlist.push(func_mod);
semantic_data.func_num += 1;
WasmExpTy::none()
}
DecData::JsImport(func_name, params, result, module_name, name) => {
semantic_data.new_frame(&func_name, parent_class);
let params_lazetype = trans_params(¶ms, semantic_data);
let (_, return_lazetype) = trans_result(dec.pos, result, semantic_data);
let _return_access = semantic_data
.frame
.last_mut()
.unwrap()
.alloc(&return_lazetype);
semantic_data.venv.add_data(
func_name.clone(),
EnvEntry::Func(
semantic_data.func_num,
params_lazetype.clone(),
return_lazetype.clone(),
),
);
let import_mod = Module_::jsimport_mod(
name.clone(),
module_name.clone(),
Module_::func_mod(
semantic_data.func_num,
LazeType_::list_to_wasm_type(¶ms_lazetype),
vec![],
return_lazetype.to_wasm_type(),
WASMStm_::none_stm(),
None,
),
);
semantic_data.result_modlist.push(import_mod);
semantic_data.func_num += 1;
WasmExpTy::none()
}
DecData::JsExport(func_name, export_name) => {
let entry = semantic_data.venv.get_data(&func_name).expect(
format_args!(
"Could not find function {:?} to export: {:?}",
func_name, dec.pos
)
.as_str()
.unwrap(),
);
let export_mod = if let EnvEntry::Func(index, _, _) = entry {
Module_::jsexport_mod(export_name.clone(), *index)
} else {
Module_::none_mod()
};
semantic_data.result_modlist.push(export_mod);
WasmExpTy::none()
}
DecData::Class(class_name, member_list, _inheritance) => {
let mut members_entrymap = EntryMap::new();
let mut class_size = 0;
let mut default_assignlist: StmList = vec![];
// enter all members into members_entrymap
for member in member_list {
match &member.dec.data {
DecData::Var(var, ty, init) => {
let member_ty = trans_ty(ty, semantic_data);
class_size += member_ty.size;
members_entrymap.add_data(
get_var_name(var),
EnvEntry::Member(member.specifier, member_ty, class_size),
);
// initializtion in constructor
match &init.data {
ASTExpData::None => {}
_ => {
default_assignlist.push(Stm_::assign_stm(
var.pos,
var.clone(),
init.clone(),
AssignType::Normal,
));
}
}
}
DecData::Func(func_name, params, result, _)
| DecData::Oper(func_name, params, result, _) => {
let mut params_lazetype = trans_params(¶ms, semantic_data);
params_lazetype.insert(0, LazeType_::pointer_type(LazeType_::void_type()));
let (_, return_type) = trans_result(dec.pos, result, semantic_data);
members_entrymap.add_data(
func_name.clone(),
EnvEntry::Method(
member.specifier,
semantic_data.func_num,
params_lazetype,
return_type,
),
);
}
_ => {}
}
}
let class_entry = semantic_data.tenv.get_mut_data(class_name);
let parent_class_type: Type;
if let Some(EnvEntry::Template(_, template_map, _, _)) = class_entry {
if let Some(class_type) = parent_class {
if let TypeData::Template(_, type_params) = &class_type.data {
parent_class_type =
Type_::template_type(dec.pos, class_name.clone(), type_params.clone());
template_map.add_data(
type_params.clone(),
EnvEntry::Class(
class_name.clone(),
members_entrymap.clone(),
class_size,
),
);
} else {
return WasmExpTy::none();
}
} else {
return WasmExpTy::none();
}
} else {
parent_class_type = Type_::name_type(dec.pos, class_name.clone());
semantic_data.tenv.add_data(
class_name.clone(),
EnvEntry::Class(class_name.clone(), members_entrymap.clone(), class_size),
);
}
for member in member_list {
match &member.dec.data {
DecData::Func(func_name, params, result, func_body)
| DecData::Oper(func_name, params, result, func_body) => {
let _new_frame =
semantic_data.new_frame(func_name, Some(&parent_class_type));
let self_param = Field_::new(
dec.pos,
Var_::simple_var(dec.pos, "self".to_string()),
Type_::pointer_type(dec.pos, parent_class_type.clone()),
);
let mut params_with_self = vec![self_param];
params_with_self.append(&mut params.clone());
let params_lazetype = trans_params(¶ms_with_self, semantic_data);
let (return_var, return_type) =
trans_result(dec.pos, result, semantic_data);
let func_mod = trans_funcdec(
func_body,
¶ms_with_self,
¶ms_lazetype,
return_var,
&return_type,
None,
semantic_data,
);
semantic_data.result_modlist.push(func_mod);
semantic_data.func_num += 1;
}
_ => {}
}
}
WasmExpTy::none()
}
DecData::Template(original_dec, type_params) => {
match &original_dec.data {
DecData::Class(name, _, _) | DecData::Func(name, _, _, _) => {
semantic_data.tenv.add_data(
name.clone(),
EnvEntry::Template(
original_dec.clone(),
TemplateMap::new(),
semantic_data.venv.clone(),
type_params.clone(),
),
);
}
_ => {}
};
WasmExpTy::none()
}
DecData::None => WasmExpTy::none(),
}
}
|
//! Game Management API.
use crate::{
db::{self, Pool},
sync_match::{CurrentMatchState, MatchParameters, SynchronizedMatch},
timer::TimerConfig,
ws, AppState, ServerError,
};
use axum::{
extract::{Path, State},
routing::{get, post},
Json, Router,
};
use serde::Deserialize;
/// Adds the game management API to the given router.
/// This is expected to be nested at "/api".
pub fn add_to_router(api_router: Router<AppState>) -> Router<AppState> {
api_router
.route("/create_game", post(create_game))
.route("/game/:key", get(get_game))
.route("/ai/game/:key", post(post_action_to_game))
.route("/game/recent", get(recently_created_games))
.route("/branch_game", post(branch_game))
}
/// Create a match on the database and return the id.
/// The Websocket will then have to load it on its own. While that is
/// mildly inefficient, it decouples the websocket server from the http
/// server a bit.
async fn create_game(
pool: State<Pool>,
game_parameters: Json<MatchParameters>,
) -> Result<String, ServerError> {
info!("Creating a new game on client request.");
let mut conn = pool.conn().await?;
let mut game = SynchronizedMatch::new_with_key("0", game_parameters.0.sanitize());
db::game::insert(&mut game, &mut conn).await?;
info!("Game created with id {}.", game.key);
Ok(game.key.to_string())
}
/// Returns the current state of the given game. This is intended for use by the
/// replay page.
async fn get_game(
Path(key): Path<String>,
pool: State<Pool>,
) -> Result<Json<CurrentMatchState>, ServerError> {
let key: i64 = key.parse()?;
let mut conn = pool.conn().await?;
if let Some(game) = db::game::select(key, &mut conn).await? {
Ok(Json(game.current_state()?))
} else {
Err(ServerError::NotFound)
}
}
async fn post_action_to_game(Path(key): Path<String>, action: Json<pacosako::PacoAction>) {
ws::to_logic(ws::LogicMsg::AiAction {
key,
action: action.0,
});
}
async fn recently_created_games(
pool: State<Pool>,
) -> Result<Json<Vec<CurrentMatchState>>, ServerError> {
let games = db::game::latest(&mut pool.conn().await?).await?;
let vec: Result<Vec<CurrentMatchState>, _> = games.iter().map(|m| m.current_state()).collect();
Ok(Json(vec?))
}
#[derive(Deserialize, Clone)]
struct BranchParameters {
source_key: String,
action_index: usize,
timer: Option<TimerConfig>,
}
/// Create a match from an existing match
async fn branch_game(
pool: State<Pool>,
game_branch_parameters: Json<BranchParameters>,
) -> Result<String, ServerError> {
info!("Creating a new game on client request.");
let mut conn = pool.conn().await?;
let game = db::game::select(game_branch_parameters.source_key.parse()?, &mut conn).await?;
if let Some(mut game) = game {
game.actions.truncate(game_branch_parameters.action_index);
game.timer = game_branch_parameters.timer.clone().map(|o| o.into());
db::game::insert(&mut game, &mut conn).await?;
Ok(game.key)
} else {
Err(ServerError::NotFound)
}
}
|
use std::io::Result;
pub fn enable_vt_processing() -> Result<()> {
#[cfg(windows)]
{
use crossterm_winapi::{ConsoleMode, Handle};
pub const ENABLE_PROCESSED_OUTPUT: u32 = 0x0001;
pub const ENABLE_VIRTUAL_TERMINAL_PROCESSING: u32 = 0x0004;
// let mask = ENABLE_VIRTUAL_TERMINAL_PROCESSING;
let console_mode = ConsoleMode::from(Handle::current_out_handle()?);
let old_mode = console_mode.mode()?;
// researching odd ansi behavior in windows terminal repo revealed that
// enable_processed_output and enable_virtual_terminal_processing should be used
// also, instead of checking old_mode & mask, just set the mode already
// if old_mode & mask == 0 {
console_mode
.set_mode(old_mode | ENABLE_PROCESSED_OUTPUT | ENABLE_VIRTUAL_TERMINAL_PROCESSING)?;
// }
}
Ok(())
}
|
fn main() {
let program = vec![1,0,0,3,1,1,2,3,1,3,4,3,1,5,0,3,2,1,9,19,1,19,5,23,2,6,23,27,1,6,27,31,2,31,9,35,1,35,6,39,1,10,39,43,2,9,43,47,1,5,47,51,2,51,6,55,1,5,55,59,2,13,59,63,1,63,5,67,2,67,13,71,1,71,9,75,1,75,6,79,2,79,6,83,1,83,5,87,2,87,9,91,2,9,91,95,1,5,95,99,2,99,13,103,1,103,5,107,1,2,107,111,1,111,5,0,99,2,14,0,0];
println!("{}", run_intcode(program)[0]);
}
enum Opcode {
Add,
Multiply,
EndOfProgram,
}
fn int_to_opcode(int: i32) -> Opcode {
if int == 1 {
Opcode::Add
} else if int == 2 {
Opcode::Multiply
} else {
Opcode::EndOfProgram
}
}
fn run_intcode(mut program: Vec<i32>) -> Vec<i32> {
let mut current_position = 0;
loop {
let current_opcode = int_to_opcode(program[current_position]);
if let Opcode::EndOfProgram = current_opcode {
break;
}
let operand1_location = program[current_position + 1] as usize;
let operand2_location = program[current_position + 2] as usize;
let operand1 = program[operand1_location];
let operand2 = program[operand2_location];
let result = match current_opcode {
Opcode::Add => operand1 + operand2,
Opcode::Multiply => operand1 * operand2,
Opcode::EndOfProgram => panic!("impossible"),
};
let result_location = program[current_position + 3] as usize;
program[result_location] = result;
current_position += 4;
}
return program;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example_program_one() {
let program = vec![1,0,0,0,99];
let answer = run_intcode(program);
assert_eq!(answer, vec![2,0,0,0,99]);
}
#[test]
fn example_program_two() {
let program = vec![2,3,0,3,99];
let answer = run_intcode(program);
assert_eq!(answer, vec![2,3,0,6,99]);
}
#[test]
fn example_program_three() {
let program = vec![2,4,4,5,99,0];
let answer = run_intcode(program);
assert_eq!(answer, vec![2,4,4,5,99,9801]);
}
#[test]
fn example_program_four() {
let program = vec![1,1,1,4,99,5,6,0,99];
let answer = run_intcode(program);
assert_eq!(answer, vec![30,1,1,4,2,5,6,0,99]);
}
}
|
#[macro_use]
mod leak;
use core::sync::atomic::{AtomicBool, AtomicPtr, Ordering};
use crate::song::{PlayData, Song, PlaybackCmd, CallbackState};
use crate::module_reader::{SongData, read_module};
use crate::producer_consumer_queue::{PCQHolder, ProducerConsumerQueue};
use std::sync::{mpsc, Mutex, Arc};
use core::option::Option::None;
use core::option::Option;
use std::thread::{spawn, sleep, JoinHandle};
use core::time::Duration;
use crate::triple_buffer::State::StateNoChange;
use crate::song::PlaybackCmd::Quit;
use crate::triple_buffer::{TripleBufferReader, TripleBuffer};
use std::sync::mpsc::{Sender, Receiver};
use std::ops::{DerefMut};
use crate::instrument::Instrument;
use simple_error::{SimpleResult};
use crate::song::InterleavedBufferAdaptar;
#[derive(Clone)]
pub struct StructHolder<T> {
t: Arc<AtomicPtr<T>>,
}
impl <T> StructHolder<T> {
pub fn new(arg: Box<T>) -> Self {
Self { t: Arc::new(AtomicPtr::new(Box::into_raw(arg))) }
}
pub fn get_mut(&mut self) -> &mut T {
unsafe { &mut *self.t.load(Ordering::Acquire) }
}
pub fn get(&self) -> &T {
unsafe { &*self.t.load(Ordering::Acquire) }
}
}
#[derive(Clone)]
pub struct SongState {
pub stopped: Arc<AtomicBool>,
triple_buffer_reader: Arc<Mutex<TripleBufferReader<PlayData>>>,
pub song_data: SongData,
pub song: Arc<Mutex<Song>>,
tx: Sender<PlaybackCmd>,
rx: Arc<Mutex<Receiver<PlaybackCmd>>>,
q: PCQHolder,
display_cb: Option<fn (&PlayData, &Vec<Instrument>)>,
self_ref: Option<StructHolder<SongState>>,
}
pub type SongHandle = StructHolder<SongState>;
impl SongState {
pub fn new(path: String) -> SimpleResult<SongHandle> {
let song_data = read_module(path.as_str())?;
let triple_buffer = TripleBuffer::<PlayData>::new();
let (triple_buffer_reader, triple_buffer_writer) = triple_buffer.split();
let song = Arc::new(Mutex::new(Song::new(&song_data, triple_buffer_writer, 48000.0)));
let (tx, rx): (Sender<PlaybackCmd>, Receiver<PlaybackCmd>) = mpsc::channel();
let stopped = Arc::new(AtomicBool::from(false));
let mut sh = StructHolder::new( Box::new( Self {
stopped,
triple_buffer_reader: Arc::new(Mutex::new(triple_buffer_reader)),
song_data,
song,
tx,
rx: Arc::new(Mutex::new(rx)),
q: ProducerConsumerQueue::new(),
display_cb: None,
self_ref: None
}));
sh.get_mut().self_ref = Option::from(sh.clone());
Ok(sh)
}
pub fn set_order(&mut self, order: u32) {
self.q.get().drain();
if let Ok(_) = self.tx.send(PlaybackCmd::SetPosition(order)) {}
}
fn callback(&mut self) {
let mut song = self.song.lock().unwrap();
let mut rx = self.rx.lock().unwrap();
self.q.get().produce(|buf: &mut [f32]| -> bool {
let mut adaptar = InterleavedBufferAdaptar{buf};
if let CallbackState::Complete = song.get_next_tick(&mut adaptar, rx.deref_mut()) { return false; }
true
});
self.stopped.store(true, Ordering::Release);
}
// fn callback_planar(&mut self) {
// let mut song = self.song.lock().unwrap();
// let mut rx = self.rx.lock().unwrap();
// self.q.get().produce(|buf: &mut [f32]| -> bool {
// let adaptar = PlanarBufferAdaptar::new(buf);
// if let CallbackState::Complete = song.get_next_tick(adaptar, rx.deref_mut()) { return false; }
// true
// });
// self.stopped.store(true, Ordering::Release);
// }
pub fn is_stopped(&self) -> bool {
self.stopped.load(Ordering::Acquire)
}
fn clone(&mut self) -> SongHandle {
self.self_ref.as_mut().unwrap().clone()
}
pub fn start(&mut self, display_cb: fn (&PlayData, &Vec<Instrument>)) -> (Option<JoinHandle<()>>, Option<JoinHandle<()>>) {
self.display_cb = Option::from(display_cb);
let mut s1 = self.clone();
let play_thread = Option::from(spawn(move || Self::callback(s1.get_mut())));
let mut display_thread: Option<JoinHandle<()>> = None;
let mut s2 = self.clone();
if self.display_cb.is_some() {
display_thread = Option::from(spawn(move || {
let s = s2.get_mut();
let tb_guard = s.triple_buffer_reader.clone();
let mut triple_buffer_reader = tb_guard.lock().unwrap().get();
// let mut triple_buffer_reader = triple_buffer_reader.lock().unwrap();
let mut song_row = 0;
let mut song_tick = 2000;
loop {
if s.is_stopped() {
break;
}
sleep(Duration::from_millis(30));
let (play_data, state) = triple_buffer_reader.read();
if StateNoChange == state { continue; }
if play_data.tick != song_tick || play_data.row != song_row {
(s.display_cb.unwrap())(play_data, &s.song_data.instruments);
song_row = play_data.row;
song_tick = play_data.tick;
}
}
}));
}
(play_thread, display_thread)
}
pub fn get_queue(&mut self) -> PCQHolder {
return self.q.clone();
}
pub fn get_sender(&mut self) -> &mut Sender<PlaybackCmd> {
return &mut self.tx;
}
pub fn get_triple_buffer_reader(&self) -> Arc<Mutex<TripleBufferReader<PlayData>>> {
return self.triple_buffer_reader.clone();
}
pub fn close(&mut self) {
self.stopped.store(true, Ordering::Release);
self.tx.send(Quit).unwrap();
self.q.get().quit();
// if handle.0.is_some() {
// handle.0.unwrap().join().unwrap();
// }
// if handle.1.is_some() {
// handle.1.unwrap().join().unwrap();
// }
}
}
// pub struct SongHandleLockGuard<'a>{
// song_state: &'a mut SongState,
// mutex_guard: MutexGuard<'a, u32>,
// _nosend: PhantomData<*mut ()>
// }
//
// impl<'a> Deref for SongHandleLockGuard<'a> {
// type Target = SongState;
// fn deref(&self) -> &SongState { (*self.song_state).as_ref() }
// }
//
// impl<'a> DerefMut for SongHandleLockGuard<'a> {
// fn deref_mut(&mut self) -> &mut SongState { (*self.song_state).as_mut() }
// }
//
// impl<'a> Drop for SongHandleLockGuard<'a> {
// fn drop(&mut self) {
// mem::drop(self.mutex_guard);
// }
// }
//
// #[derive(Clone)]
// pub struct SongHandle {
// song_state: *mut c_void,
// mutex: Mutex<u32>,
// }
//
// impl SongHandle {
// pub fn new(path: String) -> Self {
// Self { song_state: leak!(SongState::new(path)), mutex: Mutex::new(0) }
// }
//
// pub fn lock(&mut self) -> SongHandleLockGuard {
// let guard = self.mutex.lock().unwrap();
// SongHandleLockGuard{ song_state: self.song_state as &mut SongState, mutex_guard: guard, _nosend: Default::default() }
// }
// } |
extern crate clap;
extern crate env_logger;
#[macro_use]
extern crate log;
extern crate atty;
extern crate shlex;
extern crate skim;
extern crate time;
use derive_builder::Builder;
use std::env;
use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter, Write};
use clap::{crate_version, App, Arg, ArgMatches};
use skim::prelude::*;
const USAGE: &str = "
Usage: sk [options]
Options
-h, --help print this help menu
--version print out the current version of skim
Search
--tac reverse the order of search result
--no-sort Do not sort the result
-t, --tiebreak [score,begin,end,-score,length...]
comma seperated criteria
-n, --nth 1,2..5 specify the fields to be matched
--with-nth 1,2..5 specify the fields to be transformed
-d, --delimiter \\t specify the delimiter(in REGEX) for fields
-e, --exact start skim in exact mode
--regex use regex instead of fuzzy match
--algo=TYPE Fuzzy matching algorithm:
[skim_v1|skim_v2|clangd] (default: skim_v2)
--case [respect,ignore,smart] (default: smart)
case sensitive or not
Interface
-b, --bind KEYBINDS comma seperated keybindings, in KEY:ACTION
such as 'ctrl-j:accept,ctrl-k:kill-line'
-m, --multi Enable Multiple Selection
--no-multi Disable Multiple Selection
--no-mouse Disable mouse events
-c, --cmd ag command to invoke dynamically
-i, --interactive Start skim in interactive(command) mode
--color [BASE][,COLOR:ANSI]
change color theme
--no-hscroll Disable horizontal scroll
--keep-right Keep the right end of the line visible on overflow
--skip-to-pattern Line starts with the start of matched pattern
--no-clear-if-empty Do not clear previous items if command returns empty result
--no-clear-start Do not clear on start
--show-cmd-error Send command error message if command fails
Layout
--layout=LAYOUT Choose layout: [default|reverse|reverse-list]
--height=HEIGHT Height of skim's window (--height 40%)
--no-height Disable height feature
--min-height=HEIGHT Minimum height when --height is given by percent
(default: 10)
--margin=MARGIN Screen Margin (TRBL / TB,RL / T,RL,B / T,R,B,L)
e.g. (sk --margin 1,10%)
-p, --prompt '> ' prompt string for query mode
--cmd-prompt '> ' prompt string for command mode
Display
--ansi parse ANSI color codes for input strings
--tabstop=SPACES Number of spaces for a tab character (default: 8)
--inline-info Display info next to query
--header=STR Display STR next to info
--header-lines=N The first N lines of the input are treated as header
History
--history=FILE History file
--history-size=N Maximum number of query history entries (default: 1000)
--cmd-history=FILE command History file
--cmd-history-size=N Maximum number of command history entries (default: 1000)
Preview
--preview=COMMAND command to preview current highlighted line ({})
We can specify the fields. e.g. ({1}, {..3}, {0..})
--preview-window=OPT Preview window layout (default: right:50%)
[up|down|left|right][:SIZE[%]][:hidden][:+SCROLL[-OFFSET]]
Scripting
-q, --query \"\" specify the initial query
--cmd-query \"\" specify the initial query for interactive mode
--expect KEYS comma seperated keys that can be used to complete skim
--read0 Read input delimited by ASCII NUL(\\0) characters
--print0 Print output delimited by ASCII NUL(\\0) characters
--no-clear-start Do not clear screen on start
--no-clear Do not clear screen on exit
--print-query Print query as the first line
--print-cmd Print command query as the first line (after --print-query)
--print-score Print matching score in filter output (with --filter)
-1, --select-1 Automatically select the only match
-0, --exit-0 Exit immediately when there's no match
--sync Synchronous search for multi-staged filtering
--pre-select-n=NUM Pre-select the first n items in multi-selection mode
--pre-select-pat=REGEX
Pre-select the matched items in multi-selection mode
--pre-select-items=$'item1\\nitem2'
Pre-select the items separated by newline character
--pre-select-file=FILENAME
Pre-select the items read from file
Environment variables
SKIM_DEFAULT_COMMAND Default command to use when input is tty
SKIM_DEFAULT_OPTIONS Default options (e.g. '--ansi --regex')
You should not include other environment variables
(e.g. '-c \"$HOME/bin/ag\"')
Removed
-I replstr replace `replstr` with the selected item
Reserved (not used for now)
--extended
--literal
--cycle
--hscroll-off=COL
--filepath-word
--jump-labels=CHARS
--border
--no-bold
--info
--pointer
--marker
--phony
";
const DEFAULT_HISTORY_SIZE: usize = 1000;
//------------------------------------------------------------------------------
fn main() {
env_logger::builder().format_timestamp_nanos().init();
match real_main() {
Ok(exit_code) => std::process::exit(exit_code),
Err(err) => {
// if downstream pipe is closed, exit silently, see PR#279
if err.kind() == std::io::ErrorKind::BrokenPipe {
std::process::exit(0)
}
std::process::exit(2)
}
}
}
#[rustfmt::skip]
fn real_main() -> Result<i32, std::io::Error> {
let mut stdout = std::io::stdout();
let mut args = Vec::new();
args.push(env::args().next().expect("there should be at least one arg: the application name"));
args.extend(env::var("SKIM_DEFAULT_OPTIONS")
.ok()
.and_then(|val| shlex::split(&val))
.unwrap_or_default());
for arg in env::args().skip(1) {
args.push(arg);
}
//------------------------------------------------------------------------------
// parse options
let opts = App::new("sk")
.author("Jinzhou Zhang<lotabout@gmail.com>")
.version(crate_version!())
.arg(Arg::with_name("help").long("help").short('h'))
.arg(Arg::with_name("bind").long("bind").short('b').multiple(true).takes_value(true))
.arg(Arg::with_name("multi").long("multi").short('m').multiple(true))
.arg(Arg::with_name("no-multi").long("no-multi").multiple(true))
.arg(Arg::with_name("prompt").long("prompt").short('p').multiple(true).takes_value(true).default_value("> "))
.arg(Arg::with_name("cmd-prompt").long("cmd-prompt").multiple(true).takes_value(true).default_value("c> "))
.arg(Arg::with_name("expect").long("expect").multiple(true).takes_value(true))
.arg(Arg::with_name("tac").long("tac").multiple(true))
.arg(Arg::with_name("tiebreak").long("tiebreak").short('t').multiple(true).takes_value(true))
.arg(Arg::with_name("ansi").long("ansi").multiple(true))
.arg(Arg::with_name("exact").long("exact").short('e').multiple(true))
.arg(Arg::with_name("cmd").long("cmd").short('c').multiple(true).takes_value(true))
.arg(Arg::with_name("interactive").long("interactive").short('i').multiple(true))
.arg(Arg::with_name("query").long("query").short('q').multiple(true).takes_value(true))
.arg(Arg::with_name("cmd-query").long("cmd-query").multiple(true).takes_value(true))
.arg(Arg::with_name("regex").long("regex").multiple(true))
.arg(Arg::with_name("delimiter").long("delimiter").short('d').multiple(true).takes_value(true))
.arg(Arg::with_name("nth").long("nth").short('n').multiple(true).takes_value(true))
.arg(Arg::with_name("with-nth").long("with-nth").multiple(true).takes_value(true))
.arg(Arg::with_name("replstr").short('I').multiple(true).takes_value(true))
.arg(Arg::with_name("color").long("color").multiple(true).takes_value(true))
.arg(Arg::with_name("margin").long("margin").multiple(true).takes_value(true).default_value("0,0,0,0"))
.arg(Arg::with_name("min-height").long("min-height").multiple(true).takes_value(true).default_value("10"))
.arg(Arg::with_name("height").long("height").multiple(true).takes_value(true).default_value("100%"))
.arg(Arg::with_name("no-height").long("no-height").multiple(true))
.arg(Arg::with_name("no-clear").long("no-clear").multiple(true))
.arg(Arg::with_name("no-clear-start").long("no-clear-start").multiple(true))
.arg(Arg::with_name("no-mouse").long("no-mouse").multiple(true))
.arg(Arg::with_name("preview").long("preview").multiple(true).takes_value(true))
.arg(Arg::with_name("preview-window").long("preview-window").multiple(true).takes_value(true).default_value("right:50%"))
.arg(Arg::with_name("reverse").long("reverse").multiple(true))
.arg(Arg::with_name("algorithm").long("algo").multiple(true).takes_value(true).default_value("skim_v2"))
.arg(Arg::with_name("case").long("case").multiple(true).takes_value(true).default_value("smart"))
.arg(Arg::with_name("literal").long("literal").multiple(true))
.arg(Arg::with_name("cycle").long("cycle").multiple(true))
.arg(Arg::with_name("no-hscroll").long("no-hscroll").multiple(true))
.arg(Arg::with_name("hscroll-off").long("hscroll-off").multiple(true).takes_value(true).default_value("10"))
.arg(Arg::with_name("filepath-word").long("filepath-word").multiple(true))
.arg(Arg::with_name("jump-labels").long("jump-labels").multiple(true).takes_value(true).default_value("abcdefghijklmnopqrstuvwxyz"))
.arg(Arg::with_name("border").long("border").multiple(true))
.arg(Arg::with_name("inline-info").long("inline-info").multiple(true))
.arg(Arg::with_name("header").long("header").multiple(true).takes_value(true).default_value(""))
.arg(Arg::with_name("header-lines").long("header-lines").multiple(true).takes_value(true).default_value("0"))
.arg(Arg::with_name("tabstop").long("tabstop").multiple(true).takes_value(true).default_value("8"))
.arg(Arg::with_name("no-bold").long("no-bold").multiple(true))
.arg(Arg::with_name("history").long("history").multiple(true).takes_value(true))
.arg(Arg::with_name("cmd-history").long("cmd-history").multiple(true).takes_value(true))
.arg(Arg::with_name("history-size").long("history-size").multiple(true).takes_value(true).default_value("1000"))
.arg(Arg::with_name("cmd-history-size").long("cmd-history-size").multiple(true).takes_value(true).default_value("1000"))
.arg(Arg::with_name("print-query").long("print-query").multiple(true))
.arg(Arg::with_name("print-cmd").long("print-cmd").multiple(true))
.arg(Arg::with_name("print-score").long("print-score").multiple(true))
.arg(Arg::with_name("read0").long("read0").multiple(true))
.arg(Arg::with_name("print0").long("print0").multiple(true))
.arg(Arg::with_name("sync").long("sync").multiple(true))
.arg(Arg::with_name("extended").long("extended").short('x').multiple(true))
.arg(Arg::with_name("no-sort").long("no-sort").multiple(true))
.arg(Arg::with_name("select-1").long("select-1").short('1').multiple(true))
.arg(Arg::with_name("exit-0").long("exit-0").short('0').multiple(true))
.arg(Arg::with_name("filter").long("filter").short('f').takes_value(true).multiple(true))
.arg(Arg::with_name("layout").long("layout").multiple(true).takes_value(true).default_value("default"))
.arg(Arg::with_name("keep-right").long("keep-right").multiple(true))
.arg(Arg::with_name("skip-to-pattern").long("skip-to-pattern").multiple(true).takes_value(true).default_value(""))
.arg(Arg::with_name("pre-select-n").long("pre-select-n").multiple(true).takes_value(true).default_value("0"))
.arg(Arg::with_name("pre-select-pat").long("pre-select-pat").multiple(true).takes_value(true).default_value(""))
.arg(Arg::with_name("pre-select-items").long("pre-select-items").multiple(true).takes_value(true))
.arg(Arg::with_name("pre-select-file").long("pre-select-file").multiple(true).takes_value(true).default_value(""))
.arg(Arg::with_name("no-clear-if-empty").long("no-clear-if-empty").multiple(true))
.arg(Arg::with_name("show-cmd-error").long("show-cmd-error").multiple(true))
.get_matches_from(args);
if opts.is_present("help") {
write!(stdout, "{}", USAGE)?;
return Ok(0);
}
//------------------------------------------------------------------------------
let mut options = parse_options(&opts);
let preview_window_joined = opts.values_of("preview-window").map(|x| x.collect::<Vec<_>>().join(":"));
options.preview_window = preview_window_joined.as_deref();
//------------------------------------------------------------------------------
// initialize collector
let item_reader_option = SkimItemReaderOption::default()
.ansi(opts.is_present("ansi"))
.delimiter(opts.values_of("delimiter").and_then(|vals| vals.last()).unwrap_or(""))
.with_nth(opts.values_of("with-nth").and_then(|vals| vals.last()).unwrap_or(""))
.nth(opts.values_of("nth").and_then(|vals| vals.last()).unwrap_or(""))
.read0(opts.is_present("read0"))
.show_error(opts.is_present("show-cmd-error"))
.build();
let cmd_collector = Rc::new(RefCell::new(SkimItemReader::new(item_reader_option)));
options.cmd_collector = cmd_collector.clone();
//------------------------------------------------------------------------------
// read in the history file
let fz_query_histories = opts.values_of("history").and_then(|vals| vals.last());
let cmd_query_histories = opts.values_of("cmd-history").and_then(|vals| vals.last());
let query_history = fz_query_histories.and_then(|filename| read_file_lines(filename).ok()).unwrap_or_default();
let cmd_history = cmd_query_histories.and_then(|filename| read_file_lines(filename).ok()).unwrap_or_default();
if fz_query_histories.is_some() || cmd_query_histories.is_some() {
options.query_history = &query_history;
options.cmd_history = &cmd_history;
// bind ctrl-n and ctrl-p to handle history
options.bind.insert(0, "ctrl-p:previous-history,ctrl-n:next-history");
}
//------------------------------------------------------------------------------
// handle pre-selection options
let pre_select_n: Option<usize> = opts.values_of("pre-select-n").and_then(|vals| vals.last()).and_then(|s| s.parse().ok());
let pre_select_pat = opts.values_of("pre-select-pat").and_then(|vals| vals.last());
let pre_select_items: Option<Vec<String>> = opts.values_of("pre-select-items").map(|vals| vals.flat_map(|m|m.split('\n')).map(|s|s.to_string()).collect());
let pre_select_file = opts.values_of("pre-select-file").and_then(|vals| vals.last());
if pre_select_n.is_some() || pre_select_pat.is_some() || pre_select_items.is_some() || pre_select_file.is_some() {
let first_n = pre_select_n.unwrap_or(0);
let pattern = pre_select_pat.unwrap_or("");
let preset_items = pre_select_items.unwrap_or_default();
let preset_file = pre_select_file.and_then(|filename| read_file_lines(filename).ok()).unwrap_or_default();
let selector = DefaultSkimSelector::default()
.first_n(first_n)
.regex(pattern)
.preset(preset_items)
.preset(preset_file);
options.selector = Some(Rc::new(selector));
}
let options = options;
//------------------------------------------------------------------------------
let bin_options = BinOptionsBuilder::default()
.filter(opts.values_of("filter").and_then(|vals| vals.last()))
.print_query(opts.is_present("print-query"))
.print_cmd(opts.is_present("print-cmd"))
.output_ending(if opts.is_present("print0") { "\0" } else { "\n" })
.build()
.expect("");
//------------------------------------------------------------------------------
// read from pipe or command
let rx_item = if atty::isnt(atty::Stream::Stdin) {
let rx_item = cmd_collector.borrow().of_bufread(BufReader::new(std::io::stdin()));
Some(rx_item)
} else {
None
};
//------------------------------------------------------------------------------
// filter mode
if opts.is_present("filter") {
return filter(&bin_options, &options, rx_item);
}
//------------------------------------------------------------------------------
let output = Skim::run_with(&options, rx_item);
if output.is_none() { // error
return Ok(135);
}
//------------------------------------------------------------------------------
// output
let output = output.unwrap();
if output.is_abort {
return Ok(130);
}
// output query
if bin_options.print_query {
write!(stdout, "{}{}", output.query, bin_options.output_ending)?;
}
if bin_options.print_cmd {
write!(stdout, "{}{}", output.cmd, bin_options.output_ending)?;
}
if opts.is_present("expect") {
match output.final_event {
Event::EvActAccept(Some(accept_key)) => {
write!(stdout, "{}{}", accept_key, bin_options.output_ending)?;
}
Event::EvActAccept(None) => {
write!(stdout, "{}", bin_options.output_ending)?;
}
_ => {}
}
}
for item in output.selected_items.iter() {
write!(stdout, "{}{}", item.output(), bin_options.output_ending)?;
}
//------------------------------------------------------------------------------
// write the history with latest item
if let Some(file) = fz_query_histories {
let limit = opts.values_of("history-size").and_then(|vals| vals.last())
.and_then(|size| size.parse::<usize>().ok())
.unwrap_or(DEFAULT_HISTORY_SIZE);
write_history_to_file(&query_history, &output.query, limit, file)?;
}
if let Some(file) = cmd_query_histories {
let limit = opts.values_of("cmd-history-size").and_then(|vals| vals.last())
.and_then(|size| size.parse::<usize>().ok())
.unwrap_or(DEFAULT_HISTORY_SIZE);
write_history_to_file(&cmd_history, &output.cmd, limit, file)?;
}
Ok(if output.selected_items.is_empty() { 1 } else { 0 })
}
fn parse_options(options: &ArgMatches) -> SkimOptions<'_> {
SkimOptionsBuilder::default()
.color(options.values_of("color").and_then(|vals| vals.last()))
.min_height(options.values_of("min-height").and_then(|vals| vals.last()))
.no_height(options.is_present("no-height"))
.height(options.values_of("height").and_then(|vals| vals.last()))
.margin(options.values_of("margin").and_then(|vals| vals.last()))
.preview(options.values_of("preview").and_then(|vals| vals.last()))
.cmd(options.values_of("cmd").and_then(|vals| vals.last()))
.query(options.values_of("query").and_then(|vals| vals.last()))
.cmd_query(options.values_of("cmd-query").and_then(|vals| vals.last()))
.interactive(options.is_present("interactive"))
.prompt(options.values_of("prompt").and_then(|vals| vals.last()))
.cmd_prompt(options.values_of("cmd-prompt").and_then(|vals| vals.last()))
.bind(
options
.values_of("bind")
.map(|x| x.collect::<Vec<_>>())
.unwrap_or_default(),
)
.expect(options.values_of("expect").map(|x| x.collect::<Vec<_>>().join(",")))
.multi(if options.is_present("no-multi") {
false
} else {
options.is_present("multi")
})
.layout(options.values_of("layout").and_then(|vals| vals.last()).unwrap_or(""))
.reverse(options.is_present("reverse"))
.no_hscroll(options.is_present("no-hscroll"))
.no_mouse(options.is_present("no-mouse"))
.no_clear(options.is_present("no-clear"))
.no_clear_start(options.is_present("no-clear-start"))
.tabstop(options.values_of("tabstop").and_then(|vals| vals.last()))
.tiebreak(options.values_of("tiebreak").map(|x| x.collect::<Vec<_>>().join(",")))
.tac(options.is_present("tac"))
.nosort(options.is_present("no-sort"))
.exact(options.is_present("exact"))
.regex(options.is_present("regex"))
.delimiter(options.values_of("delimiter").and_then(|vals| vals.last()))
.inline_info(options.is_present("inline-info"))
.header(options.values_of("header").and_then(|vals| vals.last()))
.header_lines(
options
.values_of("header-lines")
.and_then(|vals| vals.last())
.map(|s| s.parse::<usize>().unwrap_or(0))
.unwrap_or(0),
)
.layout(options.values_of("layout").and_then(|vals| vals.last()).unwrap_or(""))
.algorithm(FuzzyAlgorithm::of(
options.values_of("algorithm").and_then(|vals| vals.last()).unwrap(),
))
.case(match options.value_of("case") {
Some("smart") => CaseMatching::Smart,
Some("ignore") => CaseMatching::Ignore,
_ => CaseMatching::Respect,
})
.keep_right(options.is_present("keep-right"))
.skip_to_pattern(
options
.values_of("skip-to-pattern")
.and_then(|vals| vals.last())
.unwrap_or(""),
)
.select1(options.is_present("select-1"))
.exit0(options.is_present("exit-0"))
.sync(options.is_present("sync"))
.no_clear_if_empty(options.is_present("no-clear-if-empty"))
.build()
.unwrap()
}
fn read_file_lines(filename: &str) -> Result<Vec<String>, std::io::Error> {
let file = File::open(filename)?;
let ret = BufReader::new(file).lines().collect();
debug!("file content: {:?}", ret);
ret
}
fn write_history_to_file(
orig_history: &[String],
latest: &str,
limit: usize,
filename: &str,
) -> Result<(), std::io::Error> {
if orig_history.last().map(|l| l.as_str()) == Some(latest) {
// no point of having at the end of the history 5x the same command...
return Ok(());
}
let additional_lines = if latest.trim().is_empty() { 0 } else { 1 };
let start_index = if orig_history.len() + additional_lines > limit {
orig_history.len() + additional_lines - limit
} else {
0
};
let mut history = orig_history[start_index..].to_vec();
history.push(latest.to_string());
let file = File::create(filename)?;
let mut file = BufWriter::new(file);
file.write_all(history.join("\n").as_bytes())?;
Ok(())
}
#[derive(Builder)]
pub struct BinOptions<'a> {
filter: Option<&'a str>,
output_ending: &'a str,
print_query: bool,
print_cmd: bool,
}
pub fn filter(
bin_option: &BinOptions,
options: &SkimOptions,
source: Option<SkimItemReceiver>,
) -> Result<i32, std::io::Error> {
let mut stdout = std::io::stdout();
let default_command = match env::var("SKIM_DEFAULT_COMMAND").as_ref().map(String::as_ref) {
Ok("") | Err(_) => "find .".to_owned(),
Ok(val) => val.to_owned(),
};
let query = bin_option.filter.unwrap_or("");
let cmd = options.cmd.unwrap_or(&default_command);
// output query
if bin_option.print_query {
write!(stdout, "{}{}", query, bin_option.output_ending)?;
}
if bin_option.print_cmd {
write!(stdout, "{}{}", cmd, bin_option.output_ending)?;
}
//------------------------------------------------------------------------------
// matcher
let engine_factory: Box<dyn MatchEngineFactory> = if options.regex {
Box::new(RegexEngineFactory::builder())
} else {
let fuzzy_engine_factory = ExactOrFuzzyEngineFactory::builder()
.fuzzy_algorithm(options.algorithm)
.exact_mode(options.exact)
.build();
Box::new(AndOrEngineFactory::new(fuzzy_engine_factory))
};
let engine = engine_factory.create_engine_with_case(query, options.case);
//------------------------------------------------------------------------------
// start
let components_to_stop = Arc::new(AtomicUsize::new(0));
let stream_of_item = source.unwrap_or_else(|| {
let cmd_collector = options.cmd_collector.clone();
let (ret, _control) = cmd_collector.borrow_mut().invoke(cmd, components_to_stop);
ret
});
let mut num_matched = 0;
stream_of_item
.into_iter()
.filter_map(|item| engine.match_item(item.clone()).map(|result| (item, result)))
.try_for_each(|(item, _match_result)| {
num_matched += 1;
write!(stdout, "{}{}", item.output(), bin_option.output_ending)
})?;
Ok(if num_matched == 0 { 1 } else { 0 })
}
|
#![no_main]
#![no_std]
extern crate cortex_m;
extern crate cortex_m_rt;
extern crate embedded_hal;
extern crate panic_halt;
extern crate stm32f042_hal as hal;
extern crate numtoa;
use hal::i2c::*;
use hal::prelude::*;
use hal::stm32;
use embedded_hal::blocking::i2c::Write;
use numtoa::NumToA;
use cortex_m_rt::entry;
const SSD1306_BYTE_CMD: u8 = 0x00;
const SSD1306_BYTE_DATA: u8 = 0x40;
const SSD1306_BYTE_CMD_SINGLE: u8 = 0x80;
const SSD1306_DISPLAY_RAM: u8 = 0xA4;
const SSD1306_DISPLAY_NORMAL: u8 = 0xA6;
const SSD1306_DISPLAY_OFF: u8 = 0xAE;
const SSD1306_DISPLAY_ON: u8 = 0xAF;
const SSD1306_MEMORY_ADDR_MODE: u8 = 0x20;
const SSD1306_COLUMN_RANGE: u8 = 0x21;
const SSD1306_PAGE_RANGE: u8 = 0x22;
const SSD1306_DISPLAY_START_LINE: u8 = 0x40;
const SSD1306_SCAN_MODE_NORMAL: u8 = 0xC0;
const SSD1306_DISPLAY_OFFSET: u8 = 0xD3;
const SSD1306_PIN_MAP: u8 = 0xDA;
const SSD1306_DISPLAY_CLK_DIV: u8 = 0xD5;
const SSD1306_CHARGE_PUMP: u8 = 0x8D;
#[entry]
fn main() -> ! {
if let Some(p) = stm32::Peripherals::take() {
let gpiof = p.GPIOF.split();
let mut rcc = p.RCC.constrain();
let _ = rcc.cfgr.freeze();
let scl = gpiof
.pf1
.into_alternate_af1()
.internal_pull_up(true)
.set_open_drain();
let sda = gpiof
.pf0
.into_alternate_af1()
.internal_pull_up(true)
.set_open_drain();
/* Setup I2C1 */
let mut i2c = I2c::i2c1(p.I2C1, (scl, sda), 10.khz());
/* Initialise SSD1306 display */
let _ = ssd1306_init(&mut i2c);
/* Print a welcome message on the display */
let _ = ssd1306_pos(&mut i2c, 0, 0);
/* Endless loop */
loop {
let _ = ssd1306_pos(&mut i2c, 0, 0);
let mut data = [0; 2];
let _ = i2c.write_read(0x40, &[0x00], &mut data);
let config = (u16::from(data[0]) << 8) | u16::from(data[1]);
let mut buffer = [0u8; 10];
let _ = ssd1306_print_bytes(&mut i2c, config.numtoa(10, &mut buffer));
let _ = ssd1306_pos(&mut i2c, 0, 1);
let mut data = [0; 2];
let _ = i2c.write_read(0x40, &[0x02], &mut data);
let mut voltage = ((u32::from(data[0]) << 8) | u32::from(data[1])) * 1250;
let _ = ssd1306_print_bytes(&mut i2c, voltage.numtoa(10, &mut buffer));
let _ = ssd1306_print_bytes(&mut i2c, b"uV ");
let _ = ssd1306_pos(&mut i2c, 0, 2);
let mut data = [0; 2];
let _ = i2c.write_read(0x40, &[0x01], &mut data);
voltage = ((u32::from(data[0]) << 8) | u32::from(data[1])) * 1250;
let _ = ssd1306_print_bytes(&mut i2c, voltage.numtoa(10, &mut buffer));
let _ = ssd1306_print_bytes(&mut i2c, b"uA ");
}
}
loop {
continue;
}
}
/// Print characters on the display with the embedded 7x7 font
fn ssd1306_print_bytes<I2C, E>(i2c: &mut I2C, bytes: &[u8]) -> Result<(), E>
where
I2C: Write<Error = E>,
{
/* A 7x7 font shamelessly borrowed from https://github.com/techninja/MarioChron/ */
const FONT_7X7: [u8; 672] = [
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // (space)
0x00, 0x00, 0x5F, 0x00, 0x00, 0x00, 0x00, // !
0x00, 0x07, 0x00, 0x07, 0x00, 0x00, 0x00, // "
0x14, 0x7F, 0x14, 0x7F, 0x14, 0x00, 0x00, // #
0x24, 0x2A, 0x7F, 0x2A, 0x12, 0x00, 0x00, // $
0x23, 0x13, 0x08, 0x64, 0x62, 0x00, 0x00, // %
0x36, 0x49, 0x55, 0x22, 0x50, 0x00, 0x00, // &
0x00, 0x05, 0x03, 0x00, 0x00, 0x00, 0x00, // '
0x00, 0x1C, 0x22, 0x41, 0x00, 0x00, 0x00, // (
0x00, 0x41, 0x22, 0x1C, 0x00, 0x00, 0x00, // )
0x08, 0x2A, 0x1C, 0x2A, 0x08, 0x00, 0x00, // *
0x08, 0x08, 0x3E, 0x08, 0x08, 0x00, 0x00, // +
0x00, 0x50, 0x30, 0x00, 0x00, 0x00, 0x00, // ,
0x00, 0x18, 0x18, 0x18, 0x18, 0x18, 0x00, // -
0x00, 0x60, 0x60, 0x00, 0x00, 0x00, 0x00, // .
0x20, 0x10, 0x08, 0x04, 0x02, 0x00, 0x00, // /
0x1C, 0x3E, 0x61, 0x41, 0x43, 0x3E, 0x1C, // 0
0x40, 0x42, 0x7F, 0x7F, 0x40, 0x40, 0x00, // 1
0x62, 0x73, 0x79, 0x59, 0x5D, 0x4F, 0x46, // 2
0x20, 0x61, 0x49, 0x4D, 0x4F, 0x7B, 0x31, // 3
0x18, 0x1C, 0x16, 0x13, 0x7F, 0x7F, 0x10, // 4
0x27, 0x67, 0x45, 0x45, 0x45, 0x7D, 0x38, // 5
0x3C, 0x7E, 0x4B, 0x49, 0x49, 0x79, 0x30, // 6
0x03, 0x03, 0x71, 0x79, 0x0D, 0x07, 0x03, // 7
0x36, 0x7F, 0x49, 0x49, 0x49, 0x7F, 0x36, // 8
0x06, 0x4F, 0x49, 0x49, 0x69, 0x3F, 0x1E, // 9
0x00, 0x36, 0x36, 0x00, 0x00, 0x00, 0x00, // :
0x00, 0x56, 0x36, 0x00, 0x00, 0x00, 0x00, // ;
0x00, 0x08, 0x14, 0x22, 0x41, 0x00, 0x00, // <
0x14, 0x14, 0x14, 0x14, 0x14, 0x00, 0x00, // =
0x41, 0x22, 0x14, 0x08, 0x00, 0x00, 0x00, // >
0x02, 0x01, 0x51, 0x09, 0x06, 0x00, 0x00, // ?
0x32, 0x49, 0x79, 0x41, 0x3E, 0x00, 0x00, // @
0x7E, 0x11, 0x11, 0x11, 0x7E, 0x00, 0x00, // A
0x7F, 0x49, 0x49, 0x49, 0x36, 0x00, 0x00, // B
0x3E, 0x41, 0x41, 0x41, 0x22, 0x00, 0x00, // C
0x7F, 0x7F, 0x41, 0x41, 0x63, 0x3E, 0x1C, // D
0x7F, 0x49, 0x49, 0x49, 0x41, 0x00, 0x00, // E
0x7F, 0x09, 0x09, 0x01, 0x01, 0x00, 0x00, // F
0x3E, 0x41, 0x41, 0x51, 0x32, 0x00, 0x00, // G
0x7F, 0x08, 0x08, 0x08, 0x7F, 0x00, 0x00, // H
0x00, 0x41, 0x7F, 0x41, 0x00, 0x00, 0x00, // I
0x20, 0x40, 0x41, 0x3F, 0x01, 0x00, 0x00, // J
0x7F, 0x08, 0x14, 0x22, 0x41, 0x00, 0x00, // K
0x7F, 0x7F, 0x40, 0x40, 0x40, 0x40, 0x00, // L
0x7F, 0x02, 0x04, 0x02, 0x7F, 0x00, 0x00, // M
0x7F, 0x04, 0x08, 0x10, 0x7F, 0x00, 0x00, // N
0x3E, 0x7F, 0x41, 0x41, 0x41, 0x7F, 0x3E, // O
0x7F, 0x09, 0x09, 0x09, 0x06, 0x00, 0x00, // P
0x3E, 0x41, 0x51, 0x21, 0x5E, 0x00, 0x00, // Q
0x7F, 0x7F, 0x11, 0x31, 0x79, 0x6F, 0x4E, // R
0x46, 0x49, 0x49, 0x49, 0x31, 0x00, 0x00, // S
0x01, 0x01, 0x7F, 0x01, 0x01, 0x00, 0x00, // T
0x3F, 0x40, 0x40, 0x40, 0x3F, 0x00, 0x00, // U
0x1F, 0x20, 0x40, 0x20, 0x1F, 0x00, 0x00, // V
0x7F, 0x7F, 0x38, 0x1C, 0x38, 0x7F, 0x7F, // W
0x63, 0x14, 0x08, 0x14, 0x63, 0x00, 0x00, // X
0x03, 0x04, 0x78, 0x04, 0x03, 0x00, 0x00, // Y
0x61, 0x51, 0x49, 0x45, 0x43, 0x00, 0x00, // Z
0x00, 0x00, 0x7F, 0x41, 0x41, 0x00, 0x00, // [
0x02, 0x04, 0x08, 0x10, 0x20, 0x00, 0x00, // "\"
0x41, 0x41, 0x7F, 0x00, 0x00, 0x00, 0x00, // ]
0x04, 0x02, 0x01, 0x02, 0x04, 0x00, 0x00, // ^
0x40, 0x40, 0x40, 0x40, 0x40, 0x00, 0x00, // _
0x00, 0x01, 0x02, 0x04, 0x00, 0x00, 0x00, // `
0x20, 0x54, 0x54, 0x54, 0x78, 0x00, 0x00, // a
0x7F, 0x48, 0x44, 0x44, 0x38, 0x00, 0x00, // b
0x38, 0x44, 0x44, 0x44, 0x20, 0x00, 0x00, // c
0x38, 0x44, 0x44, 0x48, 0x7F, 0x00, 0x00, // d
0x38, 0x54, 0x54, 0x54, 0x18, 0x00, 0x00, // e
0x08, 0x7E, 0x09, 0x01, 0x02, 0x00, 0x00, // f
0x08, 0x14, 0x54, 0x54, 0x3C, 0x00, 0x00, // g
0x7F, 0x08, 0x04, 0x04, 0x78, 0x00, 0x00, // h
0x00, 0x44, 0x7D, 0x40, 0x00, 0x00, 0x00, // i
0x20, 0x40, 0x44, 0x3D, 0x00, 0x00, 0x00, // j
0x00, 0x7F, 0x10, 0x28, 0x44, 0x00, 0x00, // k
0x00, 0x41, 0x7F, 0x40, 0x00, 0x00, 0x00, // l
0x7C, 0x04, 0x18, 0x04, 0x78, 0x00, 0x00, // m
0x7C, 0x08, 0x04, 0x04, 0x78, 0x00, 0x00, // n
0x38, 0x44, 0x44, 0x44, 0x38, 0x00, 0x00, // o
0x7C, 0x14, 0x14, 0x14, 0x08, 0x00, 0x00, // p
0x08, 0x14, 0x14, 0x18, 0x7C, 0x00, 0x00, // q
0x7C, 0x08, 0x04, 0x04, 0x08, 0x00, 0x00, // r
0x48, 0x54, 0x54, 0x54, 0x20, 0x00, 0x00, // s
0x04, 0x3F, 0x44, 0x40, 0x20, 0x00, 0x00, // t
0x3C, 0x40, 0x40, 0x20, 0x7C, 0x00, 0x00, // u
0x1C, 0x20, 0x40, 0x20, 0x1C, 0x00, 0x00, // v
0x3C, 0x40, 0x30, 0x40, 0x3C, 0x00, 0x00, // w
0x00, 0x44, 0x28, 0x10, 0x28, 0x44, 0x00, // x
0x0C, 0x50, 0x50, 0x50, 0x3C, 0x00, 0x00, // y
0x44, 0x64, 0x54, 0x4C, 0x44, 0x00, 0x00, // z
0x00, 0x08, 0x36, 0x41, 0x00, 0x00, 0x00, // {
0x00, 0x00, 0x7F, 0x00, 0x00, 0x00, 0x00, // |
0x00, 0x41, 0x36, 0x08, 0x00, 0x00, 0x00, // }
0x08, 0x08, 0x2A, 0x1C, 0x08, 0x00, 0x00, // ->
0x08, 0x1C, 0x2A, 0x08, 0x08, 0x00, 0x00, // <-
];
for c in bytes {
/* Create an array with our I2C instruction and a blank column at the end */
let mut data: [u8; 9] = [SSD1306_BYTE_DATA, 0, 0, 0, 0, 0, 0, 0, 0];
/* Calculate our index into the character table above */
let index = (*c as usize - 0x20) * 7;
/* Populate the middle of the array with the data from the character array at the right
* index */
data[1..8].copy_from_slice(&FONT_7X7[index..index + 7]);
/* Write it out to the I2C bus */
i2c.write(0x3C, &data)?
}
Ok(())
}
/// Initialise display with some useful values
fn ssd1306_init<I2C, E>(i2c: &mut I2C) -> Result<(), E>
where
I2C: Write<Error = E>,
{
i2c.write(0x3C, &[SSD1306_BYTE_CMD_SINGLE, SSD1306_DISPLAY_OFF])?;
i2c.write(
0x3C,
&[SSD1306_BYTE_CMD_SINGLE, SSD1306_DISPLAY_CLK_DIV, 0x80],
)?;
i2c.write(0x3C, &[SSD1306_BYTE_CMD_SINGLE, SSD1306_SCAN_MODE_NORMAL])?;
i2c.write(
0x3C,
&[SSD1306_BYTE_CMD_SINGLE, SSD1306_DISPLAY_OFFSET, 0x00, 0x00],
)?;
i2c.write(
0x3C,
&[SSD1306_BYTE_CMD_SINGLE, SSD1306_MEMORY_ADDR_MODE, 0x00],
)?;
i2c.write(
0x3C,
&[SSD1306_BYTE_CMD_SINGLE, SSD1306_DISPLAY_START_LINE, 0x00],
)?;
i2c.write(0x3C, &[SSD1306_BYTE_CMD_SINGLE, SSD1306_CHARGE_PUMP, 0x14])?;
i2c.write(0x3C, &[SSD1306_BYTE_CMD_SINGLE, SSD1306_PIN_MAP, 0x12])?;
i2c.write(0x3C, &[SSD1306_BYTE_CMD_SINGLE, SSD1306_DISPLAY_RAM])?;
i2c.write(0x3C, &[SSD1306_BYTE_CMD_SINGLE, SSD1306_DISPLAY_NORMAL])?;
i2c.write(0x3C, &[SSD1306_BYTE_CMD_SINGLE, SSD1306_DISPLAY_ON])?;
let data = [
SSD1306_BYTE_DATA,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
];
for _ in 0..128 {
i2c.write(0x3C, &data)?;
}
Ok(())
}
/// Position cursor at specified x, y block coordinate (multiple of 8)
fn ssd1306_pos<I2C, E>(i2c: &mut I2C, x: u8, y: u8) -> Result<(), E>
where
I2C: Write<Error = E>,
{
let data = [
SSD1306_BYTE_CMD,
SSD1306_COLUMN_RANGE,
x * 8,
0x7F,
SSD1306_PAGE_RANGE,
y,
0x07,
];
i2c.write(0x3C, &data)
}
|
#[doc = "Register `EWCR` reader"]
pub type R = crate::R<EWCR_SPEC>;
#[doc = "Register `EWCR` writer"]
pub type W = crate::W<EWCR_SPEC>;
#[doc = "Field `EWIT` reader - Watchdog counter window value These bits are write access protected (see ). They are written by software to define at which position of the IWDCNT down-counter the early wakeup interrupt must be generated. The early interrupt is generated when the IWDCNT is lower or equal to EWIT\\[11:0\\]
- 1. EWIT\\[11:0\\]
must be bigger than 1. An interrupt is generated only if EWIE = 1. The EWU bit in the must be reset to be able to change the reload value. Note: Reading this register returns the Early wakeup comparator value and the Interrupt enable bit from the VDD voltage domain. This value may not be up to date/valid if a write operation to this register is ongoing, hence the value read from this register is valid only when the EWU bit in the is reset."]
pub type EWIT_R = crate::FieldReader<u16>;
#[doc = "Field `EWIT` writer - Watchdog counter window value These bits are write access protected (see ). They are written by software to define at which position of the IWDCNT down-counter the early wakeup interrupt must be generated. The early interrupt is generated when the IWDCNT is lower or equal to EWIT\\[11:0\\]
- 1. EWIT\\[11:0\\]
must be bigger than 1. An interrupt is generated only if EWIE = 1. The EWU bit in the must be reset to be able to change the reload value. Note: Reading this register returns the Early wakeup comparator value and the Interrupt enable bit from the VDD voltage domain. This value may not be up to date/valid if a write operation to this register is ongoing, hence the value read from this register is valid only when the EWU bit in the is reset."]
pub type EWIT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 12, O, u16>;
#[doc = "Field `EWIC` writer - Watchdog early interrupt acknowledge The software must write a 1 into this bit in order to acknowledge the early wakeup interrupt and to clear the EWIF flag. Writing 0 has not effect, reading this flag returns a 0."]
pub type EWIC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EWIE` reader - Watchdog early interrupt enable Set and reset by software. The EWU bit in the must be reset to be able to change the value of this bit."]
pub type EWIE_R = crate::BitReader;
#[doc = "Field `EWIE` writer - Watchdog early interrupt enable Set and reset by software. The EWU bit in the must be reset to be able to change the value of this bit."]
pub type EWIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:11 - Watchdog counter window value These bits are write access protected (see ). They are written by software to define at which position of the IWDCNT down-counter the early wakeup interrupt must be generated. The early interrupt is generated when the IWDCNT is lower or equal to EWIT\\[11:0\\]
- 1. EWIT\\[11:0\\]
must be bigger than 1. An interrupt is generated only if EWIE = 1. The EWU bit in the must be reset to be able to change the reload value. Note: Reading this register returns the Early wakeup comparator value and the Interrupt enable bit from the VDD voltage domain. This value may not be up to date/valid if a write operation to this register is ongoing, hence the value read from this register is valid only when the EWU bit in the is reset."]
#[inline(always)]
pub fn ewit(&self) -> EWIT_R {
EWIT_R::new((self.bits & 0x0fff) as u16)
}
#[doc = "Bit 15 - Watchdog early interrupt enable Set and reset by software. The EWU bit in the must be reset to be able to change the value of this bit."]
#[inline(always)]
pub fn ewie(&self) -> EWIE_R {
EWIE_R::new(((self.bits >> 15) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:11 - Watchdog counter window value These bits are write access protected (see ). They are written by software to define at which position of the IWDCNT down-counter the early wakeup interrupt must be generated. The early interrupt is generated when the IWDCNT is lower or equal to EWIT\\[11:0\\]
- 1. EWIT\\[11:0\\]
must be bigger than 1. An interrupt is generated only if EWIE = 1. The EWU bit in the must be reset to be able to change the reload value. Note: Reading this register returns the Early wakeup comparator value and the Interrupt enable bit from the VDD voltage domain. This value may not be up to date/valid if a write operation to this register is ongoing, hence the value read from this register is valid only when the EWU bit in the is reset."]
#[inline(always)]
#[must_use]
pub fn ewit(&mut self) -> EWIT_W<EWCR_SPEC, 0> {
EWIT_W::new(self)
}
#[doc = "Bit 14 - Watchdog early interrupt acknowledge The software must write a 1 into this bit in order to acknowledge the early wakeup interrupt and to clear the EWIF flag. Writing 0 has not effect, reading this flag returns a 0."]
#[inline(always)]
#[must_use]
pub fn ewic(&mut self) -> EWIC_W<EWCR_SPEC, 14> {
EWIC_W::new(self)
}
#[doc = "Bit 15 - Watchdog early interrupt enable Set and reset by software. The EWU bit in the must be reset to be able to change the value of this bit."]
#[inline(always)]
#[must_use]
pub fn ewie(&mut self) -> EWIE_W<EWCR_SPEC, 15> {
EWIE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "IWDG early wakeup interrupt register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ewcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ewcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct EWCR_SPEC;
impl crate::RegisterSpec for EWCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ewcr::R`](R) reader structure"]
impl crate::Readable for EWCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ewcr::W`](W) writer structure"]
impl crate::Writable for EWCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets EWCR to value 0"]
impl crate::Resettable for EWCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BlockchainMember {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BlockchainMemberProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BlockchainMemberProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub protocol: Option<blockchain_member_properties::Protocol>,
#[serde(rename = "validatorNodesSku", default, skip_serializing_if = "Option::is_none")]
pub validator_nodes_sku: Option<BlockchainMemberNodesSku>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<blockchain_member_properties::ProvisioningState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub dns: Option<String>,
#[serde(rename = "userName", default, skip_serializing_if = "Option::is_none")]
pub user_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub password: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub consortium: Option<String>,
#[serde(rename = "consortiumManagementAccountAddress", default, skip_serializing_if = "Option::is_none")]
pub consortium_management_account_address: Option<String>,
#[serde(rename = "consortiumManagementAccountPassword", default, skip_serializing_if = "Option::is_none")]
pub consortium_management_account_password: Option<String>,
#[serde(rename = "consortiumRole", default, skip_serializing_if = "Option::is_none")]
pub consortium_role: Option<String>,
#[serde(rename = "consortiumMemberDisplayName", default, skip_serializing_if = "Option::is_none")]
pub consortium_member_display_name: Option<String>,
#[serde(rename = "rootContractAddress", default, skip_serializing_if = "Option::is_none")]
pub root_contract_address: Option<String>,
#[serde(rename = "publicKey", default, skip_serializing_if = "Option::is_none")]
pub public_key: Option<String>,
#[serde(rename = "firewallRules", default, skip_serializing_if = "Vec::is_empty")]
pub firewall_rules: Vec<FirewallRule>,
}
pub mod blockchain_member_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Protocol {
NotSpecified,
Parity,
Quorum,
Corda,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
NotSpecified,
Updating,
Deleting,
Succeeded,
Failed,
Stale,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BlockchainMemberUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BlockchainMemberPropertiesUpdate>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BlockchainMemberPropertiesUpdate {
#[serde(flatten)]
pub transaction_node_properties_update: TransactionNodePropertiesUpdate,
#[serde(rename = "consortiumManagementAccountPassword", default, skip_serializing_if = "Option::is_none")]
pub consortium_management_account_password: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Sku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BlockchainMemberNodesSku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub capacity: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct FirewallRule {
#[serde(rename = "ruleName", default, skip_serializing_if = "Option::is_none")]
pub rule_name: Option<String>,
#[serde(rename = "startIpAddress", default, skip_serializing_if = "Option::is_none")]
pub start_ip_address: Option<String>,
#[serde(rename = "endIpAddress", default, skip_serializing_if = "Option::is_none")]
pub end_ip_address: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BlockchainMemberCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<BlockchainMember>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConsortiumMemberCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ConsortiumMember>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConsortiumMember {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub role: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(rename = "joinDate", default, skip_serializing_if = "Option::is_none")]
pub join_date: Option<String>,
#[serde(rename = "dateModified", default, skip_serializing_if = "Option::is_none")]
pub date_modified: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ApiKeyCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub keys: Vec<ApiKey>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ApiKey {
#[serde(rename = "keyName", default, skip_serializing_if = "Option::is_none")]
pub key_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NameAvailabilityRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NameAvailability {
#[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<name_availability::Reason>,
}
pub mod name_availability {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Reason {
NotSpecified,
AlreadyExists,
Invalid,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConsortiumCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Consortium>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Consortium {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub protocol: Option<consortium::Protocol>,
}
pub mod consortium {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Protocol {
NotSpecified,
Parity,
Quorum,
Corda,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceProviderOperationCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ResourceProviderOperation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceProviderOperation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<ResourceProviderOperationDisplay>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceProviderOperationDisplay {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceTypeSkuCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ResourceTypeSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceTypeSku {
#[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub skus: Vec<SkuSetting>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SkuSetting {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub locations: Vec<String>,
#[serde(rename = "requiredFeatures", default, skip_serializing_if = "Vec::is_empty")]
pub required_features: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransactionNode {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TransactionNodeProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransactionNodeProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<transaction_node_properties::ProvisioningState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub dns: Option<String>,
#[serde(rename = "publicKey", default, skip_serializing_if = "Option::is_none")]
pub public_key: Option<String>,
#[serde(rename = "userName", default, skip_serializing_if = "Option::is_none")]
pub user_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub password: Option<String>,
#[serde(rename = "firewallRules", default, skip_serializing_if = "Vec::is_empty")]
pub firewall_rules: Vec<FirewallRule>,
}
pub mod transaction_node_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
NotSpecified,
Updating,
Deleting,
Succeeded,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransactionNodeUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TransactionNodePropertiesUpdate>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransactionNodePropertiesUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub password: Option<String>,
#[serde(rename = "firewallRules", default, skip_serializing_if = "Vec::is_empty")]
pub firewall_rules: Vec<FirewallRule>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TransactionNodeCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<TransactionNode>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
|
#[macro_use]
extern crate approx;
use itertools::enumerate;
use ndarray::linalg;
use ndarray::{LinalgScalar,ArrayView,ArrayViewMut,Array, Ix2};
use rand::Rng;
use std::fs::File;
use std::io::Write;
use matrix_mult::naive_sequential;
use matrix_mult::faster_vec;
use std::ops::AddAssign;
use matrix_mult::vectorisation;
use matrix_mult::matrix_adaptive;
use rayon_adaptive::prelude::*;
use rayon_adaptive::Policy;
const ITERS: usize = 5;
fn average(numbers: [f64; ITERS as usize]) -> f64 {
numbers.iter().sum::<f64>() / numbers.len() as f64
}
fn main() -> std::io::Result<()> {
let input_size: Vec<usize> = (1..41).map(|i| i * 250).collect();
do_benchmark("openblas_join_adaptive.data", |a,b,mut c| linalg::general_mat_mul(1.0, &a, &b, 1.0, &mut c),input_size);
Ok(())
}
fn do_benchmark<F>(filename: &str,resolution : F, input_size: Vec<usize>) -> std::io::Result<()>
where
F: Fn(ArrayView<f32, Ix2>, ArrayView<f32, Ix2>, ArrayViewMut<f32,Ix2>) + Copy + Sync
{
let mut file = File::create(filename)?;
for (_j, size) in enumerate(input_size) {
let mut time= [0f64; ITERS as usize];
println!("SIZE : {:?}", size);
for i in 0..ITERS {
let height = size as usize;
let width = size as usize;
let mut rng = rand::thread_rng();
let random = rng.gen_range(0.0, 1.0);
let an = Array::from_shape_fn((height, width), |(i, j)| {
((j + i * width) % 3) as f32 + random
});
let bn = Array::from_shape_fn((width, height), |(i, j)| {
((j + i * height) % 3) as f32 - random
});
let mut dest = Array::zeros((height, height));
let (ddim1,ddim2)= dest.dim();
let matrix_half = matrix_adaptive::Matrix {
a: an.view(),
b: bn.view(),
d: dest.view_mut(),
asize: an.dim(),
bsize: bn.dim(),
dsize: (ddim1,ddim2),
};
let start_time = time::precise_time_ns();
matrix_half
.with_policy(Policy::Join(height*height/64))
.for_each(|mut e| {
let (ra,ca) = e.a.dim();
let (rb,cb) = e.b.dim();
let (rd,cd) = e.d.dim();
e.asize = (ra,ca);
e.bsize = (rb,cb);
e.dsize = (rd,cd);
e.with_policy(Policy::Adaptive((8.0*f32::log2((rd*cd) as f32)) as usize ,
((16.0*f32::sqrt((rd*cd) as f32) as f32) as usize ))).for_each( |e| {
resolution(e.a,e.b,e.d);
})
});
let end_time = time::precise_time_ns();
let mut verif = Array::zeros((height, height));
linalg::general_mat_mul(1f32, &an.view(), &bn.view(), 1f32, &mut verif.view_mut());
assert_abs_diff_eq!(
dest.as_slice().unwrap(),
verif.as_slice().unwrap(),
epsilon = 1e-1f32
);
time[i] = (end_time - start_time) as f64;
}
file.write_all(format!("{}\t{}\n", size, average(time)).as_bytes())?;
}
Ok(())
} |
use crate::parser::parser_defs;
use crate::parser::svg_util;
pub fn calculate_eliptical_arcs() {
}
fn calc_point_on_elipsis() -> (f32, f32) {
(0.0, 0.0)
} |
extern crate game_of_life;
use game_of_life::parsers::rle::*;
#[test]
fn test_rle_correct_file() {
let input = "#N Pulsar
#O John Conway
#C A period 3 oscillator. Despite its size, this is the fourth most common oscillator (and by
#C far the most common of period greater than 2).
#C www.conwaylife.com/wiki/index.php?title=Pulsar
x = 13, y = 13, rule = B3/S23
2b3o3b3o2b2$o4bobo4bo$o4bobo4bo$o4bobo4bo$2b3o3b3o2b2$2b3o3b3o2b$o4bob
o4bo$o4bobo4bo$o4bobo4bo2$2b3o3b3o!";
assert!(parse_rle_file(&input).is_ok())
}
#[test]
fn test_rle_incorrect_file() {
let input = "#N Pulsar
#O John Conway
#C A period 3 oscillator. Despite its size, this is the fourth most common oscillator (and by
#C far the most common of period greater than 2).
#C www.conwaylife.com/wiki/index.php?title=Pulsar
x = 13, y = 13, rule = B3/S23
2b3o3b3o2b2$o4bobo4bo$o4wrong4bo$o4bobo4bo$2b3o3b3o2b2$2b3o3b3o2b$o4bob
o4bo$o4bobo4bo$o4bobo4bo2$2b3o3b3o!";
assert!(parse_rle_file(&input).is_err())
}
|
//! This is the model object for a QueryPlan trimmed down to only contain _owned_ fields
//! that are required for executing a QueryPlan as implemented in the existing Apollo Gateway.
//!
//! The [SelectionSet] in the `requires` field of a [FetchNode] is trimmed to only be a list of
//! either a [Field] or an [InlineFragment], since those are the only potential values needed to
//! execute a query plan. Furthermore, within a [Field] or [InlineFragment], we only need
//! names, aliases, type conditions, and recurively sub [SelectionSet]s.
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(from = "QueryPlanSerde", into = "QueryPlanSerde")]
pub struct QueryPlan {
pub node: Option<PlanNode>,
}
impl From<QueryPlanSerde> for QueryPlan {
fn from(qps: QueryPlanSerde) -> Self {
let QueryPlanSerde::QueryPlan { node } = qps;
QueryPlan { node }
}
}
impl Into<QueryPlanSerde> for QueryPlan {
fn into(self) -> QueryPlanSerde {
QueryPlanSerde::QueryPlan { node: self.node }
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase", tag = "kind")]
pub enum PlanNode {
Sequence { nodes: Vec<PlanNode> },
Parallel { nodes: Vec<PlanNode> },
Fetch(FetchNode),
Flatten(FlattenNode),
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FetchNode {
pub service_name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub requires: Option<SelectionSet>,
pub variable_usages: Vec<String>,
pub operation: GraphQLDocument,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FlattenNode {
pub path: ResponsePath,
pub node: Box<PlanNode>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase", tag = "kind")]
pub enum Selection {
Field(Field),
InlineFragment(InlineFragment),
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Field {
#[serde(skip_serializing_if = "Option::is_none")]
pub alias: Option<String>,
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub selections: Option<SelectionSet>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct InlineFragment {
#[serde(skip_serializing_if = "Option::is_none")]
pub type_condition: Option<String>,
pub selections: SelectionSet,
}
pub type SelectionSet = Vec<Selection>;
pub type GraphQLDocument = String;
pub type ResponsePath = Vec<String>;
/// Hacking Json Serde to match JS.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase", tag = "kind")]
enum QueryPlanSerde {
QueryPlan { node: Option<PlanNode> },
}
#[cfg(test)]
mod tests {
use super::*;
use crate::consts::TYPENAME_FIELD_NAME;
use serde_json::Value;
fn qp_json_string() -> &'static str {
r#"
{
"kind": "QueryPlan",
"node": {
"kind": "Sequence",
"nodes": [
{
"kind": "Fetch",
"serviceName": "product",
"variableUsages": [],
"operation": "{topProducts{__typename ...on Book{__typename isbn}...on Furniture{name}}product(upc:\"1\"){__typename ...on Book{__typename isbn}...on Furniture{name}}}"
},
{
"kind": "Parallel",
"nodes": [
{
"kind": "Sequence",
"nodes": [
{
"kind": "Flatten",
"path": ["topProducts", "@"],
"node": {
"kind": "Fetch",
"serviceName": "books",
"requires": [
{
"kind": "InlineFragment",
"typeCondition": "Book",
"selections": [
{ "kind": "Field", "name": "__typename" },
{ "kind": "Field", "name": "isbn" }
]
}
],
"variableUsages": [],
"operation": "query($representations:[_Any!]!){_entities(representations:$representations){...on Book{__typename isbn title year}}}"
}
},
{
"kind": "Flatten",
"path": ["topProducts", "@"],
"node": {
"kind": "Fetch",
"serviceName": "product",
"requires": [
{
"kind": "InlineFragment",
"typeCondition": "Book",
"selections": [
{ "kind": "Field", "name": "__typename" },
{ "kind": "Field", "name": "isbn" },
{ "kind": "Field", "name": "title" },
{ "kind": "Field", "name": "year" }
]
}
],
"variableUsages": [],
"operation": "query($representations:[_Any!]!){_entities(representations:$representations){...on Book{name}}}"
}
}
]
},
{
"kind": "Sequence",
"nodes": [
{
"kind": "Flatten",
"path": ["product"],
"node": {
"kind": "Fetch",
"serviceName": "books",
"requires": [
{
"kind": "InlineFragment",
"typeCondition": "Book",
"selections": [
{ "kind": "Field", "name": "__typename" },
{ "kind": "Field", "name": "isbn" }
]
}
],
"variableUsages": [],
"operation": "query($representations:[_Any!]!){_entities(representations:$representations){...on Book{__typename isbn title year}}}"
}
},
{
"kind": "Flatten",
"path": ["product"],
"node": {
"kind": "Fetch",
"serviceName": "product",
"requires": [
{
"kind": "InlineFragment",
"typeCondition": "Book",
"selections": [
{ "kind": "Field", "name": "__typename" },
{ "kind": "Field", "name": "isbn" },
{ "kind": "Field", "name": "title" },
{ "kind": "Field", "name": "year" }
]
}
],
"variableUsages": [],
"operation": "query($representations:[_Any!]!){_entities(representations:$representations){...on Book{name}}}"
}
}
]
}
]
}
]
}
}"#
}
fn query_plan() -> QueryPlan {
QueryPlan {
node: Some(PlanNode::Sequence {
nodes: vec![
PlanNode::Fetch(FetchNode {
service_name: "product".to_owned(),
variable_usages: vec![],
requires: None,
operation: "{topProducts{__typename ...on Book{__typename isbn}...on Furniture{name}}product(upc:\"1\"){__typename ...on Book{__typename isbn}...on Furniture{name}}}".to_owned(),
}),
PlanNode::Parallel {
nodes: vec![
PlanNode::Sequence {
nodes: vec![
PlanNode::Flatten(FlattenNode {
path: vec![
"topProducts".to_owned(), "@".to_owned()],
node: Box::new(PlanNode::Fetch(FetchNode {
service_name: "books".to_owned(),
variable_usages: vec![],
requires: Some(vec![
Selection::InlineFragment(InlineFragment {
type_condition: Some("Book".to_owned()),
selections: vec![
Selection::Field(Field {
alias: None,
name: TYPENAME_FIELD_NAME.to_owned(),
selections: None,
}),
Selection::Field(Field {
alias: None,
name: "isbn".to_owned(),
selections: None,
})],
})]),
operation: "query($representations:[_Any!]!){_entities(representations:$representations){...on Book{__typename isbn title year}}}".to_owned(),
})),
}),
PlanNode::Flatten(FlattenNode {
path: vec![
"topProducts".to_owned(),
"@".to_owned()],
node: Box::new(PlanNode::Fetch(FetchNode {
service_name: "product".to_owned(),
variable_usages: vec![],
requires: Some(vec![
Selection::InlineFragment(InlineFragment {
type_condition: Some("Book".to_owned()),
selections: vec![
Selection::Field(Field {
alias: None,
name: TYPENAME_FIELD_NAME.to_owned(),
selections: None,
}),
Selection::Field(Field {
alias: None,
name: "isbn".to_owned(),
selections: None,
}),
Selection::Field(Field {
alias: None,
name: "title".to_owned(),
selections: None,
}),
Selection::Field(Field {
alias: None,
name: "year".to_owned(),
selections: None,
})],
})]),
operation: "query($representations:[_Any!]!){_entities(representations:$representations){...on Book{name}}}".to_owned(),
})),
})]
},
PlanNode::Sequence {
nodes: vec![
PlanNode::Flatten(FlattenNode {
path: vec!["product".to_owned()],
node: Box::new(PlanNode::Fetch(FetchNode {
service_name: "books".to_owned(),
variable_usages: vec![],
requires: Some(vec![
Selection::InlineFragment(InlineFragment {
type_condition: Some("Book".to_owned()),
selections: vec![
Selection::Field(Field {
alias: None,
name: TYPENAME_FIELD_NAME.to_owned(),
selections: None,
}),
Selection::Field(Field {
alias: None,
name: "isbn".to_owned(),
selections: None,
})],
})]),
operation: "query($representations:[_Any!]!){_entities(representations:$representations){...on Book{__typename isbn title year}}}".to_owned(),
})),
}),
PlanNode::Flatten(FlattenNode {
path: vec!["product".to_owned()],
node: Box::new(PlanNode::Fetch(FetchNode {
service_name: "product".to_owned(),
variable_usages: vec![],
requires: Some(vec![
Selection::InlineFragment(InlineFragment {
type_condition: Some("Book".to_owned()),
selections: vec![
Selection::Field(Field {
alias: None,
name: TYPENAME_FIELD_NAME.to_owned(),
selections: None,
}),
Selection::Field(Field {
alias: None,
name: "isbn".to_owned(),
selections: None,
}),
Selection::Field(Field {
alias: None,
name: "title".to_owned(),
selections: None,
}),
Selection::Field(Field {
alias: None,
name: "year".to_owned(),
selections: None,
})],
})]),
operation: "query($representations:[_Any!]!){_entities(representations:$representations){...on Book{name}}}".to_owned(),
})),
})]
}]
}]
})
}
}
#[test]
fn query_plan_from_json() {
assert_eq!(
serde_json::from_str::<QueryPlan>(qp_json_string()).unwrap(),
query_plan()
);
}
#[test]
fn query_plan_into_json() {
assert_eq!(
serde_json::to_value(query_plan()).unwrap(),
serde_json::from_str::<Value>(qp_json_string()).unwrap()
);
}
}
|
extern crate rayon;
use itertools::enumerate;
use matrix_mult::benchmark;
use ndarray::LinalgScalar;
use ndarray::{Array, ArrayView, Ix2,linalg};
use rayon_adaptive::Policy;
use std::fs::File;
use std::io::Write;
use matrix_mult::vectorisation_packed_simd;
use rand::Rng;
const ITERS: usize = 5;
fn average(numbers: [f64; ITERS as usize]) -> f64 {
numbers.iter().sum::<f64>() / numbers.len() as f64
}
fn main() {
do_benchmark("OPENBLAS_new_cut_adaptive_size_test.data", |s,a,b,p| benchmark::benchmark_adaptive_generic(s,a,b,
p, |a,b,mut c| linalg::general_mat_mul(1.0, &a, &b, 1.0, &mut c)));
}
pub fn do_benchmark<
F: Fn(usize, ArrayView<f32, Ix2>, ArrayView<f32, Ix2>, Policy) -> u64,
>(
filename: &str,
f: F,
) -> std::io::Result<()> {
let mut file = File::create(filename)?;
let input_size: Vec<usize> = (1..41).map(|i| i * 250).collect();
for (_j, size) in enumerate(input_size) {
let mut adaptive16 = [0f64; ITERS as usize];
let mut adaptive32 = [0f64; ITERS as usize];
let mut adaptive48 = [0f64; ITERS as usize];
let mut adaptive64 = [0f64; ITERS as usize];
let mut adaptive80 = [0f64; ITERS as usize];
let mut adaptive96 = [0f64; ITERS as usize];
let mut adaptive112 = [0f64; ITERS as usize];
let mut adaptive128 = [0f64; ITERS as usize];
eprintln!("SIZE {:?}", size);
for i in 0..ITERS {
let height = size as usize;
let width = size as usize;
let mut rng = rand::thread_rng();
let random = rng.gen_range(0.0, 1.0);
let an = Array::from_shape_fn((height, width), |(i, j)| {
((j + i * width) % 3) as f32 + random
});
let bn = Array::from_shape_fn((width, height), |(i, j)| {
((j + i * height) % 3) as f32 - random
});
let mut size_iter = (1..9).map(|i| i as f32 * 16.0);
adaptive16[i] = f(height, an.view(), bn.view(), Policy::Adaptive(f32::log2((height*height) as f32) as usize ,(size_iter.next().unwrap()*f32::sqrt((height*height) as f32)) as usize)) as f64;
adaptive32[i] = f(height, an.view(), bn.view(), Policy::Adaptive(f32::log2((height*height) as f32) as usize ,(size_iter.next().unwrap()*f32::sqrt((height*height) as f32)) as usize)) as f64;
adaptive48[i] = f(height, an.view(), bn.view(), Policy::Adaptive(f32::log2((height*height) as f32) as usize ,(size_iter.next().unwrap()*f32::sqrt((height*height) as f32)) as usize)) as f64;
adaptive64[i] = f(height, an.view(), bn.view(), Policy::Adaptive(f32::log2((height*height) as f32) as usize ,(size_iter.next().unwrap()*f32::sqrt((height*height) as f32)) as usize)) as f64;
adaptive80[i] = f(height, an.view(), bn.view(), Policy::Adaptive(f32::log2((height*height) as f32) as usize ,(size_iter.next().unwrap()*f32::sqrt((height*height) as f32)) as usize)) as f64;
adaptive96[i] = f(height, an.view(), bn.view(), Policy::Adaptive(f32::log2((height*height) as f32) as usize ,(size_iter.next().unwrap()*f32::sqrt((height*height) as f32)) as usize)) as f64;
adaptive112[i] = f(height, an.view(), bn.view(), Policy::Adaptive(f32::log2((height*height) as f32) as usize ,(size_iter.next().unwrap()*f32::sqrt((height*height) as f32)) as usize)) as f64;
adaptive128[i] = f(height, an.view(), bn.view(), Policy::Adaptive(f32::log2((height*height) as f32) as usize ,(size_iter.next().unwrap()*f32::sqrt((height*height) as f32)) as usize)) as f64;
}
file.write_all(
format!(
"{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n",
size,
average(adaptive16),
average(adaptive32),
average(adaptive48),
average(adaptive64),
average(adaptive80),
average(adaptive96),
average(adaptive112),
average(adaptive128),
)
.as_bytes(),
)?;
}
Ok(())
}
|
struct AdamOptimizer {
}
struct CheckConfig {
default: bool,
alpha:f64,
beta1:f64,
beta2:f64,
epsilon:f64,
epoch:f64
}
struct GradientDescent {
}
//impl CheckConfig {
// fn is_default(&self) {
// if self.default == true {
// let (alpha, beta1, beta2, epsilon, epoch) = (0.001, 0.9, 0.999, 0.1, 10);
// println!("Using default configs. alpha: {}, beta1: {}, beta2: {}, epsilon: {}", alpha, beta1, beta2, epsilon, epoch);
// } else {
// let (alpha, beta1, beta2, epsilon, epoch) = (self.alpha, self.beta1, self.beta2, self.epsilon);
// println!("Using custom configs. alpha: {}, beta1: {}, beta2: {}, epsilon: {}", alpha, beta1, beta2, epsilon, epoch);
// }
// }
//}
impl AdamOptimizer {
fn is_default(&self, default: bool, alpha: f64, beta1: f64, beta2: f64, epsilon: f64, epoch: i64) {
if default == true {
let (alpha, beta1, beta2, epsilon, epoch) = (0.001, 0.9, 0.999, 0.1, 10);
println!("Using default configs. alpha: {}, beta1: {}, beta2: {}, epsilon: {}, epoch: {}", alpha, beta1, beta2, epsilon, epoch,)
//backwards_propagation(alpha, beta1, beta2, epsilon,)
} else {
let (alpha, beta1, beta2, epsilon, epoch) = (alpha, beta1, beta2, epsilon, epoch);
println!("Using custom configs. alpha: {}, beta1: {}, beta2: {}, epsilon: {}, epoch: {}", alpha, beta1, beta2, epsilon, epoch)
//backwards_propagation(alpha, beta1, beta2, epsilon, epoch, gradient, weights)
}
//Make sure to add all the necessary variables for functions below
}
fn backwards_propagation(&self, alpha: f64, beta1: f64, beta2: f64, epsilon: f64, epoch: i64, gradient: Vec<f64>, theta: Vec<f64>) -> std::vec::Vec<f64> {
//println!("Loading configs. alpha: {}, beta1: {}, beta2: {}, epsilon: {}, epoch: {}", alpha, beta1, beta2, epsilon, epoch);
// x = input 1 vector
// y = input 2 vector
let m = 0.0;
let v = 0.0;
let t: i32 = 0;
let theta = theta;
let t_bp: i32 = t+1;
let m_bp = vec![beta1*m + (1.0-beta1)*gradient[0], beta1*m + (1.0-beta1)*gradient[1]];
let v_bp = vec![beta2*v+(1.0-beta2)*(gradient[0].powi(2)), beta2*v+(1.0-beta2)*(gradient[1].powi(2))];
let m_hat = vec![m_bp[0]/(1.0- beta1.powi(t_bp)), m_bp[1]/(1.0- beta1.powi(t_bp))];
let v_hat = vec![v_bp[0]/(1.0- beta2.powi(t_bp)), v_bp[1]/(1.0- beta2.powi(t_bp))];
let mut theta_bp = vec![0.0, 0.0];
theta_bp[0] += theta[0] - alpha*(m_hat[0]/v_hat[0].sqrt() - epsilon);
theta_bp[1] += theta[1] - alpha*(m_hat[0]/v_hat[1].sqrt() - epsilon);
theta_bp as Vec<f64>
}
}
impl GradientDescent {
fn cost_function(&self, x: &Vec<f64>, y: &Vec<f64>, theta_0: f64, theta_1: f64) -> f64 {
let m = x.len() as f64;
let mut v: f64 = 0.0;
for i in x {
let h = theta_0 + theta_1 * i;
v += (h - i).powi(2);
}
v/(2.0*m)
}
fn gradient_function(&self, x: &f64, y: &f64, theta_0: &f64, theta_1: &f64) -> Vec<f64> {
let mut v = vec![0.0, 0.0];
let h = theta_0 + theta_1 * x;
v[0] += h - y;
v[1] += (h - y)*x;
v
}
}
fn main() {
let x = vec![1.0, 2.1, 3.9, 4.2, 5.1];
let y = vec![1.0, 2.1, 3.9, 4.2, 5.1];
let print_interval = 100;
let initial_theta = vec![0.0, 0.0];
let m = x.len().clone();
let default = true;
let (alpha, beta1, beta2, epsilon, epoch) = (0.001, 0.9, 0.999, 0.1, 10);
//let batch_size = // some vector
let theta = vec![0.0, 0.0];
//let lambda_h = |theta_0, theta_1, x| theta_0+theta_1*x; Old closure
//let weights = theta;
let runner = AdamOptimizer{};
runner.is_default(default, alpha, beta1, beta2, epsilon, epoch);
let gd = GradientDescent{};
let initial_cost = gd.cost_function(&x.clone(), &y.clone(), initial_theta[0], initial_theta[1]);
let initial_cost_vector = vec![initial_cost];
let mut history = Vec::new();
history.extend([theta.clone(), initial_cost_vector].iter().cloned());
for j in 0..epoch {
for i in 0..m {
let gradient = gd.gradient_function(&x[i], &y[i], &initial_theta[0], &initial_theta[1]);
let theta = runner.backwards_propagation(alpha, beta1, beta2, epsilon, epoch, gradient, theta.clone());
if (j+1)%print_interval == 0 {
let cost = gd.cost_function(&x, &y, initial_theta[0], initial_theta[1]);
let cost_vector = vec![cost];
history.extend([theta, cost_vector].iter().cloned());
println!("{:?}", history)
} else if j==0 {
let cost = gd.cost_function(&x, &y, initial_theta[0], initial_theta[1]);
let cost_vector = vec![cost];
history.extend([theta, cost_vector].iter().cloned());
println!("{:?}", history)
}
}
}
}
|
// The wasm-pack uses wasm-bindgen to build and generate JavaScript binding file.
// Import the wasm-bindgen crate.
use wasm_bindgen::prelude::*;
// Our Add function
// wasm-pack requires "exported" functions
// to include #[wasm_bindgen]
#[wasm_bindgen]
pub fn add(a: i32, b: i32) -> i32 {
return a + b;
}
|
pub use super::blob::{
BlobBlockType, BlockList, BlockListRequired, BlockListSupport, BlockListType,
BlockListTypeRequired, BlockListTypeSupport,
};
pub use super::container::{
PublicAccess, PublicAccessRequired, PublicAccessSupport, StoredAccessPolicyListOption,
StoredAccessPolicyListSupport,
};
pub use super::Blob as BlobTrait;
pub use super::Container as ContainerTrait;
pub use crate::{RehydratePriority, RehydratePriorityOption, RehydratePrioritySupport};
|
use crate::error::*;
use crate::*;
use std::io::Write;
use tempfile::Builder;
pub struct Msi;
pub type ModuleMsiInstaller = Installer<UnityModule, Msi, InstallerWithCommand>;
impl InstallHandler for ModuleMsiInstaller {
fn install_handler(&self) -> Result<()> {
let installer = self.installer();
debug!("install unity module from installer msi");
let mut install_helper = Builder::new().suffix(".cmd").rand_bytes(20).tempfile()?;
info!(
"create install helper script {}",
install_helper.path().display()
);
{
let script = install_helper.as_file_mut();
let install_command = self
.cmd()
.replace("/i", &format!(r#"/i "{}""#, installer.display()));
trace!("install helper script content:");
writeln!(script, "ECHO OFF")?;
trace!("{}", &install_command);
writeln!(script, "{}", install_command)?;
}
info!("install {}", installer.display());
let installer_script = install_helper.into_temp_path();
self.install_from_temp_command(&installer_script)?;
installer_script.close()?;
Ok(())
}
fn after_install(&self) -> Result<()> {
if let Some((from, to)) = &self.rename() {
uvm_move_dir::move_dir(from, to).chain_err(|| "failed to rename installed module")?;
}
Ok(())
}
}
|
trait Monad<A> {
fn flatmap<B, C: Monad<B>>(&self, f: fn(A) -> C) -> C;
}
fn main() {
println!("It compiled!");
}
|
//! Allows you to flip the outward face of a `Hittable`.
use crate::{
aabb::AABB,
hittable::{HitRecord, Hittable},
ray::Ray,
};
/// A "holder" that does nothing but hold a `Hittable` and flip its face.
#[derive(Debug, Clone)]
pub struct FlipFace(Box<dyn Hittable>);
impl FlipFace {
/// Create a new holder for flipping a `Hittable`'s face.
pub fn new(p: Box<dyn Hittable>) -> Self {
Self(p)
}
}
impl Hittable for FlipFace {
fn hit(&self, ray: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> {
if let Some(mut rec) = self.0.hit(ray, t_min, t_max) {
rec.front_face = !rec.front_face;
Some(rec)
} else {
None
}
}
fn bounding_box(&self, t0: f32, t1: f32) -> Option<AABB> {
self.0.bounding_box(t0, t1)
}
fn box_clone(&self) -> Box<dyn Hittable> {
Box::new(self.clone())
}
}
|
#![no_main]
#![no_std]
extern crate stm32f411e_disco;
use stm32f411e_disco::led;
#[no_mangle]
pub fn main() -> ! {
unsafe { led::init(); }
let red_led = led::Colour::Red;
let blue_led = led::Colour::Blue;
let green_led = led::Colour::Green;
let orange_led = led::Colour::Orange;
loop {
red_led.on();
for _ in 0..10000 {}
red_led.off();
blue_led.on();
for _ in 0..10000 {}
blue_led.off();
green_led.on();
for _ in 0..10000 {}
green_led.off();
orange_led.on();
for _ in 0..10000 {}
orange_led.off();
}
}
|
fn main() {
let mut buffer = Vec::new();
let coefficients: [i64; 12];
let qlp_shift: i16;
for i in 0..40{
buffer.push(i);
}
coefficients = [1,2,3,4,5,6,7,8,9,10,11,12];
qlp_shift = 3;
for i in 12..buffer.len() {
let prediction = coefficients
.iter()
.zip(&buffer[i - 12..i])
.map(|(&c, &s)| c * s as i64)
.sum::<i64>()
>> qlp_shift;
let delta = buffer[i];
buffer[i] = prediction as i32 + delta;
println!("{}", prediction)
}
}
|
use amethyst::{
core::transform::components::{Parent, Transform},
ecs::prelude::Entity,
prelude::*,
renderer::{Camera, Projection},
};
pub fn init_camera(world: &mut World, view_dim: (u32, u32,), parent: Entity,) {
let mut transform = Transform::default();
transform.translate_z(100.0);
world
.create_entity()
.with(Camera::from(Projection::orthographic(
-(view_dim.0 as f32) / 2.0,
(view_dim.0 as f32) / 2.0,
(view_dim.1 as f32) / 2.0,
-(view_dim.1 as f32) / 2.0,
),),)
.with(transform,)
.with(Parent {
entity: parent,
},)
.build();
}
|
#[doc = "Reader of register IC_INTR_STAT"]
pub type R = crate::R<u32, super::IC_INTR_STAT>;
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_MASTER_ON_HOLD bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_MASTER_ON_HOLD_A {
#[doc = "0: R_MASTER_ON_HOLD interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_MASTER_ON_HOLD interrupt is active"]
ACTIVE = 1,
}
impl From<R_MASTER_ON_HOLD_A> for bool {
#[inline(always)]
fn from(variant: R_MASTER_ON_HOLD_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_MASTER_ON_HOLD`"]
pub type R_MASTER_ON_HOLD_R = crate::R<bool, R_MASTER_ON_HOLD_A>;
impl R_MASTER_ON_HOLD_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_MASTER_ON_HOLD_A {
match self.bits {
false => R_MASTER_ON_HOLD_A::INACTIVE,
true => R_MASTER_ON_HOLD_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_MASTER_ON_HOLD_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_MASTER_ON_HOLD_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_RESTART_DET bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_RESTART_DET_A {
#[doc = "0: R_RESTART_DET interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_RESTART_DET interrupt is active"]
ACTIVE = 1,
}
impl From<R_RESTART_DET_A> for bool {
#[inline(always)]
fn from(variant: R_RESTART_DET_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_RESTART_DET`"]
pub type R_RESTART_DET_R = crate::R<bool, R_RESTART_DET_A>;
impl R_RESTART_DET_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_RESTART_DET_A {
match self.bits {
false => R_RESTART_DET_A::INACTIVE,
true => R_RESTART_DET_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_RESTART_DET_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_RESTART_DET_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_GEN_CALL bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_GEN_CALL_A {
#[doc = "0: R_GEN_CALL interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_GEN_CALL interrupt is active"]
ACTIVE = 1,
}
impl From<R_GEN_CALL_A> for bool {
#[inline(always)]
fn from(variant: R_GEN_CALL_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_GEN_CALL`"]
pub type R_GEN_CALL_R = crate::R<bool, R_GEN_CALL_A>;
impl R_GEN_CALL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_GEN_CALL_A {
match self.bits {
false => R_GEN_CALL_A::INACTIVE,
true => R_GEN_CALL_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_GEN_CALL_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_GEN_CALL_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_START_DET bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_START_DET_A {
#[doc = "0: R_START_DET interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_START_DET interrupt is active"]
ACTIVE = 1,
}
impl From<R_START_DET_A> for bool {
#[inline(always)]
fn from(variant: R_START_DET_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_START_DET`"]
pub type R_START_DET_R = crate::R<bool, R_START_DET_A>;
impl R_START_DET_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_START_DET_A {
match self.bits {
false => R_START_DET_A::INACTIVE,
true => R_START_DET_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_START_DET_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_START_DET_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_STOP_DET bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_STOP_DET_A {
#[doc = "0: R_STOP_DET interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_STOP_DET interrupt is active"]
ACTIVE = 1,
}
impl From<R_STOP_DET_A> for bool {
#[inline(always)]
fn from(variant: R_STOP_DET_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_STOP_DET`"]
pub type R_STOP_DET_R = crate::R<bool, R_STOP_DET_A>;
impl R_STOP_DET_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_STOP_DET_A {
match self.bits {
false => R_STOP_DET_A::INACTIVE,
true => R_STOP_DET_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_STOP_DET_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_STOP_DET_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_ACTIVITY bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_ACTIVITY_A {
#[doc = "0: R_ACTIVITY interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_ACTIVITY interrupt is active"]
ACTIVE = 1,
}
impl From<R_ACTIVITY_A> for bool {
#[inline(always)]
fn from(variant: R_ACTIVITY_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_ACTIVITY`"]
pub type R_ACTIVITY_R = crate::R<bool, R_ACTIVITY_A>;
impl R_ACTIVITY_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_ACTIVITY_A {
match self.bits {
false => R_ACTIVITY_A::INACTIVE,
true => R_ACTIVITY_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_ACTIVITY_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_ACTIVITY_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_RX_DONE bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_RX_DONE_A {
#[doc = "0: R_RX_DONE interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_RX_DONE interrupt is active"]
ACTIVE = 1,
}
impl From<R_RX_DONE_A> for bool {
#[inline(always)]
fn from(variant: R_RX_DONE_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_RX_DONE`"]
pub type R_RX_DONE_R = crate::R<bool, R_RX_DONE_A>;
impl R_RX_DONE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_RX_DONE_A {
match self.bits {
false => R_RX_DONE_A::INACTIVE,
true => R_RX_DONE_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_RX_DONE_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_RX_DONE_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_TX_ABRT bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_TX_ABRT_A {
#[doc = "0: R_TX_ABRT interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_TX_ABRT interrupt is active"]
ACTIVE = 1,
}
impl From<R_TX_ABRT_A> for bool {
#[inline(always)]
fn from(variant: R_TX_ABRT_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_TX_ABRT`"]
pub type R_TX_ABRT_R = crate::R<bool, R_TX_ABRT_A>;
impl R_TX_ABRT_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_TX_ABRT_A {
match self.bits {
false => R_TX_ABRT_A::INACTIVE,
true => R_TX_ABRT_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_TX_ABRT_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_TX_ABRT_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_RD_REQ bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_RD_REQ_A {
#[doc = "0: R_RD_REQ interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_RD_REQ interrupt is active"]
ACTIVE = 1,
}
impl From<R_RD_REQ_A> for bool {
#[inline(always)]
fn from(variant: R_RD_REQ_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_RD_REQ`"]
pub type R_RD_REQ_R = crate::R<bool, R_RD_REQ_A>;
impl R_RD_REQ_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_RD_REQ_A {
match self.bits {
false => R_RD_REQ_A::INACTIVE,
true => R_RD_REQ_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_RD_REQ_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_RD_REQ_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_TX_EMPTY bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_TX_EMPTY_A {
#[doc = "0: R_TX_EMPTY interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_TX_EMPTY interrupt is active"]
ACTIVE = 1,
}
impl From<R_TX_EMPTY_A> for bool {
#[inline(always)]
fn from(variant: R_TX_EMPTY_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_TX_EMPTY`"]
pub type R_TX_EMPTY_R = crate::R<bool, R_TX_EMPTY_A>;
impl R_TX_EMPTY_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_TX_EMPTY_A {
match self.bits {
false => R_TX_EMPTY_A::INACTIVE,
true => R_TX_EMPTY_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_TX_EMPTY_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_TX_EMPTY_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_TX_OVER bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_TX_OVER_A {
#[doc = "0: R_TX_OVER interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_TX_OVER interrupt is active"]
ACTIVE = 1,
}
impl From<R_TX_OVER_A> for bool {
#[inline(always)]
fn from(variant: R_TX_OVER_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_TX_OVER`"]
pub type R_TX_OVER_R = crate::R<bool, R_TX_OVER_A>;
impl R_TX_OVER_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_TX_OVER_A {
match self.bits {
false => R_TX_OVER_A::INACTIVE,
true => R_TX_OVER_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_TX_OVER_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_TX_OVER_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_RX_FULL bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_RX_FULL_A {
#[doc = "0: R_RX_FULL interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_RX_FULL interrupt is active"]
ACTIVE = 1,
}
impl From<R_RX_FULL_A> for bool {
#[inline(always)]
fn from(variant: R_RX_FULL_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_RX_FULL`"]
pub type R_RX_FULL_R = crate::R<bool, R_RX_FULL_A>;
impl R_RX_FULL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_RX_FULL_A {
match self.bits {
false => R_RX_FULL_A::INACTIVE,
true => R_RX_FULL_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_RX_FULL_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_RX_FULL_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_RX_OVER bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_RX_OVER_A {
#[doc = "0: R_RX_OVER interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: R_RX_OVER interrupt is active"]
ACTIVE = 1,
}
impl From<R_RX_OVER_A> for bool {
#[inline(always)]
fn from(variant: R_RX_OVER_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_RX_OVER`"]
pub type R_RX_OVER_R = crate::R<bool, R_RX_OVER_A>;
impl R_RX_OVER_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_RX_OVER_A {
match self.bits {
false => R_RX_OVER_A::INACTIVE,
true => R_RX_OVER_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_RX_OVER_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_RX_OVER_A::ACTIVE
}
}
#[doc = "See IC_RAW_INTR_STAT for a detailed description of R_RX_UNDER bit.\\n\\n Reset value: 0x0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum R_RX_UNDER_A {
#[doc = "0: RX_UNDER interrupt is inactive"]
INACTIVE = 0,
#[doc = "1: RX_UNDER interrupt is active"]
ACTIVE = 1,
}
impl From<R_RX_UNDER_A> for bool {
#[inline(always)]
fn from(variant: R_RX_UNDER_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `R_RX_UNDER`"]
pub type R_RX_UNDER_R = crate::R<bool, R_RX_UNDER_A>;
impl R_RX_UNDER_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> R_RX_UNDER_A {
match self.bits {
false => R_RX_UNDER_A::INACTIVE,
true => R_RX_UNDER_A::ACTIVE,
}
}
#[doc = "Checks if the value of the field is `INACTIVE`"]
#[inline(always)]
pub fn is_inactive(&self) -> bool {
*self == R_RX_UNDER_A::INACTIVE
}
#[doc = "Checks if the value of the field is `ACTIVE`"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == R_RX_UNDER_A::ACTIVE
}
}
impl R {
#[doc = "Bit 13 - See IC_RAW_INTR_STAT for a detailed description of R_MASTER_ON_HOLD bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_master_on_hold(&self) -> R_MASTER_ON_HOLD_R {
R_MASTER_ON_HOLD_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 12 - See IC_RAW_INTR_STAT for a detailed description of R_RESTART_DET bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_restart_det(&self) -> R_RESTART_DET_R {
R_RESTART_DET_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 11 - See IC_RAW_INTR_STAT for a detailed description of R_GEN_CALL bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_gen_call(&self) -> R_GEN_CALL_R {
R_GEN_CALL_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 10 - See IC_RAW_INTR_STAT for a detailed description of R_START_DET bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_start_det(&self) -> R_START_DET_R {
R_START_DET_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 9 - See IC_RAW_INTR_STAT for a detailed description of R_STOP_DET bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_stop_det(&self) -> R_STOP_DET_R {
R_STOP_DET_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8 - See IC_RAW_INTR_STAT for a detailed description of R_ACTIVITY bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_activity(&self) -> R_ACTIVITY_R {
R_ACTIVITY_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 7 - See IC_RAW_INTR_STAT for a detailed description of R_RX_DONE bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_rx_done(&self) -> R_RX_DONE_R {
R_RX_DONE_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 6 - See IC_RAW_INTR_STAT for a detailed description of R_TX_ABRT bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_tx_abrt(&self) -> R_TX_ABRT_R {
R_TX_ABRT_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 5 - See IC_RAW_INTR_STAT for a detailed description of R_RD_REQ bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_rd_req(&self) -> R_RD_REQ_R {
R_RD_REQ_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 4 - See IC_RAW_INTR_STAT for a detailed description of R_TX_EMPTY bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_tx_empty(&self) -> R_TX_EMPTY_R {
R_TX_EMPTY_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 3 - See IC_RAW_INTR_STAT for a detailed description of R_TX_OVER bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_tx_over(&self) -> R_TX_OVER_R {
R_TX_OVER_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 2 - See IC_RAW_INTR_STAT for a detailed description of R_RX_FULL bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_rx_full(&self) -> R_RX_FULL_R {
R_RX_FULL_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1 - See IC_RAW_INTR_STAT for a detailed description of R_RX_OVER bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_rx_over(&self) -> R_RX_OVER_R {
R_RX_OVER_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0 - See IC_RAW_INTR_STAT for a detailed description of R_RX_UNDER bit.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn r_rx_under(&self) -> R_RX_UNDER_R {
R_RX_UNDER_R::new((self.bits & 0x01) != 0)
}
}
|
#[macro_use]
extern crate log;
pub mod storage_manager;
|
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct Source {
pub tid: Option<String>,
#[serde(rename(serialize = "ts", deserialize = "ts"))]
pub supply_chain_start_timestamp_ms: Option<i64>,
#[serde(rename(serialize = "ds", deserialize = "ds"))]
pub digital_signature: Option<String>,
pub dsmap: Option<String>,
pub cert: Option<String>,
pub digest: Option<String>,
pub pchain: Option<String>,
pub ext: Option<SourceExt>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct SourceExt {}
|
use crate::{cmd::*, keypair::Keypair, service::api, Error, PublicKey, Result, Settings};
use helium_crypto::Sign;
use helium_proto::{BlockchainTxn, BlockchainTxnAddGatewayV1, Message, Txn};
use serde_derive::Deserialize;
use serde_json::json;
use std::{fmt, str::FromStr};
use structopt::StructOpt;
/// Construct an add gateway transaction for this gateway.
#[derive(Debug, StructOpt)]
pub struct Cmd {
/// The target owner account of this gateway
#[structopt(long)]
owner: PublicKey,
/// The account that will pay account for this addition
#[structopt(long)]
payer: PublicKey,
/// The staking mode for adding the light gateway
#[structopt(long, default_value = "dataonly")]
mode: StakingMode,
}
const TXN_FEE_SIGNATURE_SIZE: usize = 64;
impl Cmd {
pub async fn run(&self, settings: Settings) -> Result {
let public_key = &settings.keypair.public_key();
let config = TxnFeeConfig::for_address(public_key).await?;
let mut txn = BlockchainTxnAddGatewayV1 {
gateway: public_key.to_vec(),
owner: self.owner.to_vec(),
payer: self.payer.to_vec(),
fee: 0,
staking_fee: config.get_staking_fee(&self.mode),
owner_signature: vec![],
gateway_signature: vec![],
payer_signature: vec![],
};
txn.fee = txn_fee(&config, &txn)?;
txn.gateway_signature = txn_sign(&settings.keypair, &txn)?;
print_txn(&self.mode, &txn)
}
}
fn txn_fee(config: &TxnFeeConfig, txn: &BlockchainTxnAddGatewayV1) -> Result<u64> {
let mut txn = txn.clone();
txn.owner_signature = vec![0; TXN_FEE_SIGNATURE_SIZE];
txn.payer_signature = vec![0; TXN_FEE_SIGNATURE_SIZE];
txn.gateway_signature = vec![0; TXN_FEE_SIGNATURE_SIZE];
Ok(config.get_txn_fee(to_envelope_vec(&txn)?.len()))
}
fn txn_sign(keypair: &Keypair, txn: &BlockchainTxnAddGatewayV1) -> Result<Vec<u8>> {
let mut txn = txn.clone();
txn.owner_signature = vec![];
txn.payer_signature = vec![];
txn.gateway_signature = vec![];
Ok(keypair.sign(&to_vec(&txn)?)?)
}
fn to_envelope_vec(txn: &BlockchainTxnAddGatewayV1) -> Result<Vec<u8>> {
let envelope = BlockchainTxn {
txn: Some(Txn::AddGateway(txn.clone())),
};
let mut buf = vec![];
envelope.encode(&mut buf)?;
Ok(buf)
}
fn to_vec(txn: &BlockchainTxnAddGatewayV1) -> Result<Vec<u8>> {
let mut buf = vec![];
txn.encode(&mut buf)?;
Ok(buf)
}
#[derive(Debug)]
enum StakingMode {
DataOnly,
Light,
Full,
}
impl fmt::Display for StakingMode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
StakingMode::DataOnly => f.write_str("dataonly"),
StakingMode::Full => f.write_str("full"),
StakingMode::Light => f.write_str("light"),
}
}
}
impl FromStr for StakingMode {
type Err = Error;
fn from_str(v: &str) -> Result<Self> {
match v.to_lowercase().as_ref() {
"light" => Ok(Self::Light),
"full" => Ok(Self::Full),
"dataonly" => Ok(Self::DataOnly),
_ => Err(Error::custom(format!("invalid staking mode {}", v))),
}
}
}
#[derive(Clone, Deserialize, Debug)]
pub struct TxnFeeConfig {
// whether transaction fees are active
txn_fees: bool,
// a mutliplier which will be applied to the txn fee of all txns, in order
// to make their DC costs meaningful
txn_fee_multiplier: u64,
// the staking fee in DC for adding a gateway
#[serde(default = "TxnFeeConfig::default_full_staking_fee")]
staking_fee_txn_add_gateway_v1: u64,
// the staking fee in DC for adding a light gateway
#[serde(default = "TxnFeeConfig::default_light_staking_fee")]
staking_fee_txn_add_light_gateway_v1: u64,
// the staking fee in DC for adding a data only gateway
#[serde(default = "TxnFeeConfig::default_dataonly_staking_fee")]
staking_fee_txn_add_dataonly_gateway_v1: u64,
}
impl TxnFeeConfig {
fn default_full_staking_fee() -> u64 {
4000000
}
fn default_dataonly_staking_fee() -> u64 {
1000000
}
fn default_light_staking_fee() -> u64 {
4000000
}
async fn for_address(address: &PublicKey) -> Result<Self> {
let client = api::Service::blockchain(address.network);
let config: Self = client.get("/vars").await?;
Ok(config)
}
fn get_staking_fee(&self, staking_mode: &StakingMode) -> u64 {
match staking_mode {
StakingMode::Full => self.staking_fee_txn_add_gateway_v1,
StakingMode::DataOnly => self.staking_fee_txn_add_dataonly_gateway_v1,
StakingMode::Light => self.staking_fee_txn_add_light_gateway_v1,
}
}
fn get_txn_fee(&self, payload_size: usize) -> u64 {
let dc_payload_size = if self.txn_fees { 24 } else { 1 };
let fee = if payload_size <= dc_payload_size {
1
} else {
// integer div/ceil from: https://stackoverflow.com/a/2745086
((payload_size + dc_payload_size - 1) / dc_payload_size) as u64
};
fee * self.txn_fee_multiplier
}
}
fn print_txn(mode: &StakingMode, txn: &BlockchainTxnAddGatewayV1) -> Result {
let table = json!({
"mode": mode.to_string(),
"address": PublicKey::from_bytes(&txn.gateway)?.to_string(),
"payer": PublicKey::from_bytes(&txn.payer)?.to_string(),
"owner": PublicKey::from_bytes(&txn.owner)?.to_string(),
"fee": txn.fee,
"staking fee": txn.staking_fee,
"txn": base64::encode_config(&to_envelope_vec(txn)?, base64::STANDARD),
});
print_json(&table)
}
|
use lazy_static::lazy_static;
use lox_proc_macros::U8Enum;
#[cfg(feature = "debug_print_code")]
use crate::debug::disassemble_chunk;
use crate::{chunk::{Chunk, OpCode}, error::{CompileError, ErrorInfo}, scanner::{Scanner, Token, TokenKind}, value::{Objects, Value}};
pub struct Compiler<'source, 'objects> {
chunk: Chunk,
parser: Parser<'source>,
objects: &'objects Objects,
}
impl<'source, 'objects> Compiler<'source, 'objects> {
pub fn compile(source: String, objects: &'objects Objects) -> Result<Chunk, CompileError> {
let scanner = Scanner::new(&source);
let mut compiler = Compiler {
chunk: Chunk::new(),
parser: Parser::new(&scanner),
objects,
};
compiler.expression();
compiler
.parser
.consume(TokenKind::Eof, "Expect end of expression.");
compiler.end();
Ok(compiler.chunk)
}
fn emit_byte(&mut self, byte: u8) {
self.chunk.write(byte, self.parser.previous.line);
}
fn emit_bytes(&mut self, byte1: u8, byte2: u8) {
self.emit_byte(byte1);
self.emit_byte(byte2);
}
fn emit_constant(&mut self, value: Value) {
let constant = self.make_constant(value);
self.emit_bytes(OpCode::Constant.as_u8(), constant);
}
fn make_constant(&mut self, value: Value) -> u8 {
let constant = self.chunk.add_constant(value);
if constant > u8::MAX as usize {
todo!("Too many constants in one chunk.");
// return 0;
}
constant as u8
}
fn expression(&mut self) {
self.parse_precedence(Precedence::Assignment);
}
fn parse_precedence(&mut self, precedence: Precedence) {
self.parser.advance();
let prefix_rule = get_rule(self.parser.previous.kind).prefix;
if let Some(prefix_rule) = prefix_rule {
prefix_rule(self);
} else {
todo!("Expect expression.");
// return;
}
while precedence.as_u8() <= get_rule(self.parser.current.kind).precedence.as_u8() {
self.parser.advance();
let infix_rule = get_rule(self.parser.previous.kind).infix;
(infix_rule.unwrap())(self);
}
}
fn end(&mut self) {
self.emit_byte(OpCode::Return.as_u8());
#[cfg(feature = "debug_print_code")]
{
if
/* !self.parser.had_error */
true {
disassemble_chunk(&self.chunk, "code");
}
}
}
}
fn grouping(compiler: &mut Compiler) {
compiler.expression();
compiler
.parser
.consume(TokenKind::RightParen, "Expect ')' after expression.");
}
fn literal(compiler: &mut Compiler) {
match compiler.parser.previous.kind {
TokenKind::False => compiler.emit_byte(OpCode::False.as_u8()),
TokenKind::True => compiler.emit_byte(OpCode::True.as_u8()),
TokenKind::Nil => compiler.emit_byte(OpCode::Nil.as_u8()),
_ => unreachable!(
"Literal will always be false, true, or nil: {:?}",
compiler.parser.previous
),
}
}
fn number(compiler: &mut Compiler) {
let number: f64 = compiler
.parser
.previous
.lexeme
.parse()
.expect("number expects a valid number token");
compiler.emit_constant(Value::Number(number));
}
fn string(compiler: &mut Compiler) {
let s = compiler.parser.previous.lexeme;
let obj = compiler.objects.string(&s[1..s.len() - 1]);
compiler.emit_constant(Value::Obj(obj));
}
fn unary(compiler: &mut Compiler) {
let operator_kind = compiler.parser.previous.kind;
compiler.parse_precedence(Precedence::Unary);
match operator_kind {
TokenKind::Minus => compiler.emit_byte(OpCode::Negate.as_u8()),
TokenKind::Bang => compiler.emit_byte(OpCode::Not.as_u8()),
any => unreachable!("Can't parse operator kind '{:?}' as unary.", any),
}
}
fn binary(compiler: &mut Compiler) {
let operator_kind = compiler.parser.previous.kind;
let rule = get_rule(operator_kind);
compiler.parse_precedence(Precedence::from_u8(rule.precedence.as_u8() + 1).unwrap());
match operator_kind {
TokenKind::BangEqual => compiler.emit_byte(OpCode::NotEqual.as_u8()),
TokenKind::EqualEqual => compiler.emit_byte(OpCode::Equal.as_u8()),
TokenKind::Greater => compiler.emit_byte(OpCode::Greater.as_u8()),
TokenKind::GreaterEqual => compiler.emit_byte(OpCode::GreaterEqual.as_u8()),
TokenKind::Less => compiler.emit_byte(OpCode::Less.as_u8()),
TokenKind::LessEqual => compiler.emit_byte(OpCode::LessEqual.as_u8()),
TokenKind::Plus => compiler.emit_byte(OpCode::Add.as_u8()),
TokenKind::Minus => compiler.emit_byte(OpCode::Subtract.as_u8()),
TokenKind::Star => compiler.emit_byte(OpCode::Multiply.as_u8()),
TokenKind::Slash => compiler.emit_byte(OpCode::Divide.as_u8()),
any => unreachable!("Can't parse operator kind '{:?}' as binary.", any),
}
}
struct Parser<'source> {
scanner: &'source Scanner<'source>,
current: Token<'source>,
previous: Token<'source>,
panic_mode: bool,
}
impl<'source> Parser<'source> {
pub fn new(scanner: &'source Scanner<'source>) -> Self {
let token = scanner.scan();
Self {
scanner,
current: token.clone(),
previous: token,
panic_mode: false,
}
}
pub fn advance(&mut self) {
self.previous = self.current.clone();
loop {
self.current = self.scanner.scan();
if self.current.kind != TokenKind::Error {
break;
}
self.panic_mode = true;
eprintln!(
"{}",
CompileError::ScanError(ErrorInfo::error(&self.current, ""))
)
}
}
pub fn consume(&mut self, kind: TokenKind, message: &str) {
if self.current.kind == kind {
self.advance();
return;
}
self.panic_mode = true;
eprintln!(
"{}",
CompileError::ParseError(ErrorInfo::error(&self.current, message))
)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, U8Enum)]
enum Precedence {
None,
Assignment, // =
Or, // or
And, // and
Equality, // == !=
Comparison, // < > <= >=
Term, // + -
Factor, // * /
Unary, // ! -
Call, // . ()
Primary,
}
#[derive(Clone)]
struct ParseRule {
prefix: Option<fn(&mut Compiler)>,
infix: Option<fn(&mut Compiler)>,
precedence: Precedence,
}
lazy_static! {
static ref RULES: &'static [ParseRule] = {
let mut rules = vec![None; TokenKind::COUNT];
macro_rules! rule {
($kind:ident, $prefix:expr, $infix:expr, $precedence:ident) => {
rules[TokenKind::$kind.as_u8() as usize] = Some(ParseRule {
prefix: $prefix,
infix: $infix,
precedence: Precedence::$precedence,
});
};
}
rule!(LeftParen, Some(grouping), None, None);
rule!(RightParen, None, None, None);
rule!(LeftBrace, None, None, None);
rule!(RightBrace, None, None, None);
rule!(Comma, None, None, None);
rule!(Dot, None, None, None);
rule!(Minus, Some(unary), Some(binary), Term);
rule!(Plus, None, Some(binary), Term);
rule!(Semicolon, None, None, None);
rule!(Slash, None, Some(binary), Factor);
rule!(Star, None, Some(binary), Factor);
rule!(Bang, Some(unary), None, None);
rule!(BangEqual, None, Some(binary), Equality);
rule!(Equal, None, None, None);
rule!(EqualEqual, None, Some(binary), Equality);
rule!(Greater, None, Some(binary), Equality);
rule!(GreaterEqual, None, Some(binary), Equality);
rule!(Less, None, Some(binary), Equality);
rule!(LessEqual, None, Some(binary), Equality);
rule!(Identifier, None, None, None);
rule!(String, Some(string), None, None);
rule!(Number, Some(number), None, None);
rule!(And, None, None, None);
rule!(Class, None, None, None);
rule!(Else, None, None, None);
rule!(False, Some(literal), None, None);
rule!(For, None, None, None);
rule!(Fun, None, None, None);
rule!(If, None, None, None);
rule!(Nil, Some(literal), None, None);
rule!(Or, None, None, None);
rule!(Print, None, None, None);
rule!(Return, None, None, None);
rule!(LeftBrace, None, None, None);
rule!(Super, None, None, None);
rule!(This, None, None, None);
rule!(True, Some(literal), None, None);
rule!(Var, None, None, None);
rule!(While, None, None, None);
rule!(Error, None, None, None);
rule!(Eof, None, None, None);
rules
.into_iter()
.map(Option::unwrap)
.collect::<Vec<_>>()
.leak()
};
}
fn get_rule(kind: TokenKind) -> &'static ParseRule {
&RULES[kind.as_u8() as usize]
}
// TODO! To really understand the parser, you need to see how execution threads
// through the interesting parsing functions—parsePrecedence() and the parser
// functions stored in the table. Take this (strange) expression:
//
// (-1 + 2) * 3 - -4
//
// Write a trace of how those functions are called. Show the order they are
// called, which calls which, and the arguments passed to them.
|
use itertools::Itertools;
use itertools::FoldWhile::{Continue, Done};
//use modular::*;
use std::collections::{HashMap, HashSet};
use std::error;
use std::io;
use std::io::Read;
use crate::day;
use std::fmt;
use std::ops::{Add, Mul, Sub};
/// Trait for modular operations on integers
///
/// Implementing this trait allows for conversion of integers to modular numbers, as well as
/// determining congruence relations between integers.
pub trait Modular {
/// Returns the modular representation of an integer
///
/// This is the idiomatic way of creating a new modulo number. Alternatively, the `modulo!`
/// macro is provided, which provides the same functionality.
fn to_modulo(self, modulus: u64) -> Modulo;
/// Returns true if the two integers are congruent modulo `n`
///
/// Congruence is determined by the relation:
///
/// `a === b (mod n) if a - b = kn where k is some integer.`
///
/// # Example
/// ```
/// # use modular::*;
/// // Given some integers
/// let a = 27;
/// let b = 91;
/// let c = -1;
///
/// // Assert their congruence for different modulus values
/// assert_eq!(a.is_congruent(b, 4), true); // True: 27 - 91 = -64 => n = 4, k = -16
/// assert_eq!(b.is_congruent(a, 5), false); // False: 91 - 27 = 64 => n = 5, k = 12.8
/// assert_eq!(a.is_congruent(c, 4), true); // True: 27 - -1 = 28 => n = 4, k = 7
/// ```
fn is_congruent(self, with: impl Into<i64>, modulus: u64) -> bool;
}
/// Holds the modulo representation of a number
///
/// In mathematics, the `%` operation returns the remainder obtained when an integer `a` is divided
/// by another `n`. For instance `32 % 6 = 2`: in this example, 32 can be written in terms of its
/// reminder after being divided by the specified dividend as `2 mod 6`. This is the modulo
/// representation of the number 32, with modulus 6.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct Modulo {
remainder: i64,
modulus: u64,
}
impl Modulo {
/// Returns the 'remainder' part of a modulo number
pub fn remainder(self) -> i64 {
self.remainder
}
/// Returns the modulus of a modulo number
///
/// This is sometimes referred to as the 'dividend' as well
pub fn modulus(self) -> u64 {
self.modulus
}
}
impl Modular for i64 {
fn to_modulo(self, modulus: u64) -> Modulo {
Modulo {
remainder: self % modulus as i64,
modulus,
}
}
fn is_congruent(self, with: impl Into<i64>, modulus: u64) -> bool {
(self - with.into()) % modulus as i64 == 0
}
}
impl Add for Modulo {
type Output = Self;
/// Adds two `Modulo` numbers
///
/// # Panics
///
/// Panics if the two numbers have different modulus values
fn add(self, rhs: Self) -> Self {
if self.modulus() != rhs.modulus() {
panic!("Addition is only valid for modulo numbers with the same dividend")
}
(self.remainder() + rhs.remainder()).to_modulo(self.modulus())
}
}
impl Sub for Modulo {
type Output = Self;
/// Subtracts two `Modulo` numbers
///
/// # Panics
///
/// Panics if the two numbers have different modulus values
fn sub(self, rhs: Self) -> Self {
if self.modulus() != rhs.modulus() {
panic!("Subtraction is only valid for modulo numbers with the same dividend")
}
if self.remainder() >= rhs.remainder() {
(self.remainder() - rhs.remainder()).to_modulo(self.modulus())
} else {
(self.remainder() - rhs.remainder() + self.modulus() as i64).to_modulo(self.modulus())
}
}
}
impl Mul for Modulo {
type Output = Self;
/// Multiplies two `Modulo` numbers
///
/// # Panics
///
/// Panics if the two numbers have different modulus values
fn mul(self, rhs: Self) -> Self {
if self.modulus() != rhs.modulus() {
panic!("Multiplication is only valid for modulo numbers with the same dividend")
}
(((self.remainder() as i128 * rhs.remainder() as i128) % self.modulus() as i128) as i64).to_modulo(self.modulus())
}
}
impl fmt::Display for Modulo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?} mod {:?}", self.remainder, self.modulus)
}
}
//#[cfg(test)]
//mod tests {
// use super::*;
//
// #[test]
// fn create_using_trait() {
// assert!(27.to_modulo(5) == modulo!(2, 5));
// }
//
// #[test]
// fn create_using_macro() {
// assert!(modulo!(99, 4) == 99.to_modulo(4));
// }
//
// #[test]
// fn get_remainder() {
// assert_eq!(modulo!(26, 11).remainder(), 4);
// }
//
// #[test]
// fn get_modulus() {
// assert_eq!(modulo!(121, 17).modulus(), 17);
// }
//
// #[test]
// fn add_successfully() {
// assert!(modulo!(23, 4) + modulo!(11, 4) == modulo!(2, 4));
// }
//
// #[test]
// #[should_panic]
// fn add_panics_with_different_moduli() {
// assert!(modulo!(23, 5) + modulo!(11, 6) == modulo!(2, 5));
// }
//
// #[test]
// fn subtract_successfully() {
// assert!(modulo!(24, 4) - modulo!(13, 4) == modulo!(1, 4));
// }
//
// #[test]
// #[should_panic]
// fn subtract_panics_with_different_moduli() {
// assert!(modulo!(47, 43) - modulo!(5, 27) == modulo!(12, 13));
// }
//
// #[test]
// fn multiply_successfully() {
// assert!(modulo!(2, 4) * modulo!(19, 4) == modulo!(2, 4));
// }
//
// #[test]
// #[should_panic]
// fn multiply_panics_with_different_moduli() {
// assert!(modulo!(91, 92) - modulo!(8, 9) == modulo!(12, 47));
// }
//
// #[test]
// fn string_representation() {
// let mod_new = modulo!(6, 7u64);
// assert_eq!(format!("{}", mod_new), "6 mod 7");
// }
//}
pub type BoxResult<T> = Result<T, Box<dyn error::Error>>;
pub struct Day24 {}
impl day::Day for Day24 {
fn tag(&self) -> &str { "24" }
fn part1(&self, input: &dyn Fn() -> Box<dyn io::Read>) {
println!("{:?}", self.part1_impl(&mut *input()));
}
fn part2(&self, input: &dyn Fn() -> Box<dyn io::Read>) {
println!("{:?}", self.part2_impl(&mut *input(), 200));
}
}
impl Day24 {
fn part1_impl(self: &Self, input: &mut dyn io::Read) -> BoxResult<usize> {
let reader = io::BufReader::new(input);
let init: Vec<_> = reader.bytes()
.map(|b| match b.unwrap() as char { '.' => 0, '#' => 1, _ => 2, })
.filter(|&b| b != 2).collect();
let (s, _) = (0..)
.fold_while((init, HashSet::new()), |(s, seen), _| {
if seen.contains(&s) { Done((s, seen)) }
else {
let mut seen = seen;
seen.insert(s.clone());
let ns = (0..s.len()).map(|i| {
let n = if i >= 5 { s[i - 5] } else { 0 }
+ if i < 20 { s[i + 5] } else { 0 }
+ if i % 5 >= 1 { s[i - 1] } else { 0 }
+ if i % 5 < 4 { s[i + 1] } else { 0 };
if s[i] == 1 && n != 1 { 0 }
else if s[i] == 0 && (n == 1 || n == 2) { 1 }
else { s[i] }
}).collect();
Continue((ns, seen))
}
}).into_inner();
Ok(s.into_iter().enumerate().map(|(i, b)| b << i).sum())
}
fn part2_impl(self: &Self, input: &mut dyn io::Read, n: usize) -> BoxResult<usize> {
let reader = io::BufReader::new(input);
let mut init = HashMap::new();
init.insert(0, reader.bytes()
.map(|b| match b.unwrap() as char { '.' => 0, '#' => 1, _ => 2, })
.filter(|&b| b != 2).collect::<Vec<_>>());
let len = 25;
let z = vec![0; len];
let (s, _) = (0..n).fold((init, 0), |(sp, dim), _| {
let mut nsp = HashMap::new();
for d in -(dim + 1)..=(dim + 1) {
let s = sp.get(&d).unwrap_or(&z);
let so = sp.get(&(d - 1)).unwrap_or(&z);
let si = sp.get(&(d + 1)).unwrap_or(&z);
let ns = (0..len).map(|i| {
let n = if i >= 5 {
if i != 17 { s[i - 5] } else { (&si[20..len]).iter().sum() }
} else { so[7] }
+ if i < 20 {
if i != 7 { s[i + 5] } else { (&si[0..5]).iter().sum() }
} else { so[17] }
+ if i % 5 >= 1 {
if i != 13 { s[i - 1] } else { (4..len).step_by(5).map(|i| si[i]).sum() }
} else { so[11] }
+ if i % 5 < 4 {
if i != 11 { s[i + 1] } else { (0..len).step_by(5).map(|i| si[i]).sum() }
} else { so[13] };
if i == 12 || s[i] == 1 && n != 1 { 0 } else if s[i] == 0 && (n == 1 || n == 2) { 1 } else { s[i] }
}).collect();
nsp.insert(d, ns);
}
let mut dim = dim;
// eprintln!("dim {} nsp {:?}", dim, nsp);
if nsp.get(&-(dim + 1)) != Some(&z)
|| nsp.get(&(dim + 1)) != Some(&z) {
dim += 1;
};
(nsp, dim)
});
// eprintln!("{:?}", s);
let v = s.iter();
let m: Vec<i32> = v.map(|(d, v)| {
let i = v.iter();
let s = i.sum();
// eprintln!("{} {:?} {}", d, v, s);
s
}).collect();
let s2: i32 = m.iter().sum();
Ok(s2 as usize)
// Ok(s.values().map(|&v| v.iter().sum()).sum() as usize)
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test1(s: &str, x: usize) {
assert_eq!(
Day24 {}.part1_impl(&mut s.as_bytes()).unwrap(),
x);
}
#[test]
fn part1() {
test1("....#
#..#.
#..##
..#..
#....", 2129920);
}
fn test2(s: &str, x: usize) {
assert_eq!(
Day24 {}.part2_impl(&mut s.as_bytes(), 10).unwrap(),
x);
}
#[test]
fn part2() {
test2("....#
#..#.
#..##
..#..
#....", 99);
}
}
|
#![allow(dead_code)]
use crate::prime::{sieve_of_eratosthenes};
fn problem(roof: u64) -> u64 {
// @TODO replace for Sieve of Sundaram
let primes = sieve_of_eratosthenes(roof);
primes.into_iter().fold(0, |sum, p| sum + p)
}
#[cfg(test)]
mod tests {
use super::*;
#[ignore] // this one unfortunately takes 4 seconds on my machine :'(
#[test]
fn problem_test() {
assert_eq!(problem(10), 17);
assert_eq!(problem(2_000_000), 142913828922);
}
}
|
use std::{future::Future, marker::PhantomData, pin::Pin};
struct App {
// handlers: Vec<Box<dyn Handler>>,
services: Vec<Box<dyn Service>>,
}
impl App {
pub fn new() -> Self {
Self { services: vec![] }
}
pub fn handler<F, T, R>(mut self, f: F) -> Self
where
F: Handler<T, R>,
T: FromRequest + 'static,
R: Future<Output = ()> + 'static,
{
self.services.push(Box::new(ServiceWrapper::new(f)));
self
}
pub async fn dispatch(&self, req: String) {
for service in self.services.iter() {
service.handle_request(&req).await;
}
}
}
/// 要求 T 可解析
trait FromRequest {
fn from_request(req: &str) -> Self;
}
impl FromRequest for () {
fn from_request(req: &str) -> Self {
()
}
}
impl FromRequest for String {
fn from_request(req: &str) -> Self {
req.to_string()
}
}
impl FromRequest for u32 {
fn from_request(req: &str) -> Self {
req.parse().unwrap()
}
}
impl FromRequest for u64 {
fn from_request(req: &str) -> Self {
req.parse().unwrap()
}
}
impl<T1> FromRequest for (T1,)
where
T1: FromRequest,
{
fn from_request(req: &str) -> Self {
(T1::from_request(req),)
}
}
impl<T1, T2> FromRequest for (T1, T2)
where
T1: FromRequest,
T2: FromRequest,
{
fn from_request(req: &str) -> Self {
(T1::from_request(req), T2::from_request(req))
}
}
/// 设置 T 为 Handler 接受的类型
trait Handler<T, R>: Clone + 'static
where
R: Future<Output = ()>,
{
fn call(&self, param: T) -> R;
}
impl<F, R> Handler<(), R> for F
where
F: Fn() -> R + Clone + 'static,
R: Future<Output = ()>,
{
fn call(&self, param: ()) -> R {
(self)()
}
}
impl<F, T, R> Handler<(T,), R> for F
where
F: Fn(T) -> R + Clone + 'static,
T: FromRequest,
R: Future<Output = ()>,
{
fn call(&self, param: (T,)) -> R {
(self)(param.0)
}
}
impl<F, T1, T2, R> Handler<(T1, T2), R> for F
where
F: Fn(T1, T2) -> R + Clone + 'static,
T1: FromRequest,
T2: FromRequest,
R: Future<Output = ()>,
{
fn call(&self, param: (T1, T2)) -> R {
(self)(param.0, param.1)
}
}
trait Service {
fn handle_request(&self, req: &str) -> Pin<Box<dyn Future<Output = ()>>>;
}
struct ServiceWrapper<F, T, R> {
f: F,
_t: PhantomData<(T, R)>,
}
impl<F, T, R> ServiceWrapper<F, T, R> {
pub fn new(f: F) -> Self
where
F: Handler<T, R>,
T: FromRequest,
R: Future<Output = ()>,
{
Self { f, _t: PhantomData }
}
}
impl<F, T, R> Service for ServiceWrapper<F, T, R>
where
F: Handler<T, R>,
T: FromRequest + 'static,
R: Future<Output = ()>,
{
fn handle_request(&self, req: &str) -> Pin<Box<dyn Future<Output = ()>>> {
let params = T::from_request(req);
let f = self.f.clone();
Box::pin(async move { f.call(params).await })
}
}
#[tokio::test]
async fn test_add_handlers() {
async fn none() {
eprintln!("print from none");
}
async fn one(s: String) {
eprintln!("print from one: s = {}", s);
}
async fn two(n1: u32, n2: u64) {
eprintln!("print from two: n1 = {}, n2 = {}", n1, n2);
}
let app = App::new().handler(none).handler(one).handler(two);
app.dispatch("1234".to_string()).await;
}
|
use super::obj::{GTwzobj, Twzobj};
use std::sync::atomic::{AtomicU32, Ordering};
pub struct Transaction {
leader: GTwzobj,
followers: Vec<GTwzobj>,
}
pub enum TransactionErr<E> {
LogFull,
Abort(E),
}
#[repr(C, packed)]
struct RecordEntry<T> {
ty: u8,
fl: u8,
len: u16,
pad: u32,
data: T,
}
const RECORD_ALLOC: u8 = 1;
struct RecordAlloc {
owned: u64,
}
enum Record<'a> {
Alloc(&'a RecordAlloc),
}
impl<T> RecordEntry<T> {
fn data<'a>(&'a self) -> Record<'a> {
match self.ty {
RECORD_ALLOC => Record::Alloc(unsafe { std::mem::transmute::<&T, &RecordAlloc>(&self.data) }),
_ => panic!("unknown transaction record type"),
}
}
}
pub(super) const TX_METAEXT: u64 = 0xabbaabba44556655;
#[repr(C)]
struct TxLog {
len: u32,
top: AtomicU32,
pad2: u64,
log: [u8; 4096],
}
impl TxLog {
fn reserve<T>(&mut self) -> &mut T {
let len = std::mem::size_of::<T>();
let len = (len + 15usize) & (!15usize);
let top = self.top.fetch_add(len as u32, Ordering::SeqCst);
unsafe { std::mem::transmute::<&mut u8, &mut T>(&mut self.log[top as usize]) }
}
}
impl Default for TxLog {
fn default() -> Self {
Self {
len: 4096,
top: AtomicU32::new(0),
pad2: 0,
log: [0; 4096],
}
}
}
#[repr(C)]
#[derive(Default)]
pub(super) struct TransactionManager {
log: TxLog,
}
impl<T> Twzobj<T> {
pub(super) fn init_tx(&self) {
unsafe {
self.alloc_metaext_unchecked::<TransactionManager>(TX_METAEXT);
}
}
}
impl Transaction {
fn get_log(&self) -> Option<&mut TxLog> {
self.leader.find_metaext_mut(TX_METAEXT)
}
pub(super) fn new(leader: GTwzobj) -> Transaction {
Transaction {
leader,
followers: vec![],
}
}
pub(super) fn prep_alloc_free_on_fail<'b, T>(&'b self, _obj: &Twzobj<T>) -> &'b mut u64 {
let log = self.get_log().unwrap(); //TODO
let entry = log.reserve::<RecordEntry<RecordAlloc>>();
unsafe { &mut entry.data.owned }
}
pub(super) fn record_base<T>(&self, _obj: &Twzobj<T>) {}
}
|
#[doc = "Register `C1ISR` reader"]
pub type R = crate::R<C1ISR_SPEC>;
#[doc = "Field `ISF0` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub type ISF0_R = crate::BitReader<ISF0_A>;
#[doc = "Interrupt(N) semaphore n status bit before enable (mask)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ISF0_A {
#[doc = "0: No interrupt pending"]
NotPending = 0,
#[doc = "1: Interrupt pending"]
Pending = 1,
}
impl From<ISF0_A> for bool {
#[inline(always)]
fn from(variant: ISF0_A) -> Self {
variant as u8 != 0
}
}
impl ISF0_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ISF0_A {
match self.bits {
false => ISF0_A::NotPending,
true => ISF0_A::Pending,
}
}
#[doc = "No interrupt pending"]
#[inline(always)]
pub fn is_not_pending(&self) -> bool {
*self == ISF0_A::NotPending
}
#[doc = "Interrupt pending"]
#[inline(always)]
pub fn is_pending(&self) -> bool {
*self == ISF0_A::Pending
}
}
#[doc = "Field `ISF1` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF1_R;
#[doc = "Field `ISF2` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF2_R;
#[doc = "Field `ISF3` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF3_R;
#[doc = "Field `ISF4` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF4_R;
#[doc = "Field `ISF5` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF5_R;
#[doc = "Field `ISF6` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF6_R;
#[doc = "Field `ISF7` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF7_R;
#[doc = "Field `ISF8` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF8_R;
#[doc = "Field `ISF9` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF9_R;
#[doc = "Field `ISF10` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF10_R;
#[doc = "Field `ISF11` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF11_R;
#[doc = "Field `ISF12` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF12_R;
#[doc = "Field `ISF13` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF13_R;
#[doc = "Field `ISF14` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF14_R;
#[doc = "Field `ISF15` reader - Interrupt(N) semaphore n status bit before enable (mask)"]
pub use ISF0_R as ISF15_R;
impl R {
#[doc = "Bit 0 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf0(&self) -> ISF0_R {
ISF0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf1(&self) -> ISF1_R {
ISF1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf2(&self) -> ISF2_R {
ISF2_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf3(&self) -> ISF3_R {
ISF3_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf4(&self) -> ISF4_R {
ISF4_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf5(&self) -> ISF5_R {
ISF5_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf6(&self) -> ISF6_R {
ISF6_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf7(&self) -> ISF7_R {
ISF7_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf8(&self) -> ISF8_R {
ISF8_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf9(&self) -> ISF9_R {
ISF9_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf10(&self) -> ISF10_R {
ISF10_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf11(&self) -> ISF11_R {
ISF11_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf12(&self) -> ISF12_R {
ISF12_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf13(&self) -> ISF13_R {
ISF13_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf14(&self) -> ISF14_R {
ISF14_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Interrupt(N) semaphore n status bit before enable (mask)"]
#[inline(always)]
pub fn isf15(&self) -> ISF15_R {
ISF15_R::new(((self.bits >> 15) & 1) != 0)
}
}
#[doc = "HSEM Interrupt status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c1isr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct C1ISR_SPEC;
impl crate::RegisterSpec for C1ISR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`c1isr::R`](R) reader structure"]
impl crate::Readable for C1ISR_SPEC {}
#[doc = "`reset()` method sets C1ISR to value 0"]
impl crate::Resettable for C1ISR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
mod auth;
mod error;
use std::{fmt, net::SocketAddr, sync::Arc};
use eyre::Report;
use handlebars::Handlebars;
use hyper::{
client::{connect::dns::GaiResolver, HttpConnector},
header::{AUTHORIZATION, CONTENT_TYPE, LOCATION},
server::Server,
Body, Client as HyperClient, Request, Response, StatusCode,
};
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
use prometheus::{Encoder, TextEncoder};
use rosu_v2::Osu;
use routerify::{ext::RequestExt, RouteError, Router, RouterService};
use serde_json::json;
use tokio::{fs::File, io::AsyncReadExt, sync::oneshot};
use crate::{
custom_client::{CustomClientError, ErrorKind, TwitchDataList, TwitchOAuthToken, TwitchUser},
util::constants::{GENERAL_ISSUE, TWITCH_OAUTH, TWITCH_USERS_ENDPOINT},
Context, CONFIG,
};
pub use self::{
auth::{AuthenticationStandby, AuthenticationStandbyError, WaitForOsuAuth, WaitForTwitchAuth},
error::ServerError,
};
pub async fn run_server(ctx: Arc<Context>, shutdown_rx: oneshot::Receiver<()>) {
if cfg!(debug_assertions) {
info!("Skip server on debug");
return;
}
let ip = CONFIG.get().unwrap().server.internal_ip;
let port = CONFIG.get().unwrap().server.internal_port;
let addr = SocketAddr::from((ip, port));
let router = router(ctx);
let service = RouterService::new(router).expect("failed to create RouterService");
let server = Server::bind(&addr)
.serve(service)
.with_graceful_shutdown(async {
let _ = shutdown_rx.await;
});
info!("Running server...");
if let Err(why) = server.await {
error!("{:?}", Report::new(why).wrap_err("server failed"));
}
}
struct Client(HyperClient<HttpsConnector<HttpConnector<GaiResolver>>, Body>);
struct Context_(Arc<Context>);
struct Handlebars_(Handlebars<'static>);
struct OsuClientId(u64);
struct OsuClientSecret(String);
struct OsuRedirect(String);
struct TwitchClientId(String);
struct TwitchClientSecret(String);
struct TwitchRedirect(String);
fn router(ctx: Arc<Context>) -> Router<Body, ServerError> {
let connector = HttpsConnectorBuilder::new()
.with_webpki_roots()
.https_or_http()
.enable_http1()
.build();
let client = HyperClient::builder().build(connector);
let config = CONFIG.get().unwrap();
let osu_client_id = config.tokens.osu_client_id;
let osu_client_secret = config.tokens.osu_client_secret.to_owned();
let twitch_client_id = config.tokens.twitch_client_id.to_owned();
let twitch_client_secret = config.tokens.twitch_token.to_owned();
let url = &config.server.external_url;
let osu_redirect = format!("{url}/auth/osu");
let twitch_redirect = format!("{url}/auth/twitch");
let mut handlebars = Handlebars::new();
let mut path = config.paths.website.to_owned();
path.push("auth.hbs");
handlebars
.register_template_file("auth", path)
.expect("failed to register auth template to handlebars");
Router::builder()
.data(Client(client))
.data(Context_(ctx))
.data(Handlebars_(handlebars))
.data(OsuClientId(osu_client_id))
.data(OsuClientSecret(osu_client_secret))
.data(OsuRedirect(osu_redirect))
.data(TwitchClientId(twitch_client_id))
.data(TwitchClientSecret(twitch_client_secret))
.data(TwitchRedirect(twitch_redirect))
.get("/metrics", metrics_handler)
.get("/auth/osu", auth_osu_handler)
.get("/auth/twitch", auth_twitch_handler)
.get("/auth/auth.css", auth_css_handler)
.get("/auth/icon.svg", auth_icon_handler)
.get("/osudirect/:mapset_id", osudirect_handler)
.any(handle_404)
.err_handler(error_handler)
.build()
.expect("failed to build router")
}
// Required to pass RouteError to Report
#[derive(Debug)]
struct ErrorWrapper(RouteError);
impl std::error::Error for ErrorWrapper {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.0.source()
}
}
impl fmt::Display for ErrorWrapper {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
async fn error_handler(err: RouteError) -> Response<Body> {
let report = Report::new(ErrorWrapper(err)).wrap_err("error while handling server request");
error!("{report:?}");
Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(Body::from(GENERAL_ISSUE))
.unwrap()
}
type HandlerResult = Result<Response<Body>, ServerError>;
async fn handle_404(_req: Request<Body>) -> HandlerResult {
let response = Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Body::from("404 Not Found"))?;
Ok(response)
}
async fn metrics_handler(req: Request<Body>) -> HandlerResult {
let mut buf = Vec::new();
let encoder = TextEncoder::new();
let Context_(ctx) = req.data().unwrap();
let metric_families = ctx.stats.registry.gather();
encoder.encode(&metric_families, &mut buf).unwrap();
Ok(Response::new(Body::from(buf)))
}
async fn auth_osu_handler(req: Request<Body>) -> HandlerResult {
match auth_osu_handler_(&req).await {
Ok(response) => Ok(response),
Err(why) => {
warn!("{:?}", Report::new(why).wrap_err("osu! auth failed"));
let render_data = json!({
"body_id": "error",
"error": GENERAL_ISSUE,
});
let Handlebars_(handlebars) = req.data().unwrap();
let page = handlebars.render("auth", &render_data)?;
let response = Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(Body::from(page))?;
Ok(response)
}
}
}
async fn auth_osu_handler_(req: &Request<Body>) -> HandlerResult {
let query = req.uri().query();
let code = match query.and_then(|q| q.split('&').find(|q| q.starts_with("code="))) {
Some(query) => &query[5..],
None => return invalid_auth_query(req),
};
let id_opt = query.and_then(|q| {
q.split('&')
.find(|q| q.starts_with("state="))
.map(|q| q[6..].parse())
});
let id = match id_opt {
Some(Ok(state)) => state,
None | Some(Err(_)) => return invalid_auth_query(req),
};
let Context_(ctx) = req.data().unwrap();
if ctx.auth_standby.is_osu_empty() {
return unexpected_auth(req);
}
let OsuClientId(client_id) = req.data().unwrap();
let OsuClientSecret(client_secret) = req.data().unwrap();
let OsuRedirect(redirect) = req.data().unwrap();
let osu = Osu::builder()
.client_id(*client_id)
.client_secret(client_secret)
.with_authorization(code, redirect)
.build()
.await?;
let user = osu.own_data().await?;
let render_data = json!({
"body_id": "success",
"kind": "osu!",
"name": user.username,
});
let Handlebars_(handlebars) = req.data().unwrap();
let page = handlebars.render("auth", &render_data)?;
info!("Successful osu! authorization for `{}`", user.username);
ctx.auth_standby.process_osu(user, id);
Ok(Response::new(Body::from(page)))
}
async fn auth_twitch_handler(req: Request<Body>) -> HandlerResult {
match auth_twitch_handler_(&req).await {
Ok(response) => Ok(response),
Err(why) => {
warn!("{:?}", Report::new(why).wrap_err("twitch auth failed"));
let render_data = json!({
"body_id": "error",
"error": GENERAL_ISSUE,
});
let Handlebars_(handlebars) = req.data().unwrap();
let page = handlebars.render("auth", &render_data)?;
let response = Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(Body::from(page))?;
Ok(response)
}
}
}
async fn auth_twitch_handler_(req: &Request<Body>) -> HandlerResult {
let query = req.uri().query();
let code = match query.and_then(|q| q.split('&').find(|q| q.starts_with("code="))) {
Some(query) => &query[5..],
None => return invalid_auth_query(req),
};
let id_opt = query.and_then(|q| {
q.split('&')
.find(|q| q.starts_with("state="))
.map(|q| q[6..].parse())
});
let id = match id_opt {
Some(Ok(state)) => state,
None | Some(Err(_)) => return invalid_auth_query(req),
};
let Context_(ctx) = req.data().unwrap();
if ctx.auth_standby.is_twitch_empty() {
return unexpected_auth(req);
}
let TwitchClientId(client_id) = req.data().unwrap();
let TwitchClientSecret(client_secret) = req.data().unwrap();
let TwitchRedirect(redirect) = req.data().unwrap();
let Client(client) = req.data().unwrap();
let req_uri = format!(
"{TWITCH_OAUTH}?client_id={client_id}&client_secret={client_secret}\
&code={code}&grant_type=authorization_code&redirect_uri={redirect}"
);
let token_req = Request::post(req_uri).body(Body::empty())?;
let response = client.request(token_req).await?;
let bytes = hyper::body::to_bytes(response.into_body()).await?;
let token = serde_json::from_slice::<TwitchOAuthToken>(&bytes)
.map_err(|e| CustomClientError::parsing(e, &bytes, ErrorKind::TwitchToken))
.map(|token| format!("Bearer {token}"))?;
let user_req = Request::get(TWITCH_USERS_ENDPOINT)
.header(AUTHORIZATION, token)
.header("Client-ID", client_id)
.body(Body::empty())?;
let response = client.request(user_req).await?;
let bytes = hyper::body::to_bytes(response.into_body()).await?;
let user = serde_json::from_slice::<TwitchDataList<TwitchUser>>(&bytes)
.map_err(|e| CustomClientError::parsing(e, &bytes, ErrorKind::TwitchUserId))
.map(|mut data| data.data.pop())?
.ok_or(ServerError::NoTwitchUser)?;
let render_data = json!({
"body_id": "success",
"kind": "twitch",
"name": user.display_name,
});
let Handlebars_(handlebars) = req.data().unwrap();
let page = handlebars.render("auth", &render_data)?;
info!(
"Successful twitch authorization for `{}`",
user.display_name
);
ctx.auth_standby.process_twitch(user, id);
Ok(Response::new(Body::from(page)))
}
async fn auth_css_handler(_: Request<Body>) -> HandlerResult {
let mut path = CONFIG.get().unwrap().paths.website.to_owned();
path.push("auth.css");
let mut buf = Vec::with_capacity(1824);
File::open(path).await?.read_to_end(&mut buf).await?;
Ok(Response::new(Body::from(buf)))
}
async fn auth_icon_handler(_: Request<Body>) -> HandlerResult {
let mut path = CONFIG.get().unwrap().paths.website.to_owned();
path.push("icon.svg");
let mut buf = Vec::with_capacity(11_198);
File::open(path).await?.read_to_end(&mut buf).await?;
let response = Response::builder()
.header(CONTENT_TYPE, "image/svg+xml")
.body(Body::from(buf))?;
Ok(response)
}
async fn osudirect_handler(req: Request<Body>) -> HandlerResult {
let mapset_id: u32 = match req.param("mapset_id").map(|id| id.parse()) {
Some(Ok(id)) => id,
Some(Err(_)) | None => {
let content = "The path following '/osudirect/' must be a numeric mapset id";
let response = Response::builder()
.status(StatusCode::BAD_REQUEST)
.body(Body::from(content))?;
return Ok(response);
}
};
let location = format!("osu://dl/{mapset_id}");
let response = Response::builder()
.status(StatusCode::PERMANENT_REDIRECT)
.header(LOCATION, location)
.body(Body::empty())?;
Ok(response)
}
fn invalid_auth_query(req: &Request<Body>) -> HandlerResult {
let render_data = json!({
"body_id": "error",
"error": "Invalid query",
});
let Handlebars_(handlebars) = req.data().unwrap();
let page = handlebars.render("auth", &render_data)?;
let response = Response::builder()
.status(StatusCode::BAD_REQUEST)
.body(Body::from(page))?;
Ok(response)
}
fn unexpected_auth(req: &Request<Body>) -> HandlerResult {
let render_data = json!({
"body_id": "error",
"error": "Did not expect authentication. Be sure you use the bot command first.",
});
let Handlebars_(handlebars) = req.data().unwrap();
let page = handlebars.render("auth", &render_data)?;
let response = Response::builder()
.status(StatusCode::PRECONDITION_FAILED)
.body(Body::from(page))?;
Ok(response)
}
|
use crate::common::{self, *};
use std::{thread, time, any::TypeId};
pub struct TestableApplication {
pub(crate) root: *mut Application,
name: String,
sleep: u32,
}
pub type Application = AApplication<TestableApplication>;
impl<O: controls::Application> NewApplicationInner<O> for TestableApplication {
fn with_uninit_params(u: &mut mem::MaybeUninit<O>, name: &str) -> Self {
TestableApplication {
root: u as *mut _ as *mut Application,
name: name.into(),
sleep: 0,
}
}
}
impl TestableApplication {
pub fn get(&self) -> &Application {
unsafe { &*self.root }
}
pub fn get_mut(&mut self) -> &mut Application {
unsafe { &mut *self.root }
}
}
impl ApplicationInner for TestableApplication {
fn with_name<S: AsRef<str>>(name: S) -> Box<dyn controls::Application> {
let mut b: Box<mem::MaybeUninit<Application>> = Box::new_uninit();
let ab = AApplication::with_inner(
<Self as NewApplicationInner<Application>>::with_uninit_params(b.as_mut(), name.as_ref()),
);
unsafe {
b.as_mut_ptr().write(ab);
b.assume_init()
}
}
fn add_root(&mut self, m: Box<dyn controls::Closeable>) -> &mut dyn controls::Member {
let base = &mut self.get_mut().base;
let is_window = m.as_any().type_id() == TypeId::of::<crate::window::Window>();
let is_tray = m.as_any().type_id() == TypeId::of::<crate::tray::Tray>();
if is_window {
let i = base.windows.len();
base.windows.push(m.into_any().downcast::<crate::window::Window>().unwrap());
return base.windows[i].as_mut().as_member_mut();
}
if is_tray {
let i = base.trays.len();
base.trays.push(m.into_any().downcast::<crate::tray::Tray>().unwrap());
return base.trays[i].as_mut().as_member_mut();
}
panic!("Unsupported Closeable: {:?}", m.as_any().type_id());
}
fn close_root(&mut self, arg: types::FindBy, skip_callbacks: bool) -> bool {
let base = &mut self.get_mut().base;
match arg {
types::FindBy::Id(id) => {
(0..base.windows.len()).into_iter().find(|i| if base.windows[*i].id() == id
&& base.windows[*i].as_any_mut().downcast_mut::<crate::window::Window>().unwrap().inner_mut().inner_mut().inner_mut().inner_mut().close(skip_callbacks) {
base.windows.remove(*i);
true
} else {
false
}).is_some()
||
(0..base.trays.len()).into_iter().find(|i| if base.trays[*i].id() == id
&& base.trays[*i].as_any_mut().downcast_mut::<crate::tray::Tray>().unwrap().inner_mut().close(skip_callbacks) {
base.trays.remove(*i);
true
} else {
false
}).is_some()
}
types::FindBy::Tag(ref tag) => {
(0..base.windows.len()).into_iter().find(|i| if base.windows[*i].tag().is_some() && base.windows[*i].tag().unwrap() == Cow::Borrowed(tag.into())
&& base.windows[*i].as_any_mut().downcast_mut::<crate::window::Window>().unwrap().inner_mut().inner_mut().inner_mut().inner_mut().close(skip_callbacks) {
base.windows.remove(*i);
true
} else {
false
}).is_some()
||
(0..base.trays.len()).into_iter().find(|i| if base.trays[*i].tag().is_some() && base.trays[*i].tag().unwrap() == Cow::Borrowed(tag.into())
&& base.trays[*i].as_any_mut().downcast_mut::<crate::tray::Tray>().unwrap().inner_mut().close(skip_callbacks) {
base.trays.remove(*i);
true
} else {
false
}).is_some()
}
}
}
fn name<'a>(&'a self) -> Cow<'a, str> {
Cow::Borrowed(self.name.as_str())
}
fn frame_sleep(&self) -> u32 {
self.sleep
}
fn set_frame_sleep(&mut self, value: u32) {
self.sleep = value;
}
fn start(&mut self) {
{
let base = &mut self.get_mut().base;
for window in base.windows.as_mut_slice() {
window.as_any_mut().downcast_mut::<crate::window::Window>().unwrap().inner_mut().inner_mut().inner_mut().inner_mut().inner_mut().draw();
}
}
loop {
let mut frame_callbacks = 0;
let w = &mut unsafe {&mut *self.root}.base;
while frame_callbacks < defaults::MAX_FRAME_CALLBACKS {
match w.queue().try_recv() {
Ok(mut cmd) => {
if (cmd.as_mut())(unsafe { &mut *self.root } ) {
let _ = w.sender().send(cmd);
}
frame_callbacks += 1;
}
Err(e) => match e {
mpsc::TryRecvError::Empty => break,
mpsc::TryRecvError::Disconnected => unreachable!(),
},
}
}
if self.sleep > 0 {
thread::sleep(time::Duration::from_millis(self.sleep as u64));
}
let base = &mut self.get_mut().base;
if base.windows.len() < 1 && base.trays.len() < 1 {
break;
}
}
}
fn find_member_mut<'a>(&'a mut self, arg: &'a types::FindBy) -> Option<&'a mut dyn controls::Member> {
let base = &mut self.get_mut().base;
for window in base.windows.as_mut_slice() {
match arg {
types::FindBy::Id(id) => {
if window.id() == *id {
return Some(window.as_member_mut());
}
}
types::FindBy::Tag(ref tag) => {
if let Some(mytag) = window.tag() {
if tag.as_str() == mytag {
return Some(window.as_member_mut());
}
}
}
}
let found = controls::Container::find_control_mut(window.as_mut(), arg).map(|control| control.as_member_mut());
if found.is_some() {
return found;
}
}
for tray in base.trays.as_mut_slice() {
match arg {
types::FindBy::Id(ref id) => {
if tray.id() == *id {
return Some(tray.as_member_mut());
}
}
types::FindBy::Tag(ref tag) => {
if let Some(mytag) = tray.tag() {
if tag.as_str() == mytag {
return Some(tray.as_member_mut());
}
}
}
}
}
None
}
fn find_member<'a>(&'a self, arg: &'a types::FindBy) -> Option<&'a dyn controls::Member> {
let base = &self.get().base;
for window in base.windows.as_slice() {
match arg {
types::FindBy::Id(id) => {
if window.id() == *id {
return Some(window.as_member());
}
}
types::FindBy::Tag(ref tag) => {
if let Some(mytag) = window.tag() {
if tag.as_str() == mytag {
return Some(window.as_member());
}
}
}
}
let found = controls::Container::find_control(window.as_ref(), arg).map(|control| control.as_member());
if found.is_some() {
return found;
}
}
for tray in base.trays.as_slice() {
match arg {
types::FindBy::Id(ref id) => {
if tray.id() == *id {
return Some(tray.as_member());
}
}
types::FindBy::Tag(ref tag) => {
if let Some(mytag) = tray.tag() {
if tag.as_str() == mytag {
return Some(tray.as_member());
}
}
}
}
}
None
}
fn exit(&mut self) {
let base = &mut self.get_mut().base;
for mut window in base.windows.drain(..) {
window.as_any_mut().downcast_mut::<crate::window::Window>().unwrap().inner_mut().inner_mut().inner_mut().inner_mut().close(true);
}
for mut tray in base.trays.drain(..) {
tray.as_any_mut().downcast_mut::<crate::tray::Tray>().unwrap().inner_mut().close(true);
}
}
fn roots<'a>(&'a self) -> Box<dyn Iterator<Item = &'a (dyn controls::Member)> + 'a> {
self.get().roots()
}
fn roots_mut<'a>(&'a mut self) -> Box<dyn Iterator<Item = &'a mut (dyn controls::Member)> + 'a> {
self.get_mut().roots_mut()
}
}
impl HasNativeIdInner for TestableApplication {
type Id = common::TestableId;
fn native_id(&self) -> Self::Id {
(self.root as *mut MemberBase).into()
}
}
|
pub mod ast;
pub mod printer;
#[macro_use]
extern crate lalrpop_util;
lalrpop_mod!(pub grammar); // synthesized by LALRPOP
pub fn parse(input: &str) -> Result<ast::Query, String> {
match grammar::QUERYParser::new().parse(input) {
Ok(ast) => Ok(ast),
Err(e) => Err(format!("{:?}", e)),
}
}
#[cfg(test)]
mod tests {
use crate::parse;
use crate::ast::*;
use crate::printer::Source;
fn one_feature(feature: Feature) -> Query {
one_expression(Expression::from_feature(feature))
}
fn one_expression(expression: Expression) -> Query {
Query::simple(Expressions::from_expression(expression))
}
fn one_comparison(operator: RelationalOperator, feature: Feature, number: i64) -> Query {
let left = Operand::from_feature(feature);
let right = Operand::from_number(number);
let expression = Expression::new(operator, left, right);
Query::simple(Expressions::from_expression(expression))
}
fn feature_conjunction(features: Vec<Feature>) -> Query {
assert!(features.len() > 1);
expression_conjunction(features.into_iter().map(|f| Expression::from_feature(f)).collect())
}
fn expression_conjunction(expressions: Vec<Expression>) -> Query {
assert!(expressions.len() > 1);
Query::simple(Expressions::from_expressions(Connective::Conjunction, expressions).unwrap())
}
fn comparison_conjunction(comparisons: Vec<(Feature, RelationalOperator, i64)>) -> Query {
assert!(comparisons.len() > 1);
let expressions = comparisons.into_iter().map(|(f, o, n)| {
Expression::new(o, Operand::from_feature(f), Operand::from_number(n))
}).collect();
Query::simple(Expressions::from_expressions(Connective::Conjunction, expressions).unwrap())
}
fn parse_ok(input: &str, expected: Query) {
assert_eq!(parse(input), Ok(expected));
}
fn print_ok(input: Query, expected: &str) {
assert_eq!(input.to_source(), expected.to_string());
}
#[test] fn test_commits() {
let input = "commits";
let expected = one_feature(Feature::commits_simple());
parse_ok(input,expected);
}
#[test] fn test_commits_equal_something() {
let input = "commits == 42";
let expected = one_comparison(RelationalOperator::Equal,
Feature::commits_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_commits_different_something() {
let input = "commits != 42";
let expected = one_comparison(RelationalOperator::Different,
Feature::commits_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_commits_less_something() {
let input = "commits < 42";
let expected = one_comparison(RelationalOperator::Less,
Feature::commits_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_commits_less_equal_something() {
let input = "commits <= 42";
let expected = one_comparison(RelationalOperator::LessEqual,
Feature::commits_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_commits_greater_something() {
let input = "commits > 42";
let expected = one_comparison(RelationalOperator::Greater,
Feature::commits_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_commits_greater_equal_something() {
let input = "commits >= 42";
let expected = one_comparison(RelationalOperator::GreaterEqual,
Feature::commits_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_commits_with_empty_parens() {
let input = "commits()";
let expected = one_feature(Feature::commits_simple());
parse_ok(input,expected);
}
#[test] fn test_commits_with_equal_path_filter() {
let input = r#"commits(path == "test/*")"#;
let expected = one_feature(Feature::commits_with_parameter(Parameter::path_equal_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_commits_with_different_path_filter() {
let input = r#"commits(path != "test/*")"#;
let expected = one_feature(Feature::commits_with_parameter(Parameter::path_different_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_commits_with_filter_equal_something() {
let input = r#"commits(path == "test/*") == 42"#;
let expected = one_comparison(RelationalOperator::Equal,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_commits_with_filter_different_something() {
let input = r#"commits(path == "test/*") != 42"#;
let expected = one_comparison(RelationalOperator::Different,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_commits_with_filter_less_something() {
let input = r#"commits(path == "test/*") < 42"#;
let expected = one_comparison(RelationalOperator::Less,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_commits_with_filter_less_equal_something() {
let input = r#"commits(path == "test/*") <= 42"#;
let expected = one_comparison(RelationalOperator::LessEqual,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_commits_with_filter_greater_something() {
let input = r#"commits(path == "test/*") > 42"#;
let expected = one_comparison(RelationalOperator::Greater,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_commits_with_filter_greater_equal_something() {
let input = r#"commits(path == "test/*") >= 42"#;
let expected = one_comparison(RelationalOperator::GreaterEqual,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_commits_with_extra_comma() {
let input = r#"commits(path != "test/*",)"#;
let expected = one_feature(Feature::commits_with_parameter(Parameter::path_different_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_commits_with_elapsed_time() {
let input = "commits.elapsedTime";
let expected = one_feature(Feature::commits_with_property(Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_commits_with_elapsed_time_and_empty_parens_() {
let input = "commits().elapsedTime()";
let expected = one_feature(Feature::commits_with_property(Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_commits_with_equal_path_filter_and_elapsed_time() {
let parameter = Parameter::path_equal_str("test/*");
let input = r#"commits(path == "test/*").elapsedTime"#;
let expected = one_feature(Feature::commits(vec![parameter], Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_commits_with_different_path_filter_and_elapsed_time() {
let parameter = Parameter::path_different_str("test/*");
let input = r#"commits(path != "test/*").elapsedTime"#;
let expected = one_feature(Feature::commits(vec![parameter], Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_additions() {
let input = "additions";
let expected = one_feature(Feature::additions_simple());
parse_ok(input, expected);
}
#[test] fn test_additions_equal_something() {
let input = "additions == 42";
let expected = one_comparison(RelationalOperator::Equal,
Feature::additions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_additions_different_something() {
let input = "additions != 42";
let expected = one_comparison(RelationalOperator::Different,
Feature::additions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_additions_less_something() {
let input = "additions < 42";
let expected = one_comparison(RelationalOperator::Less,
Feature::additions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_additions_less_equal_something() {
let input = "additions <= 42";
let expected = one_comparison(RelationalOperator::LessEqual,
Feature::additions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_additions_greater_something() {
let input = "additions > 42";
let expected = one_comparison(RelationalOperator::Greater,
Feature::additions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_additions_greater_equal_something() {
let input = "additions >= 42";
let expected = one_comparison(RelationalOperator::GreaterEqual,
Feature::additions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_additions_with_empty_parens() {
let input = "additions()";
let expected = one_feature(Feature::additions_simple());
parse_ok(input, expected);
}
#[test] fn test_additions_with_equal_path_filter() {
let input = r#"additions(path == "test/*")"#;
let expected = one_feature(Feature::additions_with_parameter(Parameter::path_equal_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_additions_with_different_path_filter() {
let input = r#"additions(path != "test/*")"#;
let expected = one_feature(Feature::additions_with_parameter(Parameter::path_different_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_additions_with_filter_equal_something() {
let input = r#"additions(path == "test/*") == 42"#;
let expected = one_comparison(RelationalOperator::Equal,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_additions_with_filter_different_something() {
let input = r#"additions(path == "test/*") != 42"#;
let expected = one_comparison(RelationalOperator::Different,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_additions_with_filter_less_something() {
let input = r#"additions(path == "test/*") < 42"#;
let expected = one_comparison(RelationalOperator::Less,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_additions_with_filter_less_equal_something() {
let input = r#"additions(path == "test/*") <= 42"#;
let expected = one_comparison(RelationalOperator::LessEqual,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_additions_with_filter_greater_something() {
let input = r#"additions(path == "test/*") > 42"#;
let expected = one_comparison(RelationalOperator::Greater,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_additions_with_filter_greater_equal_something() {
let input = r#"additions(path == "test/*") >= 42"#;
let expected = one_comparison(RelationalOperator::GreaterEqual,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_additions_with_extra_comma() {
let input = r#"additions(path != "test/*",)"#;
let expected = one_feature(Feature::additions_with_parameter(Parameter::path_different_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_additions_with_elapsed_time() {
let input = "additions.elapsedTime";
let expected = one_feature(Feature::additions(vec![], Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_additions_with_elapsed_time_and_empty_parens_() {
let input = "additions().elapsedTime()";
let expected = one_feature(Feature::additions(vec![], Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_additions_with_equal_path_filter_and_elapsed_time() {
let parameter = Parameter::path_equal_str("test/*");
let input = r#"additions(path == "test/*").elapsedTime"#;
let expected = one_feature(Feature::additions(vec![parameter], Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_additions_with_different_path_filter_and_elapsed_time() {
let input = r#"additions(path != "test/*").elapsedTime"#;
let expected = one_feature(Feature::additions(vec![Parameter::path_different_str("test/*")],
Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_deletions() {
let input = "deletions";
let expected = one_feature(Feature::deletions_simple());
parse_ok(input, expected);
}
#[test] fn test_deletions_equal_something() {
let input = "deletions == 42";
let expected = one_comparison(RelationalOperator::Equal,
Feature::deletions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_deletions_different_something() {
let input = "deletions != 42";
let expected = one_comparison(RelationalOperator::Different,
Feature::deletions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_deletions_less_something() {
let input = "deletions < 42";
let expected = one_comparison(RelationalOperator::Less,
Feature::deletions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_deletions_less_equal_something() {
let input = "deletions <= 42";
let expected = one_comparison(RelationalOperator::LessEqual,
Feature::deletions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_deletions_greater_something() {
let input = "deletions > 42";
let expected = one_comparison(RelationalOperator::Greater,
Feature::deletions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_deletions_greater_equal_something() {
let input = "deletions >= 42";
let expected = one_comparison(RelationalOperator::GreaterEqual,
Feature::deletions_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_deletions_with_empty_parens() {
let input = "deletions()";
let expected = one_feature(Feature::deletions_simple());
parse_ok(input, expected);
}
#[test] fn test_deletions_with_equal_path_filter() {
let input = r#"deletions(path == "test/*")"#;
let expected = one_feature(Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_deletions_with_different_path_filter() {
let input = r#"deletions(path != "test/*")"#;
let expected = one_feature(Feature::deletions_with_parameter(Parameter::path_different_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_deletions_with_filter_equal_something() {
let input = r#"deletions(path == "test/*") == 42"#;
let expected = one_comparison(RelationalOperator::Equal,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_deletions_with_filter_different_something() {
let input = r#"deletions(path == "test/*") != 42"#;
let expected = one_comparison(RelationalOperator::Different,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_deletions_with_filter_less_something() {
let input = r#"deletions(path == "test/*") < 42"#;
let expected = one_comparison(RelationalOperator::Less,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_deletions_with_filter_less_equal_something() {
let input = r#"deletions(path == "test/*") <= 42"#;
let expected = one_comparison(RelationalOperator::LessEqual,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_deletions_with_filter_greater_something() {
let input = r#"deletions(path == "test/*") > 42"#;
let expected = one_comparison(RelationalOperator::Greater,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_deletions_with_filter_greater_equal_something() {
let input = r#"deletions(path == "test/*") >= 42"#;
let expected = one_comparison(RelationalOperator::GreaterEqual,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_deletions_with_extra_comma() {
let input = r#"deletions(path != "test/*",)"#;
let expected = one_feature(Feature::deletions_with_parameter(Parameter::path_different_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_deletions_with_elapsed_time() {
let input = "deletions.elapsedTime";
let expected = one_feature(Feature::deletions_with_property(Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_deletions_with_elapsed_time_and_empty_parens_() {
let input = "deletions().elapsedTime()";
let expected = one_feature(Feature::deletions_with_property(Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_deletions_with_equal_path_filter_and_elapsed_time() {
let input = r#"deletions(path == "test/*").elapsedTime"#;
let expected = one_feature(Feature::deletions(vec![Parameter::path_equal_str("test/*")],
Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_deletions_with_different_path_filter_and_elapsed_time() {
let input = r#"deletions(path != "test/*").elapsedTime"#;
let expected = one_feature(Feature::deletions(vec![Parameter::path_different_str("test/*")],
Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_changes() {
let input = "changes";
let expected = one_feature(Feature::changes_simple());
parse_ok(input, expected);
}
#[test] fn test_changes_equal_something() {
let input = "changes == 42";
let expected = one_comparison(RelationalOperator::Equal,
Feature::changes_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_changes_different_something() {
let input = "changes != 42";
let expected = one_comparison(RelationalOperator::Different,
Feature::changes_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_changes_less_something() {
let input = "changes < 42";
let expected = one_comparison(RelationalOperator::Less,
Feature::changes_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_changes_less_equal_something() {
let input = "changes <= 42";
let expected = one_comparison(RelationalOperator::LessEqual,
Feature::changes_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_changes_greater_something() {
let input = "changes > 42";
let expected = one_comparison(RelationalOperator::Greater,
Feature::changes_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_changes_greater_equal_something() {
let input = "changes >= 42";
let expected = one_comparison(RelationalOperator::GreaterEqual,
Feature::changes_simple(), 42);
parse_ok(input,expected);
}
#[test] fn test_changes_with_empty_parens() {
let input = "changes()";
let expected = one_feature(Feature::changes_simple());
parse_ok(input, expected);
}
#[test] fn test_changes_with_equal_path_filter() {
let input = r#"changes(path == "test/*")"#;
let expected = one_feature(Feature::changes_with_parameter(Parameter::path_equal_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_changes_with_different_path_filter() {
let input = r#"changes(path != "test/*")"#;
let expected = one_feature(Feature::changes_with_parameter(Parameter::path_different_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_changes_with_filter_equal_something() {
let input = r#"changes(path == "test/*") == 42"#;
let expected = one_comparison(RelationalOperator::Equal,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_changes_with_filter_different_something() {
let input = r#"changes(path == "test/*") != 42"#;
let expected = one_comparison(RelationalOperator::Different,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_changes_with_filter_less_something() {
let input = r#"changes(path == "test/*") < 42"#;
let expected = one_comparison(RelationalOperator::Less,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_changes_with_filter_less_equal_something() {
let input = r#"changes(path == "test/*") <= 42"#;
let expected = one_comparison(RelationalOperator::LessEqual,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_changes_with_filter_greater_something() {
let input = r#"changes(path == "test/*") > 42"#;
let expected = one_comparison(RelationalOperator::Greater,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_changes_with_filter_greater_equal_something() {
let input = r#"changes(path == "test/*") >= 42"#;
let expected = one_comparison(RelationalOperator::GreaterEqual,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
parse_ok(input,expected);
}
#[test] fn test_changes_with_extra_comma() {
let input = r#"changes(path != "test/*",)"#;
let expected = one_feature(Feature::changes_with_parameter(Parameter::path_different_str("test/*")));
parse_ok(input, expected);
}
#[test] fn test_changes_with_elapsed_time() {
let input = "changes.elapsedTime";
let expected = one_feature(Feature::changes_with_property(Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_changes_with_elapsed_time_and_empty_parens_() {
let input = "changes().elapsedTime()";
let expected = one_feature(Feature::changes_with_property(Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_changes_with_equal_path_filter_and_elapsed_time() {
let input = r#"changes(path == "test/*").elapsedTime"#;
let expected = one_feature(Feature::changes(vec![Parameter::path_equal_str("test/*")],
Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_changes_with_different_path_filter_and_elapsed_time() {
let input = r#"changes(path != "test/*").elapsedTime"#;
let expected = one_feature(Feature::changes(vec![Parameter::path_different_str("test/*")],
Property::ElapsedTime));
parse_ok(input, expected);
}
#[test] fn test_and_connector_2() {
let input = r#"commits and changes"#;
let expected = feature_conjunction(vec![Feature::commits_simple(),
Feature::changes_simple()]);
parse_ok(input, expected);
}
#[test] fn test_and_connector_3() {
let input = r#"commits and changes and additions"#;
let expected = feature_conjunction(vec![Feature::commits_simple(),
Feature::changes_simple(),
Feature::additions_simple()]);
parse_ok(input, expected);
}
#[test] fn test_and_connector_comparisons_2() {
let input = r#"commits == 42 and changes != 42"#;
let expected = comparison_conjunction(
vec![(Feature::commits_simple(), RelationalOperator::Equal, 42),
(Feature::changes_simple(), RelationalOperator::Different, 42)]);
parse_ok(input, expected);
}
#[test] fn test_and_connector_comparisons_3() {
let input = r#"commits == 42 and changes != 42 and additions < 42"#;
let expected = comparison_conjunction(
vec![(Feature::commits_simple(), RelationalOperator::Equal, 42),
(Feature::changes_simple(), RelationalOperator::Different, 42),
(Feature::additions_simple(), RelationalOperator::Less, 42)]);
parse_ok(input, expected);
}
//------------------------------------------------------------------------
// Printing
//------------------------------------------------------------------------
#[test] fn print_commits() {
let expected = "commits";
let input = one_feature(Feature::commits_simple());
print_ok(input,expected);
}
#[test] fn print_commits_equal_something() {
let expected = "commits == 42";
let input = one_comparison(RelationalOperator::Equal,
Feature::commits_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_commits_different_something() {
let expected = "commits != 42";
let input = one_comparison(RelationalOperator::Different,
Feature::commits_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_commits_less_something() {
let expected = "commits < 42";
let input = one_comparison(RelationalOperator::Less,
Feature::commits_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_commits_less_equal_something() {
let expected = "commits <= 42";
let input = one_comparison(RelationalOperator::LessEqual,
Feature::commits_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_commits_greater_something() {
let expected = "commits > 42";
let input = one_comparison(RelationalOperator::Greater,
Feature::commits_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_commits_greater_equal_something() {
let expected = "commits >= 42";
let input = one_comparison(RelationalOperator::GreaterEqual,
Feature::commits_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_commits_with_equal_path_filter() {
let expected = r#"commits(path == "test/*")"#;
let input = one_feature(Feature::commits_with_parameter(Parameter::path_equal_str("test/*")));
print_ok(input, expected);
}
#[test] fn print_commits_with_different_path_filter() {
let expected = r#"commits(path != "test/*")"#;
let input = one_feature(Feature::commits_with_parameter(Parameter::path_different_str("test/*")));
print_ok(input, expected);
}
#[test] fn print_commits_with_filter_equal_something() {
let expected = r#"commits(path == "test/*") == 42"#;
let input = one_comparison(RelationalOperator::Equal,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_commits_with_filter_different_something() {
let expected = r#"commits(path == "test/*") != 42"#;
let input = one_comparison(RelationalOperator::Different,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_commits_with_filter_less_something() {
let expected = r#"commits(path == "test/*") < 42"#;
let input = one_comparison(RelationalOperator::Less,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_commits_with_filter_less_equal_something() {
let expected = r#"commits(path == "test/*") <= 42"#;
let input = one_comparison(RelationalOperator::LessEqual,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_commits_with_filter_greater_something() {
let expected = r#"commits(path == "test/*") > 42"#;
let input = one_comparison(RelationalOperator::Greater,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_commits_with_filter_greater_equal_something() {
let expected = r#"commits(path == "test/*") >= 42"#;
let input = one_comparison(RelationalOperator::GreaterEqual,
Feature::commits_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_commits_with_elapsed_time() {
let expected = "commits.elapsedTime";
let input = one_feature(Feature::commits_with_property(Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_commits_with_equal_path_filter_and_elapsed_time() {
let parameter = Parameter::path_equal_str("test/*");
let expected = r#"commits(path == "test/*").elapsedTime"#;
let input = one_feature(Feature::commits(vec![parameter], Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_commits_with_different_path_filter_and_elapsed_time() {
let parameter = Parameter::path_different_str("test/*");
let expected = r#"commits(path != "test/*").elapsedTime"#;
let input = one_feature(Feature::commits(vec![parameter], Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_additions() {
let expected = "additions";
let input = one_feature(Feature::additions_simple());
print_ok(input, expected);
}
#[test] fn print_additions_equal_something() {
let expected = "additions == 42";
let input = one_comparison(RelationalOperator::Equal,
Feature::additions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_additions_different_something() {
let expected = "additions != 42";
let input = one_comparison(RelationalOperator::Different,
Feature::additions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_additions_less_something() {
let expected = "additions < 42";
let input = one_comparison(RelationalOperator::Less,
Feature::additions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_additions_less_equal_something() {
let expected = "additions <= 42";
let input = one_comparison(RelationalOperator::LessEqual,
Feature::additions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_additions_greater_something() {
let expected = "additions > 42";
let input = one_comparison(RelationalOperator::Greater,
Feature::additions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_additions_greater_equal_something() {
let expected = "additions >= 42";
let input = one_comparison(RelationalOperator::GreaterEqual,
Feature::additions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_additions_with_equal_path_filter() {
let expected = r#"additions(path == "test/*")"#;
let input = one_feature(Feature::additions_with_parameter(Parameter::path_equal_str("test/*")));
print_ok(input, expected);
}
#[test] fn print_additions_with_different_path_filter() {
let expected = r#"additions(path != "test/*")"#;
let input = one_feature(Feature::additions_with_parameter(Parameter::path_different_str("test/*")));
print_ok(input, expected);
}
#[test] fn print_additions_with_filter_equal_something() {
let expected = r#"additions(path == "test/*") == 42"#;
let input = one_comparison(RelationalOperator::Equal,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_additions_with_filter_different_something() {
let expected = r#"additions(path == "test/*") != 42"#;
let input = one_comparison(RelationalOperator::Different,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_additions_with_filter_less_something() {
let expected = r#"additions(path == "test/*") < 42"#;
let input = one_comparison(RelationalOperator::Less,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_additions_with_filter_less_equal_something() {
let expected = r#"additions(path == "test/*") <= 42"#;
let input = one_comparison(RelationalOperator::LessEqual,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_additions_with_filter_greater_something() {
let expected = r#"additions(path == "test/*") > 42"#;
let input = one_comparison(RelationalOperator::Greater,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_additions_with_filter_greater_equal_something() {
let expected = r#"additions(path == "test/*") >= 42"#;
let input = one_comparison(RelationalOperator::GreaterEqual,
Feature::additions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_additions_with_elapsed_time() {
let expected = "additions.elapsedTime";
let input = one_feature(Feature::additions(vec![], Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_additions_with_equal_path_filter_and_elapsed_time() {
let parameter = Parameter::path_equal_str("test/*");
let expected = r#"additions(path == "test/*").elapsedTime"#;
let input = one_feature(Feature::additions(vec![parameter], Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_additions_with_different_path_filter_and_elapsed_time() {
let expected = r#"additions(path != "test/*").elapsedTime"#;
let input = one_feature(Feature::additions(vec![Parameter::path_different_str("test/*")],
Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_deletions() {
let expected = "deletions";
let input = one_feature(Feature::deletions_simple());
print_ok(input, expected);
}
#[test] fn print_deletions_equal_something() {
let expected = "deletions == 42";
let input = one_comparison(RelationalOperator::Equal,
Feature::deletions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_deletions_different_something() {
let expected = "deletions != 42";
let input = one_comparison(RelationalOperator::Different,
Feature::deletions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_deletions_less_something() {
let expected = "deletions < 42";
let input = one_comparison(RelationalOperator::Less,
Feature::deletions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_deletions_less_equal_something() {
let expected = "deletions <= 42";
let input = one_comparison(RelationalOperator::LessEqual,
Feature::deletions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_deletions_greater_something() {
let expected = "deletions > 42";
let input = one_comparison(RelationalOperator::Greater,
Feature::deletions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_deletions_greater_equal_something() {
let expected = "deletions >= 42";
let input = one_comparison(RelationalOperator::GreaterEqual,
Feature::deletions_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_deletions_with_equal_path_filter() {
let expected = r#"deletions(path == "test/*")"#;
let input = one_feature(Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")));
print_ok(input, expected);
}
#[test] fn print_deletions_with_different_path_filter() {
let expected = r#"deletions(path != "test/*")"#;
let input = one_feature(Feature::deletions_with_parameter(Parameter::path_different_str("test/*")));
print_ok(input, expected);
}
#[test] fn print_deletions_with_filter_equal_something() {
let expected = r#"deletions(path == "test/*") == 42"#;
let input = one_comparison(RelationalOperator::Equal,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_deletions_with_filter_different_something() {
let expected = r#"deletions(path == "test/*") != 42"#;
let input = one_comparison(RelationalOperator::Different,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_deletions_with_filter_less_something() {
let expected = r#"deletions(path == "test/*") < 42"#;
let input = one_comparison(RelationalOperator::Less,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_deletions_with_filter_less_equal_something() {
let expected = r#"deletions(path == "test/*") <= 42"#;
let input = one_comparison(RelationalOperator::LessEqual,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_deletions_with_filter_greater_something() {
let expected = r#"deletions(path == "test/*") > 42"#;
let input = one_comparison(RelationalOperator::Greater,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_deletions_with_filter_greater_equal_something() {
let expected = r#"deletions(path == "test/*") >= 42"#;
let input = one_comparison(RelationalOperator::GreaterEqual,
Feature::deletions_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_deletions_with_elapsed_time() {
let expected = "deletions.elapsedTime";
let input = one_feature(Feature::deletions_with_property(Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_deletions_with_equal_path_filter_and_elapsed_time() {
let expected = r#"deletions(path == "test/*").elapsedTime"#;
let input = one_feature(Feature::deletions(vec![Parameter::path_equal_str("test/*")],
Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_deletions_with_different_path_filter_and_elapsed_time() {
let expected = r#"deletions(path != "test/*").elapsedTime"#;
let input = one_feature(Feature::deletions(vec![Parameter::path_different_str("test/*")],
Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_changes() {
let expected = "changes";
let input = one_feature(Feature::changes_simple());
print_ok(input, expected);
}
#[test] fn print_changes_equal_something() {
let expected = "changes == 42";
let input = one_comparison(RelationalOperator::Equal,
Feature::changes_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_changes_different_something() {
let expected = "changes != 42";
let input = one_comparison(RelationalOperator::Different,
Feature::changes_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_changes_less_something() {
let expected = "changes < 42";
let input = one_comparison(RelationalOperator::Less,
Feature::changes_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_changes_less_equal_something() {
let expected = "changes <= 42";
let input = one_comparison(RelationalOperator::LessEqual,
Feature::changes_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_changes_greater_something() {
let expected = "changes > 42";
let input = one_comparison(RelationalOperator::Greater,
Feature::changes_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_changes_greater_equal_something() {
let expected = "changes >= 42";
let input = one_comparison(RelationalOperator::GreaterEqual,
Feature::changes_simple(), 42);
print_ok(input,expected);
}
#[test] fn print_changes_with_equal_path_filter() {
let expected = r#"changes(path == "test/*")"#;
let input = one_feature(Feature::changes_with_parameter(Parameter::path_equal_str("test/*")));
print_ok(input, expected);
}
#[test] fn print_changes_with_different_path_filter() {
let expected = r#"changes(path != "test/*")"#;
let input = one_feature(Feature::changes_with_parameter(Parameter::path_different_str("test/*")));
print_ok(input, expected);
}
#[test] fn print_changes_with_filter_equal_something() {
let expected = r#"changes(path == "test/*") == 42"#;
let input = one_comparison(RelationalOperator::Equal,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_changes_with_filter_different_something() {
let expected = r#"changes(path == "test/*") != 42"#;
let input = one_comparison(RelationalOperator::Different,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_changes_with_filter_less_something() {
let expected = r#"changes(path == "test/*") < 42"#;
let input = one_comparison(RelationalOperator::Less,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_changes_with_filter_less_equal_something() {
let expected = r#"changes(path == "test/*") <= 42"#;
let input = one_comparison(RelationalOperator::LessEqual,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_changes_with_filter_greater_something() {
let expected = r#"changes(path == "test/*") > 42"#;
let input = one_comparison(RelationalOperator::Greater,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_changes_with_filter_greater_equal_something() {
let expected = r#"changes(path == "test/*") >= 42"#;
let input = one_comparison(RelationalOperator::GreaterEqual,
Feature::changes_with_parameter(Parameter::path_equal_str("test/*")),
42);
print_ok(input,expected);
}
#[test] fn print_changes_with_elapsed_time() {
let expected = "changes.elapsedTime";
let input = one_feature(Feature::changes_with_property(Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_changes_with_equal_path_filter_and_elapsed_time() {
let expected = r#"changes(path == "test/*").elapsedTime"#;
let input = one_feature(Feature::changes(vec![Parameter::path_equal_str("test/*")],
Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_changes_with_different_path_filter_and_elapsed_time() {
let expected = r#"changes(path != "test/*").elapsedTime"#;
let input = one_feature(Feature::changes(vec![Parameter::path_different_str("test/*")],
Property::ElapsedTime));
print_ok(input, expected);
}
#[test] fn print_and_connector_2() {
let expected = r#"commits and changes"#;
let input = feature_conjunction(vec![Feature::commits_simple(),
Feature::changes_simple()]);
print_ok(input, expected);
}
#[test] fn print_and_connector_3() {
let expected = r#"commits and changes and additions"#;
let input = feature_conjunction(vec![Feature::commits_simple(),
Feature::changes_simple(),
Feature::additions_simple()]);
print_ok(input, expected);
}
#[test] fn print_and_connector_comparisons_2() {
let expected = r#"commits == 42 and changes != 42"#;
let input = comparison_conjunction(
vec![(Feature::commits_simple(), RelationalOperator::Equal, 42),
(Feature::changes_simple(), RelationalOperator::Different, 42)]);
print_ok(input, expected);
}
#[test] fn print_and_connector_comparisons_3() {
let expected = r#"commits == 42 and changes != 42 and additions < 42"#;
let input = comparison_conjunction(
vec![(Feature::commits_simple(), RelationalOperator::Equal, 42),
(Feature::changes_simple(), RelationalOperator::Different, 42),
(Feature::additions_simple(), RelationalOperator::Less, 42)]);
print_ok(input, expected);
}
} |
use crate::common::*;
use crate::parse::Rule;
use codespan::{FileId, Span};
use codespan_reporting::diagnostic::{Diagnostic, Label};
use codespan_reporting::term::emit;
use pest::error::InputLocation;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, Clone)]
pub struct Error(Diagnostic);
impl Error {
pub fn new(
file: FileId,
primary: impl Into<String>,
span: impl Into<Span>,
secondary: impl Into<String>,
) -> Self {
let d = Diagnostic::new_error(primary, Label::new(file, span, secondary));
Error(d)
}
pub fn from_pest(pest_err: pest::error::Error<Rule>, file: FileId) -> Self {
match &pest_err {
pest::error::Error {
variant:
pest::error::ErrorVariant::ParsingError {
positives,
negatives,
},
location,
..
} => {
let mut expected = String::new();
let mut unexpected = String::new();
let p_len = positives.len();
for (n, i) in positives.iter().enumerate() {
if !expected.is_empty() {
expected.push_str(", ");
if n == p_len - 1 {
expected.push_str("or ");
}
}
expected.push_str(i.format());
}
let n_len = negatives.len();
for (n, i) in negatives.iter().enumerate() {
if !unexpected.is_empty() {
unexpected.push_str(", ");
if n == n_len - 1 {
unexpected.push_str("or ");
}
}
unexpected.push_str(i.format());
}
let (start, end) = match location {
InputLocation::Span(t) => *t,
InputLocation::Pos(u) => (*u, *u),
};
let message = if !unexpected.is_empty() && !expected.is_empty() {
format!("unexpected {}, expected {}", unexpected, expected)
} else if !unexpected.is_empty() {
format!("unexpected {}", unexpected)
} else {
format!("expected {}", expected)
};
let span = Span::new(start as u32, end as u32);
Error::new(file, format!("Parse error: {}", message), span, message)
}
pest::error::Error { location, .. } => {
let (start, end) = match location {
InputLocation::Span(t) => *t,
InputLocation::Pos(u) => (*u, *u),
};
let span = Span::new(start as u32, end as u32);
Error::new(file, format!("Pest error:\n{}", pest_err), span, "Here")
}
}
}
pub fn write(&self) -> std::io::Result<()> {
emit(
&mut *WRITER.write().unwrap(),
&CONFIG,
&FILES.read().unwrap(),
&self.0,
)
}
}
|
use crate::parser::parser_defs;
use crate::parser::svg_util;
use crate::regex_defs;
use crate::parser::svg_commands::bezier;
use crate::parser::svg_commands::line;
use crate::parser::path_defs;
use regex::*;
const BEZIER_RESOLUTION: f32 = 0.01;
pub fn parse_svg_path(element: &quick_xml::events::BytesStart) -> Result<parser_defs::SVGShape, String> {
let attribute_list = element.attributes();
let mut color: parser_defs::SVGColor = parser_defs::SVGColor {r: 0, g: 0, b: 0, a: 0};
let mut position: Option<parser_defs::SVGPoint> = None;
let mut points: Vec<parser_defs::SVGPoint> = Vec::new();
for attribute in attribute_list {
let att = attribute.unwrap();
match att.key {
b"d" => {
let data = &String::from_utf8(att.value.to_vec()).unwrap();
let mut parsed_items: Vec<&str> = Vec::new();
// TODO: Lazy Statics so this doesnt get compiled everytime this is called. - Austin Haskell
let regex = Regex::new(regex_defs::SVG_COMMAND_GROUPING).unwrap();
for i in regex.find_iter(data) {
parsed_items.push(&data[i.start()..i.end()]);
}
let possible_position = parse_position(parsed_items[0]);
match possible_position {
Err(e) => return Err(e),
Ok(val) => position = Some(val)
}
let possible_points = parse_points(&parsed_items, position.unwrap());
match possible_points {
Err(e) => return Err(e),
Ok(val) => points = val
}
// TODO: Functions like transform, scale, etc that show up after data points. - Austin Haskell
// Note: I think some refactoring is going to be needed to do the above todo.
},
b"stroke" => color = parser_defs::map_color(&String::from_utf8(att.value.to_vec()).unwrap()),
_ => print!("")
}
}
Ok(parser_defs::SVGShape {
points: points,
position: position.unwrap(),
shape_type: String::from("polyline"),
color: color
})
}
// Note: It is very inefficiant to search through this twice, this could be parsed at the same
// time as the point data to avoid the extra loop. - Austin Haskell
fn parse_position(data: &str) -> Result<parser_defs::SVGPoint, String> {
let delimiter = Regex::new(regex_defs::COMMA_OR_SPACE).unwrap();
let mut items: Vec<&str> = Vec::new();
let mut pos = 1;
for i in delimiter.find_iter(data) {
items.push(&data[pos..i.end()-1]);
pos = i.start() + 1;
}
if items.len() < 2 {
// Maybe need to go to eol and the line doenst have a space or letter at the end.
items.push(&data[pos..]);
}
if items.len() == 2 {
let x: f32 = svg_util::parse_possible_float(items[0]);
let y: f32 = svg_util::parse_possible_float(items[1]);
return Ok(parser_defs::SVGPoint {
x: x,
y: y
})
}
Err(String::from("Error: No position data found (M), svg file is considered malformed. "))
}
fn parse_points(data: &Vec<&str>, position: parser_defs::SVGPoint) -> Result<Vec<parser_defs::SVGPoint>, String> {
let mut point_list: Vec<parser_defs::SVGPoint> = Vec::new();
let regex = Regex::new(regex_defs::SEPERATE_TWO_NUMBERS).unwrap();
let mut last_point: parser_defs::SVGPoint = position;
point_list.push(last_point);
let mut current_position = position;
let mut last_control_point: Option<(f32, f32)> = None;
let mut close_path: bool = false;
for item in data {
// TODO: Clean this up a bit - Austin Haskell
let mut str_coordanates: Vec<&str> = Vec::new();
for i in regex.find_iter(item) {
let item_data: &str = &item[i.start()..i.end()];
if item_data == "" {
continue;
}
str_coordanates.push(item_data);
}
let mut coordanates: Vec<f32> = Vec::new();
for coord in str_coordanates {
coordanates.push(svg_util::parse_possible_float(coord));
}
let mut point: Option<parser_defs::SVGPoint> = None;
match item.chars().nth(0).unwrap() {
path_defs::MOVE_ABSOLUTE => {
let x: f32 = coordanates[0];
let y: f32 = coordanates[1];
current_position.x = x;
current_position.y = y;
},
path_defs::MOVE_RELATIVE => {
let x: f32 = coordanates[0];
let y: f32 = coordanates[1];
current_position.x += x;
current_position.y += y;
}
path_defs::LINE_RELATIVE => {
let x: f32 = coordanates[0];
let y: f32 = coordanates[1];
point = Some(line::line_relative((x, y), (last_point.x, last_point.y)));
},
path_defs::LINE_ABSOLUTE => {
let x: f32 = coordanates[0];
let y: f32 = coordanates[1];
point = Some(line::line_absolute((x, y)));
},
path_defs::HORIZONTAL_RELATIVE => {
let x: f32 = coordanates[0];
point = Some(line::horizontal_line_relative(x, (last_point.x, last_point.y)));
},
path_defs::HORIZONTAL_ABSOLUTE => {
let x: f32 = coordanates[0];
point = Some(line::horizontal_line_absolute((x, last_point.y)));
},
path_defs::VERTICAL_RELATIVE => {
let y: f32 = coordanates[0];
point = Some(line::vertical_line_relative(y, (last_point.x, last_point.y)));
},
path_defs::VERTICAL_ABSOLUTE => {
let y: f32 = coordanates[0];
point = Some(line::vertical_line_absolute((last_point.x, y)));
}
// TODO: Refactor the bezier functions -Austin Haskell
path_defs::CUBIC_BEZIER_ABSOLUTE => {
if coordanates.len() % 2 != 0 {
return Err(String::from("Error: Absolute Bezier curve has an insufficiant point count"));
}
let points = svg_util::create_xy_point_list(&coordanates);
let repeat_command_groups: i32 = (points.len() as i32) / path_defs::CUBIC_POINTS_PER_GROUP;
for group in 1..=repeat_command_groups {
let group_offset: usize = ((path_defs::CUBIC_POINTS_PER_GROUP * group) - path_defs::CUBIC_POINTS_PER_GROUP) as usize;
let mut t: f32 = 0.0;
while t < 1.0 {
point_list.push(
bezier::calculate_cubic_bezier(
(current_position.x, current_position.y),
points[0 + group_offset],
points[1 + group_offset],
points[2 + group_offset], t));
t += BEZIER_RESOLUTION;
}
last_control_point = Some(points[1 + group_offset]);
current_position.x = points[2 + group_offset].0;
current_position.y = points[2 + group_offset].1;
point_list.push(parser_defs::SVGPoint {
x: current_position.x,
y: current_position.y
}
);
}
},
path_defs::CUBIC_BEZIER_RELATIVE => {
if coordanates.len() % 2 != 0 {
return Err(String::from("Error: Relative Bezier curve has an insufficiant point count"));
}
let points = svg_util::create_xy_point_list(&coordanates);
let repeat_command_groups: i32 = (points.len() as i32) / path_defs::CUBIC_POINTS_PER_GROUP;
for group in 1..=repeat_command_groups {
let group_offset: usize = ((path_defs::CUBIC_POINTS_PER_GROUP * group) - path_defs::CUBIC_POINTS_PER_GROUP) as usize;
let mut t: f32 = 0.0;
while t < 1.0 {
point_list.push(
bezier::calculate_cubic_bezier(
(current_position.x, current_position.y),
(points[0 + group_offset].0 + current_position.x, points[0 + group_offset].1 + current_position.y),
(points[1 + group_offset].0 + current_position.x, points[1 + group_offset].1 + current_position.y),
(points[2 + group_offset].0 + current_position.x, points[2 + group_offset].1 + current_position.y), t));
t += BEZIER_RESOLUTION;
}
current_position.x += points[2 + group_offset].0;
current_position.y += points[2 + group_offset].1;
last_control_point = Some((points[1 + group_offset].0 + current_position.x, points[1 + group_offset].1 + current_position.y));
point_list.push(parser_defs::SVGPoint {
x: current_position.x,
y: current_position.y
});
}
},
path_defs::SHORTHAND_CUBIC_BEZIER_ABSOLUTE => {
if last_control_point.is_none() {
return Err(String::from("Error: Shorthand abolute bezier curve was used before a previous control point could be established. "));
}
if coordanates.len() % 2 != 0 {
return Err(String::from("Error: Shorthand absolute Bezier curve has an insufficiant point count"));
}
let points = svg_util::create_xy_point_list(&coordanates);
let repeat_command_groups: i32 = (points.len() as i32) / path_defs::SHORTHAND_CUBIC_POINTS_PER_GROUP as i32;
for group in 1..=repeat_command_groups {
let group_offset: usize = ((path_defs::SHORTHAND_CUBIC_POINTS_PER_GROUP * group) - path_defs::SHORTHAND_CUBIC_POINTS_PER_GROUP) as usize;
let mut t: f32 = 0.0;
while t < 1.0 {
point_list.push(
bezier::calculate_cubic_bezier(
(current_position.x, current_position.y),
bezier::calculate_reflected_control_point(last_control_point.unwrap(), (current_position.x, current_position.y)),
(points[0 + group_offset].0, points[0 + group_offset].1),
(points[1 + group_offset].0, points[1 + group_offset].1), t));
t += BEZIER_RESOLUTION;
}
current_position.x = points[1 + group_offset].0;
current_position.y = points[1 + group_offset].1;
last_control_point = Some(points[0 + group_offset]);
point_list.push(parser_defs::SVGPoint {
x: current_position.x,
y: current_position.y
});
}
},
path_defs::SHORTHAND_CUBIC_BEZIER_RELATIVE => {
if last_control_point.is_none() {
return Err(String::from("Error: Shorthand reelative bezier curve was used before a previous control point could be established. "));
}
if coordanates.len() % 2 != 0 {
return Err(String::from("Error: Shorthand relative Bezier curve has an insufficiant point count"));
}
let points = svg_util::create_xy_point_list(&coordanates);
let repeat_command_groups: i32 = (points.len() as i32) / path_defs::SHORTHAND_CUBIC_POINTS_PER_GROUP as i32;
for group in 1..=repeat_command_groups {
let group_offset: usize = ((path_defs::SHORTHAND_CUBIC_POINTS_PER_GROUP * group) - path_defs::SHORTHAND_CUBIC_POINTS_PER_GROUP) as usize;
let mut t: f32 = 0.0;
while t < 1.0 {
point_list.push(
bezier::calculate_cubic_bezier(
(current_position.x, current_position.y),
bezier::calculate_reflected_control_point(last_control_point.unwrap(), (current_position.x, current_position.y)),
(points[0 + group_offset].0 + current_position.x, points[0 + group_offset].1 + current_position.y),
(points[1 + group_offset].0 + current_position.x, points[1 + group_offset].1 + current_position.y), t));
t += BEZIER_RESOLUTION;
}
last_control_point = Some((points[0 + group_offset].0 + current_position.x, points[0 + group_offset].1 + current_position.y));
current_position.x += points[1 + group_offset].0;
current_position.y += points[1 + group_offset].1;
point_list.push(parser_defs::SVGPoint {
x: current_position.x,
y: current_position.y
});
}
},
path_defs::QUADRATIC_BEZIER_ABSOLUTE => {
println!("No implementation currently exists for quadratic bezier (Q)");
},
path_defs::QUADRATIC_BEZIER_RELATIVE => {
println!("No implementation currently exists for quadratic bezier (q)");
/*
if coordanates.len() % 2 != 0 {
return Err(String::from("Error: Absolute Bezier curve has an insufficiant point count"));
}
let points = svg_util::create_xy_point_list(&coordanates);
let repeat_command_groups: i32 = (points.len() as i32) / path_defs::QUADRATIC_POINTS_PER_GROUP as i32;
for group in 1..=repeat_command_groups {
let group_offset: usize = ((path_defs::QUADRATIC_POINTS_PER_GROUP * group) - path_defs::QUADRATIC_POINTS_PER_GROUP) as usize;
let mut t: f32 = 0.0;
while t < 1.0 {
point_list.push(
bezier::calculate_quadratic_bezier(
(current_position.x, current_position.y),
(points[0 + group_offset].0 + current_position.x, points[0 + group_offset].1 + current_position.y),
(points[1 + group_offset].0 + current_position.x, points[1 + group_offset].1 + current_position.y),
t));
t += BEZIER_RESOLUTION;
}
current_position.x = points[1 + group_offset].0 + current_position.x;
current_position.y = points[1 + group_offset].1 + current_position.y;
last_control_point = Some((points[0 + group_offset].0 + current_position.x, points[0 + group_offset].1 + current_position.y));
point_list.push(parser_defs::SVGPoint {
x: current_position.x,
y: current_position.y
}
);
}*/
},
path_defs::SHORTHAND_QUADRATIC_BEZIER_ABSOLUTE => {
println!("No implementation currently exists for shorthand quadratic bezier (T)");
},
path_defs::SHORTHAND_QUADRATIC_BEZIER_RELATIVE => {
println!("No implementation currently exists for shorthand quadratic bezier (t)");
},
path_defs::ELIPTICAL_ARC_ABSOLUTE => {
println!("No implementation currently exists for eliptical arcs (A)");
},
path_defs::ELIPTICAL_ARC_RELATIVE => {
println!("No implementation currently exists for eliptical arcs (a)");
if coordanates.len() % 5 != 0 {
return Err(String::from("Error: Eliptical Arc relative has an insufficiant point count. Needed 5 data points"));
}
},
path_defs::FINISH_PATH_LOWER => close_path = true,
path_defs::FINISH_PATH_UPPER => close_path = true,
_ => println!("")
};
if point.is_some() {
point_list.push(point.unwrap());
last_point = point.unwrap();
}
}
if close_path {
point_list.push(position);
}
if point_list.is_empty() {
return Err(String::from("Error: No point data found (l), svg file is considered malformed. "));
}
Ok(point_list)
}
|
#![allow(missing_docs)]
use std::fmt::Debug;
use std::num::ParseIntError;
use num::traits::{ PrimInt, Num };
pub enum Either<Left, Right> { Left(Left), Right(Right) }
pub trait Halveable {
type HalfSize: ExtInt;
fn split(self) -> (Self::HalfSize, Self::HalfSize);
fn join(h1: Self::HalfSize, h2: Self::HalfSize) -> Self;
}
pub trait To64 { fn to64(self) -> u64; }
pub trait From { fn from<T: To64>(n: T) -> Self; }
pub trait BitLength { fn bits(self) -> usize; }
pub trait ExtInt:
PrimInt + Num<FromStrRadixErr=ParseIntError>
+ To64 + From + BitLength + Debug { }
#[cfg(target_pointer_width = "32")]
impl Halveable for usize {
type HalfSize = u16;
fn split(self) -> (u16, u16) { ((self / 0x1_00_00usize) as u16, self as u16) }
fn join(h1: u16, h2: u16) -> usize { ((h1 as usize) * 0x1_00_00usize) + (h2 as usize) }
}
#[cfg(target_pointer_width = "64")]
impl Halveable for usize {
type HalfSize = u32;
fn split(self) -> (u32, u32) { ((self / 0x1_00_00_00_00usize) as u32, self as u32) }
fn join(h1: u32, h2: u32) -> usize { ((h1 as usize) * 0x1_00_00_00_00usize) + (h2 as usize) }
}
impl Halveable for u64 {
type HalfSize = u32;
fn split(self) -> (u32, u32) { ((self / 0x1_00_00_00_00u64) as u32, self as u32) }
fn join(h1: u32, h2: u32) -> u64 { ((h1 as u64) * 0x1_00_00_00_00u64) + (h2 as u64) }
}
impl Halveable for u32 {
type HalfSize = u16;
fn split(self) -> (u16, u16) { ((self / 0x1_00_00u32) as u16, self as u16) }
fn join(h1: u16, h2: u16) -> u32 { ((h1 as u32) * 0x1_00_00u32) + (h2 as u32) }
}
impl Halveable for u16 {
type HalfSize = u8;
fn split(self) -> (u8, u8) { ((self / 0x1_00u16) as u8, self as u8) }
fn join(h1: u8, h2: u8) -> u16 { ((h1 as u16) * 0x1_00u16) + (h2 as u16) }
}
impl To64 for usize { #[inline] fn to64(self) -> u64 { self as u64 } }
impl To64 for u64 { #[inline] fn to64(self) -> u64 { self } }
impl To64 for u32 { #[inline] fn to64(self) -> u64 { self as u64 } }
impl To64 for u16 { #[inline] fn to64(self) -> u64 { self as u64 } }
impl To64 for u8 { #[inline] fn to64(self) -> u64 { self as u64 } }
impl From for usize { #[inline] fn from<T: To64>(n: T) -> Self { n.to64() as usize } }
impl From for u64 { #[inline] fn from<T: To64>(n: T) -> Self { n.to64() } }
impl From for u32 { #[inline] fn from<T: To64>(n: T) -> Self { n.to64() as u32 } }
impl From for u16 { #[inline] fn from<T: To64>(n: T) -> Self { n.to64() as u16 } }
impl From for u8 { #[inline] fn from<T: To64>(n: T) -> Self { n.to64() as u8 } }
#[cfg(target_pointer_width = "32")]
impl BitLength for usize { #[inline] fn bits(self) -> usize { 32usize } }
#[cfg(target_pointer_width = "64")]
impl BitLength for usize { #[inline] fn bits(self) -> usize { 64usize } }
impl BitLength for u64 { #[inline] fn bits(self) -> usize { 64usize } }
impl BitLength for u32 { #[inline] fn bits(self) -> usize { 32usize } }
impl BitLength for u16 { #[inline] fn bits(self) -> usize { 16usize } }
impl BitLength for u8 { #[inline] fn bits(self) -> usize { 8usize } }
impl ExtInt for usize { }
impl ExtInt for u64 { }
impl ExtInt for u32 { }
impl ExtInt for u16 { }
impl ExtInt for u8 { }
pub fn cast<T: To64, U: From>(n: T) -> U {
From::from(n)
}
#[cfg(test)]
mod test {
use super::{ Halveable };
#[test]
fn test_split_u16() {
assert_eq!((0x00u8, 0x00u8), Halveable::split(0x0000u16));
assert_eq!((0x00u8, 0x01u8), Halveable::split(0x0001u16));
assert_eq!((0x00u8, 0xFEu8), Halveable::split(0x00FEu16));
assert_eq!((0x00u8, 0xFFu8), Halveable::split(0x00FFu16));
assert_eq!((0x01u8, 0x00u8), Halveable::split(0x0100u16));
assert_eq!((0x01u8, 0x01u8), Halveable::split(0x0101u16));
assert_eq!((0xFFu8, 0xFEu8), Halveable::split(0xFFFEu16));
assert_eq!((0xFFu8, 0xFFu8), Halveable::split(0xFFFFu16));
}
#[test]
fn test_split_u32() {
assert_eq!((0x0000u16, 0x0000u16), Halveable::split(0x00000000u32));
assert_eq!((0x0000u16, 0x0001u16), Halveable::split(0x00000001u32));
assert_eq!((0x0000u16, 0xFFFEu16), Halveable::split(0x0000FFFEu32));
assert_eq!((0x0000u16, 0xFFFFu16), Halveable::split(0x0000FFFFu32));
assert_eq!((0x0001u16, 0x0000u16), Halveable::split(0x00010000u32));
assert_eq!((0x0001u16, 0x0001u16), Halveable::split(0x00010001u32));
assert_eq!((0xFFFFu16, 0xFFFEu16), Halveable::split(0xFFFFFFFEu32));
assert_eq!((0xFFFFu16, 0xFFFFu16), Halveable::split(0xFFFFFFFFu32));
}
#[test]
fn test_split_u64() {
assert_eq!((0x00000000u32, 0x00000000u32), Halveable::split(0x0000000000000000u64));
assert_eq!((0x00000000u32, 0x00000001u32), Halveable::split(0x0000000000000001u64));
assert_eq!((0x00000000u32, 0xFFFFFFFEu32), Halveable::split(0x00000000FFFFFFFEu64));
assert_eq!((0x00000000u32, 0xFFFFFFFFu32), Halveable::split(0x00000000FFFFFFFFu64));
assert_eq!((0x00000001u32, 0x00000000u32), Halveable::split(0x0000000100000000u64));
assert_eq!((0x00000001u32, 0x00000001u32), Halveable::split(0x0000000100000001u64));
assert_eq!((0xFFFFFFFFu32, 0xFFFFFFFEu32), Halveable::split(0xFFFFFFFFFFFFFFFEu64));
assert_eq!((0xFFFFFFFFu32, 0xFFFFFFFFu32), Halveable::split(0xFFFFFFFFFFFFFFFFu64));
}
#[test]
#[cfg(target_pointer_width = "32")]
fn test_split_usize() {
assert_eq!((0x0000u16, 0x0000u16), Halveable::split(0x00000000usize));
assert_eq!((0x0000u16, 0x0001u16), Halveable::split(0x00000001usize));
assert_eq!((0x0000u16, 0xFFFEu16), Halveable::split(0x0000FFFEusize));
assert_eq!((0x0000u16, 0xFFFFu16), Halveable::split(0x0000FFFFusize));
assert_eq!((0x0001u16, 0x0000u16), Halveable::split(0x00010000usize));
assert_eq!((0x0001u16, 0x0001u16), Halveable::split(0x00010001usize));
assert_eq!((0xFFFFu16, 0xFFFEu16), Halveable::split(0xFFFFFFFEusize));
assert_eq!((0xFFFFu16, 0xFFFFu16), Halveable::split(0xFFFFFFFFusize));
}
#[test]
#[cfg(target_pointer_width = "64")]
fn test_split_usize() {
assert_eq!((0x00000000u32, 0x00000000u32), Halveable::split(0x0000000000000000usize));
assert_eq!((0x00000000u32, 0x00000001u32), Halveable::split(0x0000000000000001usize));
assert_eq!((0x00000000u32, 0xFFFFFFFEu32), Halveable::split(0x00000000FFFFFFFEusize));
assert_eq!((0x00000000u32, 0xFFFFFFFFu32), Halveable::split(0x00000000FFFFFFFFusize));
assert_eq!((0x00000001u32, 0x00000000u32), Halveable::split(0x0000000100000000usize));
assert_eq!((0x00000001u32, 0x00000001u32), Halveable::split(0x0000000100000001usize));
assert_eq!((0xFFFFFFFFu32, 0xFFFFFFFEu32), Halveable::split(0xFFFFFFFFFFFFFFFEusize));
assert_eq!((0xFFFFFFFFu32, 0xFFFFFFFFu32), Halveable::split(0xFFFFFFFFFFFFFFFFusize));
}
#[test]
fn test_join_u16() {
assert_eq!(0x0000u16, Halveable::join(0x00u8, 0x00u8));
assert_eq!(0x0001u16, Halveable::join(0x00u8, 0x01u8));
assert_eq!(0x00FEu16, Halveable::join(0x00u8, 0xFEu8));
assert_eq!(0x00FFu16, Halveable::join(0x00u8, 0xFFu8));
assert_eq!(0x0100u16, Halveable::join(0x01u8, 0x00u8));
assert_eq!(0x0101u16, Halveable::join(0x01u8, 0x01u8));
assert_eq!(0xFFFEu16, Halveable::join(0xFFu8, 0xFEu8));
assert_eq!(0xFFFFu16, Halveable::join(0xFFu8, 0xFFu8));
}
#[test]
fn test_join_u32() {
assert_eq!(0x00000000u32, Halveable::join(0x0000u16, 0x0000u16));
assert_eq!(0x00000001u32, Halveable::join(0x0000u16, 0x0001u16));
assert_eq!(0x0000FFFEu32, Halveable::join(0x0000u16, 0xFFFEu16));
assert_eq!(0x0000FFFFu32, Halveable::join(0x0000u16, 0xFFFFu16));
assert_eq!(0x00010000u32, Halveable::join(0x0001u16, 0x0000u16));
assert_eq!(0x00010001u32, Halveable::join(0x0001u16, 0x0001u16));
assert_eq!(0xFFFFFFFEu32, Halveable::join(0xFFFFu16, 0xFFFEu16));
assert_eq!(0xFFFFFFFFu32, Halveable::join(0xFFFFu16, 0xFFFFu16));
}
#[test]
fn test_join_u64() {
assert_eq!(0x0000000000000000u64, Halveable::join(0x00000000u32, 0x00000000u32));
assert_eq!(0x0000000000000001u64, Halveable::join(0x00000000u32, 0x00000001u32));
assert_eq!(0x00000000FFFFFFFEu64, Halveable::join(0x00000000u32, 0xFFFFFFFEu32));
assert_eq!(0x00000000FFFFFFFFu64, Halveable::join(0x00000000u32, 0xFFFFFFFFu32));
assert_eq!(0x0000000100000000u64, Halveable::join(0x00000001u32, 0x00000000u32));
assert_eq!(0x0000000100000001u64, Halveable::join(0x00000001u32, 0x00000001u32));
assert_eq!(0xFFFFFFFFFFFFFFFEu64, Halveable::join(0xFFFFFFFFu32, 0xFFFFFFFEu32));
assert_eq!(0xFFFFFFFFFFFFFFFFu64, Halveable::join(0xFFFFFFFFu32, 0xFFFFFFFFu32));
}
#[test]
#[cfg(target_pointer_width = "32")]
fn test_join_usize() {
assert_eq!(0x00000000usize, Halveable::join(0x0000u16, 0x0000u16));
assert_eq!(0x00000001usize, Halveable::join(0x0000u16, 0x0001u16));
assert_eq!(0x0000FFFEusize, Halveable::join(0x0000u16, 0xFFFEu16));
assert_eq!(0x0000FFFFusize, Halveable::join(0x0000u16, 0xFFFFu16));
assert_eq!(0x00010000usize, Halveable::join(0x0001u16, 0x0000u16));
assert_eq!(0x00010001usize, Halveable::join(0x0001u16, 0x0001u16));
assert_eq!(0xFFFFFFFEusize, Halveable::join(0xFFFFu16, 0xFFFEu16));
assert_eq!(0xFFFFFFFFusize, Halveable::join(0xFFFFu16, 0xFFFFu16));
}
#[test]
#[cfg(target_pointer_width = "64")]
fn test_join_usize() {
assert_eq!(0x0000000000000000usize, Halveable::join(0x00000000u32, 0x00000000u32));
assert_eq!(0x0000000000000001usize, Halveable::join(0x00000000u32, 0x00000001u32));
assert_eq!(0x00000000FFFFFFFEusize, Halveable::join(0x00000000u32, 0xFFFFFFFEu32));
assert_eq!(0x00000000FFFFFFFFusize, Halveable::join(0x00000000u32, 0xFFFFFFFFu32));
assert_eq!(0x0000000100000000usize, Halveable::join(0x00000001u32, 0x00000000u32));
assert_eq!(0x0000000100000001usize, Halveable::join(0x00000001u32, 0x00000001u32));
assert_eq!(0xFFFFFFFFFFFFFFFEusize, Halveable::join(0xFFFFFFFFu32, 0xFFFFFFFEu32));
assert_eq!(0xFFFFFFFFFFFFFFFFusize, Halveable::join(0xFFFFFFFFu32, 0xFFFFFFFFu32));
}
}
|
use bevy_ecs::{reflect::ReflectComponent, system::Query};
use bevy_math::{vec3, Vec2};
use bevy_reflect::{Reflect, TypeUuid};
use bevy_render::camera::Camera;
use bevy_transform::components::{GlobalTransform, Transform};
/// Component for sprites that should render according to a parallax relative to the camera.
/// Note that the parallax_transform_system will overwrite the `Transform` component,
/// so if you want to modify the transform of an entity that has a `Parallax` component you should
/// modify the `transform` field of `Parallax` instead of modifying the `Transform` component directly.
#[derive(Debug, Default, Clone, TypeUuid, Reflect)]
#[reflect(Component)]
#[uuid = "0e436fcb-7b34-420c-92df-6fda230332d8"]
pub struct Parallax {
/// Parallax factor per axis.
/// Factors below 1.0 will make entities appear further away from the camera,
/// while factors above 1.0 will make them appear closer.
/// You can think of the camera as being on factor 1.0.
pub factor: Vec2,
/// The source transform to use when performing parallax transformation.
pub transform: Transform,
}
/// System that updates the `Transform` component of `Parallax` entities.
pub fn parallax_transform_system(
cameras: Query<(&GlobalTransform, &Camera)>,
mut parallax: Query<(&mut Transform, &Parallax)>,
) {
if let Some((camera_transform, _camera)) = cameras.iter().next() {
let translation = camera_transform.translation;
for (mut transform, parallax) in parallax.iter_mut() {
transform.translation = parallax.transform.translation
+ translation * vec3(1.0, 1.0, 0.0)
- translation * parallax.factor.extend(0.0);
transform.rotation = parallax.transform.rotation;
transform.scale = parallax.transform.scale;
}
}
}
impl Parallax {
/// Construct a new `Parallax`.
pub fn new(factor: Vec2, transform: Transform) -> Self {
Self { factor, transform }
}
}
|
// Copyright (c) 2018, ilammy
//
// Licensed under the Apache License, Version 2.0 (see LICENSE in the
// root directory). This file may be copied, distributed, and modified
// only in accordance with the terms specified by the license.
use exonum::{
api::ServiceApiBuilder, blockchain::{Service, Transaction, TransactionSet}, crypto::Hash,
encoding, messages::RawTransaction, storage::Snapshot,
};
use api::LegalApi;
use transactions::LegalTransactions;
/// Service identifier.
pub const SERVICE_ID: u16 = 9;
/// Our service marker. It does not need any state.
pub struct LegalService;
impl Service for LegalService {
fn service_name(&self) -> &'static str {
"legislation"
}
fn service_id(&self) -> u16 {
SERVICE_ID
}
// Implement a method to deserialize transactions coming to the node.
fn tx_from_raw(&self, raw: RawTransaction) -> Result<Box<dyn Transaction>, encoding::Error> {
let tx = LegalTransactions::tx_from_raw(raw)?;
Ok(tx.into())
}
// Hashes for the service tables that will be included into the state hash.
// To simplify things, we don't have [Merkelized tables][merkle] in the service storage
// for now, so we return an empty vector.
//
// [merkle]: https://exonum.com/doc/architecture/storage/#merklized-indices
fn state_hash(&self, _: &dyn Snapshot) -> Vec<Hash> {
vec![]
}
// Links the service api implementation to the Exonum.
fn wire_api(&self, builder: &mut ServiceApiBuilder) {
LegalApi::wire(builder);
}
}
|
use crate::aoc_utils::read_input;
use regex::Regex;
pub fn run(input_filename: &str) {
let input = read_input(input_filename);
part1(&input);
part2(&input);
}
struct Bag {
color: String,
number: u32,
parent_color: String,
}
// Add this to the struct so we can `println!("{:?}", bags)`
impl std::fmt::Debug for Bag {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
f.debug_struct("Bag")
.field("color", &self.color)
.field("number", &self.number)
.field("parent_color", &self.parent_color)
.finish()
}
}
fn read_bag(line: &str) -> Vec<Bag> {
let mut result_bags: Vec<Bag> = vec![];
let regex_string =
r"(?P<start_prefix>[a-z]+) (?P<start_color>[a-z]+) bags contain (?P<contents>.*)+";
let regex = Regex::new(®ex_string).unwrap();
let capture_result = regex.captures(line).expect("Invalid bag line");
let regex_string_contents =
r"(?P<number>\d+) (?P<prefix>[a-z]+) (?P<color>[a-z]+) bags?,?\.?\s?";
let regex_contents = Regex::new(®ex_string_contents).unwrap();
for cap in regex_contents.captures_iter(&capture_result["contents"]) {
result_bags.push(Bag {
color: format!("{} {}", &cap["prefix"], &cap["color"]),
number: cap["number"].parse::<u32>().unwrap_or(0),
parent_color: format!(
"{} {}",
&capture_result["start_prefix"], &capture_result["start_color"]
),
})
}
return result_bags;
}
fn count_bags(bags: &Vec<Bag>, needle: &str, provided_counted_bags: &Vec<String>) -> Vec<String> {
let mut counted_bags: Vec<String> = vec![];
counted_bags.extend(provided_counted_bags.iter().cloned());
for bag in bags {
if bag.color == needle {
if !counted_bags.contains(&bag.parent_color) {
let col = &bag.parent_color;
counted_bags.push(String::from(col));
counted_bags = count_bags(&bags, &bag.parent_color, &counted_bags);
}
}
}
return counted_bags;
}
fn part1(input: &String) {
let mut bags: Vec<Bag> = vec![];
for line in input.lines() {
bags.extend(read_bag(line));
}
let counted: Vec<String> = vec![];
let counted_bags = count_bags(&bags, "shiny gold", &counted);
println!("Part 1: {}", counted_bags.len());
}
fn count_inside(bags: &Vec<Bag>, needle: &str) -> u32 {
let mut count: u32 = 0;
for bag in bags {
if bag.parent_color == needle {
count += &bag.number * count_inside(&bags, &bag.color);
count += &bag.number;
}
}
return count;
}
fn part2(input: &String) {
let mut bags: Vec<Bag> = vec![];
for line in input.lines() {
bags.extend(read_bag(line));
}
let count = count_inside(&bags, "shiny gold");
println!("Part 2: {}", count);
}
|
use std::prelude::v1::*;
use std::{i16, f64};
use super::super::*;
use flt2dec::*;
use flt2dec::bignum::Big32x36 as Big;
use flt2dec::strategy::dragon::*;
#[test]
fn test_mul_pow10() {
let mut prevpow10 = Big::from_small(1);
for i in 1..340 {
let mut curpow10 = Big::from_small(1);
mul_pow10(&mut curpow10, i);
assert_eq!(curpow10, *prevpow10.clone().mul_small(10));
prevpow10 = curpow10;
}
}
#[test]
fn shortest_sanity_test() {
f64_shortest_sanity_test(format_shortest);
f32_shortest_sanity_test(format_shortest);
more_shortest_sanity_test(format_shortest);
}
#[test]
fn exact_sanity_test() {
f64_exact_sanity_test(format_exact);
f32_exact_sanity_test(format_exact);
}
#[bench]
fn bench_small_shortest(b: &mut Bencher) {
let decoded = decode_finite(3.141592f64);
let mut buf = [0; MAX_SIG_DIGITS];
b.iter(|| format_shortest(&decoded, &mut buf));
}
#[bench]
fn bench_big_shortest(b: &mut Bencher) {
let decoded = decode_finite(f64::MAX);
let mut buf = [0; MAX_SIG_DIGITS];
b.iter(|| format_shortest(&decoded, &mut buf));
}
#[bench]
fn bench_small_exact_3(b: &mut Bencher) {
let decoded = decode_finite(3.141592f64);
let mut buf = [0; 3];
b.iter(|| format_exact(&decoded, &mut buf, i16::MIN));
}
#[bench]
fn bench_big_exact_3(b: &mut Bencher) {
let decoded = decode_finite(f64::MAX);
let mut buf = [0; 3];
b.iter(|| format_exact(&decoded, &mut buf, i16::MIN));
}
#[bench]
fn bench_small_exact_12(b: &mut Bencher) {
let decoded = decode_finite(3.141592f64);
let mut buf = [0; 12];
b.iter(|| format_exact(&decoded, &mut buf, i16::MIN));
}
#[bench]
fn bench_big_exact_12(b: &mut Bencher) {
let decoded = decode_finite(f64::MAX);
let mut buf = [0; 12];
b.iter(|| format_exact(&decoded, &mut buf, i16::MIN));
}
#[bench]
fn bench_small_exact_inf(b: &mut Bencher) {
let decoded = decode_finite(3.141592f64);
let mut buf = [0; 1024];
b.iter(|| format_exact(&decoded, &mut buf, i16::MIN));
}
#[bench]
fn bench_big_exact_inf(b: &mut Bencher) {
let decoded = decode_finite(f64::MAX);
let mut buf = [0; 1024];
b.iter(|| format_exact(&decoded, &mut buf, i16::MIN));
}
#[test]
fn test_to_shortest_str() {
to_shortest_str_test(format_shortest);
}
#[test]
fn test_to_shortest_exp_str() {
to_shortest_exp_str_test(format_shortest);
}
#[test]
fn test_to_exact_exp_str() {
to_exact_exp_str_test(format_exact);
}
#[test]
fn test_to_exact_fixed_str() {
to_exact_fixed_str_test(format_exact);
}
|
pub mod cornell_box;
pub mod cornell_smoke;
pub mod earth;
pub mod last;
pub mod random;
pub mod simple_light;
pub mod two_perlin_spheres;
pub mod two_spheres;
|
/// Find all prime numbers less than `n`.
/// For example, `sieve(7)` should return `[2, 3, 5]`
pub fn sieve(n: u32) -> Vec<u32> {
// TODO
let mut ans = Vec::new();
let mut flag;
for i in 2..n+1 {
flag = 0;
for j in 2..i {
if i%j == 0 {
flag = 1;
}
}
if flag == 0 {
ans.push(i);
}
}
ans
}
|
// Copyright (c) 2018-2020 Jeron Aldaron Lau
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0>, the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, or the ZLib
// license <LICENSE-ZLIB or https://www.zlib.net/zlib_license.html> at
// your option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Mono speaker configuration and types.
use crate::{
chan::{Ch16, Ch32, Ch64, Ch8},
sample::Sample1,
Config,
};
/// 1 speaker/channel arrangement (front center)
#[derive(Default, Debug, Copy, Clone, PartialEq)]
pub struct Mono;
impl Config for Mono {
const CHANNEL_COUNT: usize = 1;
}
/// [Mono](struct.Mono.html) [8-bit PCM](../chan/struct.Ch8.html) format.
pub type Mono8 = Sample1<Ch8, Mono>;
/// [Mono](struct.Mono.html) [16-bit PCM](../chan/struct.Ch16.html) format.
pub type Mono16 = Sample1<Ch16, Mono>;
/// [Mono](struct.Mono.html)
/// [32-bit Floating Point](../chan/struct.Ch32.html) format.
pub type Mono32 = Sample1<Ch32, Mono>;
/// [Mono](struct.Mono.html)
/// [64-bit Floating Point](../chan/struct.Ch64.html) format.
pub type Mono64 = Sample1<Ch64, Mono>;
|
// Copyright 2020 IOTA Stiftung
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and limitations under the License.
//! Module that provides the [`Tangle`] struct.
use crate::{
milestone::MilestoneIndex,
vertex::{TransactionRef, Vertex},
};
use bee_bundle::{Hash, Transaction};
use std::{
collections::HashSet,
sync::atomic::{AtomicU32, Ordering},
};
use async_std::{
sync::{Arc, Barrier},
task::block_on,
};
use dashmap::{mapref::entry::Entry, DashMap, DashSet};
use flume::Sender;
/// A datastructure based on a directed acyclic graph (DAG).
pub struct Tangle {
/// A map between each vertex and the hash of the transaction the respective vertex represents.
pub(crate) vertices: DashMap<Hash, Vertex>,
/// A map between the hash of a transaction and the hashes of its approvers.
pub(crate) approvers: DashMap<Hash, Vec<Hash>>,
/// A map between the milestone index and hash of the milestone transaction.
milestones: DashMap<MilestoneIndex, Hash>,
/// A set of hashes representing transactions deemed solid entry points.
solid_entry_points: DashSet<Hash>,
/// The sender side of a channel between the Tangle and the (gossip) solidifier.
solidifier_send: Sender<Option<Hash>>,
solid_milestone_index: AtomicU32,
snapshot_milestone_index: AtomicU32,
last_milestone_index: AtomicU32,
drop_barrier: Arc<Barrier>,
}
impl Tangle {
/// Creates a new `Tangle`.
pub(crate) fn new(solidifier_send: Sender<Option<Hash>>, drop_barrier: Arc<Barrier>) -> Self {
Self {
vertices: DashMap::new(),
approvers: DashMap::new(),
solidifier_send,
solid_entry_points: DashSet::new(),
milestones: DashMap::new(),
solid_milestone_index: AtomicU32::new(0),
snapshot_milestone_index: AtomicU32::new(0),
last_milestone_index: AtomicU32::new(0),
drop_barrier,
}
}
/// Inserts a transaction.
///
/// Note: The method assumes that `hash` -> `transaction` is injective, otherwise unexpected behavior could
/// occur.
pub async fn insert_transaction(&'static self, transaction: Transaction, hash: Hash) -> Option<TransactionRef> {
match self.approvers.entry(*transaction.trunk()) {
Entry::Occupied(mut entry) => {
let values = entry.get_mut();
values.push(hash);
}
Entry::Vacant(entry) => {
entry.insert(vec![hash]);
}
}
if transaction.trunk() != transaction.branch() {
match self.approvers.entry(*transaction.branch()) {
Entry::Occupied(mut entry) => {
let values = entry.get_mut();
values.push(hash);
}
Entry::Vacant(entry) => {
entry.insert(vec![hash]);
}
}
}
let vertex = Vertex::from(transaction, hash);
let tx_ref = vertex.get_ref_to_inner();
// TODO: not sure if we want replacement of vertices
if self.vertices.insert(hash, vertex).is_none() {
match self.solidifier_send.send(Some(hash)) {
Ok(()) => (),
Err(e) => todo!("log warning"),
}
Some(tx_ref)
} else {
None
}
}
pub(crate) fn shutdown(&self) {
// `None` will cause the worker to finish
self.solidifier_send.send(None).expect("error sending shutdown signal");
block_on(self.drop_barrier.wait());
}
/// Returns a reference to a transaction, if it's available in the local Tangle.
pub fn get_transaction(&'static self, hash: &Hash) -> Option<TransactionRef> {
self.vertices.get(hash).map(|v| v.get_ref_to_inner())
}
/// Returns whether the transaction is stored in the Tangle.
pub fn contains_transaction(&'static self, hash: &Hash) -> bool {
self.vertices.contains_key(hash)
}
/// Returns whether the transaction associated with `hash` is solid.
///
/// Note: This function is _eventually consistent_ - if `true` is returned, solidification has
/// definitely occurred. If `false` is returned, then solidification has probably not occurred,
/// or solidification information has not yet been fully propagated.
pub fn is_solid_transaction(&'static self, hash: &Hash) -> bool {
if self.is_solid_entry_point(hash) {
true
} else {
self.vertices.get(hash).map(|r| r.value().is_solid()).unwrap_or(false)
}
}
/// Adds the `hash` of a milestone identified by its milestone `index`.
pub fn add_milestone(&'static self, index: MilestoneIndex, hash: Hash) {
self.milestones.insert(index, hash);
if let Some(mut vertex) = self.vertices.get_mut(&hash) {
vertex.set_milestone();
}
}
/// Removes the hash of a milestone.
pub fn remove_milestone(&'static self, index: MilestoneIndex) {
self.milestones.remove(&index);
}
/// Returns the milestone transaction corresponding to the given milestone `index`.
pub fn get_milestone(&'static self, index: MilestoneIndex) -> Option<TransactionRef> {
match self.get_milestone_hash(index) {
None => None,
Some(hash) => self.get_transaction(&hash),
}
}
/// Returns a [`VertexRef`] linked to the specified milestone, if it's available in the local Tangle.
pub fn get_latest_milestone(&'static self) -> Option<TransactionRef> {
todo!("get the last milestone index, get the transaction hash from it, and query the Tangle for it")
}
/// Returns the hash of a milestone.
pub fn get_milestone_hash(&'static self, index: MilestoneIndex) -> Option<Hash> {
match self.milestones.get(&index) {
None => None,
Some(v) => Some(*v),
}
}
/// Returns whether the milestone index maps to a know milestone hash.
pub fn contains_milestone(&'static self, index: MilestoneIndex) -> bool {
self.milestones.contains_key(&index)
}
/// Retreives the solid milestone index.
pub fn get_solid_milestone_index(&'static self) -> MilestoneIndex {
self.solid_milestone_index.load(Ordering::Relaxed).into()
}
/// Updates the solid milestone index to `new_index`.
pub fn update_solid_milestone_index(&'static self, new_index: MilestoneIndex) {
self.solid_milestone_index.store(*new_index, Ordering::Relaxed);
}
/// Retreives the snapshot milestone index.
pub fn get_snapshot_milestone_index(&'static self) -> MilestoneIndex {
self.snapshot_milestone_index.load(Ordering::Relaxed).into()
}
/// Updates the snapshot milestone index to `new_index`.
pub fn update_snapshot_milestone_index(&'static self, new_index: MilestoneIndex) {
self.snapshot_milestone_index.store(*new_index, Ordering::Relaxed);
}
/// Retreives the last milestone index.
pub fn get_last_milestone_index(&'static self) -> MilestoneIndex {
self.last_milestone_index.load(Ordering::Relaxed).into()
}
/// Updates the last milestone index to `new_index`.
pub fn update_last_milestone_index(&'static self, new_index: MilestoneIndex) {
self.last_milestone_index.store(*new_index, Ordering::Relaxed);
}
/// Adds `hash` to the set of solid entry points.
pub fn add_solid_entry_point(&'static self, hash: Hash) {
self.solid_entry_points.insert(hash);
}
/// Removes `hash` from the set of solid entry points.
pub fn remove_solid_entry_point(&'static self, hash: Hash) {
self.solid_entry_points.remove(&hash);
}
/// Returns whether the transaction associated `hash` is a solid entry point.
pub fn is_solid_entry_point(&'static self, hash: &Hash) -> bool {
self.solid_entry_points.contains(hash)
}
/// Checks if the tangle is synced or not
pub fn is_synced(&'static self) -> bool {
self.get_solid_milestone_index() == self.get_last_milestone_index()
}
/// Returns the current size of the Tangle.
pub fn size(&'static self) -> usize {
self.vertices.len()
}
/// Starts a walk beginning at a `start` vertex identified by its associated transaction hash
/// traversing its children/approvers for as long as those satisfy a given `filter`.
///
/// Returns a list of descendents of `start`. It is ensured, that all elements of that list
/// are connected through the trunk.
pub fn trunk_walk_approvers<F>(&'static self, start: Hash, filter: F) -> Vec<(TransactionRef, Hash)>
where
F: Fn(&TransactionRef) -> bool,
{
let mut approvees = vec![];
let mut collected = vec![];
if let Some(approvee_ref) = self.vertices.get(&start) {
let approvee_vtx = approvee_ref.value();
let approvee = approvee_vtx.get_ref_to_inner();
if filter(&approvee) {
approvees.push(start);
collected.push((approvee, approvee_vtx.get_id()));
while let Some(approvee_hash) = approvees.pop() {
if let Some(approvers_ref) = self.approvers.get(&approvee_hash) {
for approver_hash in approvers_ref.value() {
if let Some(approver_ref) = self.vertices.get(approver_hash) {
let approver = approver_ref.value().get_ref_to_inner();
if *approver.trunk() == approvee_hash && filter(&approver) {
approvees.push(*approver_hash);
collected.push((approver, approver_ref.value().get_id()));
// NOTE: For simplicity reasons we break here, and assume, that there can't be
// a second approver that passes the filter
break;
}
}
}
}
}
}
}
collected
}
/// Starts a walk beginning at a `start` vertex identified by its associated transaction hash
/// traversing its ancestors/approvees for as long as those satisfy a given `filter`.
///
/// Returns a list of ancestors of `start`. It is ensured, that all elements of that list
/// are connected through the trunk.
pub fn trunk_walk_approvees<F>(&'static self, start: Hash, filter: F) -> Vec<(TransactionRef, Hash)>
where
F: Fn(&TransactionRef) -> bool,
{
let mut approvers = vec![start];
let mut collected = vec![];
while let Some(approver_hash) = approvers.pop() {
if let Some(approver_ref) = self.vertices.get(&approver_hash) {
let approver_vtx = approver_ref.value();
let approver = approver_vtx.get_ref_to_inner();
if !filter(&approver) {
break;
} else {
approvers.push(approver.trunk().clone());
collected.push((approver, approver_vtx.get_id()));
}
}
}
collected
}
/// Walks all approvers given a starting hash `root`.
pub fn walk_approvees_depth_first<Mapping, Follow, Missing>(
&'static self,
root: Hash,
mut map: Mapping,
should_follow: Follow,
mut on_missing: Missing,
) where
Mapping: FnMut(&TransactionRef),
Follow: Fn(&Vertex) -> bool,
Missing: FnMut(&Hash),
{
let mut non_analyzed_hashes = Vec::new();
let mut analyzed_hashes = HashSet::new();
non_analyzed_hashes.push(root);
while let Some(hash) = non_analyzed_hashes.pop() {
if !analyzed_hashes.contains(&hash) {
match self.vertices.get(&hash) {
Some(vertex) => {
let vertex = vertex.value();
let transaction = vertex.get_ref_to_inner();
map(&transaction);
if should_follow(vertex) {
non_analyzed_hashes.push(*transaction.branch());
non_analyzed_hashes.push(*transaction.trunk());
}
}
None => {
if !self.is_solid_entry_point(&hash) {
on_missing(&hash);
}
}
}
analyzed_hashes.insert(hash);
}
}
}
/// Walks all approvers in a post order DFS way through trunk then branch.
pub fn walk_approvers_post_order_dfs<Mapping, Follow, Missing>(
&'static self,
root: Hash,
mut map: Mapping,
should_follow: Follow,
mut on_missing: Missing,
) where
Mapping: FnMut(&Hash, &TransactionRef),
Follow: Fn(&Vertex) -> bool,
Missing: FnMut(&Hash),
{
let mut non_analyzed_hashes = Vec::new();
let mut analyzed_hashes = HashSet::new();
non_analyzed_hashes.push(root);
while let Some(hash) = non_analyzed_hashes.last() {
match self.vertices.get(hash) {
Some(vertex) => {
let vertex = vertex.value();
let transaction = vertex.get_ref_to_inner();
// TODO add follow
if analyzed_hashes.contains(transaction.trunk()) && analyzed_hashes.contains(transaction.branch()) {
map(hash, &transaction);
analyzed_hashes.insert(hash.clone());
non_analyzed_hashes.pop();
// TODO add follow
} else if !analyzed_hashes.contains(transaction.trunk()) {
non_analyzed_hashes.push(*transaction.trunk());
// TODO add follow
} else if !analyzed_hashes.contains(transaction.branch()) {
non_analyzed_hashes.push(*transaction.branch());
}
}
None => {
if !self.is_solid_entry_point(hash) {
on_missing(hash);
}
analyzed_hashes.insert(hash.clone());
non_analyzed_hashes.pop();
}
}
}
}
#[cfg(test)]
fn num_approvers(&'static self, hash: &Hash) -> usize {
self.approvers.get(hash).map_or(0, |r| r.value().len())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::*;
use bee_bundle::{TransactionField, Value};
use bee_test::{
field::rand_trits_field,
transaction::{create_random_attached_tx, create_random_tx},
};
use async_std::{sync::channel, task::block_on};
use serial_test::serial;
#[test]
#[serial]
fn insert_and_contains() {
init();
let tangle = tangle();
let (hash, transaction) = create_random_tx();
assert!(block_on(tangle.insert_transaction(transaction, hash)).is_some());
assert_eq!(1, tangle.size());
assert!(tangle.contains_transaction(&hash));
drop();
}
#[test]
#[serial]
fn update_and_get_snapshot_milestone_index() {
init();
let tangle = tangle();
tangle.update_snapshot_milestone_index(1_368_160.into());
assert_eq!(1368160, *tangle.get_snapshot_milestone_index());
drop();
}
#[test]
#[serial]
fn update_and_get_solid_milestone_index() {
init();
let tangle = tangle();
tangle.update_solid_milestone_index(1_368_167.into());
assert_eq!(1_368_167, *tangle.get_solid_milestone_index());
drop();
}
#[test]
#[serial]
fn update_and_get_last_milestone_index() {
init();
let tangle = tangle();
tangle.update_last_milestone_index(1_368_168.into());
assert_eq!(1_368_168, *tangle.get_last_milestone_index());
drop();
}
#[test]
#[serial]
fn walk_trunk_approvers() {
init();
let (Transactions { a, d, e, .. }, Hashes { a_hash, .. }) = create_test_tangle();
let txs = tangle().trunk_walk_approvers(a_hash, |tx| true);
assert_eq!(3, txs.len());
assert_eq!(a.address(), txs[0].0.address());
assert_eq!(d.address(), txs[1].0.address());
assert_eq!(e.address(), txs[2].0.address());
drop();
}
#[test]
#[serial]
fn walk_trunk_approvees() {
init();
let (Transactions { a, d, e, .. }, Hashes { e_hash, .. }) = create_test_tangle();
let txs = tangle().trunk_walk_approvees(e_hash, |tx| true);
assert_eq!(3, txs.len());
assert_eq!(e.address(), txs[0].0.address());
assert_eq!(d.address(), txs[1].0.address());
assert_eq!(a.address(), txs[2].0.address());
drop();
}
#[test]
#[serial]
fn walk_approvees() {
init();
let (Transactions { a, d, e, .. }, Hashes { e_hash, .. }) = create_test_tangle();
drop();
}
#[test]
#[serial]
fn walk_approvees_depth_first() {
init();
let (Transactions { a, b, c, d, e, .. }, Hashes { e_hash, .. }) = create_test_tangle();
let mut addresses = vec![];
tangle().walk_approvees_depth_first(
e_hash,
|tx_ref| addresses.push(tx_ref.address().clone()),
|tx_ref| true,
|tx_hash| (),
);
assert_eq!(*e.address(), addresses[0]);
assert_eq!(*d.address(), addresses[1]);
assert_eq!(*a.address(), addresses[2]);
assert_eq!(*c.address(), addresses[3]);
assert_eq!(*b.address(), addresses[4]);
drop();
}
struct Transactions {
pub a: Transaction,
pub b: Transaction,
pub c: Transaction,
pub d: Transaction,
pub e: Transaction,
}
struct Hashes {
pub a_hash: Hash,
pub b_hash: Hash,
pub c_hash: Hash,
pub d_hash: Hash,
pub e_hash: Hash,
}
#[allow(clippy::many_single_char_names)]
fn create_test_tangle() -> (Transactions, Hashes) {
// a b
// |\ /
// | c
// |/|
// d |
// \|
// e
//
// Trunk path from 'e':
// e --(trunk)-> d --(trunk)-> a
let tangle = tangle();
let (a_hash, a) = create_random_tx();
let (b_hash, b) = create_random_tx();
let (c_hash, c) = create_random_attached_tx(a_hash, b_hash); // branch, trunk
let (d_hash, d) = create_random_attached_tx(c_hash, a_hash);
let (e_hash, e) = create_random_attached_tx(c_hash, d_hash);
block_on(async {
tangle.insert_transaction(a.clone(), a_hash).await;
tangle.insert_transaction(b.clone(), b_hash).await;
tangle.insert_transaction(c.clone(), c_hash).await;
tangle.insert_transaction(d.clone(), d_hash).await;
tangle.insert_transaction(e.clone(), e_hash).await;
});
assert_eq!(5, tangle.size());
assert_eq!(2, tangle.num_approvers(&a_hash));
assert_eq!(1, tangle.num_approvers(&b_hash));
assert_eq!(2, tangle.num_approvers(&c_hash));
assert_eq!(1, tangle.num_approvers(&d_hash));
assert_eq!(0, tangle.num_approvers(&e_hash));
(
Transactions { a, b, c, d, e },
Hashes {
a_hash,
b_hash,
c_hash,
d_hash,
e_hash,
},
)
}
#[test]
#[serial]
fn walk_approvers_post_order_dfs() {
// Example from https://github.com/iotaledger/protocol-rfcs/blob/master/text/0005-white-flag/0005-white-flag.md
init();
let tangle = tangle();
// Creates solid entry points
let sep1 = rand_trits_field::<Hash>();
let sep2 = rand_trits_field::<Hash>();
let sep3 = rand_trits_field::<Hash>();
let sep4 = rand_trits_field::<Hash>();
let sep5 = rand_trits_field::<Hash>();
let sep6 = rand_trits_field::<Hash>();
tangle.add_solid_entry_point(sep1);
tangle.add_solid_entry_point(sep2);
tangle.add_solid_entry_point(sep3);
tangle.add_solid_entry_point(sep4);
tangle.add_solid_entry_point(sep5);
tangle.add_solid_entry_point(sep6);
// Links transactions
let (a_hash, a) = create_random_attached_tx(sep1, sep2);
let (b_hash, b) = create_random_attached_tx(sep3, sep4);
let (c_hash, c) = create_random_attached_tx(sep5, sep6);
let (d_hash, d) = create_random_attached_tx(b_hash, a_hash);
let (e_hash, e) = create_random_attached_tx(b_hash, a_hash);
let (f_hash, f) = create_random_attached_tx(c_hash, b_hash);
let (g_hash, g) = create_random_attached_tx(e_hash, d_hash);
let (h_hash, h) = create_random_attached_tx(f_hash, e_hash);
let (i_hash, i) = create_random_attached_tx(c_hash, f_hash);
let (j_hash, j) = create_random_attached_tx(h_hash, g_hash);
let (k_hash, k) = create_random_attached_tx(i_hash, h_hash);
let (l_hash, l) = create_random_attached_tx(j_hash, g_hash);
let (m_hash, m) = create_random_attached_tx(h_hash, j_hash);
let (n_hash, n) = create_random_attached_tx(k_hash, h_hash);
let (o_hash, o) = create_random_attached_tx(i_hash, k_hash);
let (p_hash, p) = create_random_attached_tx(i_hash, k_hash);
let (q_hash, q) = create_random_attached_tx(m_hash, l_hash);
let (r_hash, r) = create_random_attached_tx(m_hash, l_hash);
let (s_hash, s) = create_random_attached_tx(o_hash, n_hash);
let (t_hash, t) = create_random_attached_tx(p_hash, o_hash);
let (u_hash, u) = create_random_attached_tx(r_hash, q_hash);
let (v_hash, v) = create_random_attached_tx(s_hash, r_hash);
let (w_hash, w) = create_random_attached_tx(t_hash, s_hash);
let (x_hash, x) = create_random_attached_tx(u_hash, q_hash);
let (y_hash, y) = create_random_attached_tx(v_hash, u_hash);
let (z_hash, z) = create_random_attached_tx(s_hash, v_hash);
// Confirms transactions
// TODO uncomment when confirmation index
// tangle.confirm_transaction(a_hash, 1);
// tangle.confirm_transaction(b_hash, 1);
// tangle.confirm_transaction(c_hash, 1);
// tangle.confirm_transaction(d_hash, 2);
// tangle.confirm_transaction(e_hash, 1);
// tangle.confirm_transaction(f_hash, 1);
// tangle.confirm_transaction(g_hash, 2);
// tangle.confirm_transaction(h_hash, 1);
// tangle.confirm_transaction(i_hash, 2);
// tangle.confirm_transaction(j_hash, 2);
// tangle.confirm_transaction(k_hash, 2);
// tangle.confirm_transaction(l_hash, 2);
// tangle.confirm_transaction(m_hash, 2);
// tangle.confirm_transaction(n_hash, 2);
// tangle.confirm_transaction(o_hash, 2);
// tangle.confirm_transaction(p_hash, 3);
// tangle.confirm_transaction(q_hash, 3);
// tangle.confirm_transaction(r_hash, 2);
// tangle.confirm_transaction(s_hash, 2);
// tangle.confirm_transaction(t_hash, 3);
// tangle.confirm_transaction(u_hash, 3);
// tangle.confirm_transaction(v_hash, 2);
// tangle.confirm_transaction(w_hash, 3);
// tangle.confirm_transaction(x_hash, 3);
// tangle.confirm_transaction(y_hash, 3);
// tangle.confirm_transaction(z_hash, 3);
// Constructs the graph
block_on(async {
tangle.insert_transaction(a, a_hash).await;
tangle.insert_transaction(b, b_hash).await;
tangle.insert_transaction(c, c_hash).await;
tangle.insert_transaction(d, d_hash).await;
tangle.insert_transaction(e, e_hash).await;
tangle.insert_transaction(f, f_hash).await;
tangle.insert_transaction(g, g_hash).await;
tangle.insert_transaction(h, h_hash).await;
tangle.insert_transaction(i, i_hash).await;
tangle.insert_transaction(j, j_hash).await;
tangle.insert_transaction(k, k_hash).await;
tangle.insert_transaction(l, l_hash).await;
tangle.insert_transaction(m, m_hash).await;
tangle.insert_transaction(n, n_hash).await;
tangle.insert_transaction(o, o_hash).await;
tangle.insert_transaction(p, p_hash).await;
tangle.insert_transaction(q, q_hash).await;
tangle.insert_transaction(r, r_hash).await;
tangle.insert_transaction(s, s_hash).await;
tangle.insert_transaction(t, t_hash).await;
tangle.insert_transaction(u, u_hash).await;
tangle.insert_transaction(v, v_hash).await;
tangle.insert_transaction(w, w_hash).await;
tangle.insert_transaction(x, x_hash).await;
tangle.insert_transaction(y, y_hash).await;
tangle.insert_transaction(z, z_hash).await;
});
let mut hashes = Vec::new();
tangle.walk_approvers_post_order_dfs(
v_hash,
|hash, _transaction| {
hashes.push(*hash);
},
|_| true,
|_| (),
);
// TODO Remove when we have confirmation index
assert_eq!(hashes.len(), 18);
assert_eq!(hashes[0], a_hash);
assert_eq!(hashes[1], b_hash);
assert_eq!(hashes[2], d_hash);
assert_eq!(hashes[3], e_hash);
assert_eq!(hashes[4], g_hash);
assert_eq!(hashes[5], c_hash);
assert_eq!(hashes[6], f_hash);
assert_eq!(hashes[7], h_hash);
assert_eq!(hashes[8], j_hash);
assert_eq!(hashes[9], l_hash);
assert_eq!(hashes[10], m_hash);
assert_eq!(hashes[11], r_hash);
assert_eq!(hashes[12], i_hash);
assert_eq!(hashes[13], k_hash);
assert_eq!(hashes[14], n_hash);
assert_eq!(hashes[15], o_hash);
assert_eq!(hashes[16], s_hash);
assert_eq!(hashes[17], v_hash);
// TODO uncomment when we have confirmation index
// assert_eq!(hashes.len(), 12);
// assert_eq!(hashes[0], d_hash);
// assert_eq!(hashes[1], g_hash);
// assert_eq!(hashes[2], j_hash);
// assert_eq!(hashes[3], l_hash);
// assert_eq!(hashes[4], m_hash);
// assert_eq!(hashes[5], r_hash);
// assert_eq!(hashes[6], i_hash);
// assert_eq!(hashes[7], k_hash);
// assert_eq!(hashes[8], n_hash);
// assert_eq!(hashes[9], o_hash);
// assert_eq!(hashes[10], s_hash);
// assert_eq!(hashes[11], v_hash);
drop();
}
}
|
use crate::cli::Args;
use color_eyre::eyre;
use serenity::{
builder::CreateInteraction,
http::Http,
model::{
id::GuildId,
interactions::{ApplicationCommandOptionType, Interaction},
},
prelude::*,
};
mod event_handler;
pub struct Bot {
args: Args,
}
impl Bot {
pub fn new() -> Self {
let args = Args::from_args();
Self { args }
}
#[tokio::main]
pub async fn run(&self) -> eyre::Result<()> {
if let Some(guild_id) = self.args.guild_id {
self.register_slash_commands_guild(GuildId(guild_id))
.await?;
} else {
self.register_slash_commands_global().await?;
}
let mut client = Client::builder(self.token())
.event_handler(self.handler())
.await?;
client.start().await?;
Ok(())
}
fn token(&self) -> &str {
&self.args.client_secret
}
fn handler(&self) -> event_handler::Handler {
event_handler::Handler
}
fn application_id(&self) -> u64 {
self.args.client_id
}
// Commands registered here are handled in the `event_handlers` module
async fn register_slash_commands_guild(&self, guild_id: GuildId) -> eyre::Result<()> {
let http = Http::new_with_token(self.token());
Interaction::create_guild_application_command(
http,
guild_id,
self.application_id(),
Self::uwuify_command,
)
.await?;
Ok(())
}
async fn register_slash_commands_global(&self) -> eyre::Result<()> {
let http = Http::new_with_token(self.token());
Interaction::create_global_application_command(
http,
self.application_id(),
Self::uwuify_command,
)
.await?;
Ok(())
}
fn uwuify_command(interaction: &mut CreateInteraction) -> &mut CreateInteraction {
interaction
.name("uwuify")
.description("uwuify an impowtant m-message")
.create_interaction_option(|option| {
option
.name("text")
.description("text to be uwuified")
.kind(ApplicationCommandOptionType::String)
.required(true)
})
}
}
impl Default for Bot {
fn default() -> Self {
Self::new()
}
}
|
use scene::Scene;
use regex::Regex;
use std::result;
use std::num::ParseIntError;
use std::str::FromStr;
use vector::Vector3;
use light::Light;
use triangle::Triangle;
use sphere::Sphere;
use material::Material;
struct ParsedScene<'a> {
tokens: Vec<&'a str>,
scene: &'a mut Scene,
}
pub fn parse_scene(scenedef: String) -> result::Result<Scene, String> {
// cleanup: ditch all comments and commas
let comment_re = Regex::new(r"(,|#[^\n]*\n)").unwrap();
let scenedef_sans_comments = comment_re.replace_all(&scenedef, " ");
let mut scene = Scene::new(128, 128);
{
let mut parsed_scene = ParsedScene {
tokens: scenedef_sans_comments.split_whitespace().collect(),
scene: &mut scene,
};
parsed_scene.parse();
}
Ok(scene)
}
#[derive(Debug)]
enum ParseSceneError {
GenericError,
}
impl From<ParseIntError> for ParseSceneError {
fn from(_: ParseIntError) -> ParseSceneError {
ParseSceneError::GenericError
}
}
impl From<ParseSceneError> for String {
fn from(_: ParseSceneError) -> String {
"Parse error".to_string()
}
}
fn _parse<T>(v: &str) -> result::Result<T, ParseSceneError> where T : FromStr {
v.parse().map_err(|_| ParseSceneError::GenericError)
}
type ParseResult = result::Result<(), ParseSceneError>;
impl<'a> ParsedScene<'a> {
fn parse(&'a mut self) {
self.tokens.reverse();
while self.tokens.len() > 0 {
let _ = self.next_token()
.and_then(|section| {
match section.as_ref() {
"options" => self.parse_options(),
"camera" => self.parse_camera(),
"pointlight" => self.parse_pointlight(),
"triangle" => self.parse_triangle(),
"sphere" => self.parse_sphere(),
"material" => self.parse_material(),
_ => Err(ParseSceneError::GenericError),
}
});
}
}
fn next_token(&mut self) -> result::Result<String, ParseSceneError> {
self.tokens.pop().ok_or(ParseSceneError::GenericError).map(|s| s.to_string())
}
fn require(&mut self, token: &str) -> ParseResult {
self.next_token().and_then(|nt| {
if nt == token {
Ok(())
} else {
Err(ParseSceneError::GenericError)
}
})
}
fn parse_string(&mut self) -> result::Result<String, ParseSceneError> {
self.tokens.last()
.ok_or(ParseSceneError::GenericError)
.and_then(|s| {
let q = '"';
if s.chars().count() > 2 && s.chars().nth(0).unwrap() == q && s.chars().last().unwrap() == q {
let chars_to_trim: &[char] = &[q];
Ok(s.trim_matches(chars_to_trim).to_string())
}
else {
Err(ParseSceneError::GenericError)
}
// only if the above succeeds should we consume the token
}).map(|s| { let _ = self.next_token(); s })
}
fn parse_num<T>(&mut self) -> result::Result<T, ParseSceneError> where T : FromStr {
self.next_token().and_then(|t| _parse::<T>(&t))
}
fn parse_vector3(&mut self) -> result::Result<Vector3<f64>, ParseSceneError> {
let mut result = Vector3::init(0f64);
self.next_token()
.and_then(|t| _parse::<f64>(&t)).and_then(|x| { result.x = x; self.next_token() })
.and_then(|t| _parse::<f64>(&t)).and_then(|y| { result.y = y; self.next_token() })
.and_then(|t| _parse::<f64>(&t)).map(|z| { result.z = z; result })
}
fn parse_section<F>(&mut self, mut directive_parser: F) -> result::Result<String, ParseSceneError>
where F : FnMut(&mut ParsedScene, String) -> ParseResult {
self.parse_string()
.or(Ok("".to_string()))
.and_then(|s| {
self.require("{")
.and_then(|_| {
loop {
match self.next_token() {
Ok(t) => {
if t == "}".to_string() {
return Ok(s)
}
let _ = match directive_parser(self, t) {
Ok(_) => continue,
Err(_) => return Err(ParseSceneError::GenericError),
};
},
_ => return Err(ParseSceneError::GenericError),
}
}
})
})
}
fn parse_options(&mut self) -> ParseResult {
let mut width = 0u32;
let mut height = 0u32;
self.parse_section(|p, t| {
println!("options directive: {}", t);
match t.as_ref() {
"width" => p.parse_num().map(|i| width = i),
"height" => p.parse_num().map(|i| height = i),
"bgcolor" => p.parse_vector3().map(|v| p.scene.bgcolor = v),
"bspdepth" => p.parse_num::<i32>().map(|_| ()),
"bspleafobjs" => p.parse_num::<i32>().map(|_| ()),
_ => Err(ParseSceneError::GenericError),
}
}).map(|_| self.scene.resize(width, height))
}
fn parse_camera(&mut self) -> ParseResult {
self.parse_section(|p, t| {
println!("camera directive: {}", t);
match t.as_ref() {
"lookat" => p.parse_vector3().map(|v| p.scene.camera.lookat = v),
"pos" => p.parse_vector3().map(|v| p.scene.camera.eye = v),
"up" => p.parse_vector3().map(|v| p.scene.camera.up = v),
"fov" => p.parse_num::<f64>().map(|f| p.scene.camera.fov = f.to_radians()/2f64),
_ => Err(ParseSceneError::GenericError),
}
}).map(|_| self.scene.camera.setup_viewport())
}
fn parse_pointlight(&mut self) -> ParseResult {
let mut light = Light {
position: Vector3::init(0.0),
color: Vector3::init(1.0),
wattage: 100f64,
};
self.parse_section(|p, t| {
println!("pointlight directive: {}", t);
match t.as_ref() {
"pos" => p.parse_vector3().map(|v| light.position = v),
"color" => p.parse_vector3().map(|v| light.color = v),
"wattage" => p.parse_num().map(|f| light.wattage = f),
_ => Err(ParseSceneError::GenericError),
}
}).map(|_| self.scene.lights.push(light))
}
fn parse_triangle(&mut self) -> ParseResult {
let mut tri = Triangle {
v1: Vector3::init(0.0),
v2: Vector3::new(1.0, 0.0, 0.0),
v3: Vector3::new(0.0, 1.0, 0.0),
n1: Vector3::new(0.0, 0.0, 1.0),
n2: Vector3::new(0.0, 0.0, 1.0),
n3: Vector3::new(0.0, 0.0, 1.0),
material: "white".to_string(),
};
self.parse_section(|p, t| {
println!("triangle directive: {}", t);
match t.as_ref() {
"v1" => p.parse_vector3().map(|v| tri.v1 = v),
"v2" => p.parse_vector3().map(|v| tri.v2 = v),
"v3" => p.parse_vector3().map(|v| tri.v3 = v),
"n1" => p.parse_vector3().map(|v| tri.n1 = v),
"n2" => p.parse_vector3().map(|v| tri.n2 = v),
"n3" => p.parse_vector3().map(|v| tri.n3 = v),
"material" => p.parse_string().map(|s| tri.material = s),
_ => Err(ParseSceneError::GenericError),
}
}).map(|_| self.scene.objects.push(Box::new(tri)))
}
fn parse_sphere(&mut self) -> ParseResult {
let mut sphere = Sphere {
center: Vector3::init(0.0),
radius: 0.0,
material: "white".to_string(),
};
self.parse_section(|p, t| {
println!("triangle directive: {}", t);
match t.as_ref() {
"center" => p.parse_vector3().map(|v| sphere.center = v),
"radius" => p.parse_num().map(|f| sphere.radius = f),
"material" => p.parse_string().map(|s| sphere.material = s),
_ => Err(ParseSceneError::GenericError),
}
}).map(|_| self.scene.objects.push(Box::new(sphere)))
}
fn parse_material(&mut self) -> ParseResult {
let mut mat = Material::white();
self.parse_section(|p, t| {
println!("material directive: {}", t);
match t.as_ref() {
"color" => p.parse_vector3().map(|v| mat.color = v),
"diffuse" => p.parse_num().map(|f| mat.diffuse = f),
"specular" => p.parse_num().map(|f| mat.specular = f),
_ => Err(ParseSceneError::GenericError),
}
}).map(|n| {
println!("material name: {}", n);
mat.name = n;
self.scene.materials.push(mat)
})
}
}
|
pub fn get_conf() -> Result<ApiConfig, config::ConfigError>{
let mut file_path = dirs::config_dir().expect("Error locating config folder");
file_path.push("whothis_config.yaml");
let mut default_conf = config::Config::default();
match default_conf.merge(config::File::from(file_path)){
Ok(conf) => conf.clone().try_into(),
Err(why) => Err(why),
}
}
#[derive(serde::Deserialize, Debug)]
pub struct ApiConfig{
pub who_xml_api_key: String,
pub virustotal_api_key: String,
pub hybrid_analysis: String,
}
impl ApiConfig{
pub fn get_whois_url(&self) -> String {
format!("https://www.whoisxmlapi.com/whoisserver/WhoisService?apiKey={}&domainName=", self.who_xml_api_key)
}
pub fn get_virustotal_api_key(&self) -> String {
self.virustotal_api_key.clone()
}
}
|
extern crate rand;
extern crate time;
extern crate timely;
extern crate differential_dataflow;
use timely::dataflow::*;
use timely::dataflow::operators::*;
use timely::progress::timestamp::RootTimestamp;
use rand::{Rng, SeedableRng, StdRng};
use differential_dataflow::operators::*;
use differential_dataflow::collection::LeastUpperBound;
type Node = u32;
type Edge = (Node, Node);
fn main() {
// define a new computational scope, in which to run BFS
timely::execute_from_args(std::env::args(), |computation| {
let start = time::precise_time_s();
// define BFS dataflow; return handles to roots and edges inputs
let (mut roots, mut graph, probe) = computation.scoped(|scope| {
let (edge_input, graph) = scope.new_input();
let (node_input, roots) = scope.new_input();
let dists = bfs(&graph, &roots); // determine distances to each graph node
let probe = dists
// .map(|((_,s),w)| (s,w)) // keep only the distances, not node ids
// .consolidate_by(|&x| x) // aggregate into one record per distance
// .inspect_batch(move |t, x| { // print up something neat for each update
// // println!("observed at {:?}:", t);
// println!("elapsed: {}s", time::precise_time_s() - (start + t.inner as f64));
// // for y in x {
// // println!("\t{:?}", y);
// // }
// })
.probe().0;
(node_input, edge_input, probe)
});
let nodes = 50_000_000u32; // the u32 helps type inference understand what nodes are
let edges = 100_000_000;
let seed: &[_] = &[1, 2, 3, 4];
let mut rng1: StdRng = SeedableRng::from_seed(seed); // rng for edge additions
let mut rng2: StdRng = SeedableRng::from_seed(seed); // rng for edge deletions
println!("performing BFS on {} nodes, {} edges:", nodes, edges);
if computation.index() == 0 {
// trickle edges in to dataflow
for _ in 0..(edges/1000) {
for _ in 0..1000 {
graph.send(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), 1));
}
computation.step();
}
}
// start the root set out with roots 0, 1, and 2
// roots.advance_to(0);
computation.step();
computation.step();
computation.step();
println!("loaded; elapsed: {}s", time::precise_time_s() - start);
roots.send((0, 1));
roots.send((1, 1));
roots.send((2, 1));
roots.advance_to(1);
roots.close();
graph.advance_to(1);
while probe.le(&RootTimestamp::new(0)) {
computation.step();
}
let mut changes = Vec::new();
for wave in 0.. {
for _ in 0..1000 {
changes.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), 1));
changes.push(((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes)),-1));
}
let start = time::precise_time_s();
for _ in 0..1000 {
let round = *graph.epoch();
graph.send(changes.pop().unwrap());
graph.send(changes.pop().unwrap());
graph.advance_to(round + 1);
while probe.le(&RootTimestamp::new(round)) {
computation.step();
}
}
println!("wave {}: avg {}", wave, (time::precise_time_s() - start) / 1000.0f64);
}
// // repeatedly change edges
// let mut round = 0 as u32;
// while computation.step() {
// // once each full second ticks, change an edge
// if time::precise_time_s() - start >= round as f64 {
// // add edges using prior rng; remove edges using fresh rng with the same seed
// graph.send(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), 1));
// graph.send(((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes)),-1));
// graph.advance_to(round + 1);
// round += 1;
// }
// }
});
}
// returns pairs (n, s) indicating node n can be reached from a root in s steps.
fn bfs<G: Scope>(edges: &Stream<G, (Edge, i32)>, roots: &Stream<G, (Node, i32)>)
-> Stream<G, ((Node, u32), i32)>
where G::Timestamp: LeastUpperBound {
// initialize roots as reaching themselves at distance 0
let nodes = roots.map(|(x,w)| ((x, 0), w));
// let edges = edges.map_in_place(|x| x.0 = ((x.0).1, (x.0).0))
// .concat(&edges);
// repeatedly update minimal distances each node can be reached from each root
nodes.iterate(|inner| {
let edges = inner.scope().enter(&edges);
let nodes = inner.scope().enter(&nodes);
inner.join_map_u(&edges, |_k,l,d| (*d, l+1))
.concat(&nodes)
.group_u(|_, s, t| t.push((*s.peek().unwrap().0, 1)))
})
}
|
#[macro_use]
extern crate log;
extern crate pretty_env_logger;
#[macro_use]
extern crate failure;
#[macro_use]
extern crate structopt;
extern crate glob;
extern crate pnet;
use std::fmt;
use std::fs::File;
use std::io::{self, BufWriter, Read, Write};
use std::path::PathBuf;
use failure::Error;
use glob::glob;
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(about = "dump pcap/pcapng file.")]
struct Opt {
/// When parsing and printing, produce (slightly more) verbose output.
#[structopt(short = "v")]
verbose: bool,
/// Read packets from file
#[structopt(short = "r", parse(from_os_str))]
files: Vec<PathBuf>,
#[structopt(short = "o", parse(from_os_str))]
output: Option<PathBuf>,
}
fn dump_files<W: Write>(w: &mut W, patterns: Vec<PathBuf>) -> Result<(), Error> {
if patterns.is_empty() || patterns == vec![PathBuf::from("-")] {
debug!("reading from stdin");
let stdin = io::stdin();
let mut handle = stdin.lock();
dump_file(w, &mut handle)?;
} else {
for pattern in patterns {
for entry in glob(pattern.to_str().unwrap()).expect("Failed to read glob pattern") {
match entry {
Ok(path) => {
debug!("reading file {:?}", path);
let mut f = File::open(path)?;
dump_file(w, &mut f)?;
}
Err(err) => warn!("skip entry, {}", err),
}
}
}
}
Ok(())
}
fn dump_file<W: Write, R: Read>(w: &mut W, r: &mut R) -> fmt::Result {
Ok(())
}
fn main() {
pretty_env_logger::init();
let opt = Opt::from_args();
debug!("parsed options: {:#?}", opt);
match opt.output {
Some(ref path) if *path != PathBuf::from("-") => {
debug!("dump to file {:?}", path);
let f = File::create(path).unwrap();
let mut w = BufWriter::new(f);
dump_files(&mut w, opt.files).expect("dump to file");
}
_ => {
debug!("dump to stdout");
let stdout = io::stdout();
let mut handle = stdout.lock();
let mut w = BufWriter::new(handle);
dump_files(&mut w, opt.files).expect("dump to stdout");
}
}
}
|
use log::*;
use serde_json::Value;
use tantivy::tokenizer::StopWordFilter;
#[derive(Clone)]
pub struct StopWordFilterFactory {}
impl StopWordFilterFactory {
pub fn new() -> Self {
StopWordFilterFactory {}
}
pub fn create(self, json: &str) -> StopWordFilter {
let v: Value = match serde_json::from_str(json) {
Result::Ok(val) => val,
Result::Err(err) => {
warn!("failed to parse JSON: {}", err.to_string());
serde_json::Value::Null
}
};
match v["words"].as_array() {
Some(w) => {
if w.len() > 0 {
StopWordFilter::remove(
w.iter().map(|s| s.as_str().unwrap().to_string()).collect(),
)
} else {
warn!("words are empty. set default words");
StopWordFilter::default()
}
}
_ => {
warn!("words are missing. set default words");
StopWordFilter::default()
}
}
}
}
#[cfg(test)]
mod tests {
use tantivy::tokenizer::{SimpleTokenizer, TextAnalyzer};
use crate::tokenizer::stop_word_filter_factory::StopWordFilterFactory;
fn helper(text: &str) -> Vec<String> {
let json = r#"
{
"words": [
"a",
"b",
"c"
]
}
"#;
let factory = StopWordFilterFactory::new();
let filter = factory.create(json);
let mut tokens = vec![];
let mut token_stream = TextAnalyzer::from(SimpleTokenizer)
.filter(filter)
.token_stream(text);
while token_stream.advance() {
let token_text = token_stream.token().text.clone();
tokens.push(token_text);
}
tokens
}
#[test]
fn test_stemming_filter() {
assert_eq!(
vec![
"d".to_string(),
"e".to_string(),
"f".to_string(),
"g".to_string(),
],
helper("a b c d e f g")
);
}
}
|
use std::{fs, io};
use std::sync::Arc;
use io::{BufReader, ErrorKind};
use fs::File;
use tokio_rustls::rustls::internal::pemfile;
use tokio_rustls::rustls::{Certificate, NoClientAuth, PrivateKey, ServerConfig};
fn load_certs(filename: &str) -> io::Result<Vec<Certificate>> {
let cert_file = File::open(filename)?;
let mut reader = BufReader::new(cert_file);
pemfile::certs(&mut reader)
.map_err(|_| io::Error::new(ErrorKind::InvalidInput, "Couldn't parse certificates"))
}
fn load_private_key(filename: &str) -> io::Result<PrivateKey> {
let key_file = fs::File::open(filename)?;
let mut reader = io::BufReader::new(key_file);
// Load and return a single private key.
let keys = pemfile::rsa_private_keys(&mut reader)
.map_err(|_| io::Error::new(ErrorKind::InvalidInput, "Couldn't parse key"))?;
if keys.len() != 1 {
return Err(io::Error::new(
ErrorKind::InvalidInput,
"Expected just one key",
));
}
Ok(keys[0].clone())
}
// Build TLS configuration.
pub fn get_configuration(crt_file: &str, key_file: &str) -> io::Result<Arc<ServerConfig>> {
let certs = load_certs(crt_file)?;
let key = load_private_key(key_file)?;
// Do not use client certificate authentication.
let mut cfg = ServerConfig::new(NoClientAuth::new());
cfg.set_single_cert(certs, key).map_err(|e| {
io::Error::new(
ErrorKind::InvalidInput,
format!("Certs and key don't match. {:?}", e),
)
})?;
Ok(Arc::new(cfg))
}
|
use aoc::read_data;
use std::convert::Infallible;
use std::error::Error;
use std::str::FromStr;
/// top-left (0,0)
struct Position(usize, usize);
impl Position {
fn walk(&self, data: &[Iline]) -> Obj {
data[self.1].get(self.0)
}
// walk_by (right, down)
fn walk_by(&mut self, right: usize, down: usize) {
self.0 += right;
self.1 += down;
}
}
#[derive(PartialEq, Clone)]
enum Obj {
Tree,
Empty,
}
impl Obj {
fn from_char(s: char) -> Self {
if s == '.' {
Obj::Empty
} else if s == '#' {
Obj::Tree
} else {
panic!("I do not know how to read")
}
}
fn is_tree(&self) -> bool {
*self == Obj::Tree
}
}
struct Iline {
line: Vec<Obj>,
}
impl Iline {
fn get(&self, idx: usize) -> Obj {
let mut num = idx;
let len = self.line.len();
if idx >= len {
for _ in 0..((idx as f64 / len as f64).floor() as i64) {
num -= len
}
}
self.line[num].clone()
}
}
impl FromStr for Iline {
type Err = Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut v: Vec<Obj> = Vec::new();
for el in s.chars() {
v.push(Obj::from_char(el))
}
Ok(Self { line: v })
}
}
fn step(data: &[Iline], position: &mut Position, right: usize, down: usize) -> bool {
(*position).walk_by(right, down);
position.walk(data).is_tree()
}
/// Starting at the top-left corner of your map and following a slope of right 3 and down 1, how many trees would you encounter?
fn find_trees(data: &[Iline], right: usize, down: usize) -> usize {
let mut position = Position(0, 0);
let mut trees = 0;
while position.1 < data.len() - 1 {
if step(&data, &mut position, right, down) {
trees += 1
}
}
trees
}
fn p1(data: &[Iline]) -> usize {
find_trees(data, 3, 1)
}
fn p2(data: &[Iline]) -> usize {
find_trees(&data, 1, 1)
* find_trees(&data, 3, 1)
* find_trees(&data, 5, 1)
* find_trees(&data, 7, 1)
* find_trees(&data, 1, 2)
}
fn main() -> Result<(), Box<dyn Error>> {
println!("Hello, Advent Of Code 2020!");
// part 1
let data: Vec<Iline> = read_data("./data/data3").unwrap();
println!("Starting at the top-left corner of your map and following a slope of right 3 and down 1, how many trees would you encounter?: {}", p1(&data));
//part 2
println!("What do you get if you multiply together the number of trees encountered on each of the listed slopes?: {}", p2(&data));
Ok(())
}
#[test]
fn data_read() {
println!("{:?}", read_data::<String>("./data/data3").unwrap());
}
#[test]
fn calc() {
let data = vec![
"..##.......",
"#...#...#..",
".#....#..#.",
"..#.#...#.#",
".#...##..#.",
"..#.##.....",
".#.#.#....#",
".#........#",
"#.##...#...",
"#...##....#",
".#..#...#.#",
];
let mut v: Vec<Iline> = Vec::new();
for d in data {
v.push(d.parse().unwrap())
}
let mut position = Position(0, 0);
assert_eq!(step(&v, &mut position, 3, 1), false);
assert_eq!(step(&v, &mut position, 3, 1), true);
assert_eq!(step(&v, &mut position, 3, 1), false);
assert_eq!(step(&v, &mut position, 3, 1), true);
assert_eq!(step(&v, &mut position, 3, 1), true);
assert_eq!(step(&v, &mut position, 3, 1), false);
assert_eq!(step(&v, &mut position, 3, 1), true);
assert_eq!(step(&v, &mut position, 3, 1), true);
assert_eq!(step(&v, &mut position, 3, 1), true);
assert_eq!(step(&v, &mut position, 3, 1), true);
assert_eq!(p1(&v), 7);
// part 2
assert_eq!(find_trees(&v, 1, 1), 2);
assert_eq!(find_trees(&v, 3, 1), 7);
assert_eq!(find_trees(&v, 5, 1), 3);
assert_eq!(find_trees(&v, 7, 1), 4);
assert_eq!(find_trees(&v, 1, 2), 2);
assert_eq!(p2(&v), 336);
}
|
use std::f64::{INFINITY, NAN};
use wasm_bindgen::JsValue;
use wasm_bindgen_test::*;
use js_sys::*;
#[wasm_bindgen_test]
fn is_finite() {
assert!(Number::is_finite(&42.into()));
assert!(Number::is_finite(&42.1.into()));
assert!(!Number::is_finite(&"42".into()));
assert!(!Number::is_finite(&NAN.into()));
assert!(!Number::is_finite(&INFINITY.into()));
}
#[wasm_bindgen_test]
fn is_integer() {
assert!(Number::is_integer(&42.into()));
assert!(!Number::is_integer(&42.1.into()));
}
#[wasm_bindgen_test]
fn is_nan() {
assert!(Number::is_nan(&NAN.into()));
assert!(!Number::is_nan(&JsValue::TRUE));
assert!(!Number::is_nan(&JsValue::NULL));
assert!(!Number::is_nan(&37.into()));
assert!(!Number::is_nan(&"37".into()));
assert!(!Number::is_nan(&"37.37".into()));
assert!(!Number::is_nan(&"".into()));
assert!(!Number::is_nan(&" ".into()));
// These would all return true with the global isNaN()
assert!(!Number::is_nan(&"NaN".into()));
assert!(!Number::is_nan(&JsValue::UNDEFINED));
assert!(!Number::is_nan(&"blabla".into()));
}
#[wasm_bindgen_test]
fn is_safe_integer() {
assert_eq!(Number::is_safe_integer(&42.into()), true);
assert_eq!(Number::is_safe_integer(&(Math::pow(2., 53.) - 1.).into()), true);
assert_eq!(Number::is_safe_integer(&Math::pow(2., 53.).into()), false);
assert_eq!(Number::is_safe_integer(&"42".into()), false);
assert_eq!(Number::is_safe_integer(&42.1.into()), false);
assert_eq!(Number::is_safe_integer(&NAN.into()), false);
assert_eq!(Number::is_safe_integer(&INFINITY.into()), false);
}
#[wasm_bindgen_test]
fn new() {
let n = Number::new(JsValue::from(42));
let v = JsValue::from(n);
assert!(v.is_object());
assert_eq!(Number::from(v).value_of(), 42.);
}
#[wasm_bindgen_test]
fn parse_int_float() {
assert_eq!(Number::parse_int("42", 10).value_of(), 42.);
assert_eq!(Number::parse_int("42", 16).value_of(), 66.); // 0x42 == 66
assert!(Number::parse_int("invalid int", 10).value_of().is_nan());
assert_eq!(Number::parse_float("123456.789").value_of(), 123456.789);
assert!(Number::parse_float("invalid float").value_of().is_nan());
}
#[wasm_bindgen_test]
fn to_locale_string() {
let number = Number::new(1234.45.into());
assert_eq!(number.to_locale_string("en-US"), "1,234.45");
// TODO: these tests seems to be system dependent, disable for now
// assert_eq!(wasm.to_locale_string(number, "de-DE"), "1,234.45");
// assert_eq!(wasm.to_locale_string(number, "zh-Hans-CN-u-nu-hanidec"), "1,234.45");
}
#[wasm_bindgen_test]
fn to_precision() {
assert_eq!(Number::new(0.1.into()).to_precision(3).unwrap(), "0.100");
assert!(Number::new(10.into()).to_precision(101).is_err());
}
#[wasm_bindgen_test]
fn to_string() {
assert_eq!(Number::new(42.into()).to_string(10).unwrap(), "42");
assert_eq!(Number::new(233.into()).to_string(16).unwrap(), "e9");
assert!(Number::new(100.into()).to_string(100).is_err());
}
#[wasm_bindgen_test]
fn value_of() {
assert_eq!(Number::new(42.into()).value_of(), 42.);
}
#[wasm_bindgen_test]
fn to_fixed() {
assert_eq!(Number::new(123.456.into()).to_fixed(2).unwrap(), "123.46");
assert!(Number::new(10.into()).to_fixed(101).is_err());
}
#[wasm_bindgen_test]
fn to_exponential() {
assert_eq!(Number::new(123456.into()).to_exponential(2).unwrap(), "1.23e+5");
assert!(Number::new(10.into()).to_exponential(101).is_err());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.