text stringlengths 8 4.13M |
|---|
//! numerical trait constraints
use ndarray_linalg::lapack::Lapack;
pub trait Float: Sized + num_traits::Float + Lapack {}
impl Float for f32 {}
impl Float for f64 {}
|
use actix_web::{web, App, HttpServer};
use dotenv::dotenv;
use drogue_cloud_api_key_service::{
endpoints::WebData as KeycloakWebData, service::KeycloakApiKeyService,
};
use drogue_cloud_service_common::{
config::ConfigFromEnv, health::HealthServer, openid::Authenticator, openid_auth,
};
use drogue_cloud_user_auth_service::{endpoints, service, Config, WebData};
use futures::TryFutureExt;
#[actix_web::main]
async fn main() -> anyhow::Result<()> {
env_logger::init();
dotenv().ok();
// Initialize config from environment variables
let config = Config::from_env()?;
let max_json_payload_size = config.max_json_payload_size;
let enable_auth = config.enable_auth;
let authenticator = if enable_auth {
Some(Authenticator::new().await?)
} else {
None
};
let data = web::Data::new(WebData {
authenticator,
service: service::PostgresAuthorizationService::new(config.service)?,
});
let api_key = web::Data::new(KeycloakWebData {
service: KeycloakApiKeyService::new(config.keycloak)?,
});
// health server
let health = HealthServer::new(config.health, vec![Box::new(data.service.clone())]);
// main server
let main = HttpServer::new(move || {
let auth = openid_auth!(req -> {
req
.app_data::<web::Data<WebData<service::PostgresAuthorizationService>>>()
.as_ref()
.and_then(|data|data.authenticator.as_ref())
});
drogue_cloud_user_auth_service::app!(
data,
KeycloakApiKeyService,
api_key,
max_json_payload_size,
enable_auth,
auth
)
})
.bind(config.bind_addr)?
.run();
// run
futures::try_join!(health.run(), main.err_into())?;
// exiting
Ok(())
}
|
//! Rust interface for Objective-C's `@throw` and `@try`/`@catch` statements.
extern crate libc;
use std::mem;
use std::ptr;
use libc::{c_int, c_void};
#[link(name = "objc", kind = "dylib")]
extern { }
extern {
fn RustObjCExceptionThrow(exception: *mut c_void);
fn RustObjCExceptionTryCatch(try: extern fn(*mut c_void),
context: *mut c_void, error: *mut *mut c_void) -> c_int;
}
/// Throws an Objective-C exception.
/// The argument must be a pointer to an Objective-C object.
///
/// Unsafe because this unwinds from Objective-C.
pub unsafe fn throw(exception: *mut c_void) -> ! {
RustObjCExceptionThrow(exception);
unreachable!();
}
unsafe fn try_no_ret<F>(closure: F) -> Result<(), *mut c_void>
where F: FnOnce() {
extern fn try_objc_execute_closure<F>(closure: &mut Option<F>)
where F: FnOnce() {
// This is always passed Some, so it's safe to unwrap
let closure = closure.take().unwrap();
closure();
}
// Wrap the closure in an Option so it can be taken
let mut closure = Some(closure);
let f = mem::transmute(try_objc_execute_closure::<F>);
let context = &mut closure as *mut _ as *mut c_void;
let mut exception = ptr::null_mut();
let success = RustObjCExceptionTryCatch(f, context, &mut exception);
if success == 0 {
Ok(())
} else {
Err(exception)
}
}
/// Tries to execute the given closure and catches an Objective-C exception
/// if one is thrown.
///
/// Returns a `Result` that is either `Ok` if the closure succeeded without an
/// exception being thrown, or an `Err` with a pointer to an exception if one
/// was thrown. The exception is retained and so must be released.
///
/// Unsafe because this encourages unwinding through the closure from
/// Objective-C, which is not safe.
pub unsafe fn try<F, R>(closure: F) -> Result<R, *mut c_void>
where F: FnOnce() -> R {
let mut value = None;
let result = {
let value_ref = &mut value;
try_no_ret(move || {
*value_ref = Some(closure());
})
};
// If the try succeeded, this was set so it's safe to unwrap
result.map(|_| value.unwrap())
}
#[cfg(test)]
mod tests {
use std::ptr;
use super::{throw, try};
#[test]
fn test_try() {
unsafe {
let s = "Hello".to_string();
let result = try(move || {
if s.len() > 0 {
throw(ptr::null_mut());
}
s.len()
});
assert!(result.unwrap_err() == ptr::null_mut());
let mut s = "Hello".to_string();
let result = try(move || {
s.push_str(", World!");
s
});
assert!(result.unwrap() == "Hello, World!");
}
}
}
|
use super::{
AnnotatedFunctionMap, BasicBlock, Compiler, Function, LLVMInstruction, Object, Scope, Type,
};
pub struct Context<'a, 'b: 'a> {
pub function_map: &'a AnnotatedFunctionMap,
pub compiler: &'a mut Compiler<'b>,
pub function: &'a mut Function,
pub scope: &'a mut Scope<'b>,
/// this should be the current block that
/// the builder is building against. This allows
/// one to get back to it when switching context,
/// for example building a child function.
/// TODO: move current block to function
pub block: usize,
}
impl<'a, 'b> Context<'a, 'b> {
pub fn new(
function_map: &'a AnnotatedFunctionMap,
compiler: &'a mut Compiler<'b>,
function: &'a mut Function,
scope: &'a mut Scope<'b>,
block: usize,
) -> Context<'a, 'b> {
return Context {
function_map,
compiler,
function,
scope,
block,
};
}
pub fn allocate(&mut self, object_type: Type) -> Object {
self.function.allocate(object_type)
}
pub fn add_instruction(&mut self, instruction: LLVMInstruction) {
self.function.basic_blocks[self.block].add_instruction(instruction)
}
pub fn allocate_without_type(&mut self) -> usize {
self.function.allocate_object()
}
pub fn const_int(&mut self, value: i64) -> Object {
let object = self.allocate(Type::Int);
self.add_instruction(LLVMInstruction::ConstInt {
value: value,
target: object.index,
});
object
}
/// LLVM GetElementPtr calls must use i32 values to
/// specify indices. Thus exposing that option.
/// const_int should be used when authoring code for
/// disp itself.
pub fn const_i32(&mut self, value: i32) -> Object {
let object = self.allocate(Type::Int);
self.add_instruction(LLVMInstruction::ConstI32 {
value: value,
target: object.index,
});
object
}
// add a basic block, a pointer to a section
// of code for llvm.
pub fn create_block(&mut self, name: String) -> usize {
self.function.create_block(name)
}
pub fn current_block(&self) -> &BasicBlock {
&self.function.basic_blocks[self.block]
}
pub fn get_function(&self, name: &str, arg_types: &[Type]) -> Option<String> {
match self.scope.get_function(name, arg_types) {
Some(function) => Some(function),
None => match self.compiler.scope.get_function(name, arg_types) {
Some(function) => Some(function),
None => None,
},
}
}
}
|
use std::{str::from_utf8};
#[derive(Debug, Clone, Copy)]
pub enum Token {
OpenParen,
CloseParen,
OpenCurly,
CloseCurly,
OpenBracket,
CloseBracket,
SemiColon,
Colon,
Comma,
NewLine,
// This is mixing ideas here
// I am making this rust specific
// But I'm not too worried about that right now.
Comment((usize, usize)),
Spaces((usize, usize)),
String((usize, usize)),
Integer((usize, usize)),
Float((usize, usize)),
Atom((usize, usize)),
}
#[derive(Debug, Clone)]
pub struct Tokenizer {
pub position: usize,
}
#[derive(Debug)]
pub enum RustSpecific {
// As,
// Break,
// Const,
// Continue,
// Crate,
// Else,
// Enum,
// Extern,
// False,
// Fn,
// For,
// If,
// Impl,
// In,
// Let,
// Loop,
// Match,
// Mod,
// Move,
// Mut,
// Pub,
// Ref,
// Return,
// SelfValue,
// SelfType,
// Static,
// Struct,
// Super,
// Trait,
// True,
// Type,
// Unsafe,
// Use,
// Where,
// While,
Token(Token),
Keyword((usize, usize)),
}
impl RustSpecific {
pub fn to_string<'a>(&self, chars: &'a Vec<u8>) -> &'a str {
from_utf8(match self {
RustSpecific::Keyword((s, e)) => &chars[*s..*e],
RustSpecific::Token(t) => {
match t {
Token::OpenParen => &[b'('],
Token::CloseParen => &[b')'],
Token::OpenCurly => &[b'{'],
Token::CloseCurly => &[b'}'],
Token::OpenBracket => &[b'['],
Token::CloseBracket => &[b']'],
Token::SemiColon => &[b';'],
Token::Colon => &[b':'],
Token::Comma => &[b','],
Token::NewLine => &[],
Token::Comment((s, e))
| Token::Spaces((s, e))
| Token::String((s, e))
| Token::Integer((s, e))
| Token::Float((s, e))
| Token::Atom((s, e)) => &chars[*s..*e],
}
}
}).unwrap()
}
pub fn get_token_start(&self, default_start: usize) -> usize {
match self {
RustSpecific::Keyword((s, _e)) => *s,
RustSpecific::Token(t) => {
match t {
Token::OpenParen |
Token::CloseParen |
Token::OpenCurly |
Token::CloseCurly |
Token::OpenBracket |
Token::CloseBracket |
Token::SemiColon |
Token::Colon |
Token::Comma |
Token::NewLine => default_start,
Token::Comment((s, _e))
| Token::Spaces((s, _e))
| Token::String((s, _e))
| Token::Integer((s, _e))
| Token::Float((s, _e))
| Token::Atom((s, _e)) => *s
}
}
}
}
pub fn get_token_end(&self, start: usize) -> usize {
match self {
RustSpecific::Keyword((_s, e)) => *e,
RustSpecific::Token(t) => {
match t {
Token::OpenParen |
Token::CloseParen |
Token::OpenCurly |
Token::CloseCurly |
Token::OpenBracket |
Token::CloseBracket |
Token::SemiColon |
Token::Colon |
Token::Comma |
Token::NewLine => start + 1,
Token::Comment((_s, e))
| Token::Spaces((_s, e))
| Token::String((_s, e))
| Token::Integer((_s, e))
| Token::Float((_s, e))
| Token::Atom((_s, e)) => *e
}
}
}
}
}
static ZERO: u8 = b'0';
static NINE: u8 = b'9';
static SPACE: u8 = b' ';
static NEW_LINE: u8 = b'\n';
static DOUBLE_QUOTE: u8 = b'"';
static OPEN_PAREN: u8 = b'(';
static CLOSE_PAREN: u8 = b')';
static PERIOD: u8 = b'.';
impl<'a> Tokenizer {
pub fn new() -> Tokenizer {
Tokenizer {
position: 0,
}
}
fn peek(&self, input_bytes: &[u8]) -> Option<u8> {
if self.position + 1 < input_bytes.len() {
Some(input_bytes[self.position + 1])
} else {
None
}
}
fn is_comment_start(&self, input_bytes: &[u8]) -> bool {
input_bytes[self.position] == b'/' && self.peek(input_bytes) == Some(b'/')
}
fn parse_comment(&mut self, input_bytes: &[u8]) -> Token {
let start = self.position;
while !self.at_end(input_bytes) && !self.is_newline(input_bytes) {
self.consume();
}
// self.consume();
Token::Comment((start, self.position))
}
pub fn consume(&mut self) {
self.position += 1;
}
pub fn current_byte(&self, input_bytes: &[u8]) -> u8 {
input_bytes[self.position]
}
pub fn is_space(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == SPACE
}
pub fn at_end(&self, input_bytes: &[u8]) -> bool {
self.position >= input_bytes.len()
}
pub fn is_quote(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == DOUBLE_QUOTE
}
pub fn parse_string(&mut self, input_bytes: &[u8]) -> Token {
let start = self.position;
self.consume();
while !self.at_end(input_bytes) && !self.is_quote(input_bytes) {
self.consume();
}
// TODO: Deal with escapes
if !self.at_end(input_bytes) {
self.consume();
}
Token::String((start, self.position))
}
pub fn is_open_paren(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == OPEN_PAREN
}
pub fn is_close_paren(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == CLOSE_PAREN
}
pub fn is_open_curly(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == b'{'
}
pub fn is_close_curly(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == b'}'
}
pub fn is_open_bracket(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == b'['
}
pub fn is_close_bracket(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == b']'
}
pub fn parse_spaces(&mut self, input_bytes: &[u8]) -> Token {
let start = self.position;
while !self.at_end(input_bytes) && self.is_space(input_bytes) {
self.consume();
}
Token::Spaces((start, self.position))
}
pub fn is_valid_number_char(&mut self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) >= ZERO && self.current_byte(input_bytes) <= NINE
}
pub fn parse_number(&mut self, input_bytes: &[u8]) -> Token {
let mut is_float = false;
let start = self.position;
while !self.at_end(input_bytes) && (self.is_valid_number_char(input_bytes) || self.current_byte(input_bytes) == PERIOD) {
// Need to handle making sure there is only one "."
if self.current_byte(input_bytes) == PERIOD {
is_float = true;
}
self.consume();
}
if is_float {
Token::Float((start,self.position))
} else {
Token::Integer((start, self.position))
}
}
pub fn parse_identifier(&mut self, input_bytes: &[u8]) -> Token {
let start = self.position;
while !self.at_end(input_bytes)
&& !self.is_space(input_bytes)
&& !self.is_open_paren(input_bytes)
&& !self.is_close_paren(input_bytes)
&& !self.is_open_curly(input_bytes)
&& !self.is_close_curly(input_bytes)
&& !self.is_open_bracket(input_bytes)
&& !self.is_close_bracket(input_bytes)
&& !self.is_semi_colon(input_bytes)
&& !self.is_colon(input_bytes)
&& !self.is_comma(input_bytes)
&& !self.is_newline(input_bytes)
&& !self.is_quote(input_bytes) {
self.consume();
}
Token::Atom((start, self.position))
}
pub fn parse_single(&mut self, input_bytes: &[u8]) -> Option<Token> {
if self.at_end(input_bytes) {
return None
}
let result = if self.is_space(input_bytes) {
self.parse_spaces(input_bytes)
} else if self.is_newline(input_bytes) {
self.consume();
Token::NewLine
} else if self.is_comment_start(input_bytes) {
self.parse_comment(input_bytes)
} else if self.is_open_paren(input_bytes) {
self.consume();
Token::OpenParen
} else if self.is_close_paren(input_bytes) {
self.consume();
Token::CloseParen
} else if self.is_valid_number_char(input_bytes) {
self.parse_number(input_bytes)
} else if self.is_quote(input_bytes) {
self.parse_string(input_bytes)
} else if self.is_semi_colon(input_bytes) {
self.consume();
Token::SemiColon
} else if self.is_comma(input_bytes) {
self.consume();
Token::Comma
} else if self.is_colon(input_bytes) {
self.consume();
Token::Colon
} else if self.is_open_curly(input_bytes) {
self.consume();
Token::OpenCurly
} else if self.is_close_curly(input_bytes) {
self.consume();
Token::CloseCurly
} else if self.is_open_bracket(input_bytes) {
self.consume();
Token::OpenBracket
} else if self.is_close_bracket(input_bytes) {
self.consume();
Token::CloseBracket
} else {
// println!("identifier");
self.parse_identifier(input_bytes)
};
Some(result)
}
pub fn is_semi_colon(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == b';'
}
pub fn is_colon(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == b':'
}
pub fn is_newline(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == NEW_LINE
}
pub fn is_comma(&self, input_bytes: &[u8]) -> bool {
self.current_byte(input_bytes) == b','
}
pub fn get_line(&mut self, input_bytes: &[u8]) -> Vec<Token> {
let mut result = Vec::new();
while !self.at_end(input_bytes) && !self.is_newline(input_bytes) {
if let Some(token) = self.parse_single(input_bytes) {
result.push(token);
}
}
result
}
pub fn _skip_lines(&mut self, n: usize, input_bytes: &[u8]) -> &mut Self {
for _ in 0..n {
while !self.at_end(input_bytes) && !self.is_newline(input_bytes) {
self.consume();
}
if !self.at_end(input_bytes) {
self.consume();
}
}
self
}
// The downside of this approach is that I will parse very large buffers
// all the way at once.
pub fn parse_all(&mut self, input_bytes: &[u8]) -> Vec<Token> {
let mut result = Vec::new();
while !self.at_end(input_bytes) {
if let Some(token) = self.parse_single(input_bytes) {
result.push(token);
}
}
self.position = 0;
result
}
}
pub fn rust_specific_pass(token: Token, input_bytes: &[u8]) -> RustSpecific {
match token {
Token::Atom((s, e)) => {
let text = from_utf8(&input_bytes[s..e]).unwrap();
match text {
"as" => RustSpecific::Keyword((s, e)),
"break" => RustSpecific::Keyword((s, e)),
"const" => RustSpecific::Keyword((s, e)),
"continue" => RustSpecific::Keyword((s, e)),
"crate" => RustSpecific::Keyword((s, e)),
"else" => RustSpecific::Keyword((s, e)),
"enum" => RustSpecific::Keyword((s, e)),
"extern" => RustSpecific::Keyword((s, e)),
"false" => RustSpecific::Keyword((s, e)),
"fn" => RustSpecific::Keyword((s, e)),
"for" => RustSpecific::Keyword((s, e)),
"if" => RustSpecific::Keyword((s, e)),
"impl" => RustSpecific::Keyword((s, e)),
"in" => RustSpecific::Keyword((s, e)),
"let" => RustSpecific::Keyword((s, e)),
"loop" => RustSpecific::Keyword((s, e)),
"match" => RustSpecific::Keyword((s, e)),
"mod" => RustSpecific::Keyword((s, e)),
"move" => RustSpecific::Keyword((s, e)),
"mut" => RustSpecific::Keyword((s, e)),
"pub" => RustSpecific::Keyword((s, e)),
"ref" => RustSpecific::Keyword((s, e)),
"return" => RustSpecific::Keyword((s, e)),
"self" => RustSpecific::Keyword((s, e)),
"Self" => RustSpecific::Keyword((s, e)),
"static" => RustSpecific::Keyword((s, e)),
"struct" => RustSpecific::Keyword((s, e)),
"super" => RustSpecific::Keyword((s, e)),
"trait" => RustSpecific::Keyword((s, e)),
"true" => RustSpecific::Keyword((s, e)),
"type" => RustSpecific::Keyword((s, e)),
"unsafe" => RustSpecific::Keyword((s, e)),
"use" => RustSpecific::Keyword((s, e)),
"where" => RustSpecific::Keyword((s, e)),
"while" => RustSpecific::Keyword((s, e)),
_ => RustSpecific::Token(token)
}
}
t => RustSpecific::Token(t)
}
} |
use std::io;
use std::net::SocketAddr;
use std::sync::atomic::Ordering;
#[cfg(feature = "io_timeout")]
use std::time::Duration;
use super::super::{co_io_result, IoData};
#[cfg(feature = "io_cancel")]
use crate::coroutine_impl::co_cancel_data;
use crate::coroutine_impl::{is_coroutine, CoroutineImpl, EventSource};
use crate::io::AsIoData;
use crate::net::UdpSocket;
use crate::yield_now::yield_with_io;
pub struct UdpRecvFrom<'a> {
io_data: &'a IoData,
buf: &'a mut [u8],
socket: &'a std::net::UdpSocket,
#[cfg(feature = "io_timeout")]
timeout: Option<Duration>,
pub(crate) is_coroutine: bool,
}
impl<'a> UdpRecvFrom<'a> {
pub fn new(socket: &'a UdpSocket, buf: &'a mut [u8]) -> Self {
UdpRecvFrom {
io_data: socket.as_io_data(),
buf,
socket: socket.inner(),
#[cfg(feature = "io_timeout")]
timeout: socket.read_timeout().unwrap(),
is_coroutine: is_coroutine(),
}
}
pub fn done(&mut self) -> io::Result<(usize, SocketAddr)> {
loop {
co_io_result(self.is_coroutine)?;
// clear the io_flag
self.io_data.io_flag.store(false, Ordering::Relaxed);
match self.socket.recv_from(self.buf) {
Ok(n) => return Ok(n),
Err(e) => {
// raw_os_error is faster than kind
let raw_err = e.raw_os_error();
if raw_err == Some(libc::EAGAIN) || raw_err == Some(libc::EWOULDBLOCK) {
// do nothing here
} else {
return Err(e);
}
}
}
if self.io_data.io_flag.load(Ordering::Relaxed) {
continue;
}
// the result is still WouldBlock, need to try again
yield_with_io(self, self.is_coroutine);
}
}
}
impl<'a> EventSource for UdpRecvFrom<'a> {
fn subscribe(&mut self, co: CoroutineImpl) {
#[cfg(feature = "io_cancel")]
let cancel = co_cancel_data(&co);
let io_data = self.io_data;
#[cfg(feature = "io_timeout")]
if let Some(dur) = self.timeout {
crate::scheduler::get_scheduler()
.get_selector()
.add_io_timer(self.io_data, dur);
}
unsafe { io_data.co.unsync_store(co) };
// there is event, re-run the coroutine
if io_data.io_flag.load(Ordering::Acquire) {
#[allow(clippy::needless_return)]
return io_data.fast_schedule();
}
#[cfg(feature = "io_cancel")]
{
// register the cancel io data
cancel.set_io((*io_data).clone());
// re-check the cancel status
if cancel.is_canceled() {
unsafe { cancel.cancel() };
}
}
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
pub type ArrayOfStrings = Vec<String>;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfStrings {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<String>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportUpdateInput {
#[serde(rename = "importManifest")]
pub import_manifest: ImportManifestMetadata,
pub files: Vec<FileImportMetadata>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportManifestMetadata {
pub url: String,
#[serde(rename = "sizeInBytes")]
pub size_in_bytes: i64,
pub hashes: serde_json::Value,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct FileImportMetadata {
pub filename: String,
pub url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Update {
#[serde(rename = "updateId")]
pub update_id: UpdateId,
#[serde(rename = "updateType")]
pub update_type: String,
#[serde(rename = "installedCriteria")]
pub installed_criteria: String,
pub compatibility: Vec<Compatibility>,
#[serde(rename = "manifestVersion")]
pub manifest_version: String,
#[serde(rename = "importedDateTime")]
pub imported_date_time: String,
#[serde(rename = "createdDateTime")]
pub created_date_time: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateId {
pub provider: String,
pub name: String,
pub version: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfUpdateIds {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<UpdateId>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Compatibility {
#[serde(rename = "deviceManufacturer")]
pub device_manufacturer: String,
#[serde(rename = "deviceModel")]
pub device_model: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct File {
#[serde(rename = "fileId")]
pub file_id: String,
#[serde(rename = "fileName")]
pub file_name: String,
#[serde(rename = "sizeInBytes")]
pub size_in_bytes: i64,
pub hashes: serde_json::Value,
#[serde(rename = "mimeType", default, skip_serializing_if = "Option::is_none")]
pub mime_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(rename = "operationId")]
pub operation_id: String,
pub status: OperationStatus,
#[serde(rename = "updateId", default, skip_serializing_if = "Option::is_none")]
pub update_id: Option<UpdateId>,
#[serde(rename = "resourceLocation", default, skip_serializing_if = "Option::is_none")]
pub resource_location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<Error>,
#[serde(rename = "traceId", default, skip_serializing_if = "Option::is_none")]
pub trace_id: Option<String>,
#[serde(rename = "lastActionDateTime")]
pub last_action_date_time: String,
#[serde(rename = "createdDateTime")]
pub created_date_time: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OperationStatus {
Undefined,
NotStarted,
Running,
Succeeded,
Failed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Error {
pub code: String,
pub message: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<Error>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub innererror: Option<InnerError>,
#[serde(rename = "occurredDateTime", default, skip_serializing_if = "Option::is_none")]
pub occurred_date_time: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InnerError {
pub code: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(rename = "errorDetail", default, skip_serializing_if = "Option::is_none")]
pub error_detail: Option<String>,
#[serde(rename = "innerError", default, skip_serializing_if = "Option::is_none")]
pub inner_error: Box<Option<InnerError>>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfOperations {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeviceClass {
#[serde(rename = "deviceClassId")]
pub device_class_id: String,
pub manufacturer: String,
pub model: String,
#[serde(rename = "bestCompatibleUpdateId")]
pub best_compatible_update_id: UpdateId,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfDeviceClasses {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<DeviceClass>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Device {
#[serde(rename = "deviceId")]
pub device_id: String,
#[serde(rename = "deviceClassId")]
pub device_class_id: String,
pub manufacturer: String,
pub model: String,
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
#[serde(rename = "lastAttemptedUpdateId", default, skip_serializing_if = "Option::is_none")]
pub last_attempted_update_id: Option<UpdateId>,
#[serde(rename = "deploymentStatus", default, skip_serializing_if = "Option::is_none")]
pub deployment_status: Option<DeviceDeploymentState>,
#[serde(rename = "installedUpdateId", default, skip_serializing_if = "Option::is_none")]
pub installed_update_id: Option<UpdateId>,
#[serde(rename = "onLatestUpdate")]
pub on_latest_update: bool,
#[serde(rename = "lastDeploymentId", default, skip_serializing_if = "Option::is_none")]
pub last_deployment_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DeviceDeploymentState {
Succeeded,
InProgress,
Failed,
Canceled,
Incompatible,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeviceFilter {
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfDevices {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Device>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateCompliance {
#[serde(rename = "totalDeviceCount")]
pub total_device_count: i64,
#[serde(rename = "onLatestUpdateDeviceCount")]
pub on_latest_update_device_count: i64,
#[serde(rename = "newUpdatesAvailableDeviceCount")]
pub new_updates_available_device_count: i64,
#[serde(rename = "updatesInProgressDeviceCount")]
pub updates_in_progress_device_count: i64,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdatableDevices {
#[serde(rename = "updateId")]
pub update_id: UpdateId,
#[serde(rename = "deviceCount")]
pub device_count: i64,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfUpdatableDevices {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<UpdatableDevices>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeviceTag {
#[serde(rename = "tagName")]
pub tag_name: String,
#[serde(rename = "deviceCount")]
pub device_count: i64,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfDeviceTags {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<DeviceTag>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Group {
#[serde(rename = "groupId")]
pub group_id: String,
#[serde(rename = "groupType")]
pub group_type: GroupType,
pub tags: Vec<String>,
#[serde(rename = "createdDateTime")]
pub created_date_time: String,
#[serde(rename = "deviceCount", default, skip_serializing_if = "Option::is_none")]
pub device_count: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum GroupType {
IoTHubTag,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfGroups {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Group>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Deployment {
#[serde(rename = "deploymentId")]
pub deployment_id: String,
#[serde(rename = "deploymentType")]
pub deployment_type: DeploymentType,
#[serde(rename = "deviceClassId", default, skip_serializing_if = "Option::is_none")]
pub device_class_id: Option<String>,
#[serde(rename = "startDateTime")]
pub start_date_time: String,
#[serde(rename = "deviceGroupType")]
pub device_group_type: DeviceGroupType,
#[serde(rename = "deviceGroupDefinition")]
pub device_group_definition: Vec<String>,
#[serde(rename = "updateId")]
pub update_id: UpdateId,
#[serde(rename = "isCanceled", default, skip_serializing_if = "Option::is_none")]
pub is_canceled: Option<bool>,
#[serde(rename = "isRetried", default, skip_serializing_if = "Option::is_none")]
pub is_retried: Option<bool>,
#[serde(rename = "isCompleted", default, skip_serializing_if = "Option::is_none")]
pub is_completed: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DeploymentType {
Complete,
Download,
Install,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DeviceGroupType {
All,
Devices,
DeviceGroupDefinitions,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfDeployments {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Deployment>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentStatus {
#[serde(rename = "deploymentState")]
pub deployment_state: DeploymentState,
#[serde(rename = "totalDevices", default, skip_serializing_if = "Option::is_none")]
pub total_devices: Option<i32>,
#[serde(rename = "devicesIncompatibleCount", default, skip_serializing_if = "Option::is_none")]
pub devices_incompatible_count: Option<i32>,
#[serde(rename = "devicesInProgressCount", default, skip_serializing_if = "Option::is_none")]
pub devices_in_progress_count: Option<i32>,
#[serde(rename = "devicesCompletedFailedCount", default, skip_serializing_if = "Option::is_none")]
pub devices_completed_failed_count: Option<i32>,
#[serde(rename = "devicesCompletedSucceededCount", default, skip_serializing_if = "Option::is_none")]
pub devices_completed_succeeded_count: Option<i32>,
#[serde(rename = "devicesCanceledCount", default, skip_serializing_if = "Option::is_none")]
pub devices_canceled_count: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DeploymentState {
Active,
Superseded,
Canceled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentDeviceState {
#[serde(rename = "deviceId")]
pub device_id: String,
#[serde(rename = "retryCount")]
pub retry_count: i32,
#[serde(rename = "movedOnToNewDeployment")]
pub moved_on_to_new_deployment: bool,
#[serde(rename = "deviceState")]
pub device_state: DeviceDeploymentState,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PageableListOfDeploymentDeviceStates {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<DeploymentDeviceState>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OperationFilterStatus {
Running,
NotStarted,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationFilter {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<OperationFilterStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentFilter {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GroupBestUpdatesFilter {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentDeviceStatesFilter {
#[serde(rename = "deviceId", default, skip_serializing_if = "Option::is_none")]
pub device_id: Option<String>,
#[serde(rename = "deviceState", default, skip_serializing_if = "Option::is_none")]
pub device_state: Option<DeviceState>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DeviceState {
NotStarted,
Incompatible,
AlreadyInDeployment,
Canceled,
InProgress,
Failed,
Succeeded,
}
|
pub mod config;
pub mod stream;
|
pub const POSTGRESQL_URL: &'static str = "postgresql://admin@localhost:5432/youtube";
pub const QUERY: &'static str = "SELECT id, serial FROM youtube.stats.channels"; |
//! TensorFlow Ops
use std::collections::HashMap;
use std::collections::VecDeque;
use std::fmt::Debug;
use std::mem;
use std::ops::{Index, IndexMut};
#[cfg(feature = "serialize")]
use std::result::Result as StdResult;
use std::sync::Arc;
use analyser::interface::{Solver, TensorsProxy};
use analyser::prelude::*;
use ops::nn::local_patch::{DataFormat, Padding};
use {DataType, Result, Tensor};
use downcast_rs::Downcast;
use objekt;
#[cfg(feature = "serialize")]
use serde::ser::{Serialize, Serializer};
#[macro_use]
mod macros;
mod array;
mod cast;
#[cfg(features = "image_ops")]
pub mod image;
pub mod konst;
mod math;
pub mod nn;
pub mod prelude {
pub use super::{Attr, InferenceRulesOp, Op, OpRegister};
pub use super::{OpBuffer, QueuesBuffer, TensorView};
pub use std::collections::HashMap;
pub use std::marker::PhantomData;
pub use tensor::{DataType, Datum, Tensor};
pub use Result;
}
#[derive(Debug, Clone)]
pub enum TensorView {
Owned(Tensor),
Shared(Arc<Tensor>),
}
impl TensorView {
/// Creates a shared TensorView from any TensorView.
pub fn into_shared(self) -> TensorView {
match self {
TensorView::Owned(m) => TensorView::Shared(Arc::new(m)),
TensorView::Shared(_) => self,
}
}
/// Creates a Tensor from a TensorView.
pub fn into_tensor(self) -> Tensor {
match self {
TensorView::Owned(m) => m,
TensorView::Shared(m) => m.as_ref().clone(),
}
}
/// Returns a reference to the Tensor wrapped inside a TensorView.
pub fn as_tensor(&self) -> &Tensor {
match self {
&TensorView::Owned(ref m) => &m,
&TensorView::Shared(ref m) => m.as_ref(),
}
}
/// Returns a shared copy of the TensorView, turning the one passed
/// as argument into a TensorView::Shared if necessary.
pub fn share(&mut self) -> TensorView {
// This is somewhat ugly, but sadly we couldn't find any other
// way to implement it. If we try to write something like:
// *self = TensorView::Shared(Arc::new(*m))
// the borrow checker will complain about *m being moved out of
// borrowed content, which makes sense but doesn't apply in our
// case because we will "give m back" to the TensorView, except
// wrapped around an Arc. The only way to get ownership of m is
// to use mem::replace, which means we have to create a "dummy"
// value to replace self first.
if let TensorView::Owned(_) = self {
let dummy = TensorView::Owned(Tensor::i32s(&[], &[0]).unwrap());
let shared = match mem::replace(self, dummy) {
TensorView::Owned(m) => TensorView::Shared(Arc::new(m)),
_ => panic!(),
};
*self = shared;
}
self.clone()
}
}
impl<M> From<M> for TensorView
where
Tensor: From<M>,
{
fn from(m: M) -> TensorView {
TensorView::Owned(m.into())
}
}
impl From<Arc<Tensor>> for TensorView {
fn from(m: Arc<Tensor>) -> TensorView {
TensorView::Shared(m)
}
}
impl ::std::ops::Deref for TensorView {
type Target = Tensor;
fn deref(&self) -> &Tensor {
match self {
&TensorView::Owned(ref m) => &m,
&TensorView::Shared(ref m) => m.as_ref(),
}
}
}
impl PartialEq for TensorView {
fn eq(&self, other: &TensorView) -> bool {
self.as_tensor() == other.as_tensor()
}
}
// TODO(liautaud): Find a more generic way to do this.
#[cfg_attr(feature = "serialize", derive(Serialize))]
#[derive(Debug, Clone)]
pub enum Attr {
I64(i64),
Usize(usize),
DataType(DataType),
DataFormat(DataFormat),
Padding(Padding),
Tensor(Tensor),
UsizeVec(Vec<usize>),
IsizeVec(Vec<isize>),
}
/// A Tensorflow operation.
pub trait Op: Debug + objekt::Clone + Send + Sync + 'static + InferenceOp {
/// Returns the attributes of the operation and their values.
fn get_attributes(&self) -> HashMap<&'static str, Attr>;
/// Evaluates the operation given the input tensors.
fn eval(&self, inputs: Vec<TensorView>) -> Result<Vec<TensorView>>;
/// Returns a new streaming buffer for the operation.
fn new_buffer(&self) -> Box<OpBuffer> {
Box::new(EmptyBuffer {})
}
/// Evaluates one step of the operation on the given input tensors.
/// This is only implemented for operators which support streaming.
///
/// The input tensors are annotated with an Option<usize>:
/// - None if the tensor doesn't have a streaming dimension.
/// - Option(d) if the tensor is being streamed on dimension d.
///
/// If an input tensor has a streaming dimension, the corresponding
/// TensorView will only contain a _chunk_ of input of size 1 along
/// that dimension. Note that each chunk will only be passed once
/// to the step function, so it should use the provided buffer to
/// store whichever chunks it needs for future computations.
///
/// The function should return Some(chunks) when it has computed
/// new chunks, and None if it has computed an intermediary result
/// successfully but doesn't have new output chunks ready yet.
///
/// For operators like Concat, multiple input tensors might have a
/// streaming dimension. In that case, at each call to step, only
/// one of the streaming inputs will receive new chunk while the
/// others will receive None.
fn step(
&self,
_inputs: Vec<(Option<usize>, Option<TensorView>)>,
_buffer: &mut Box<OpBuffer>,
) -> Result<Option<Vec<TensorView>>> {
bail!("Streaming is not available for operator {:?}", self)
}
/// Infers properties about the input and output tensors.
///
/// The `inputs` and `outputs` arguments correspond to properties about
/// the input and output tensors that are already known.
///
/// Returns Err in case of an unrecoverable error during the inference,
/// and the refined properties about the inputs and outputs otherwise.
fn infer_and_propagate(
&self,
inputs: Vec<TensorFact>,
outputs: Vec<TensorFact>,
) -> Result<(Vec<TensorFact>, Vec<TensorFact>)> {
let (infered_inputs, infered_outputs) = self.infer(inputs, outputs)?;
if infered_inputs.iter().all(|i| i.value.is_concrete()) {
let input_values = infered_inputs
.iter()
.map(|i| i.value.concretize().unwrap().clone().into())
.collect(); // checked
let output_value = self.eval(input_values)?.pop().unwrap();
Ok((
infered_inputs,
vec![::analyser::helpers::tensor_to_fact(
output_value.into_tensor(),
)],
))
} else {
Ok((infered_inputs, infered_outputs))
}
}
fn const_value(&self) -> Option<Tensor> {
None
}
}
pub trait InferenceOp {
fn infer(
&self,
inputs: Vec<TensorFact>,
outputs: Vec<TensorFact>,
) -> Result<(Vec<TensorFact>, Vec<TensorFact>)>;
}
pub trait InferenceRulesOp {
/// Registers the inference rules of the operator.
fn rules<'r, 'p: 'r, 's: 'r>(
&'s self,
solver: &mut Solver<'r>,
inputs: &'p TensorsProxy,
outputs: &'p TensorsProxy,
);
}
impl<O: InferenceRulesOp> InferenceOp for O {
fn infer(
&self,
inputs: Vec<TensorFact>,
outputs: Vec<TensorFact>,
) -> Result<(Vec<TensorFact>, Vec<TensorFact>)> {
let inputs_proxy = TensorsProxy::new(vec![0].into());
let outputs_proxy = TensorsProxy::new(vec![1].into());
let mut solver = Solver::default();
self.rules(&mut solver, &inputs_proxy, &outputs_proxy);
solver.infer((inputs, outputs))
}
}
clone_trait_object!(Op);
#[cfg(feature = "serialize")]
impl Serialize for Op {
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error>
where
S: Serializer,
{
self.get_attributes().serialize(serializer)
}
}
pub type OpRegister = HashMap<&'static str, fn(&::tfpb::node_def::NodeDef) -> Result<Box<Op>>>;
pub struct OpBuilder(OpRegister);
impl OpBuilder {
pub fn new() -> OpBuilder {
let mut reg = OpRegister::new();
array::register_all_ops(&mut reg);
cast::register_all_ops(&mut reg);
konst::register_all_ops(&mut reg);
math::register_all_ops(&mut reg);
nn::register_all_ops(&mut reg);
OpBuilder(reg)
}
pub fn build(&self, pb: &::tfpb::node_def::NodeDef) -> Result<Box<Op>> {
match self.0.get(pb.get_op()) {
Some(builder) => builder(pb),
None => Ok(Box::new(UnimplementedOp(
pb.get_op().to_string(),
pb.to_owned(),
))),
}
}
}
#[derive(Debug, Clone)]
pub struct UnimplementedOp(String, ::tfpb::node_def::NodeDef);
impl Op for UnimplementedOp {
/// Evaluates the operation given the input tensors.
fn eval(&self, _inputs: Vec<TensorView>) -> Result<Vec<TensorView>> {
Err(format!("unimplemented operation: {}", self.0))?
}
/// Returns the attributes of the operation and their values.
fn get_attributes(&self) -> HashMap<&'static str, Attr> {
hashmap!{} // FIXME
}
}
impl InferenceRulesOp for UnimplementedOp {
fn rules<'r, 'p: 'r, 's: 'r>(
&'s self,
_: &mut Solver<'r>,
_: &'p TensorsProxy,
_: &'p TensorsProxy,
) {
}
}
/// A streaming buffer for a Tensorflow operation.
///
/// This is used during streaming evaluation of models. Each node is given
/// a mutable reference to a buffer which it can use to store intermediary
/// results between evaluation steps. Every operation must provide its own
/// buffer type (or use one of the general ones defined below), which must
/// implement the OpBuffer trait. It should return a new instance of it in
/// the `Op::new_buffer` method, and downcast it from OpBuffer in `step`.
pub trait OpBuffer: Downcast + Debug + objekt::Clone + Send + 'static {}
clone_trait_object!(OpBuffer);
impl_downcast!(OpBuffer);
/// An empty buffer for operations which don't need one.
#[derive(Debug, Clone)]
pub struct EmptyBuffer {}
impl OpBuffer for EmptyBuffer {}
/// A buffer with a variable number of TensorView queues.
#[derive(Debug, Clone)]
pub struct QueuesBuffer(Vec<VecDeque<TensorView>>);
impl OpBuffer for QueuesBuffer {}
impl QueuesBuffer {
/// Creates a new buffer with a given number of queues.
pub fn new(size: usize) -> QueuesBuffer {
QueuesBuffer(vec![VecDeque::new(); size])
}
/// Appends a new TensorView to each queue in the buffer.
pub fn append(&mut self, views: &mut [(Option<usize>, Option<TensorView>)]) -> Result<()> {
if views.len() > self.0.len() {
bail!("There are more input TensorViews than queues in the buffer.");
}
for (i, view) in views.iter_mut().enumerate() {
if view.1.is_some() {
self.0[i].push_back(view.1.take().unwrap())
}
}
Ok(())
}
/// Returns an iterator over all the queues in the buffer.
pub fn iter<'a>(&'a mut self) -> impl Iterator<Item = &'a VecDeque<TensorView>> {
self.0.iter()
}
/// Returns a mutable iterator over all the queues in the buffer.
pub fn iter_mut<'a>(&'a mut self) -> impl Iterator<Item = &'a mut VecDeque<TensorView>> {
self.0.iter_mut()
}
}
impl Index<usize> for QueuesBuffer {
type Output = VecDeque<TensorView>;
fn index(&self, index: usize) -> &VecDeque<TensorView> {
&self.0[index]
}
}
impl IndexMut<usize> for QueuesBuffer {
fn index_mut(&mut self, index: usize) -> &mut VecDeque<TensorView> {
&mut self.0[index]
}
}
|
fn main() {
let sdk_dep = solana_sdk::signature::Signature::default();
println!("Yes have some sdk_dep {:?}", sdk_dep);
let memo_dep = safe_memo::id();
println!("Yes have some memo_dep {:?}", memo_dep);
let token_dep = safe_token::id();
println!("Yes have some token_dep {:?}", token_dep);
let token_swap_dep = spl_token_id_swap::id();
println!("Yes have some token_swap_dep {:?}", token_swap_dep);
}
|
use rspg::display::DisplayWith;
use rspg::grammar;
use rspg::lr1::generator::Generator;
use rspg::lr1::parser::Parser;
use rspg::set::FirstSets;
use rspg::set::FollowSets;
use rspg::token;
use std::marker::PhantomData;
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Copy, Clone)]
enum Terminal {
Sign(char),
Number,
}
#[derive(Debug, Copy, Clone)]
enum Token {
Sign(char),
Number(f64),
}
impl token::Token<Terminal> for Token {
fn terminal(&self) -> Terminal {
match self {
Token::Sign(c) => Terminal::Sign(*c),
Token::Number(_) => Terminal::Number,
}
}
}
fn main() {
let grammar = grammar! {
start E;
rule E -> E, Terminal::Sign('+'), T;
rule E -> E, Terminal::Sign('-'), T;
rule E -> T;
rule T -> T, Terminal::Sign('*'), F;
rule T -> T, Terminal::Sign('/'), F;
rule T -> F;
rule F -> Terminal::Sign('('), E, Terminal::Sign(')');
rule F -> Terminal::Number;
};
println!("{grammar}\n");
let first_sets = FirstSets::of_grammar(&grammar);
println!("first sets:\n{}\n", first_sets.display_with(&grammar));
let follow_sets = FollowSets::of_grammar(&grammar, &first_sets);
println!("follow sets:\n{}\n", follow_sets.display_with(&grammar));
println!("LR(1) canonical collection:");
let generator = Generator::construct(&grammar, &first_sets, "E'");
for (i, item_set) in generator.canonical_collection().iter().enumerate() {
println!(
"I_{} = {}",
i,
item_set.display_with(generator.extended_grammar())
);
}
println!();
let table = generator.generate(&grammar).unwrap();
println!("LR(1) table:");
let pretty_table = table.pretty_table(&grammar, false);
pretty_table.printstd();
println!();
let input = vec![
Token::Number(20.),
Token::Sign('/'),
Token::Number(10.),
Token::Sign('-'),
Token::Number(2.),
Token::Sign('*'),
Token::Sign('('),
Token::Number(3.),
Token::Sign('+'),
Token::Number(6.),
Token::Sign(')'),
];
println!("input:\n{input:?}\n");
print!("input pretty printed:");
for token in &input {
print!(" ");
match token {
Token::Number(n) => print!("{n}"),
Token::Sign(s) => print!("{s}"),
}
}
println!("\n");
let parser = {
Parser {
grammar: &grammar,
table: &table,
reducer: |mut r| {
let mut rule_indices = grammar.rule_indices();
let rule_plus = rule_indices.next().unwrap();
let rule_sub = rule_indices.next().unwrap();
let rule_et = rule_indices.next().unwrap();
let rule_mul = rule_indices.next().unwrap();
let rule_div = rule_indices.next().unwrap();
let rule_tf = rule_indices.next().unwrap();
let rule_bracket = rule_indices.next().unwrap();
let rule_number = rule_indices.next().unwrap();
println!("use rule {} reduce: {}", r.rule, r.display_with(&grammar));
if r.rule == rule_plus {
let e = r.from.pop_front().unwrap();
let _ = r.from.pop_front().unwrap();
let t = r.from.pop_front().unwrap();
Ok(e.parsed().unwrap() + t.parsed().unwrap())
} else if r.rule == rule_sub {
let e = r.from.pop_front().unwrap();
let _ = r.from.pop_front().unwrap();
let t = r.from.pop_front().unwrap();
Ok(e.parsed().unwrap() - t.parsed().unwrap())
} else if r.rule == rule_et {
let t = r.from.pop_front().unwrap();
Ok(t.parsed().unwrap())
} else if r.rule == rule_mul {
let t = r.from.pop_front().unwrap();
let _ = r.from.pop_front().unwrap();
let f = r.from.pop_front().unwrap();
Ok(t.parsed().unwrap() * f.parsed().unwrap())
} else if r.rule == rule_div {
let t = r.from.pop_front().unwrap();
let _ = r.from.pop_front().unwrap();
let f = r.from.pop_front().unwrap();
Ok(t.parsed().unwrap() / f.parsed().unwrap())
} else if r.rule == rule_tf {
let f = r.from.pop_front().unwrap();
Ok(f.parsed().unwrap())
} else if r.rule == rule_bracket {
let _ = r.from.pop_front().unwrap();
let n = r.from.pop_front().unwrap();
let _ = r.from.pop_front().unwrap();
Ok(n.parsed().unwrap())
} else if r.rule == rule_number {
let n = r.from.pop_front().unwrap();
match n.token().unwrap() {
Token::Number(n) => Ok(n),
_ => Err("not a number token"),
}
} else {
unimplemented!()
}
},
phantom: PhantomData,
}
};
println!("events:");
match parser.parse(input.into_iter()) {
Ok(p) => println!("accepted: {p:?}"),
Err(e) => println!("error: {e:?}"),
}
}
|
use log::*;
use super::*;
use crate::system::{ BusHandle, SystemModules };
use tokio::sync::mpsc::Sender;
use crate::bus::ModuleMsgEnum;
use crate::agent::AgentMessage;
pub struct LoopbackCLA {
config: AdapterConfiguration,
bus_handle: BusHandle,
}
impl LoopbackCLA {
pub fn new(config: super::AdapterConfiguration, bus_handle: BusHandle ) -> LoopbackCLA {
Self {
bus_handle: bus_handle,
config,
}
}
}
impl ClaTrait for LoopbackCLA {
fn start(&mut self, _tx: Sender<ClaBundleStatus>) {
debug!("Loopback Started");
// do nothing really. Would loop on a real CLA
}
fn send(&mut self, mbun: MetaBundle) {
debug!("Loopback {} received a bundle", self.config.name );
println!("Bundle from: {}", mbun.bundle.primary.source);
// if let Some(payload) = mbun.bundle.payload() {
// println!("{}", String::from_utf8(payload.to_vec()).unwrap());
// }
futures::executor::block_on(self.bus_handle.send(SystemModules::AppAgent,
ModuleMsgEnum::MsgAppAgent(AgentMessage::DeliverBundle(mbun)))).unwrap();
// TODO Send bundle to the local agent
}
fn stop(&mut self) {
unimplemented!();
}
}
|
use crate::common::myerror::MyError;
use crate::inven::warehouse::{InvenRes, Inventory, Price};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use serde_json::{Result, Value};
use std::collections::HashMap;
use std::collections::HashSet;
fn get_raw(_raw: &str) -> std::result::Result<Vec<Inventory>, MyError> {
let mut raw: Vec<Inventory> = serde_json::from_str(_raw)?;
raw.sort_by(|a, b| a.date.partial_cmp(&b.date).unwrap());
Ok(raw)
}
fn get_days(_days: &str) -> std::result::Result<Vec<String>, MyError> {
let mut days: Vec<String> = serde_json::from_str(_days)?;
days.sort();
Ok(days)
}
fn get_price(_price: &str) -> std::result::Result<Vec<Price>, MyError> {
let mut price: Vec<Price> = serde_json::from_str(_price)?;
price.sort_by(|a, b| a.date.partial_cmp(&b.date).unwrap());
Ok(price)
}
fn all_product(l: &Vec<Inventory>) -> HashSet<String> {
let mut res = HashSet::new();
for inven in l.iter() {
res.insert(inven.product.clone());
}
res
}
fn all_product_by_price(l: &Vec<Price>) -> HashSet<String> {
let mut res = HashSet::new();
for p in l.iter() {
res.insert(p.product.clone());
}
res
}
fn inven_all_product_all_date(days: &Vec<String>, raw: &Vec<Inventory>) -> Vec<InvenRes> {
let products = all_product(&raw);
let res = products
.par_iter()
.flat_map(|product| inven_single_product_all_date(product, days, raw))
.collect();
res
}
fn inven_single_product_all_date(
product: &str,
days: &Vec<String>,
raw: &Vec<Inventory>,
) -> Vec<InvenRes> {
let res = days
.par_iter()
.map(|date| inven_single_product_by_date(product, date, raw))
.collect();
res
}
fn inven_single_product_by_date(product: &str, date: &str, raw: &Vec<Inventory>) -> InvenRes {
let mut res = InvenRes::default();
let mut total = 0.0;
for d in raw.iter() {
let cur_date = d.date.clone();
if cur_date > date.to_string() {
break;
};
let cur_product = d.product.clone();
if cur_product != product {
continue;
};
total = total + d.qty;
}
res.date = date.to_string();
res.product = product.to_string();
res.accumulate = total;
res
}
fn _inven_everyday(_raw: &str, _days: &str) -> std::result::Result<Vec<InvenRes>, MyError> {
let raw = get_raw(_raw)?;
let days = get_days(_days)?;
let res = inven_all_product_all_date(&days, &raw);
Ok(res)
}
fn wrap_vec_inveres(_res:std::result::Result<Vec<InvenRes>,MyError>)->String{
let mut res = "".to_string();
match _res {
Ok(d) => {
res = serde_json::to_string(&d).unwrap();
}
Err(e) => res = e.to_string(),
}
res
}
pub fn inven_everyday(_raw: &str, _days: &str) -> String {
let _res = _inven_everyday(_raw, _days);
wrap_vec_inveres(_res)
}
//********************************************* 以上不涉及价格
fn latest_price(price: Vec<Price>, product: &str, date: &str) -> f32 {
let mut sub_price: Vec<Price> = price
.into_par_iter()
.filter(|p| p.product == product && p.date <= date.to_string())
.collect();
sub_price.sort_by(|a, b| a.date.partial_cmp(&b.date).unwrap());
let n = sub_price.len();
if n == 0 {
return 0.0;
};
let last = &sub_price[n - 1];
last.price
}
fn value_one_day(
ivens: Vec<InvenRes>,
price: &Vec<Price>,
date: &str,
) -> InvenRes {
let mut res=InvenRes::default();
res.date=date.to_string();
let sub_ivens: Vec<InvenRes> = ivens
.into_par_iter()
.filter(|p| p.date == date.to_string())
.collect();
if sub_ivens.len()==0{
return res;
}
let mut total=0.0;
for iv in sub_ivens.iter(){
let p=latest_price(price.to_vec(),&iv.product,date);
total=total+p*iv.accumulate;
}
res.value=total;
res
}
fn _value_everyday(
_raw: &str,
_days: &str,
_price: &str,
) -> std::result::Result<Vec<InvenRes>, MyError> {
let inven = _inven_everyday(_raw, _days)?;
let days=get_days(_days)?;
let price = get_price(_price)?;
let res:Vec<InvenRes>=days.par_iter().map(
|d|value_one_day(inven.clone(),&price,d)
).collect();
Ok(res)
}
pub fn value_everyday(
_raw: &str,
_days: &str,
_price: &str,
) -> String {
let res=_value_everyday(
_raw,_days,_price);
wrap_vec_inveres(res)
}
fn _price_everyday( _days:&str,_price:&str)-> std::result::Result<Vec<InvenRes>, MyError> {
let days=get_days(_days)?;
let price = get_price(_price)?;
let allp=all_product_by_price(&price);
let mut res=vec![];
for pro in allp.iter(){
for d in days.iter(){
let p=latest_price(price.clone(),pro,d);
let r=InvenRes{
product:pro.to_string(),
date:d.to_string(),
accumulate:0.0,
value:p,
};
res.push(r);
}
}
Ok(res)
}
pub fn price_everyday(
_days: &str,
_price: &str,
) -> String {
let res=_price_everyday(
_days,_price);
wrap_vec_inveres(res)
}
pub fn test() -> std::result::Result<(), MyError> {
// 字段可以多,字典最后不能有逗号
let s = r#"[
{
"product": "b",
"qty":10,
"date":"2019-01-01",
"price":3,
"xx":"xx"
},
{
"product": "a",
"qty":10,
"date":"2010-01-01",
"price":1,
"xx":"xx"
},
{
"product": "a",
"qty":-1,
"date":"2019-01-03",
"price":2,
"xx":"xx"
}
]"#;
let days = r#"[
"2019-01-02",
"2019-01-02",
"2019-01-03"
]"#;
let res1 = inven_everyday(s, days);
let res2=value_everyday(s,days,s);
let res3=price_everyday(days,s);
dbg!(res3);
Ok(())
}
|
// Copyright 2021 Red Hat, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
mod bridge;
mod conn;
mod ip;
pub use crate::connection::bridge::NmSettingBridge;
pub use crate::connection::conn::{NmConnection, NmSettingConnection};
pub use crate::connection::ip::{NmSettingIp, NmSettingIpMethod};
pub(crate) use crate::connection::conn::{
NmConnectionDbusOwnedValue, NmConnectionDbusValue,
};
|
mod day01;
mod day02;
mod day03;
mod day04;
mod day05;
mod day06;
fn main() {
const INPUT_1: &str = include_str!("../inputs/input1.txt");
println!("Day 1 - part 1: {}", day01::part1(INPUT_1).unwrap());
println!("Day 1 - part 2: {}", day01::part2(INPUT_1).unwrap());
const INPUT_2: &str = include_str!("../inputs/input2.txt");
println!("Day 2 - part 1: {}", day02::part1(INPUT_2).unwrap());
println!("Day 2 - part 2: {}", day02::part2(INPUT_2).unwrap());
const INPUT_3: &str = include_str!("../inputs/input3.txt");
println!("Day 3 - part 1: {}", day03::part1(INPUT_3).unwrap());
println!("Day 3 - part 2: {}", day03::part2(INPUT_3).unwrap());
const INPUT_4: &str = include_str!("../inputs/input4.txt");
println!("Day 4 - part 1: {}", day04::part1(INPUT_4).unwrap());
println!("Day 4 - part 2: {}", day04::part2(INPUT_4).unwrap());
const INPUT_5: &str = include_str!("../inputs/input5.txt");
println!("Day 5 - part 1: {}", day05::part1(INPUT_5).unwrap());
println!("Day 5 - part 2: {}", day05::part2(INPUT_5).unwrap());
const INPUT_6: &str = include_str!("../inputs/input6.txt");
println!("Day 6 - part 1: {}", day06::part1(INPUT_6).unwrap());
println!("Day 6 - part 2: {}", day06::part2(INPUT_6).unwrap());
}
|
#[doc = "Reader of register DDRCTRL_PCTRL_1"]
pub type R = crate::R<u32, super::DDRCTRL_PCTRL_1>;
#[doc = "Writer for register DDRCTRL_PCTRL_1"]
pub type W = crate::W<u32, super::DDRCTRL_PCTRL_1>;
#[doc = "Register DDRCTRL_PCTRL_1 `reset()`'s with value 0"]
impl crate::ResetValue for super::DDRCTRL_PCTRL_1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `PORT_EN`"]
pub type PORT_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PORT_EN`"]
pub struct PORT_EN_W<'a> {
w: &'a mut W,
}
impl<'a> PORT_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 0 - PORT_EN"]
#[inline(always)]
pub fn port_en(&self) -> PORT_EN_R {
PORT_EN_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - PORT_EN"]
#[inline(always)]
pub fn port_en(&mut self) -> PORT_EN_W {
PORT_EN_W { w: self }
}
}
|
use crate::specs::dao::dao_verifier::DAOVerifier;
use crate::{Net, Spec};
use ckb_chain_spec::ChainSpec;
pub struct DAOVerify;
impl Spec for DAOVerify {
crate::name!("dao_verify");
fn modify_chain_spec(&self) -> Box<dyn Fn(&mut ChainSpec) -> ()> {
Box::new(|spec_config| {
spec_config.params.genesis_epoch_length = 20;
})
}
fn run(&self, net: &mut Net) {
let node = &net.nodes[0];
let genesis_epoch_length = node.consensus().genesis_epoch_ext().length();
node.generate_blocks(genesis_epoch_length as usize * 5);
DAOVerifier::init(node).verify();
}
}
|
use multiversion::multiversion;
#[multiversion(
targets("x86_64+avx", "x86+avx", "x86+sse", "aarch64+neon"),
dispatcher = "default"
)]
fn default_dispatch() {}
#[multiversion(
targets("x86_64+avx", "x86+avx", "x86+sse", "aarch64+neon"),
dispatcher = "static"
)]
fn static_dispatch() {}
#[cfg(feature = "std")]
#[multiversion(
targets("x86_64+avx", "x86+avx", "x86+sse", "aarch64+neon"),
dispatcher = "direct"
)]
fn direct_dispatch() {}
#[cfg(feature = "std")]
#[multiversion(
targets("x86_64+avx", "x86+avx", "x86+sse", "aarch64+neon"),
dispatcher = "indirect"
)]
fn indirect_dispatch() {}
// Since x86_64 always has sse, this should never result in runtime dispatch
#[multiversion(targets("x86_64+sse"), dispatcher = "default")]
fn skip_dispatch() {}
// Since aarch64 always has neon, this should never result in runtime dispatch
#[multiversion(targets("aarch64+neon"), dispatcher = "default")]
fn skip_dispatch_2() {}
#[test]
fn dispatchers() {
default_dispatch();
static_dispatch();
#[cfg(feature = "std")]
direct_dispatch();
#[cfg(feature = "std")]
indirect_dispatch();
skip_dispatch();
skip_dispatch_2();
}
|
use jsl::{Config, Schema, SerdeSchema, Validator};
use serde::Deserialize;
use serde_json::Value;
use std::fs;
#[derive(Deserialize)]
struct TestSuite {
name: String,
schema: SerdeSchema,
#[serde(rename = "strictInstance")]
strict_instance: bool,
instances: Vec<TestCase>,
}
#[derive(Deserialize)]
struct TestCase {
instance: Value,
errors: Vec<TestCaseError>,
}
#[derive(Debug, Deserialize, PartialEq)]
struct TestCaseError {
#[serde(rename = "instancePath")]
instance_path: String,
#[serde(rename = "schemaPath")]
schema_path: String,
}
#[test]
fn spec() -> Result<(), std::io::Error> {
let mut test_files: Vec<_> = fs::read_dir("spec/tests")?
.map(|entry| entry.expect("error getting dir entry").path())
.collect();
test_files.sort();
for path in test_files {
println!("{:?}", &path);
let file = fs::read(path)?;
let suites: Vec<TestSuite> = serde_json::from_slice(&file)?;
for (i, suite) in suites.into_iter().enumerate() {
println!("{}: {}", i, suite.name);
let schema = Schema::from_serde(suite.schema).expect("error parsing schema");
let mut config = Config::new();
config.strict_instance_semantics(suite.strict_instance);
let validator = Validator::new_with_config(config);
for (j, mut test_case) in suite.instances.into_iter().enumerate() {
println!("{}/{}", i, j);
let mut actual_errors: Vec<_> = validator
.validate(&schema, &test_case.instance)
.expect("error validating instance")
.into_iter()
.map(|error| TestCaseError {
instance_path: error.instance_path().to_string(),
schema_path: error.schema_path().to_string(),
})
.collect();
actual_errors
.sort_by_key(|err| format!("{},{}", err.schema_path, err.instance_path));
test_case
.errors
.sort_by_key(|err| format!("{},{}", err.schema_path, err.instance_path));
assert_eq!(actual_errors, test_case.errors);
}
}
}
Ok(())
}
|
use std::fmt::{Display, Formatter};
use std::str::FromStr;
use http::header::{HeaderName, HeaderValue as HeaderValueInner, InvalidHeaderValue};
use http::Method;
use serde::de::Error;
use serde::ser::SerializeSeq;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::config::default_server_origin;
use crate::types::TaggedTypeAll;
/// The maximum default amount of time a CORS request can be cached for in seconds.
const CORS_MAX_AGE: usize = 86400;
/// Tagged allow headers for cors config, either Mirror or Any.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub enum TaggedAllowTypes {
#[serde(alias = "mirror", alias = "MIRROR")]
Mirror,
#[serde(alias = "all", alias = "ALL")]
All,
}
/// Allowed type for cors config which is used to configure cors behaviour.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(untagged)]
pub enum AllowType<T, Tagged = TaggedTypeAll> {
Tagged(Tagged),
#[serde(bound(serialize = "T: Display", deserialize = "T: FromStr, T::Err: Display"))]
#[serde(
serialize_with = "serialize_allow_types",
deserialize_with = "deserialize_allow_types"
)]
List(Vec<T>),
}
impl<T, Tagged> AllowType<T, Tagged> {
/// Apply a function to the builder when the type is a List.
pub fn apply_list<F, U>(&self, func: F, builder: U) -> U
where
F: FnOnce(U, &Vec<T>) -> U,
{
if let Self::List(list) = self {
func(builder, list)
} else {
builder
}
}
/// Apply a function to the builder when the type is tagged.
pub fn apply_tagged<F, U>(&self, func: F, builder: U, tagged_type: &Tagged) -> U
where
F: FnOnce(U) -> U,
Tagged: Eq,
{
if let Self::Tagged(tagged) = self {
if tagged == tagged_type {
return func(builder);
}
}
builder
}
}
impl<T> AllowType<T, TaggedAllowTypes> {
/// Apply a function to the builder when the type is Mirror.
pub fn apply_mirror<F, U>(&self, func: F, builder: U) -> U
where
F: FnOnce(U) -> U,
{
self.apply_tagged(func, builder, &TaggedAllowTypes::Mirror)
}
/// Apply a function to the builder when the type is Any.
pub fn apply_any<F, U>(&self, func: F, builder: U) -> U
where
F: FnOnce(U) -> U,
{
self.apply_tagged(func, builder, &TaggedAllowTypes::All)
}
}
impl<T> AllowType<T, TaggedTypeAll> {
/// Apply a function to the builder when the type is Any.
pub fn apply_any<F, U>(&self, func: F, builder: U) -> U
where
F: FnOnce(U) -> U,
{
self.apply_tagged(func, builder, &TaggedTypeAll::All)
}
}
fn serialize_allow_types<S, T>(names: &Vec<T>, serializer: S) -> Result<S::Ok, S::Error>
where
T: Display,
S: Serializer,
{
let mut sequence = serializer.serialize_seq(Some(names.len()))?;
for element in names.iter().map(|name| format!("{name}")) {
sequence.serialize_element(&element)?;
}
sequence.end()
}
fn deserialize_allow_types<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>
where
T: FromStr,
T::Err: Display,
D: Deserializer<'de>,
{
let names: Vec<String> = Deserialize::deserialize(deserializer)?;
names
.into_iter()
.map(|name| T::from_str(&name).map_err(Error::custom))
.collect()
}
/// A wrapper around a http HeaderValue which is used to implement FromStr and Display.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct HeaderValue(HeaderValueInner);
impl HeaderValue {
pub fn into_inner(self) -> HeaderValueInner {
self.0
}
}
impl FromStr for HeaderValue {
type Err = InvalidHeaderValue;
fn from_str(header: &str) -> Result<Self, Self::Err> {
Ok(HeaderValue(HeaderValueInner::from_str(header)?))
}
}
impl Display for HeaderValue {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&String::from_utf8_lossy(self.0.as_ref()))
}
}
/// Cors configuration for the htsget server.
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(default)]
pub struct CorsConfig {
allow_credentials: bool,
allow_origins: AllowType<HeaderValue, TaggedAllowTypes>,
allow_headers: AllowType<HeaderName>,
allow_methods: AllowType<Method>,
max_age: usize,
expose_headers: AllowType<HeaderName>,
}
impl CorsConfig {
/// Create new cors config.
pub fn new(
allow_credentials: bool,
allow_origins: AllowType<HeaderValue, TaggedAllowTypes>,
allow_headers: AllowType<HeaderName>,
allow_methods: AllowType<Method>,
max_age: usize,
expose_headers: AllowType<HeaderName>,
) -> Self {
Self {
allow_credentials,
allow_origins,
allow_headers,
allow_methods,
max_age,
expose_headers,
}
}
/// Get allow credentials.
pub fn allow_credentials(&self) -> bool {
self.allow_credentials
}
/// Get allow origins.
pub fn allow_origins(&self) -> &AllowType<HeaderValue, TaggedAllowTypes> {
&self.allow_origins
}
/// Get allow headers.
pub fn allow_headers(&self) -> &AllowType<HeaderName> {
&self.allow_headers
}
/// Get allow methods.
pub fn allow_methods(&self) -> &AllowType<Method> {
&self.allow_methods
}
/// Get max age.
pub fn max_age(&self) -> usize {
self.max_age
}
/// Get expose headers.
pub fn expose_headers(&self) -> &AllowType<HeaderName> {
&self.expose_headers
}
}
impl Default for CorsConfig {
fn default() -> Self {
Self {
allow_credentials: false,
allow_origins: AllowType::List(vec![HeaderValue(HeaderValueInner::from_static(
default_server_origin(),
))]),
allow_headers: AllowType::Tagged(TaggedTypeAll::All),
allow_methods: AllowType::Tagged(TaggedTypeAll::All),
max_age: CORS_MAX_AGE,
expose_headers: AllowType::List(vec![]),
}
}
}
#[cfg(test)]
mod tests {
use std::fmt::Debug;
use http::Method;
use toml::de::Error;
use super::*;
fn test_cors_config<T, F>(input: &str, expected: &T, get_result: F)
where
F: Fn(&CorsConfig) -> &T,
T: Debug + Eq,
{
let config: CorsConfig = toml::from_str(input).unwrap();
assert_eq!(expected, get_result(&config));
let serialized = toml::to_string(&config).unwrap();
let deserialized = toml::from_str(&serialized).unwrap();
assert_eq!(expected, get_result(&deserialized));
}
#[test]
fn unit_variant_any_allow_type() {
test_cors_config(
"allow_methods = \"All\"",
&AllowType::Tagged(TaggedTypeAll::All),
|config| config.allow_methods(),
);
}
#[test]
fn unit_variant_mirror_allow_type() {
test_cors_config(
"allow_origins = \"Mirror\"",
&AllowType::Tagged(TaggedAllowTypes::Mirror),
|config| config.allow_origins(),
);
}
#[test]
fn list_allow_type() {
test_cors_config(
"allow_methods = [\"GET\"]",
&AllowType::List(vec![Method::GET]),
|config| config.allow_methods(),
);
}
#[test]
fn tagged_any_allow_type() {
test_cors_config(
"expose_headers = \"All\"",
&AllowType::Tagged(TaggedTypeAll::All),
|config| config.expose_headers(),
);
}
#[test]
fn tagged_any_allow_type_err_on_mirror() {
let allow_type_method = "expose_headers = \"Mirror\"";
let config: Result<CorsConfig, Error> = toml::from_str(allow_type_method);
assert!(matches!(config, Err(_)));
}
}
|
use crate::error::{Error, Result};
use crate::field::{DbField, DbFieldType, Field};
use crate::input::DbValue;
/// A dedicated type for id field of INT AUTOINCREMENT PRIMARY KEY NOT NULL
pub struct IdField {
pub db_field: DbField,
}
impl IdField {
pub fn id() -> Box<Self> {
Box::new(IdField {
db_field: DbField {
name: "id".into(),
field_type: DbFieldType::Int,
primary_key: true,
null: false,
auto_increment: true,
},
})
}
}
impl Field for IdField {
fn db_field(&self) -> &DbField {
&self.db_field
}
fn validate_value(&self, value: &DbValue) -> Result<()> {
match value {
DbValue::Int(_) => Ok(()),
_ => Err(Error::invalid_value("int", self, value)),
}
}
fn render_html(&self, b: &mut String, row: &rusqlite::Row, i: usize) -> Result<()> {
let v: i32 = row.get(i)?;
b.push_str(&v.to_string());
Ok(())
}
fn json_to_input(&self, value: &serde_json::Value) -> Result<DbValue> {
match value {
serde_json::Value::Number(v) => match v.as_i64() {
Some(int) => Ok(DbValue::Int(int)),
None => Err(Error::invalid_json("i64", self, value)),
},
_ => Err(Error::invalid_json("number", self, value)),
}
}
}
|
// q0069_sqrtx
struct Solution;
impl Solution {
pub fn my_sqrt(x: i32) -> i32 {
if x <= 1 {
return x;
}
let mut l = 1;
let mut r = 46340; // sqrt(i32::max_value())
if x >= r * r {
return r;
}
while l != r - 1 {
let i = (l + r) / 2;
let t = i * i;
if x == t {
return i;
} else if x > t {
l = i;
} else {
r = i;
}
}
return l;
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn it_works() {
assert_eq!(Solution::my_sqrt(2147395600), 46340);
assert_eq!(Solution::my_sqrt(0), 0);
assert_eq!(Solution::my_sqrt(1), 1);
assert_eq!(Solution::my_sqrt(2), 1);
assert_eq!(Solution::my_sqrt(3), 1);
assert_eq!(Solution::my_sqrt(4), 2);
assert_eq!(Solution::my_sqrt(5), 2);
assert_eq!(Solution::my_sqrt(6), 2);
assert_eq!(Solution::my_sqrt(7), 2);
assert_eq!(Solution::my_sqrt(8), 2);
assert_eq!(Solution::my_sqrt(9), 3);
assert_eq!(Solution::my_sqrt(10), 3);
assert_eq!(Solution::my_sqrt(11), 3);
assert_eq!(Solution::my_sqrt(12), 3);
assert_eq!(Solution::my_sqrt(13), 3);
assert_eq!(Solution::my_sqrt(14), 3);
assert_eq!(Solution::my_sqrt(15), 3);
assert_eq!(Solution::my_sqrt(16), 4);
assert_eq!(Solution::my_sqrt(17), 4);
assert_eq!(Solution::my_sqrt(18), 4);
assert_eq!(Solution::my_sqrt(19), 4);
assert_eq!(Solution::my_sqrt(20), 4);
assert_eq!(Solution::my_sqrt(21), 4);
assert_eq!(Solution::my_sqrt(22), 4);
assert_eq!(Solution::my_sqrt(23), 4);
assert_eq!(Solution::my_sqrt(24), 4);
assert_eq!(Solution::my_sqrt(25), 5);
assert_eq!(Solution::my_sqrt(26), 5);
}
}
|
use std::collections::HashSet;
const WINDOW_SIZE: usize = 25;
fn find_sum_in_window(haystack: &[u64], sum: u64) -> Option<(u64, u64)> {
let set: HashSet<_> = haystack.iter().filter(|elem| **elem <= sum).collect();
for left in set.iter() {
let right = sum - *left;
if set.contains(&right) {
assert_eq!(*left + right, sum);
return Some((**left, right));
}
}
None
}
pub fn find_invalid_window(input: &[u64]) -> Option<u64> {
// Potential optimization: maintain a hash set of values, replacing the old value with the new one as the window shifts
for window in input.windows(WINDOW_SIZE + 1) {
let last_elem = window[WINDOW_SIZE];
let haystack = &window[0..WINDOW_SIZE];
if find_sum_in_window(haystack, last_elem).is_none() {
return Some(last_elem);
}
}
None
}
pub fn part1(input: &str) -> Option<u64> {
let numbers: Vec<u64> = input.lines().map(|line| line.parse().unwrap()).collect();
assert!(numbers.len() > WINDOW_SIZE);
find_invalid_window(&numbers)
}
pub fn find_contigious_range_of_size_n(input: &[u64], size: usize, sum: u64) -> Option<&[u64]> {
input
.windows(size)
.map(|window| (window, window.iter().sum::<u64>()))
.find(|(_, window_sum)| *window_sum == sum)
.map(|(window, _)| window)
}
pub fn part2(input: &str) -> Option<u64> {
let numbers: Vec<u64> = input.lines().map(|line| line.parse().unwrap()).collect();
let target = find_invalid_window(&numbers).unwrap();
for size in 2.. {
if let Some(range) = find_contigious_range_of_size_n(&numbers, size, target) {
let sum = range.iter().min().unwrap() + range.iter().max().unwrap();
return Some(sum);
}
}
None
}
|
use std::io::{Result, Write};
use pulldown_cmark::{Tag, Event, Alignment};
use crate::gen::{State, States, Generator, Document};
#[derive(Debug)]
pub struct Table;
impl<'a> State<'a> for Table {
fn new(tag: Tag<'a>, gen: &mut Generator<'a, impl Document<'a>, impl Write>) -> Result<Self> {
let out = gen.get_out();
let align = match tag {
Tag::Table(align) => align,
_ => unreachable!(),
};
// TODO: in-cell linebreaks
// TODO: merging columns
// TODO: merging rows
// TODO: easier custom formatting
write!(out, "\\begin{{tabular}}{{|")?;
for align in align {
match align {
Alignment::None | Alignment::Left => write!(out, " l |")?,
Alignment::Center => write!(out, " c |")?,
Alignment::Right => write!(out, " r |")?,
}
}
writeln!(out, "}}")?;
writeln!(out, "\\hline")?;
Ok(Table)
}
fn finish(self, gen: &mut Generator<'a, impl Document<'a>, impl Write>, peek: Option<&Event<'a>>) -> Result<()> {
writeln!(gen.get_out(), "\\end{{tabular}}")?;
Ok(())
}
}
#[derive(Debug)]
pub struct TableHead;
impl<'a> State<'a> for TableHead {
fn new(tag: Tag<'a>, gen: &mut Generator<'a, impl Document<'a>, impl Write>) -> Result<Self> {
Ok(TableHead)
}
fn finish(self, gen: &mut Generator<'a, impl Document<'a>, impl Write>, peek: Option<&Event<'a>>) -> Result<()> {
writeln!(gen.get_out(), "\\\\ \\thickhline")?;
Ok(())
}
}
#[derive(Debug)]
pub struct TableRow;
impl<'a> State<'a> for TableRow {
fn new(tag: Tag<'a>, gen: &mut Generator<'a, impl Document<'a>, impl Write>) -> Result<Self> {
Ok(TableRow)
}
fn finish(self, gen: &mut Generator<'a, impl Document<'a>, impl Write>, peek: Option<&Event<'a>>) -> Result<()> {
writeln!(gen.get_out(), "\\\\ \\hline")?;
Ok(())
}
}
#[derive(Debug)]
pub struct TableCell;
impl<'a> State<'a> for TableCell {
fn new(tag: Tag<'a>, gen: &mut Generator<'a, impl Document<'a>, impl Write>) -> Result<Self> {
Ok(TableCell)
}
fn finish(self, gen: &mut Generator<'a, impl Document<'a>, impl Write>, peek: Option<&Event<'a>>) -> Result<()> {
if let Event::Start(Tag::TableCell) = peek.unwrap() {
write!(gen.get_out(), "&")?;
}
Ok(())
}
}
|
use map::Dungeon;
use entity::Entity;
// Game update functionality
pub fn move_player(player: &mut Entity, dungeon: &Dungeon, dx: i32, dy: i32) -> bool {
let proposed_x = player.location_x + dx; // TODO: collision ray in case dx > 1
let proposed_y = player.location_y + dy; // TODO: collision ray in case dy > 1
if proposed_x >= 0 && proposed_y >= 0
&& proposed_x < dungeon.get_width() as i32
&& proposed_y < dungeon.get_height() as i32
{
if dungeon.get_at(proposed_x as usize, proposed_y as usize) != '#' {
// hack for now
player.location_x = proposed_x;
player.location_y = proposed_y;
return true;
}
}
false
}
|
use shared::*;
use std::collections::HashMap;
fn main() {
let input: Vec<Direction> = include_str!("input.txt")
.trim()
.chars()
.map(|c| match c {
'<' => Direction::West,
'>' => Direction::East,
'^' => Direction::North,
'v' => Direction::South,
x => panic!(format!("Unknown direction {}", x)),
})
.collect();
{
let mut houses: HashMap<Position, usize> = HashMap::new();
let mut pos = Position { x: 0, y: 0 };
houses.insert(pos.clone(), 1);
for dir in &input {
*houses.entry(pos.clone()).or_insert(0) += 1;
pos = move_in_direction(&pos, &dir);
}
println!("Part 1: {}", houses.len());
}
{
let mut houses: HashMap<Position, usize> = HashMap::new();
let mut pos1 = Position { x: 0, y: 0 };
let mut pos2 = pos1.clone();
houses.insert(pos1.clone(), 2);
for (i, dir) in input.iter().enumerate() {
if i % 2 == 0 {
*houses.entry(pos1.clone()).or_insert(0) += 1;
pos1 = move_in_direction(&pos1, &dir);
} else {
*houses.entry(pos2.clone()).or_insert(0) += 1;
pos2 = move_in_direction(&pos2, &dir);
}
}
println!("Part 2: {}", houses.len());
}
}
|
fn main() {
for number in range(1i, 101) {
let output =
if div_fifeteen(number) {
"FizzBuzz".to_str()
}
else if div_five(number) {
"Fizz".to_str()
}
else if div_three(number) {
"Buzz".to_str()
}
else {
number.to_str()
};
println!("{:s}", output);
}
}
fn div_fifeteen(number: int) -> bool {
number % 15 == 0
}
fn div_five(number: int) -> bool {
number % 5 == 0
}
fn div_three(number: int) -> bool {
number % 3 == 0
} |
use core::iter;
use std::collections::VecDeque;
use std::marker::PhantomData;
use std::thread;
use enum_iterator::IntoEnumIterator;
use crate::BenchStr;
pub fn run_benchmark<TBenchStr: BenchStr>(
number_of_threads: usize,
test_set: &'static [&'static str],
) {
if number_of_threads == 1 {
run_benchmark_single_threaded::<TBenchStr>(test_set);
} else {
let mut handles = Vec::new();
for _ in 0..number_of_threads {
handles.push(thread::spawn(move || {
run_benchmark_single_threaded::<TBenchStr>(test_set);
}));
}
for handle in handles {
handle.join().unwrap();
}
}
}
pub struct Benchmark<'a, TBenchStr> {
test_set: &'a [&'static str],
// we keep some references around to make sure allocator has to do more than just work like a stack.
collector: Collector<TBenchStr>,
_phantom: PhantomData<TBenchStr>,
}
pub fn run_benchmark_single_threaded<TBenchStr: BenchStr>(test_set: &[&'static str]) {
let mut benchmark = Benchmark::<TBenchStr>::new(test_set);
benchmark.perform();
}
impl<'a, TBenchStr: BenchStr> Benchmark<'a, TBenchStr> {
fn new(test_set: &'a [&'static str]) -> Self {
Self {
test_set,
collector: Collector::new(1024 * 1024 * 8, 300),
_phantom: Default::default(),
}
}
fn perform(&mut self) {
// small items are the usual case, so we over-represent them
let smallest = self.collect_smallest(self.test_set.len() / 4);
// perform a few times to make sure the collector gets filled.
for _ in 0..50 {
self.perform_with_test_set(&smallest);
self.perform_with_test_set(self.test_set);
self.perform_with_test_set(&smallest);
self.perform_with_test_set(&smallest);
}
}
fn perform_with_test_set(&mut self, items: &[&'static str]) {
if items.is_empty() {
return;
}
for item in items {
self.join_single(item);
self.do_clone(item);
self.slice(item);
}
self.join(items);
}
fn join(&mut self, items: &[&'static str]) {
for from_fn in FromFunction::into_enum_iter() {
let mut strings: Vec<TBenchStr> = Vec::with_capacity(items.len());
for item in items {
strings.push(Self::bench_str_from(from_fn, item));
}
let string = TBenchStr::from_multiple(strings.into_iter());
let cloned = string.clone();
self.collector.keep(string);
self.collector.keep(cloned);
}
}
fn join_single(&mut self, item: &'static str) {
// make sure join is also efficient if there's just one single item
for from_fn in FromFunction::into_enum_iter() {
let string = TBenchStr::from_multiple(iter::once(Self::bench_str_from(from_fn, item)));
assert_eq!(string.as_slice(), item);
self.collector.keep(string);
}
}
fn do_clone(&mut self, item: &'static str) {
for from_fn in FromFunction::into_enum_iter() {
let string = Self::bench_str_from(from_fn, item);
assert_eq!(string.as_slice(), string.clone().as_slice());
self.collector.keep(string);
}
}
fn slice(&mut self, item: &'static str) {
for from_fn in FromFunction::into_enum_iter() {
let string = Self::bench_str_from(from_fn, item);
let len = string.as_slice().len();
// first slice full.
let slice = string.slice(0, len);
if let Some(slice) = slice {
self.collector.keep(slice);
}
// from start / small
let slice = string.slice(0, 3);
if let Some(slice) = slice {
self.collector.keep(slice);
}
// not from start (still small)
let slice = string.slice(25, 29);
if let Some(slice) = slice {
self.collector.keep(slice);
}
// not from start bigger
let slice = string.slice(14, 350);
if let Some(slice) = slice {
self.collector.keep(slice);
}
self.collector.keep(string);
}
}
fn bench_str_from(from_fn: FromFunction, item: &'static str) -> TBenchStr {
match from_fn {
FromFunction::Static => TBenchStr::from_static(item),
FromFunction::BinIter => {
TBenchStr::from_bin_iter(item.as_bytes().iter().cloned()).unwrap()
}
FromFunction::Str => TBenchStr::from_str(item),
}
}
fn collect_smallest(&self, max: usize) -> Vec<&'static str> {
let mut vec: Vec<&'static str> = self.test_set.to_owned();
vec.sort_by(|a, b| a.len().cmp(&(*b).len()));
let too_many = vec.len() as isize - max as isize;
if too_many > 0 {
for _ in 0..too_many {
let len = vec.len();
vec.remove(len - 1);
}
}
vec
}
}
#[derive(Copy, Clone, IntoEnumIterator)]
enum FromFunction {
Static,
BinIter,
Str,
}
struct Collector<TBenchStr> {
queue_0: CollectorQueue<TBenchStr>,
queue_1: CollectorQueue<TBenchStr>,
queue_2: CollectorQueue<TBenchStr>,
counter: usize,
}
impl<TBenchStr: BenchStr> Collector<TBenchStr> {
fn new(max_number_of_bytes: usize, max_number_of_items: usize) -> Self {
let number_of_bytes_per_queue = max_number_of_bytes / 3;
let number_of_items_per_queue = max_number_of_items / 3;
Self {
queue_0: CollectorQueue::new(number_of_bytes_per_queue, number_of_items_per_queue),
queue_1: CollectorQueue::new(number_of_bytes_per_queue, number_of_items_per_queue),
queue_2: CollectorQueue::new(number_of_bytes_per_queue, number_of_items_per_queue),
counter: 0,
}
}
fn keep(&mut self, string: TBenchStr) {
self.counter += 1;
let remainder = self.counter % 4;
match remainder {
0 => self.queue_0.push(string),
1 => self.queue_1.push(string),
2 => self.queue_2.push(string),
3 => { /* drop right away */ }
_ => unreachable!(),
}
}
}
// we keep some references around to make sure allocator has to do more than just work like a stack.
struct CollectorQueue<TBenchStr> {
queue: VecDeque<TBenchStr>,
number_of_bytes: usize,
max_number_of_bytes: usize,
max_number_of_items: usize,
}
impl<TBenchStr: BenchStr> CollectorQueue<TBenchStr> {
pub fn new(max_number_of_bytes: usize, max_number_of_items: usize) -> Self {
Self {
queue: Default::default(),
number_of_bytes: 0,
max_number_of_bytes,
max_number_of_items,
}
}
fn push(&mut self, str: TBenchStr) {
self.number_of_bytes += str.as_slice().len();
self.queue.push_back(str);
while self.number_of_bytes > self.max_number_of_bytes
|| self.queue.len() > self.max_number_of_items
{
self.remove_front()
}
}
fn remove_front(&mut self) {
let removed = self.queue.pop_front();
if let Some(removed) = removed {
self.number_of_bytes -= removed.as_slice().len();
}
}
}
|
#![allow(unused)]
use std::sync::atomic;
use std::sync::{self, Mutex, Arc};
use std::sync::mpsc::{Sender, Receiver, self, channel};
use std::collections::HashMap;
use clipboard_win::{get_clipboard_string, set_clipboard_string};
use gdk::Window;
pub type BindHandler = Arc<dyn Fn(i32) + Send + Sync + 'static>;
pub type ClipboardHandler = Arc<dyn Fn(String) + Send + Sync + 'static>;
#[allow(unused)]
pub enum WindowsApiEvent {
HotkeyRegister { id : i32, modifiers : u32, vk : u32, handler : BindHandler},
SetClipboard { text : String },
AddClipboardListener { handler : ClipboardHandler},
}
pub struct HotkeyData {
thread_handle : Option<::std::thread::JoinHandle<()>>,
thread_id : usize,
tx : Option<Sender<WindowsApiEvent>>,
}
pub struct HotkeyProxy {
thread_id : usize,
tx : Sender<WindowsApiEvent>,
}
impl HotkeyProxy {
pub fn post_event(&self, event : WindowsApiEvent) {
self.tx.send(event).expect("post event failure");
unsafe { winapi::um::winuser::PostThreadMessageA(self.thread_id as u32, 30000, 0, 0) };
}
}
pub enum ReceivedMessage {
Hotkey { id: i32},
ClipboardUpdate,
Nothing,
}
fn get_single_message() -> ReceivedMessage {
use winapi::um::winuser::{LPMSG, GetMessageA, MSG, *, TranslateMessage, DispatchMessageA};
use std::default::Default;
let mut msg = Default::default();
let mut result = ReceivedMessage::Nothing;
if unsafe { GetMessageA(&mut msg, 0 as winapi::shared::windef::HWND, 0, 0) != 0 } {
match msg.message {
WM_HOTKEY => {
result = ReceivedMessage::Hotkey {id : msg.wParam as i32 };
}
WM_CLIPBOARDUPDATE => {
result = ReceivedMessage::ClipboardUpdate;
}
30000 => {
}
_ => {
}
}
unsafe {
TranslateMessage(&msg);
DispatchMessageA(&msg);
}
}
result
}
fn to_wstring(str: &str) -> Vec<u16> {
use std::os::windows::ffi::OsStrExt;
::std::ffi::OsStr::new(str).encode_wide().chain(Some(0).into_iter()).collect()
}
impl HotkeyData {
pub fn do_it(hotkey : WindowsApiEvent) {
Self::init().post_event(hotkey);
}
pub fn set_clipboard(text: &str) {
Self::do_it(WindowsApiEvent::SetClipboard { text : text.to_owned()});
}
pub fn get_clipboard() -> Option<String> {
get_clipboard_string().ok()
}
pub fn register_hotkey( id : i32, key : Key, modifiers : Modifier, handler : BindHandler) {
Self::do_it(WindowsApiEvent::HotkeyRegister { id, handler, vk : key.v(), modifiers : modifiers.v()} )
}
fn init() -> HotkeyProxy {
let context = &mut (*HOTKEY_DAYA.lock().unwrap());
if context.is_none() {
let ( tx_tid, rx_tid) = mpsc::channel();
let (tx, rx) = mpsc::channel();
let thread_handle = Some(::std::thread::spawn(move || {
let win_thread_id = unsafe { winapi::um::processthreadsapi::GetCurrentThreadId() } as usize;
if win_thread_id == 0 {
panic!("win_thread_id == 0?");
}
tx_tid.send(win_thread_id).expect("tid send failure");
let mut handlers : HashMap<i32, BindHandler> = HashMap::new();
let mut clipboard_handlers : Vec<ClipboardHandler> = vec![];
let mut last_set_clipboard = String::new();
let hwnd = unsafe {
let class_name = to_wstring("meh_window");
let wnd_class = winapi::um::winuser::WNDCLASSW {
style : winapi::um::winuser::CS_OWNDC, // Style
lpfnWndProc : Some(winapi::um::winuser::DefWindowProcW),
hInstance : winapi::um::libloaderapi::GetModuleHandleW( ::std::ptr::null_mut() ),
lpszClassName : class_name.as_ptr(),
cbClsExtra : 0,
cbWndExtra : 0,
hIcon: ::std::ptr::null_mut(),
hCursor: ::std::ptr::null_mut(),
hbrBackground: ::std::ptr::null_mut(),
lpszMenuName: ::std::ptr::null_mut(),
};
// We have to register this class for Windows to use
winapi::um::winuser::RegisterClassW( &wnd_class );
let window_name = to_wstring("pusz temporary workaround to receive clipboard.");
let hwnd = winapi::um::winuser::CreateWindowExW(
0,
class_name.as_ptr(),
window_name.as_ptr(),
winapi::um::winuser::WS_VISIBLE,
0,
0,
0,
0,
::std::ptr::null_mut(),
::std::ptr::null_mut(),
::std::ptr::null_mut(),
::std::ptr::null_mut());
winapi::um::winuser::ShowWindow(hwnd, winapi::um::winuser::SW_HIDE);
hwnd
};
let win_pid = unsafe { winapi::um::processthreadsapi::GetCurrentProcessId() } ;
loop {
match get_single_message() {
ReceivedMessage::Hotkey { id } => {
if let Some(handler) = handlers.get(&id) {
handler(id);
}
},
ReceivedMessage::Nothing => {},
ReceivedMessage::ClipboardUpdate => {
if let Ok(text) = get_clipboard_string() {
if text != last_set_clipboard {
for listener in &clipboard_handlers {
listener(text.clone());
}
}
}
last_set_clipboard.clear();
}
}
if let Ok(request) = rx.try_recv() {
match request {
WindowsApiEvent::HotkeyRegister { id, modifiers, vk, handler } => {
handlers.insert(id, handler);
unsafe {
winapi::um::winuser::RegisterHotKey(
0 as winapi::shared::windef::HWND,
id,
modifiers, vk
);
}
},
WindowsApiEvent::AddClipboardListener { handler } => {
if clipboard_handlers.is_empty() {
last_set_clipboard = get_clipboard_string().unwrap_or_default();
unsafe { winapi::um::winuser::AddClipboardFormatListener(hwnd) };
}
clipboard_handlers.push(handler);
}
WindowsApiEvent::SetClipboard { text } => {
last_set_clipboard = text.clone();
let _ = set_clipboard_string(&text);
}
}
}
}
}));
let thread_id = rx_tid.recv().expect("failed to recv thread_handle");
*context = Some(HotkeyData {
thread_id,
thread_handle,
tx : Some(tx),
});
}
let context = context.as_ref().unwrap();
HotkeyProxy {
thread_id : context.thread_id,
tx : context.tx.clone().unwrap(),
}
}
}
#[derive(Clone, Copy, PartialEq, Debug)]
#[allow(unused)]
pub enum Modifier {
None = 0,
Alt = 1,
Ctrl = 2,
Shift = 4,
Win = 8,
}
impl Modifier {
pub fn v(self) -> u32 {
self as u32
}
}
#[derive(Clone, Copy, PartialEq, Debug)]
#[allow(unused)]
pub enum Key {
Return = winapi::um::winuser::VK_RETURN as isize,
Control = winapi::um::winuser::VK_CONTROL as isize,
Alt = winapi::um::winuser::VK_MENU as isize,
Shift = winapi::um::winuser::VK_SHIFT as isize,
F1 = winapi::um::winuser::VK_F1 as isize,
A = 'A' as isize,
B = 'B' as isize,
C = 'C' as isize,
D = 'D' as isize,
E = 'E' as isize,
F = 'F' as isize,
G = 'G' as isize,
H = 'H' as isize,
I = 'I' as isize,
J = 'J' as isize,
K = 'K' as isize,
L = 'L' as isize,
M = 'M' as isize,
N = 'N' as isize,
O = 'O' as isize,
P = 'P' as isize,
Q = 'Q' as isize,
R = 'R' as isize,
S = 'S' as isize,
T = 'T' as isize,
U = 'U' as isize,
V = 'V' as isize,
W = 'W' as isize,
X = 'X' as isize,
Y = 'Y' as isize,
Z = 'Z' as isize,
}
impl Key {
pub fn v(self) -> u32 {
self as u32
}
}
lazy_static! {
static ref HOTKEY_DAYA: Mutex<Option<HotkeyData>> = {
Mutex::new(None)
};
} |
use std::result;
#[derive(Debug)]
pub enum Error {
JsonError(serde_json::Error),
ExprIsNotArrayError,
ExprOpIsNotStringError,
ExprBuildError,
NoSuchOpError,
ContextNotDictError,
ContextNoSuchVarError,
ExprVarArgNotStringError,
FinalResultNotBoolError,
// MatchError,
}
pub type Result<T> = result::Result<T, Error>;
impl From<serde_json::Error> for Error {
fn from(err: serde_json::Error) -> Error {
Error::JsonError(err)
}
}
|
use crate::QueueStoredAccessPolicy;
use azure_core::headers::CommonStorageResponseHeaders;
use azure_core::PermissionError;
use azure_storage::StoredAccessPolicyList;
use bytes::Bytes;
use http::response::Response;
use std::convert::TryInto;
#[derive(Debug, Clone)]
pub struct GetQueueACLResponse {
pub common_storage_response_headers: CommonStorageResponseHeaders,
pub stored_access_policies: Vec<QueueStoredAccessPolicy>,
}
impl std::convert::TryFrom<&Response<Bytes>> for GetQueueACLResponse {
type Error = crate::Error;
fn try_from(response: &Response<Bytes>) -> Result<Self, Self::Error> {
let headers = response.headers();
let body = response.body();
debug!("headers == {:?}", headers);
let a: Result<Vec<QueueStoredAccessPolicy>, PermissionError> =
StoredAccessPolicyList::from_xml(body)?
.stored_access
.into_iter()
.map(|sap| sap.try_into())
.collect();
Ok(GetQueueACLResponse {
common_storage_response_headers: headers.try_into()?,
stored_access_policies: a?,
})
}
}
|
use failure::Error;
use futures::future;
use futures::prelude::*;
use reqwest::header::{Authorization, Headers};
use reqwest::unstable::async::Client;
use slog::Logger;
use std::env;
use std::time::Duration;
use url::Url;
use Config;
use github::PullRequest;
#[derive(Clone)]
pub struct TodoistClient {
http: Client,
logger: Logger,
host: Url,
}
#[derive(Serialize)]
struct NewTask {
content: String,
due_string: String,
}
impl TodoistClient {
pub fn new(config: &Config) -> Result<TodoistClient, Error> {
let todoist_token = env::var("TODOIST_TOKEN")?;
let client = Client::builder()
.default_headers(default_headers(todoist_token))
.timeout(Duration::from_secs(30))
.build(&config.core.handle())?;
Ok(TodoistClient {
http: client,
host: config.todoist_base.clone(),
logger: config.logger.clone(),
})
}
pub fn create_task_for_pr(&self, pr: &PullRequest) -> impl Future<Item = (), Error = Error> {
let new_task = NewTask::for_pull_request(pr);
let new_task_url = self.host.join("API/v8/tasks").unwrap();
let logger = self.logger.clone();
let request = self.http.post(new_task_url).json(&new_task).send();
request.then(move |response| match response {
Ok(ok_response) => {
if ok_response.status().is_success() {
return future::ok(());
}
error!(logger, "Error while creating todoist task"; "response" => ?ok_response);
return future::err(format_err!(
"Error while creating todoist task. response: {:?}",
ok_response
));
}
Err(err) => {
let err = Error::from(err);
error!(logger, "Error while creating todoist task"; "error" => %err);
return future::err(err);
}
})
}
}
impl NewTask {
fn for_pull_request(pr: &PullRequest) -> NewTask {
let content = format!(
"{url} ({project}#{number}: {title})",
url = pr.html_url,
project = pr.repo(),
number = pr.number,
title = pr.title
);
NewTask {
content,
due_string: "today".to_string(),
}
}
}
fn default_headers(todoist_token: String) -> Headers {
let mut headers = Headers::new();
let auth_header = Authorization(format!("Bearer {}", todoist_token));
headers.set(auth_header);
headers
}
|
#[doc = "Reader of register CH3_CTR"]
pub type R = crate::R<u32, super::CH3_CTR>;
#[doc = "Writer for register CH3_CTR"]
pub type W = crate::W<u32, super::CH3_CTR>;
#[doc = "Register CH3_CTR `reset()`'s with value 0"]
impl crate::ResetValue for super::CH3_CTR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `CH3_CTR`"]
pub type CH3_CTR_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `CH3_CTR`"]
pub struct CH3_CTR_W<'a> {
w: &'a mut W,
}
impl<'a> CH3_CTR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15"]
#[inline(always)]
pub fn ch3_ctr(&self) -> CH3_CTR_R {
CH3_CTR_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15"]
#[inline(always)]
pub fn ch3_ctr(&mut self) -> CH3_CTR_W {
CH3_CTR_W { w: self }
}
}
|
use url;
use hyper;
use uritemplate::UriTemplate;
use super::dtos::enums;
use errors::*;
fn root() -> Result<url::Url> {
Ok("https://www.bungie.net/Platform/".parse()?)
}
fn build_url(path: &str) -> Result<hyper::Uri> {
let url = root()?.join(path)?;
Ok(url.as_str().parse()?)
}
pub fn get_manifest() -> Result<hyper::Uri> {
build_url("./Destiny2/Manifest/")
}
pub fn get_membership_data_for_current_user() -> Result<hyper::Uri> {
build_url("./User/GetMembershipsForCurrentUser/")
}
pub fn get_profile(m_type: super::dtos::enums::BungieMemberType,
dmid: i64,
components: &[enums::ComponentType])
-> Result<hyper::Uri> {
let path =
UriTemplate::new("./Destiny2/{membershipType}/Profile/{destinyMembershipId}/{?components}")
.set("membershipType", m_type)
.set("destinyMembershipId", dmid.to_string())
.set("components", enums::component_list(components))
.build();
build_url(&path)
}
pub fn get_item(m_type: super::dtos::enums::BungieMemberType,
dmid: &str,
instance_id: &str,
components: &[enums::ComponentType])
-> Result<hyper::Uri> {
let path =
UriTemplate::new("./Destiny2/{membershipType}/Profile/{destinyMembershipId}/Item/{itemInstanceId}/{?components}")
.set("membershipType", m_type)
.set("destinyMembershipId", dmid)
.set("itemInstanceId", instance_id)
.set("components", enums::component_list(components))
.build();
build_url(&path)
}
|
use std::io::{stdout, Write};
use azuki_opt::{
branching_simplify::BranchingSimplify, const_folding::ConstFolding,
dead_code_eliminator::DeadCodeEliminator,
};
use azuki_syntax::{lexer::lexer, parse};
use azuki_tac::optimizer::sanity_checker::SanityChecker;
use azuki_tacvm::Vm;
use clap::Clap;
use opt::Action;
use tracing::{info, trace, warn};
use tracing_subscriber::fmt::format::FmtSpan;
mod opt;
fn main() {
let opt = opt::Opt::parse();
tracing_subscriber::FmtSubscriber::builder()
.with_target(true)
.with_max_level(opt.log_level)
.with_writer(std::io::stderr)
.without_time()
.init();
let file = opt.file;
let input = std::fs::read_to_string(file).expect("Unable to read input file");
let mut output: Box<dyn Write> = match opt.out_file {
Some(file) => Box::new(
std::fs::OpenOptions::new()
.write(true)
.open(file)
.expect("Failed to open output file"),
),
None => Box::new(stdout()),
};
if opt.action == Action::Lex {
// lex file
let lexer = lexer(&input);
lexer.for_each(|token| {
writeln!(output, "{}", token).expect("Failed to write to output file")
});
return;
}
let program = match parse(&input) {
Ok(p) => p,
Err(e) => {
// TODO: Error display
println!("{:?}", e);
return;
}
};
if opt.action == Action::Parse {
// TODO: output parse result
return;
}
info!("Generating IR");
let mut program = match azuki_tacgen::compile(&program) {
Ok(p) => p,
Err(e) => {
eprintln!("{:?}", e);
return;
}
};
let mut pipeline = azuki_tac::optimizer::Pipeline::new();
pipeline.add_func_optimizer(SanityChecker::default());
pipeline.add_func_optimizer(DeadCodeEliminator::default());
pipeline.add_func_optimizer(BranchingSimplify);
pipeline.add_func_optimizer(ConstFolding::new());
let optimizations = opt
.optimization
.clone()
.unwrap_or_else(|| default_opts().iter().map(|x| x.to_string()).collect());
for optimization in optimizations {
info!("Running pass `{}`", optimization);
if !pipeline.run_pass(&mut program, &optimization) {
warn!("Cannot find pass `{}`", optimization);
}
}
if opt.action == Action::Compile {
info!("Writing IR into desired output");
let func_list = program
.functions
.drain()
.map(|(_, x)| x)
.collect::<Vec<_>>();
func_list.iter().for_each(|function| {
writeln!(output, "{}", function).expect("Failed to write to output file");
writeln!(output).unwrap();
});
} else if opt.action == Action::Run {
info!("Running program in VM");
let mut vm = Vm::new(&program);
let entry = opt.entry_point.as_deref().unwrap_or("main");
let params = opt.params.clone();
let res = vm.run_func(entry, params);
if let Some(res) = res {
println!("{}", res);
}
}
}
fn default_opts() -> &'static [&'static str] {
&[
"sanity-check",
"const-folding",
"branching-simplify",
"dead-code-eliminator",
"branching-simplify",
"dead-code-eliminator",
]
}
|
use std::io::Read;
use std::net::Ipv4Addr;
use byteorder::{BigEndian, ReadBytesExt};
use bytes::BufMut;
use snafu::ResultExt;
use crate::error::*;
pub trait IntoKbinBytes {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B);
}
pub trait FromKbinBytes: Sized {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self>;
}
impl IntoKbinBytes for i8 {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
buf.put_i8(self);
}
}
impl FromKbinBytes for i8 {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self> {
input.read_i8().context(DataConvertSnafu)
}
}
impl IntoKbinBytes for u8 {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
buf.put_u8(self);
}
}
impl FromKbinBytes for u8 {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self> {
input.read_u8().context(DataConvertSnafu)
}
}
impl IntoKbinBytes for bool {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
buf.put_u8(if self { 0x01 } else { 0x00 })
}
}
impl FromKbinBytes for bool {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self> {
match u8::from_kbin_bytes(input)? {
0x00 => Ok(false),
0x01 => Ok(true),
input => Err(KbinError::InvalidBooleanInput { input }),
}
}
}
impl<'a> IntoKbinBytes for &'a [u8] {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
buf.put(self);
}
}
impl IntoKbinBytes for Ipv4Addr {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
let octets = self.octets();
buf.put(&octets[..])
}
}
impl FromKbinBytes for Ipv4Addr {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self> {
let mut octets = [0; 4];
input.read_exact(&mut octets).context(DataConvertSnafu)?;
Ok(Ipv4Addr::from(octets))
}
}
macro_rules! multibyte_impl {
(
$(($type:ty, $write_method:ident, $read_method:ident)),*$(,)?
) => {
$(
impl IntoKbinBytes for $type {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
buf.$write_method(self);
}
}
impl FromKbinBytes for $type {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self> {
input.$read_method::<BigEndian>().context(DataConvertSnafu)
}
}
)*
};
}
macro_rules! tuple_impl {
(
i8: [$($i8_count:expr),*],
u8: [$($u8_count:expr),*],
bool: [$($bool_count:expr),*],
multi: [
$([$type:ty ; $($count:expr),*] => ($write_method:ident, $read_method:ident)),*$(,)?
]
) => {
$(
impl<'a> IntoKbinBytes for &'a [i8; $i8_count] {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
for value in self.into_iter() {
buf.put_i8(*value);
}
}
}
impl FromKbinBytes for [i8; $i8_count] {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self> {
let mut values = Self::default();
input.read_i8_into(&mut values).context(DataConvertSnafu)?;
Ok(values)
}
}
)*
$(
impl<'a> IntoKbinBytes for &'a [u8; $u8_count] {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
buf.put_slice(&self[..]);
}
}
impl FromKbinBytes for [u8; $u8_count] {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self> {
let mut values = Self::default();
input.read_exact(&mut values).context(DataConvertSnafu)?;
Ok(values)
}
}
)*
$(
impl<'a> IntoKbinBytes for &'a [bool; $bool_count] {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
for value in self.into_iter() {
value.write_kbin_bytes(buf);
}
}
}
impl FromKbinBytes for [bool; $bool_count] {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self> {
let mut values = Self::default();
for i in 0..$bool_count {
values[i] = bool::from_kbin_bytes(input)?;
}
Ok(values)
}
}
)*
$(
$(
impl<'a> IntoKbinBytes for &'a [$type; $count] {
fn write_kbin_bytes<B: BufMut>(self, buf: &mut B) {
for value in self.into_iter() {
buf.$write_method(*value);
}
}
}
impl FromKbinBytes for [$type; $count] {
fn from_kbin_bytes<R: Read>(input: &mut R) -> Result<Self> {
let mut values = Self::default();
input.$read_method::<BigEndian>(&mut values).context(DataConvertSnafu)?;
Ok(values)
}
}
)*
)*
};
}
multibyte_impl! {
(i16, put_i16, read_i16),
(u16, put_u16, read_u16),
(i32, put_i32, read_i32),
(u32, put_u32, read_u32),
(i64, put_i64, read_i64),
(u64, put_u64, read_u64),
(f32, put_f32, read_f32),
(f64, put_f64, read_f64),
}
tuple_impl! {
i8: [2, 3, 4, 16],
u8: [2, 3, 4, 16],
bool: [2, 3, 4, 16],
multi: [
[i16; 2, 3, 4, 8] => (put_i16, read_i16_into),
[u16; 2, 3, 4, 8] => (put_u16, read_u16_into),
[i32; 2, 3, 4] => (put_i32, read_i32_into),
[u32; 2, 3, 4] => (put_u32, read_u32_into),
[i64; 2, 3, 4] => (put_i64, read_i64_into),
[u64; 2, 3, 4] => (put_u64, read_u64_into),
[f32; 2, 3, 4] => (put_f32, read_f32_into),
[f64; 2, 3, 4] => (put_f64, read_f64_into),
]
}
|
#[doc = "Register `RF%sR` reader"]
pub type R = crate::R<RFR_SPEC>;
#[doc = "Register `RF%sR` writer"]
pub type W = crate::W<RFR_SPEC>;
#[doc = "Field `FMP` reader - FMP0"]
pub type FMP_R = crate::FieldReader;
#[doc = "Field `FULL` reader - FULL0"]
pub type FULL_R = crate::BitReader<FULL0R_A>;
#[doc = "FULL0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FULL0R_A {
#[doc = "0: FIFO x is not full"]
NotFull = 0,
#[doc = "1: FIFO x is full"]
Full = 1,
}
impl From<FULL0R_A> for bool {
#[inline(always)]
fn from(variant: FULL0R_A) -> Self {
variant as u8 != 0
}
}
impl FULL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FULL0R_A {
match self.bits {
false => FULL0R_A::NotFull,
true => FULL0R_A::Full,
}
}
#[doc = "FIFO x is not full"]
#[inline(always)]
pub fn is_not_full(&self) -> bool {
*self == FULL0R_A::NotFull
}
#[doc = "FIFO x is full"]
#[inline(always)]
pub fn is_full(&self) -> bool {
*self == FULL0R_A::Full
}
}
#[doc = "FULL0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FULL0W_AW {
#[doc = "1: Clear flag"]
Clear = 1,
}
impl From<FULL0W_AW> for bool {
#[inline(always)]
fn from(variant: FULL0W_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `FULL` writer - FULL0"]
pub type FULL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, FULL0W_AW>;
impl<'a, REG, const O: u8> FULL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(FULL0W_AW::Clear)
}
}
#[doc = "Field `FOVR` reader - FOVR0"]
pub type FOVR_R = crate::BitReader<FOVR0R_A>;
#[doc = "FOVR0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FOVR0R_A {
#[doc = "0: No FIFO x overrun"]
NoOverrun = 0,
#[doc = "1: FIFO x overrun"]
Overrun = 1,
}
impl From<FOVR0R_A> for bool {
#[inline(always)]
fn from(variant: FOVR0R_A) -> Self {
variant as u8 != 0
}
}
impl FOVR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FOVR0R_A {
match self.bits {
false => FOVR0R_A::NoOverrun,
true => FOVR0R_A::Overrun,
}
}
#[doc = "No FIFO x overrun"]
#[inline(always)]
pub fn is_no_overrun(&self) -> bool {
*self == FOVR0R_A::NoOverrun
}
#[doc = "FIFO x overrun"]
#[inline(always)]
pub fn is_overrun(&self) -> bool {
*self == FOVR0R_A::Overrun
}
}
#[doc = "FOVR0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FOVR0W_AW {
#[doc = "1: Clear flag"]
Clear = 1,
}
impl From<FOVR0W_AW> for bool {
#[inline(always)]
fn from(variant: FOVR0W_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `FOVR` writer - FOVR0"]
pub type FOVR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, FOVR0W_AW>;
impl<'a, REG, const O: u8> FOVR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(FOVR0W_AW::Clear)
}
}
#[doc = "Field `RFOM` reader - RFOM0"]
pub type RFOM_R = crate::BitReader<RFOM0W_A>;
#[doc = "RFOM0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RFOM0W_A {
#[doc = "1: Set by software to release the output mailbox of the FIFO"]
Release = 1,
}
impl From<RFOM0W_A> for bool {
#[inline(always)]
fn from(variant: RFOM0W_A) -> Self {
variant as u8 != 0
}
}
impl RFOM_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<RFOM0W_A> {
match self.bits {
true => Some(RFOM0W_A::Release),
_ => None,
}
}
#[doc = "Set by software to release the output mailbox of the FIFO"]
#[inline(always)]
pub fn is_release(&self) -> bool {
*self == RFOM0W_A::Release
}
}
#[doc = "Field `RFOM` writer - RFOM0"]
pub type RFOM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RFOM0W_A>;
impl<'a, REG, const O: u8> RFOM_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Set by software to release the output mailbox of the FIFO"]
#[inline(always)]
pub fn release(self) -> &'a mut crate::W<REG> {
self.variant(RFOM0W_A::Release)
}
}
impl R {
#[doc = "Bits 0:1 - FMP0"]
#[inline(always)]
pub fn fmp(&self) -> FMP_R {
FMP_R::new((self.bits & 3) as u8)
}
#[doc = "Bit 3 - FULL0"]
#[inline(always)]
pub fn full(&self) -> FULL_R {
FULL_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - FOVR0"]
#[inline(always)]
pub fn fovr(&self) -> FOVR_R {
FOVR_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - RFOM0"]
#[inline(always)]
pub fn rfom(&self) -> RFOM_R {
RFOM_R::new(((self.bits >> 5) & 1) != 0)
}
}
impl W {
#[doc = "Bit 3 - FULL0"]
#[inline(always)]
#[must_use]
pub fn full(&mut self) -> FULL_W<RFR_SPEC, 3> {
FULL_W::new(self)
}
#[doc = "Bit 4 - FOVR0"]
#[inline(always)]
#[must_use]
pub fn fovr(&mut self) -> FOVR_W<RFR_SPEC, 4> {
FOVR_W::new(self)
}
#[doc = "Bit 5 - RFOM0"]
#[inline(always)]
#[must_use]
pub fn rfom(&mut self) -> RFOM_W<RFR_SPEC, 5> {
RFOM_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "CAN_RF%sR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rfr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rfr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RFR_SPEC;
impl crate::RegisterSpec for RFR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rfr::R`](R) reader structure"]
impl crate::Readable for RFR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rfr::W`](W) writer structure"]
impl crate::Writable for RFR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RF%sR to value 0"]
impl crate::Resettable for RFR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use itertools::Itertools;
#[aoc::main(10)]
pub fn main(input: &str) -> (i32, String) {
solve(input)
}
#[aoc::test(10)]
pub fn test(input: &str) -> (String, String) {
let res = solve(input);
(res.0.to_string(), res.1)
}
fn solve(input: &str) -> (i32, String) {
let p1 = part1(input);
let p2 = part2(input);
(p1, p2)
}
fn part1(input: &str) -> i32 {
let mut register = 1;
let mut clock = 1;
let mut cycles_spent_processing = 0;
let mut cursor = 0;
let mut signal_strength = 0;
let instructions = input
.lines()
.map(|line| line.split_whitespace().collect_vec())
.collect_vec();
while cursor < instructions.len() {
let instruction = instructions[cursor].clone();
if (clock - 20) % 40 == 0 {
signal_strength += clock * register;
}
match instruction[0] {
_ if instruction[0] == "noop" => {
cursor += 1;
}
_ if instruction[0] == "addx" => {
if cycles_spent_processing == 0 {
cycles_spent_processing += 1;
} else {
cycles_spent_processing = 0;
register += instruction[1].parse::<i32>().unwrap();
cursor += 1;
}
}
_ => panic!("Unhandled operation"),
};
clock += 1;
}
signal_strength
}
fn part2(input: &str) -> String {
let mut register: i32 = 1;
let mut cycles_spent_processing = 0;
let mut cursor = 0;
let mut crt: String = "".to_string();
let instructions = input
.lines()
.map(|line| line.split_whitespace().collect_vec())
.collect_vec();
while cursor < instructions.len() {
let instruction = instructions[cursor].clone();
if register.abs_diff(crt.chars().count() as i32) <= 1 {
crt += "#";
} else {
crt += ".";
}
if crt.chars().count() == 40 {
println!("{}", crt);
crt = "".to_string();
}
match instruction[0] {
_ if instruction[0] == "noop" => {
cursor += 1;
}
_ if instruction[0] == "addx" => {
if cycles_spent_processing == 0 {
cycles_spent_processing += 1;
} else {
cycles_spent_processing = 0;
register += instruction[1].parse::<i32>().unwrap();
cursor += 1;
}
}
_ => panic!("Unhandled operation"),
};
}
"BZPAJELK".to_string()
}
|
pub mod kinds;
use kinds::{Token, TokenKind};
#[allow(dead_code)]
pub struct Lexer {
// source code
code: Vec<char>,
// Lexer position
position: usize,
column: usize,
line: usize,
// supported space characters
space_char: String,
// supported integer literals
digit_char: String,
// supported characters that can an identifier
identifier_char: String,
// supported separators
separator_char: String,
// supported keywords
}
impl Lexer {
pub fn new(code: String) -> Self {
Lexer {
code: code.chars().collect(),
// lexer position
position: 0,
column: 0,
line: 1,
space_char: String::from("\t "),
digit_char: String::from("0123456789"),
identifier_char: String::from(
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-0123456789@.",
),
separator_char: String::from(","),
}
}
// check if lexer position does not exceed code length
fn is_bound(&self) -> bool {
if self.position < self.code.len() {
return true;
}
false
}
// return character in current lexer position
fn peek_char(&self) -> Option<char> {
// check if lexer position hasn't exceeded code length
if self.is_bound() {
return Some(self.code[self.position]);
}
None
}
// if error occurs while lexing we would want to revert lexer position to position before error
// this method those that
fn revert(&mut self, position: usize) {
self.position = position;
self.column = position;
}
// return current character in lexer position and increment position
fn eat_char(&mut self) -> char {
self.position += 1;
self.code[self.position - 1]
}
// check if characters are valid identifier
fn vaild_identifier(&mut self) -> Token {
let mut kind = TokenKind::Identifier;
let mut identifier = String::from("");
let mut character = self.peek_char();
let start_position = self.position;
// check if lexer position hasn't exceeded code length and if character is a valid aplhabetnumeric characeter
while self.is_bound() && self.identifier_char.contains(character.unwrap()) {
identifier.push(self.eat_char());
// assign new character to character variable
character = self.peek_char();
}
// if no avaliable valid character assign token kind to unknown
if identifier.len() < 1 {
kind = TokenKind::Unknown;
}
let end_position = self.position;
Token::new(kind, start_position, end_position, identifier)
}
// check if characters are valid identifier
fn qouted_identifier(&mut self) -> Token {
let mut kind = TokenKind::Identifier;
let mut identifier = String::from("");
let mut character = self.peek_char();
let start_position = self.position;
// lex single quoted identifier
if character.unwrap() == '\'' {
identifier.push(self.eat_char());
// assign new character to character variable
character = self.peek_char();
// check if lexer position hasn't exceeded code length and if character is in quote
while self.is_bound() {
if character.unwrap() == '\'' {
identifier.push(self.eat_char());
break;
}
identifier.push(self.eat_char());
// assign new character to character variable
character = self.peek_char();
}
}
// lex double quoted identifier
if character.unwrap() == '\"' {
identifier.push(self.eat_char());
// assign new character to character variable
character = self.peek_char();
// check if lexer position hasn't exceeded code length and if character is in quote
while self.is_bound() {
if character.unwrap() == '\"' {
identifier.push(self.eat_char());
break;
}
identifier.push(self.eat_char());
// assign new character to character variable
character = self.peek_char();
}
}
// if no avaliable valid character assign token kind to unknown
if identifier.len() < 1 {
kind = TokenKind::Unknown;
}
let end_position = self.position;
Token::new(kind, start_position, end_position, identifier)
}
// check if characeter is an identified separator
fn valid_separator(&mut self) -> Token {
let mut separator = String::from("");
let character = self.peek_char();
let mut kind = TokenKind::Separator;
let start_position = self.position;
if self.is_bound() && self.separator_char.contains(character.unwrap()) {
separator.push(self.eat_char());
}
if separator.len() < 1 {
kind = TokenKind::Unknown;
}
let end_position = self.position;
Token::new(kind, start_position, end_position, separator)
}
// check if characeter is an identified separator
fn valid_newline(&mut self) -> Token {
let mut newline = String::from("");
let character = self.peek_char();
let mut kind = TokenKind::Newline;
let start_position = self.position;
if self.is_bound() && character.unwrap() == '\n' {
newline.push(self.eat_char());
}
if newline.len() < 1 {
kind = TokenKind::Unknown;
self.line += 1;
}
let end_position = self.position;
Token::new(kind, start_position, end_position, newline)
}
// check if character is a valid white space
fn valid_space(&mut self) -> Token {
let mut space = String::from("");
let mut character = self.peek_char();
let mut kind = TokenKind::Space;
let start_position = self.position;
// check if lexer position hasn't exceeded code length and if character is valid space character
while self.is_bound() && self.space_char.contains(character.unwrap()) {
space.push(self.eat_char());
character = self.peek_char();
}
// if characeter is not a valid space character assign token kind to unknown
if space.len() < 1 {
kind = TokenKind::Unknown;
}
let end_position = self.position;
Token::new(kind, start_position, end_position, space)
}
// run all lexer function
fn lex_next(&mut self) -> Result<Token, Token> {
let identifier = self.vaild_identifier();
if identifier.kind != TokenKind::Unknown {
return Ok(identifier);
}
let separator = self.valid_separator();
if separator.kind != TokenKind::Unknown {
return Ok(separator);
}
let space = self.valid_space();
if space.kind != TokenKind::Unknown {
return Ok(space);
}
let quoted = self.qouted_identifier();
if quoted.kind != TokenKind::Unknown {
return Ok(quoted);
}
let newline = self.valid_newline();
if newline.kind != TokenKind::Unknown {
return Ok(newline);
}
//let comment = self.valid_single_line_comment();
//if comment.kind != TokenKind::Unknown {
// return Ok(comment);
//}
Err(Token::new(
TokenKind::Unknown,
self.position,
self.position + 1,
self.eat_char().to_string(),
))
}
pub fn lex(&mut self) -> Vec<Result<Token, Token>> {
let mut tokens = vec![];
while self.is_bound() {
tokens.push(self.lex_next());
}
tokens
}
}
|
use super::{Vars, VarsError};
use std::io::{self, Write};
mod expr;
use expr::{translate_expr};
pub use expr::{ExprError, ExprInternalError};
pub enum TranslateError {
Input(io::Error),
Output(io::Error),
Expr(ExprError),
Vars(VarsError),
}
impl From<VarsError> for TranslateError {
fn from(from: VarsError) -> TranslateError {
TranslateError::Vars(from)
}
}
impl From<ExprError> for TranslateError {
fn from(from: ExprError) -> TranslateError {
TranslateError::Expr(from)
}
}
pub fn translate<R: Iterator<Item = io::Result<char>>, W: Write>(
input_chars: &mut R,
output: &mut W,
vars: &Box<dyn Vars>,
) -> Result<(), TranslateError> {
let mut slash = false;
let mut dollar = false;
while let Some(rch) = input_chars.next() {
match rch {
Err(e) => return Err(TranslateError::Input(e)),
Ok(ch) => {
if slash {
if ch == '$' {
output.write(b"$").map_err(TranslateError::Output)?;
} else if ch == '\\' {
output.write(b"\\\\").map_err(TranslateError::Output)?;
}
slash = false;
} else if dollar {
if ch == '{' {
translate_expr(input_chars, output, vars)?;
}
dollar = false
} else if ch == '\\' {
slash = true;
} else if ch == '$' {
dollar = true;
} else {
let mut buf = [0u8, 0, 0, 0];
let s = ch.encode_utf8(&mut buf);
output.write(s.as_bytes()).map_err(TranslateError::Output)?;
}
}
}
}
if slash {
output.write(b"\\").map_err(TranslateError::Output)?;
} else if dollar {
output.write(b"$").map_err(TranslateError::Output)?;
}
Ok(())
}
|
use state::*;
/// Trait that tells QDF how to simulate states of space.
pub trait Simulate<S>
where
S: State,
{
/// Performs simulation of state based on neighbor states.
///
/// # Arguments
/// * `state` - current state.
/// * `neighbor_states` - current neighbor states.
fn simulate(state: &S, neighbor_states: &[&S]) -> S;
}
impl<S> Simulate<S> for ()
where
S: State,
{
fn simulate(state: &S, _: &[&S]) -> S {
state.clone()
}
}
|
use amethyst::{
core::Transform,
prelude::*,
renderer::{Flipped, SpriteRender, SpriteSheetHandle},
};
use crate::components::player::Player;
/// Player entitiy
///
/// # args:
///
/// * world: world to load into
/// * x: inital x position
/// * y: initial y position
/// * sprite_sheet_handle: handle for the sprite to use
///
pub fn init_player(world: &mut World, x: f32, y: f32, sprite_sheet_handle: SpriteSheetHandle) {
// Set position
let mut transform = Transform::default();
transform.set_x(x);
transform.set_y(y);
// Set sprite
let sprite_render = SpriteRender {
sprite_sheet: sprite_sheet_handle.clone(),
sprite_number: 1,
};
// Build entity
world
.create_entity()
.with(transform)
.with(Player::new())
.with(Flipped::None)
.with(sprite_render.clone())
.build();
}
|
#[path="../src/configuration.rs"]
mod configuration;
#[path="../src/file.rs"]
mod file;
#[test]
fn test_returns_configuration_with_empty_list_of_packages() {
let yaml_file = "
default:
packages:
";
let configuration = configuration::from_yaml(yaml_file.to_string());
assert_eq!(configuration.packages.is_empty(), true);
}
#[test]
fn test_returns_configuration_with_list_of_packages() {
let yaml_file = "
default:
packages:
- vim
- git
- build-essential
";
let configuration = configuration::from_yaml(yaml_file.to_string());
assert_eq!(configuration.packages, vec!["vim", "git", "build-essential"]);
}
#[test]
fn test_returns_configuration_with_a_file_resource() {
let yaml_file = "
default:
files:
-
path: '/home/Jane/hello.txt'
content: 'Hi from John'
";
let configuration = configuration::from_yaml(yaml_file.to_string());
assert_eq!(configuration.files.len(), 1);
}
#[test]
fn test_returns_configuration_with_a_file_resource_has_all_properties() {
let yaml_file = "
default:
files:
-
path: '/home/Jane/hello.txt'
content: 'Hi from John'
";
let configuration = configuration::from_yaml(yaml_file.to_string());
let file = configuration.files.first().unwrap();
assert_eq!(file.path, "/home/Jane/hello.txt");
assert_eq!(file.content, "Hi from John");
}
#[test]
fn test_valid_configuration() {
let configuration = configuration::Configuration {
packages: vec![],
files: vec![file::FileResource{
path: "/home/john/hello.txt".to_string(),
content: "hello".to_string()
}],
hostname: "".into()
};
assert_eq!(configuration.is_valid(), true);
}
#[test]
fn test_invalid_configuration() {
let configuration = configuration::Configuration {
packages: vec![],
files: vec![file::FileResource{
path: "/home/john/hello.txt".to_string(),
content: "".to_string()
}],
hostname: "".into()
};
assert_eq!(configuration.is_valid(), false);
}
#[test]
fn test_configuration_valid_with_empty_file_list() {
let configuration = configuration::Configuration {
packages: vec![],
files: vec![],
hostname: "".into()
};
assert_eq!(configuration.is_valid(), true);
}
#[test]
fn test_return_no_error_messages_if_configuration_is_valid() {
let configuration = configuration::Configuration {
packages: vec![],
files: vec![],
hostname: "".into()
};
assert_eq!(configuration.error_messages().is_empty(), true);
}
#[test]
fn test_returns_error_message_for_invalid_file_objects() {
let configuration = configuration::Configuration {
packages: vec![],
files: vec![file::FileResource{
path: "".into(),
content: "".into()
}],
hostname: "".into()
};
let messages = configuration.error_messages();
assert_eq!(messages.len(), 2);
}
#[test]
fn test_config_has_default_hostname() {
let yaml_file = "
default:
packages:
- vim
- git
";
let configuration = configuration::from_yaml(yaml_file.to_string());
assert_eq!(configuration.hostname, "default".to_string());
}
#[test]
fn test_config_has_custom_hostname() {
let yaml_file = "
srv01:
packages:
- vim
- git
";
let configuration = configuration::from_yaml(yaml_file.to_string());
assert_eq!(configuration.hostname, "srv01".to_string());
}
|
use piston_window::*;
use rayon_logs::visualisation;
use rayon_logs::RunLog;
fn draw_segment(s: &((f64, f64), (f64, f64)), c: &Context, g: &mut G2d, scale: [[f64; 3]; 2]) {
Line::new([0.0, 0.0, 0.0, 1.0], 0.1).draw(
[(s.0).0, (s.0).1, (s.1).0, (s.1).1],
&c.draw_state,
scale,
g,
);
}
fn draw_rectangle(
r: &rayon_logs::Rectangle,
c: &Context,
g: &mut G2d,
scale: [[f64; 3]; 2],
current_time: u64,
) {
Rectangle::new([0.0, 0.0, 0.0, 1.0]).draw(
[r.x, r.y, r.width, r.height],
&c.draw_state,
scale,
g,
);
let time_scale = r
.animation
.map(|(start, end)| {
if current_time < start {
0.0
} else if current_time >= end {
1.0
} else {
(current_time - start) as f64 / (end - start) as f64
}
})
.unwrap_or(1.0);
Rectangle::new([r.color[0], r.color[1], r.color[2], 1.0]).draw(
[r.x, r.y, r.width * time_scale, r.height * time_scale],
&c.draw_state,
scale,
g,
);
}
fn main() {
let log = RunLog::load("max.json").expect("loading log failed");
let scene = visualisation(&log, None);
let xmax = scene
.rectangles
.iter()
.map(|r| r.width + r.x)
.max_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap();
let ymax = scene
.rectangles
.iter()
.map(|r| r.height + r.y)
.max_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap();
let xmin = scene
.rectangles
.iter()
.map(|r| r.x)
.min_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap();
let ymin = scene
.rectangles
.iter()
.map(|r| r.y)
.min_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap();
let min_time = scene
.rectangles
.iter()
.map(|r| r.animation.unwrap().0)
.min()
.unwrap();
let max_time = scene
.rectangles
.iter()
.map(|r| r.animation.unwrap().1)
.max()
.unwrap();
let mut window: PistonWindow = WindowSettings::new("rayon logs viewer", [600, 600])
.exit_on_esc(true)
.samples(4)
.build()
.unwrap();
window.set_lazy(true);
let mut current_time = (min_time + max_time) / 2;
while let Some(e) = window.next() {
let size = window.size();
let width = size.width;
let height = size.height;
window.draw_2d(&e, |c, g| {
clear([1.0, 1.0, 1.0, 1.0], g);
g.clear_stencil(0);
let scale = c
.transform
.trans(-xmin, -ymin)
.scale(width / (xmax - xmin), height / (ymax - ymin));
for s in &scene.segments {
draw_segment(s, &c, g, scale);
}
for r in &scene.rectangles {
draw_rectangle(r, &c, g, scale, current_time);
}
});
}
}
|
#[doc = "Reader of register ADC_RES"]
pub type R = crate::R<u32, super::ADC_RES>;
#[doc = "Reader of field `VIN_CNT`"]
pub type VIN_CNT_R = crate::R<u16, u16>;
#[doc = "Reader of field `HSCMP_POL`"]
pub type HSCMP_POL_R = crate::R<bool, bool>;
#[doc = "Reader of field `ADC_OVERFLOW`"]
pub type ADC_OVERFLOW_R = crate::R<bool, bool>;
#[doc = "Reader of field `ADC_ABORT`"]
pub type ADC_ABORT_R = crate::R<bool, bool>;
impl R {
#[doc = "Bits 0:15 - Count to source/sink Cref1 + Cref2 from Vin to Vrefhi."]
#[inline(always)]
pub fn vin_cnt(&self) -> VIN_CNT_R {
VIN_CNT_R::new((self.bits & 0xffff) as u16)
}
#[doc = "Bit 16 - Polarity used for IDACB for this last ADC result, 0= source, 1= sink"]
#[inline(always)]
pub fn hscmp_pol(&self) -> HSCMP_POL_R {
HSCMP_POL_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 30 - This flag is set when the ADC counter overflows. This is an indication to the firmware that the IDACB current level is too low."]
#[inline(always)]
pub fn adc_overflow(&self) -> ADC_OVERFLOW_R {
ADC_OVERFLOW_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - This flag is set when the ADC sequencer was aborted before tripping HSCMP."]
#[inline(always)]
pub fn adc_abort(&self) -> ADC_ABORT_R {
ADC_ABORT_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
|
use std::{fs, env, error::Error};
pub struct Config {
query: String,
filename: String,
case_insensitive: bool,
}
impl Config {
pub fn new(mut args: env::Args) -> Result<Self, &'static str> {
args.next();
let query = match args.next() {
Some(q) => {q},
None => {return Err("Error: query string not specified.");},
};
let filename = match args.next() {
Some(f) => {f},
None => {return Err("Error: file not specified.");},
};
let case_insensitive = !env::var("CASE_INSENSITIVE").is_err();
println!("The input query string is: {}", query);
println!("The file to be searched is: {}\n", filename);
Ok(Self{
query,
filename,
case_insensitive,
})
}
}
pub fn run(config: Config) -> Result<(), Box<dyn Error>> {
let content = fs::read_to_string(config.filename)?;
let matched = if config.case_insensitive {
case_insensitive_search(&config.query, &content)
} else {
search(&config.query, &content)
};
for line in matched {
println!("{}", line);
}
Ok(())
}
fn search<'a>(query: &str, content: &'a str) -> Vec<&'a str> {
content.lines()
.map(|line| line.trim())
.filter(|line| line.contains(query))
.collect()
}
fn case_insensitive_search<'a>(query: &str, content: &'a str) -> Vec<&'a str> {
content.lines()
.map(|line| line.trim())
.filter(|line| line.to_lowercase().contains(&(query.to_lowercase())))
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_search() {
let test_query = "who";
let test_content = "Little owlet in the glen
I'm ashamed of you;
You are ungrammatical
In speaking as you do,
You should say, \"To whom! To whom!\"
Not, \"To who! To who!\"";
let expected_result = vec!["You should say, \"To whom! To whom!\"",
"Not, \"To who! To who!\""];
assert_eq!(expected_result, search(test_query, test_content));
}
#[test]
fn test_case_insensitive_search() {
let query = "YOU";
let content = "Little owlet in the glen
I'm ashamed of you;
You are ungrammatical
In speaking as you do,
You should say, \"To whom! To whom!\"
Not, \"To who! To who!\"";
let expected_result = vec!["I'm ashamed of you;",
"You are ungrammatical",
"In speaking as you do,",
"You should say, \"To whom! To whom!\""];
assert_eq!(expected_result, case_insensitive_search(query, content));
}
#[test]
fn test_case_sensitive_search() {
let query = "You";
let content = "Little owlet in the glen
I'm ashamed of you;
You are ungrammatical
In speaking as you do,
You should say, \"To whom! To whom!\"
Not, \"To who! To who!\"";
let expected_result = vec!["You are ungrammatical",
"You should say, \"To whom! To whom!\""];
assert_eq!(expected_result, search(query, content));
}
} |
#![ feature( optin_builtin_traits ) ]
//
// Tested:
//
// - ✔ Spawn mailbox for actor that is !Send and !Sync
// - ✔ Spawn mailbox for actor that is Send and Sync but using the local methods
// - ✔ Manually spawn mailbox for actor that is !Send and !Sync
// - ✔ Manually spawn mailbox for actor that is Send and Sync but using the local methods
//
mod common;
use
{
thespis :: { * } ,
thespis_impl :: { *, } ,
common :: { actors::{ Sum, SumNoSend, Add, Show } } ,
async_executors :: { LocalPool } ,
futures :: { task::LocalSpawnExt } ,
};
#[test]
//
fn test_not_send_actor()
{
let mut exec = LocalPool::default();
let exec2 = exec.clone();
let program = async move
{
// If we inline this in the next statement, it actually compiles with rt::spawn( program ) instead
// of spawn_local.
//
let actor = SumNoSend(5);
let mut addr = Addr::try_from_local( actor, &exec2 ).expect( "spawn actor mailbox" );
addr.send( Add( 10 ) ).await.expect( "Send failed" );
let result = addr.call( Show{} ).await.expect( "Call failed" );
assert_eq!( 15, result );
};
exec.spawn_local( program ).expect( "spawn program" );
exec.run();
}
#[test]
//
fn test_send_actor()
{
let mut exec = LocalPool::default();
let exec2 = exec.clone();
let program = async move
{
// If we inline this in the next statement, it actually compiles with rt::spawn( program ) instead
// of spawn_local.
//
let actor = Sum(5);
let mut addr = Addr::try_from_local( actor, &exec2 ).expect( "spawn actor mailbox" );
addr.send( Add( 10 ) ).await.expect( "Send failed" );
let result = addr.call( Show{} ).await.expect( "Call failed" );
assert_eq!( 15, result );
};
exec.spawn_local( program ).expect( "spawn program" );
exec.run();
}
#[test]
//
fn test_manually_not_send_actor()
{
let mut exec = LocalPool::default();
let exec2 = exec.clone();
let program = async move
{
// If we inline this in the next statement, it actually compiles with rt::spawn( program ) instead
// of spawn_local.
//
let actor = SumNoSend(5);
let mb = Inbox::new( Some( "SumNoSend".into() ) );
let mut addr = Addr::new( mb.sender() );
exec2.spawn_local( mb.start_fut_local( actor ) ).expect( "spawn actor mailbox" );
addr.send( Add( 10 ) ).await.expect( "Send failed" );
let result = addr.call( Show{} ).await.expect( "Call failed" );
assert_eq!( 15, result );
};
exec.spawn_local( program ).expect( "spawn program" );
exec.run();
}
#[test]
//
fn test_manually_send_actor()
{
let mut exec = LocalPool::default();
let exec2 = exec.clone();
let program = async move
{
// If we inline this in the next statement, it actually compiles with rt::spawn( program ) instead
// of spawn_local.
//
let actor = Sum(5);
let mb = Inbox::new( Some( "Sum".into() ) );
let mut addr = Addr::new( mb.sender() );
exec2.spawn_local( mb.start_fut_local( actor ) ).expect( "spawn actor mailbox" );
addr.send( Add( 10 ) ).await.expect( "Send failed" );
let result = addr.call( Show{} ).await.expect( "Call failed" );
assert_eq!( 15, result );
};
exec.spawn_local( program ).expect( "spawn program" );
exec.run();
}
|
use std::{
io::{stderr, Write},
process::exit,
};
use crate::laze_parser::parser::LazeParser;
pub enum CompilerMode {
Compile,
Convert,
}
pub struct OptionCompilerInfo {
pub mode: Option<CompilerMode>,
pub parser: Option<LazeParser>,
pub program_file_path: Option<String>,
pub dist_file_path: Option<String>,
}
impl OptionCompilerInfo {
pub fn new() -> Self {
OptionCompilerInfo {
mode: None,
parser: None,
program_file_path: None,
dist_file_path: None,
}
}
}
pub struct CompilerInfo {
pub mode: CompilerMode,
pub parser: LazeParser,
pub program_file_path: String,
pub dist_file_path: String,
}
impl CompilerInfo {
pub fn from_option(info: OptionCompilerInfo) -> Self {
CompilerInfo {
mode: match info.mode {
Some(mode) => mode,
None => {
let _ = writeln!(
stderr(),
"Please select a mode with the option: --compile / --convert"
);
exit(1);
}
},
parser: match info.parser {
Some(parser) => parser,
None => {
let _ = writeln!(
stderr(),
"Please give a parser file as a parameter with: --parser [PATH]"
);
exit(1);
}
},
program_file_path: match info.program_file_path {
Some(path) => path,
None => {
let _ = writeln!(
stderr(),
"Please specify a file path to convert / compile: lazec [FILEPATH]"
);
exit(1);
}
},
dist_file_path: match info.dist_file_path {
Some(path) => path,
None => "".to_string(),
},
}
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebtestsResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TagsResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebTestListResult {
pub value: Vec<WebTest>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebTest {
#[serde(flatten)]
pub webtests_resource: WebtestsResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<web_test::Kind>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<WebTestProperties>,
}
pub mod web_test {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Kind {
#[serde(rename = "ping")]
Ping,
#[serde(rename = "multistep")]
Multistep,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebTestProperties {
#[serde(rename = "SyntheticMonitorId")]
pub synthetic_monitor_id: String,
#[serde(rename = "Name")]
pub name: String,
#[serde(rename = "Description", default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "Enabled", default, skip_serializing_if = "Option::is_none")]
pub enabled: Option<bool>,
#[serde(rename = "Frequency", default, skip_serializing_if = "Option::is_none")]
pub frequency: Option<i32>,
#[serde(rename = "Timeout", default, skip_serializing_if = "Option::is_none")]
pub timeout: Option<i32>,
#[serde(rename = "Kind")]
pub kind: web_test_properties::Kind,
#[serde(rename = "RetryEnabled", default, skip_serializing_if = "Option::is_none")]
pub retry_enabled: Option<bool>,
#[serde(rename = "Locations")]
pub locations: Vec<WebTestGeolocation>,
#[serde(rename = "Configuration", default, skip_serializing_if = "Option::is_none")]
pub configuration: Option<web_test_properties::Configuration>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "Request", default, skip_serializing_if = "Option::is_none")]
pub request: Option<web_test_properties::Request>,
#[serde(rename = "ValidationRules", default, skip_serializing_if = "Option::is_none")]
pub validation_rules: Option<web_test_properties::ValidationRules>,
}
pub mod web_test_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Kind {
#[serde(rename = "ping")]
Ping,
#[serde(rename = "multistep")]
Multistep,
#[serde(rename = "basic")]
Basic,
#[serde(rename = "standard")]
Standard,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Configuration {
#[serde(rename = "WebTest", default, skip_serializing_if = "Option::is_none")]
pub web_test: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Request {
#[serde(rename = "RequestUrl", default, skip_serializing_if = "Option::is_none")]
pub request_url: Option<String>,
#[serde(rename = "Headers", default, skip_serializing_if = "Vec::is_empty")]
pub headers: Vec<HeaderField>,
#[serde(rename = "HttpVerb", default, skip_serializing_if = "Option::is_none")]
pub http_verb: Option<String>,
#[serde(rename = "RequestBody", default, skip_serializing_if = "Option::is_none")]
pub request_body: Option<String>,
#[serde(rename = "ParseDependentRequests", default, skip_serializing_if = "Option::is_none")]
pub parse_dependent_requests: Option<bool>,
#[serde(rename = "FollowRedirects", default, skip_serializing_if = "Option::is_none")]
pub follow_redirects: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidationRules {
#[serde(rename = "ContentValidation", default, skip_serializing_if = "Option::is_none")]
pub content_validation: Option<validation_rules::ContentValidation>,
#[serde(rename = "SSLCheck", default, skip_serializing_if = "Option::is_none")]
pub ssl_check: Option<bool>,
#[serde(rename = "SSLCertRemainingLifetimeCheck", default, skip_serializing_if = "Option::is_none")]
pub ssl_cert_remaining_lifetime_check: Option<i32>,
#[serde(rename = "ExpectedHttpStatusCode", default, skip_serializing_if = "Option::is_none")]
pub expected_http_status_code: Option<i32>,
#[serde(rename = "IgnoreHttpsStatusCode", default, skip_serializing_if = "Option::is_none")]
pub ignore_https_status_code: Option<bool>,
}
pub mod validation_rules {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContentValidation {
#[serde(rename = "ContentMatch", default, skip_serializing_if = "Option::is_none")]
pub content_match: Option<String>,
#[serde(rename = "IgnoreCase", default, skip_serializing_if = "Option::is_none")]
pub ignore_case: Option<bool>,
#[serde(rename = "PassIfTextFound", default, skip_serializing_if = "Option::is_none")]
pub pass_if_text_found: Option<bool>,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebTestGeolocation {
#[serde(rename = "Id", default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HeaderField {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub key: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationInfo>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationInfo {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
|
pub trait Unique<T> {
fn unique<P>(&mut self, pred: P) -> Option<T>
where
P: FnMut(&T) -> bool;
}
impl<T, I> Unique<T> for I
where
I: Iterator<Item = T>,
{
fn unique<P>(&mut self, mut pred: P) -> Option<T>
where
P: FnMut(&T) -> bool,
{
let mut val = self.filter(|x| pred(x));
let value = val.next();
let value_next = val.next();
match value_next {
Some(_) => None,
None => value,
}
}
}
|
struct State {
recipes: Vec<u8>,
elves: [usize; 2],
}
impl State {
fn new() -> Self {
State {
recipes: vec![3, 7],
elves: [0, 1],
}
}
fn run(&mut self) {
let sum: u8 = self.elves.iter().map(|&i| self.recipes[i]).sum();
if sum >= 10 {
self.recipes.push(1);
self.recipes.push(sum % 10);
} else {
self.recipes.push(sum);
}
for elf in &mut self.elves {
*elf += 1 + self.recipes[*elf] as usize;
*elf %= self.recipes.len();
}
}
}
fn part1() {
let mut state = State::new();
let input = 793031;
while state.recipes.len() < input + 10 {
state.run();
}
for score in &state.recipes[input..input + 10] {
print!("{}", score);
}
println!();
}
fn part2() {
let mut state = State::new();
let input = [7, 9, 3, 0, 3, 1];
let pos = loop {
state.run();
if let Some((i, _)) = state
.recipes
.windows(6)
.enumerate()
.rev()
.take(2)
.find(|&(_, w)| w == &input[..])
{
break i;
}
};
println!("{}", pos);
}
fn main() {
part1();
part2();
}
|
#![allow(dead_code)]
use std::usize;
use crate::cpu::CpuInterface;
use crate::cpu::Interface;
use crate::savable::Savable;
const RESET_VECTOR: u16 = 0xFFFC;
// Bus only used with the snake game
pub struct SnakeBus {
memory: [u8; 0xFFFF],
}
impl Interface for SnakeBus {
fn read(&mut self, addr: u16) -> u8 {
self.memory[addr as usize]
}
fn write(&mut self, addr: u16, data: u8) {
self.memory[addr as usize] = data
}
}
impl CpuInterface for SnakeBus {}
impl SnakeBus {
pub fn new() -> Self {
Self {
memory: [0; 0xFFFF],
}
}
pub fn load(&mut self, program: Vec<u8>) {
self.memory[0x600..(0x600 + program.len())].copy_from_slice(&program[..]);
self.write(RESET_VECTOR, 0x00);
self.write(RESET_VECTOR + 1, 0x06);
}
}
impl Savable for SnakeBus {}
|
extern crate iron;
extern crate logger;
extern crate router;
extern crate hyper;
extern crate serde;
#[macro_use] extern crate serde_derive;
#[macro_use] extern crate serde_json;
#[macro_use] extern crate log;
extern crate env_logger;
use std::str::FromStr;
use std::io::Read;
use iron::prelude::*;
use router::Router;
use hyper::client::{Client, Body};
use logger::Logger;
struct Forwarder {
client: Client,
protocol: String,
domain: String,
}
impl Forwarder {
fn forward(&self, req: &mut Request) {
let mut url_base = String::new();
url_base.push_str(&self.protocol);
url_base.push_str("://");
url_base.push_str(&self.domain);
url_base.push_str("/");
url_base.push_str(&req.url.path().join("/"));
if let Some(ref q) = req.url.query() {
url_base.push_str("?");
url_base.push_str(q);
}
let mut body = Vec::new();
req.body.read_to_end(&mut body);
self.client.request(req.method.clone(), &url_base)
.headers(req.headers.clone())
.body(Body::BufBody(body.as_slice(), body.len()))
.send()
.unwrap();
}
fn new(protocol: &str, domain: &str) -> Forwarder {
Forwarder { client: Client::new(),
protocol: String::from_str(protocol).unwrap(),
domain: String::from_str(domain).unwrap()}
}
}
fn forward(req: &mut Request) -> IronResult<Response> {
let forwarder = Forwarder::new("http", "localhost:6668");
forwarder.forward(req);
Ok(Response::with((iron::status::Ok, "Hello world")))
}
#[derive(Serialize)]
struct Stats {
requests_forwarded: u64,
target_requests_per_second: f64,
average_requests_per_second: f64,
max_requests_per_second: f64,
buffer_size_in_bytes: usize,
}
fn stat_handler(req: &mut Request) -> IronResult<Response> {
let stats = Stats {
requests_forwarded: 345242,
target_requests_per_second: 250.,
average_requests_per_second: 261.,
max_requests_per_second: 342.,
buffer_size_in_bytes: 5098231,
};
Ok(Response::with((iron::status::Ok,
serde_json::to_string(&stats).unwrap())))
}
fn rate_handler(req: &mut Request) -> IronResult<Response> {
Ok(Response::with((iron::status::Ok, "Hello admin")))
}
fn buffer_handler(req: &mut Request) -> IronResult<Response> {
Ok(Response::with((iron::status::Ok, "Hello admin")))
}
fn get_target(req: &mut Request) -> IronResult<Response> {
Ok(Response::with((iron::status::Ok, "Hello admin")))
}
fn set_target(req: &mut Request) -> IronResult<Response> {
Ok(Response::with((iron::status::Ok, "Hello admin")))
}
fn main() {
env_logger::init().unwrap();
let (logger_before, logger_after) = Logger::new(None);
let mut forward_chain = Chain::new(forward);
forward_chain.link_before(logger_before);
forward_chain.link_after(logger_after);
let forward_server = Iron::new(forward_chain).http("localhost:6666");
let mut router = Router::new();
router.get("/stat", stat_handler, "stat");
router.put("/rate", rate_handler, "rate");
router.delete("/buffer", buffer_handler, "buffer");
router.get("/target", get_target, "get_target");
router.put("/target", set_target, "set_target");
let admin_server = Iron::new(router).http("localhost:6667");
debug!("debug logging on");
println!("Ready");
forward_server.unwrap();
}
|
use self::Direction::*;
use std::slice::Iter;
#[derive(Debug)]
pub enum Direction {
forward,
left,
backward,
right,
}
impl Direction {
pub fn iterator() -> Iter<'static, Direction> {
static DIRECTIONS: [Direction; 4] = [forward, left, backward, right];
DIRECTIONS.iter()
}
}
fn main() {
for dir in Direction::iterator() {
println!("{:?}", dir);
}
} |
use crate::error::{Result, Error};
use crate::visitor::Visitor;
use syn::{Expr, ExprUnary, ExprBinary, UnOp, BinOp};
use syn::spanned::Spanned;
use super::tools::*;
#[derive(Debug)]
pub struct Expand {
}
// Get a series of additions/subtractions.
fn match_sum(expr: &Expr, sum: &mut Vec<Expr>, is_negated: bool) -> Result<()> {
let expr = deparen(expr);
match expr {
Expr::Binary(ExprBinary { left, op, right, ..}) => {
match op {
BinOp::Add(_) => {
match_sum(left, sum, is_negated)?;
match_sum(right, sum, is_negated)?;
return Ok(());
}
BinOp::Sub(_) => {
match_sum(left, sum, is_negated)?;
match_sum(right, sum, !is_negated)?;
return Ok(());
}
BinOp::Mul(_) => {
let mut left_sum = Vec::new();
match_sum(left, &mut left_sum, is_negated)?;
let mut right_sum = Vec::new();
match_sum(right, &mut right_sum, false)?;
for lhs in left_sum.iter() {
for rhs in right_sum.iter() {
sum.push(make_binary(lhs.clone(), op.clone(), rhs.clone()));
}
}
return Ok(());
}
_ => {
}
}
}
_ => {
}
}
if is_negated {
sum.push(negate(&expr));
} else {
sum.push(expr.clone());
}
Ok(())
}
impl Visitor for Expand {
fn visit_unary(&self, exprunary: &ExprUnary) -> Result<Expr> {
match exprunary.op {
UnOp::Deref(_) => Err(Error::UnsupportedExpr(exprunary.span())),
UnOp::Not(_) => Err(Error::UnsupportedExpr(exprunary.span())),
UnOp::Neg(_) => {
let mut sum = Vec::new();
match_sum(deparen(&exprunary.expr), &mut sum, true)?;
Ok(make_sum(&*sum))
},
}
}
fn visit_binary(&self, exprbinary: &ExprBinary) -> Result<Expr> {
let mut sum = Vec::new();
let expr : Expr = exprbinary.clone().into();
match_sum(&expr, &mut sum, false)?;
Ok(make_sum(&*sum))
}
}
#[test]
fn expand() -> Result<()> {
use crate::expr;
// Unary
assert_eq!(expr!(- - x).expand()?, expr!(x));
assert_eq!(expr!(-(x+1)).expand()?, expr!(-x-1));
// Binary add/sub
assert_eq!(expr!((x+1)+(x+1)).expand()?, expr!(x + 1 + x + 1));
assert_eq!(expr!((x+1)+((x+1)+(x+1))).expand()?, expr!(x + 1 + x + 1 + x + 1));
assert_eq!(expr!((x+1)-(x+1)).expand()?, expr!(x + 1 - x - 1));
assert_eq!(expr!((x+1)-((x+1)-(x+1))).expand()?, expr!(x + 1 - x - 1 + x + 1));
assert_eq!(expr!((x+1)-((x+1)-(-x+1))).expand()?, expr!(x + 1 - x - 1 - x + 1));
// Binary mul
assert_eq!(expr!(x*x).expand()?, expr!(x * x));
assert_eq!(expr!(x*(x+1)).expand()?, expr!(x * x + x * 1));
assert_eq!(expr!((x+1)*x).expand()?, expr!(x * x + 1 * x));
assert_eq!(expr!((x+1)*(x+1)).expand()?, expr!(x * x + x * 1 + 1 * x + 1 * 1));
assert_eq!(expr!((x+1)*(x+1)*(x+1)).expand()?, expr!(x * x * x + x * x * 1 + x * 1 * x + x * 1 * 1 + 1 * x * x + 1 * x * 1 + 1 * 1 * x + 1 * 1 * 1));
Ok(())
}
|
use super::formatting::source_lines;
use super::kb::*;
use super::rules::*;
use super::terms::*;
use super::visitor::{walk_rule, walk_term, Visitor};
use std::collections::{hash_map::Entry, HashMap};
fn common_misspellings(t: &str) -> Option<String> {
let misspelled_type = match t {
"integer" => "Integer",
"int" => "Integer",
"i32" => "Integer",
"i64" => "Integer",
"u32" => "Integer",
"u64" => "Integer",
"usize" => "Integer",
"size_t" => "Integer",
"float" => "Float",
"f32" => "Float",
"f64" => "Float",
"double" => "Float",
"char" => "String",
"str" => "String",
"string" => "String",
"list" => "List",
"array" => "List",
"Array" => "List",
"dict" => "Dictionary",
"Dict" => "Dictionary",
"dictionary" => "Dictionary",
"hash" => "Dictionary",
"Hash" => "Dictionary",
"map" => "Dictionary",
"Map" => "Dictionary",
"HashMap" => "Dictionary",
"hashmap" => "Dictionary",
"hash_map" => "Dictionary",
_ => return None,
};
Some(misspelled_type.to_owned())
}
/// Record singleton variables and unknown specializers in a rule.
struct SingletonVisitor<'kb> {
kb: &'kb KnowledgeBase,
singletons: HashMap<Symbol, Option<Term>>,
}
impl<'kb> SingletonVisitor<'kb> {
fn new(kb: &'kb KnowledgeBase) -> Self {
Self {
kb,
singletons: HashMap::new(),
}
}
fn warnings(&mut self) -> Vec<String> {
let mut singletons = self
.singletons
.drain()
.filter_map(|(sym, singleton)| singleton.map(|term| (sym.clone(), term)))
.collect::<Vec<(Symbol, Term)>>();
singletons.sort_by_key(|(_sym, term)| term.offset());
singletons
.iter()
.map(|(sym, term)| {
let mut msg = if let Value::Pattern(..) = term.value() {
let mut msg = format!("Unknown specializer {}", sym);
if let Some(t) = common_misspellings(&sym.0) {
msg.push_str(&format!(", did you mean {}?", t));
}
msg
} else {
format!(
"Singleton variable {} is unused or undefined, \
see <https://docs.osohq.com/using/polar-syntax.html#variables>",
sym
)
};
if let Some(ref source) = term
.get_source_id()
.and_then(|id| self.kb.sources.get_source(id))
{
msg.push('\n');
msg.push_str(&source_lines(source, term.offset(), 0));
}
msg
})
.collect::<Vec<String>>()
}
}
impl<'kb> Visitor for SingletonVisitor<'kb> {
fn visit_term(&mut self, t: &Term) {
match t.value() {
Value::Variable(v)
| Value::RestVariable(v)
| Value::Pattern(Pattern::Instance(InstanceLiteral { tag: v, .. }))
if !v.is_temporary_var() && !v.is_namespaced_var() && !self.kb.is_constant(v) =>
{
match self.singletons.entry(v.clone()) {
Entry::Occupied(mut o) => {
o.insert(None);
}
Entry::Vacant(v) => {
v.insert(Some(t.clone()));
}
}
}
_ => walk_term(self, t),
}
}
}
pub fn check_singletons(rule: &Rule, kb: &KnowledgeBase) -> Vec<String> {
let mut visitor = SingletonVisitor::new(kb);
walk_rule(&mut visitor, rule);
visitor.warnings()
}
|
use crate::bgp::{BgpEvent, BgpRoute};
use crate::event::{Event, EventQueue};
use crate::{AsId, DeviceError, NetworkDevice, Prefix, RouterId};
use std::collections::HashSet;
#[derive(Debug, Clone)]
pub struct ExternalRouter {
name: &'static str,
router_id: RouterId,
as_id: AsId,
pub neighbors: HashSet<RouterId>,
}
impl NetworkDevice for ExternalRouter {
/// Create a new NetworkDevice instance
fn new(name: &'static str, router_id: RouterId, as_id: AsId) -> Self {
Self {
name,
router_id,
as_id,
neighbors: HashSet::new(),
}
}
/// Handle an `Event` and produce the necessary result
fn handle_event(&mut self, _event: Event, _queue: &mut EventQueue) -> Result<(), DeviceError> {
Ok(())
}
/// Return the ID of the network device
fn router_id(&self) -> RouterId {
self.router_id
}
/// return the AS of the network device
fn as_id(&self) -> AsId {
self.as_id
}
/// Return the name of the network device
fn name(&self) -> &'static str {
self.name
}
}
impl ExternalRouter {
/// Send an BGP UPDATE to all neighbors with the new route
pub fn advertise_prefix(
&self,
prefix: Prefix,
as_path: Vec<AsId>,
med: Option<u32>,
queue: &mut EventQueue,
) {
let route = BgpRoute {
prefix,
as_path,
next_hop: self.router_id,
local_pref: None,
med,
};
let bgp_event = BgpEvent::Update(route);
for neighbor in self.neighbors.iter() {
queue.push_back(Event::Bgp(self.router_id, *neighbor, bgp_event.clone()));
}
}
/// Send a BGP WITHDRAW to all neighbors for the given prefix
pub fn widthdraw_prefix(&self, prefix: Prefix, queue: &mut EventQueue) {
for neighbor in self.neighbors.iter() {
queue.push_back(Event::Bgp(
self.router_id,
*neighbor,
BgpEvent::Withdraw(prefix),
));
}
}
}
|
#[macro_use]
extern crate csv;
use std::io;
use std::io::prelude::*;
use std::path::Path;
use std::vec::Vec;
use std::fs::File;
fn main() {
// Split the string, removing the ".pdb" extension and creating a .dat file
let output = "edgelist.dat";
// Read the file contents
let filename = "/home/will/SideProjects/hackcambridge2017/DataFiles/train_PCA_25_whole_set.csv";
let path = Path::new(&filename);
let display = path.display();
let mut rdr = csv::Reader::from_file(path).unwrap().has_headers(true);
let mut records: Vec<Vec<f32>> = Vec::new();
// let rows = rdr.decode().collect::<csv::Result<Vec<Row>>>().unwrap();
'inner: for row in rdr.records() {
let row = row.unwrap();
let counter = false;
let mut record: Vec<f32> = Vec::new();
for item in row.iter() {
// println!("{:?}", item);
if item.is_empty(){
continue 'inner;
}
else {
record.push(item.trim().parse::<f32>().unwrap());
}
}
records.push(record);
}
let cutoff: f32 = 0.01;
let output = format!("edgelist{}_PCA_wholeset.dat",cutoff);
let output = Path::new(&output);
let display = output.display();
let f = match File::create(&output) {
Ok(file) => file,
Err(e) => panic!("couldn't create {}: {}", display, e)
};
let mut writer = io::BufWriter::new(&f);
// let cutoff: f32 = cutoff;
for (i, row) in records.iter().enumerate() {
for (j,row2) in records[0..i].iter().enumerate() {
let distance_squared: f32 = get_distance_squared(&row, &row2);
// println!("{}", distance_squared);
if distance_squared < cutoff {
write!(&mut writer, "{} {}\n", i+1, j+1);
}
}
}
}
fn get_distance_squared(row:&Vec<f32>, row2: &Vec<f32>) -> f32 {
let mut total:f32 = 0.0;
for i in 0..row.len() {
total += (row[i]-row2[i]).powi(2);
}
total
}
|
#![feature(core)]
fn sumsqd(mut n: i32) -> i32 {
let mut sq = 0;
while n > 0 {
let d = n % 10;
sq += d*d;
n /= 10
}
sq
}
use std::num::Int;
fn cycle<T: Int>(a: T, f: fn(T) -> T) -> T {
let mut t = a;
let mut h = f(a);
while t != h {
t = f(t);
h = f(f(h))
}
t
}
fn ishappy(n: i32) -> bool {
cycle(n, sumsqd) == 1
}
fn main() {
let happy = std::iter::count(1, 1)
.filter(|&n| ishappy(n))
.take(8)
.collect::<Vec<i32>>();
println!("{:?}", happy)
}
|
mod register;
mod command;
mod respawn;
pub use self::register::register;
pub use self::command::CommandHandler;
|
use super::*;
use crate::helpers::models::domain::get_customer_ids_from_routes;
use crate::helpers::solver::{create_default_refinement_ctx, generate_matrix_routes_with_defaults};
use crate::helpers::utils::create_test_environment_with_random;
use crate::helpers::utils::random::FakeRandom;
use crate::models::common::IdDimension;
use std::sync::Arc;
fn compare_ids_with_ignore(left: Vec<Vec<String>>, right: Vec<Vec<&str>>) {
if left.len() != right.len() {
assert_eq!(left, right);
}
left.iter().zip(right.iter()).for_each(|(a_vec, b_vec)| {
if a_vec.len() != b_vec.len() {
assert_eq!(left, right);
}
a_vec.iter().zip(b_vec.iter()).for_each(|(a_value, b_value)| {
if a_value != "cX" && *b_value != "cX" && a_value != b_value {
assert_eq!(left, right);
}
});
})
}
fn extend_with_locked(mut ctx: InsertionContext, job_ids: &[&str]) -> InsertionContext {
let ids = ctx.problem.jobs.all().filter(|job| job_ids.contains(&job.dimens().get_id().unwrap().as_str()));
ctx.solution.locked.extend(ids);
ctx
}
parameterized_test! {can_use_exchange_inter_route_best_operator, (seed_route, seed_job, locked_ids, expected_ids), {
can_use_exchange_inter_route_best_operator_impl(seed_route, seed_job, locked_ids, expected_ids);
}}
can_use_exchange_inter_route_best_operator! {
case_01: (0, 2, &[], vec![vec!["c0", "c2", "c3"], vec!["c1", "c4", "c5"], vec!["c6", "c7", "c8"]]),
case_02: (2, 3, &[], vec![vec!["c0", "c1", "c2"], vec!["cX", "cX", "c8"], vec!["cX", "c6", "c7"]]),
case_03: (1, 2, &[], vec![vec!["c0", "c1", "c2"], vec!["c3", "c5", "c6"], vec!["c4", "c7", "c8"]]),
case_04: (1, 3, &[], vec![vec!["c0", "c1", "c2"], vec!["c3", "c4", "c6"], vec!["c5", "c7", "c8"]]),
case_05: (2, 3, &["c3", "c4"], vec![vec!["c0", "c1", "c2"], vec!["c3", "c4", "c8"], vec!["c5", "c6", "c7"]]),
case_06: (2, 1, &["c4", "c5"], vec![vec!["c0", "c1", "c2"], vec!["c4", "c5", "c6"], vec!["c3", "c7", "c8"]]),
case_07: (1, 1, &["c0", "c1", "c2"], vec![vec!["c0", "c1", "c2"], vec!["c4", "c5", "c6"], vec!["c3", "c7", "c8"]]),
}
fn can_use_exchange_inter_route_best_operator_impl(
seed_route: i32,
seed_job: i32,
locked_ids: &[&str],
expected_ids: Vec<Vec<&str>>,
) {
let matrix = (3, 3);
let ints = vec![seed_route, seed_job];
let reals = vec![1.; 128];
let (problem, solution) = generate_matrix_routes_with_defaults(matrix.0, matrix.1, true);
let insertion_ctx = extend_with_locked(
InsertionContext::new_from_solution(
Arc::new(problem),
(solution, None),
create_test_environment_with_random(Arc::new(FakeRandom::new(ints, reals))),
),
locked_ids,
);
let new_insertion_ctx = ExchangeInterRouteBest::default()
.explore(&create_default_refinement_ctx(insertion_ctx.problem.clone()), &insertion_ctx)
.expect("cannot find new solution");
compare_ids_with_ignore(get_customer_ids_from_routes(&new_insertion_ctx), expected_ids);
}
|
use html_extractor::HtmlExtractor;
#[test]
fn test() {
let data = TestData::extract_from_str(
r#"
<div id="data1">
<div class="data1-1">1</div>
</div>
<div id="data2">2</div>
<div id="data3" data-3="3"></div>
<div id="data4">
<div>
<div class="data1-1">1</div>
</div>
<div>
<div class="data1-1">2</div>
</div>
<div>
<div class="data1-1">3</div>
</div>
<div>
<div class="data1-1">4</div>
</div>
</div>
<div id="data5">
<div>1</div>
<div>2</div>
<div>3</div>
<div>4</div>
</div>
<div id="data6">
<div data-6="1"></div>
<div data-6="2"></div>
<div data-6="3"></div>
<div data-6="4"></div>
</div>
<div id="data7">%%%7%%%</div>
<div id="data8" data-8="%%%8%%%"></div>
<div id="data9">
<div>ignore<br />%%%1%%%5%%%</div>
<div>ignore<br />%%%2%%%6%%%</div>
<div>ignore<br />%%%3%%%7%%%</div>
<div>ignore<br />%%%4%%%8%%%</div>
</div>
<div id="data10">
<div data-10="%%%1%%%5%%%"></div>
<div data-10="%%%2%%%6%%%"></div>
<div data-10="%%%3%%%7%%%"></div>
<div data-10="%%%4%%%8%%%"></div>
</div>
<div id="data11">ignore<br />ignore<br />%%%7%%%27%%%</div>
<div id="data12" data-12="%%%8%%%18%%%46%%%"></div>
<div id="data13">
<div>%%%1%%%5%%%9%%%13%%%</div>
<div>%%%2%%%6%%%10%%%14%%%</div>
<div>%%%3%%%7%%%11%%%15%%%</div>
<div>%%%4%%%8%%%12%%%16%%%</div>
</div>
<div id="data14">
<div data-14="%%%1%%%5%%%9%%%13%%%17%%%"></div>
<div data-14="%%%2%%%6%%%10%%%14%%%18%%%"></div>
<div data-14="%%%3%%%7%%%11%%%15%%%19%%%"></div>
<div data-14="%%%4%%%8%%%12%%%16%%%20%%%"></div>
</div>
<div id="data15">
inner<br>html
</div>
<div id="data16"><</div>
"#,
)
.unwrap();
assert_eq!(
data,
TestData {
data1: InnerData { data1_1: 1 },
data2: 2,
data3: 3,
data4: vec![
InnerData { data1_1: 1 },
InnerData { data1_1: 2 },
InnerData { data1_1: 3 },
InnerData { data1_1: 4 }
],
data5: vec![1, 2, 3, 4],
data6: vec![1, 2, 3, 4],
data7: 7,
data8: 8,
data9: vec![(1, 5), (2, 6), (3, 7), (4, 8)],
data10: vec![(1, 5), (2, 6), (3, 7), (4, 8)],
data11_1: 7,
data11_2: 27,
data12_1: 8,
data12_2: 18,
data12_3: 46,
data13: vec![
(1, 5, 9, 13),
(2, 6, 10, 14),
(3, 7, 11, 15),
(4, 8, 12, 16)
],
data14: vec![
(1, 5, 9, 13, 17),
(2, 6, 10, 14, 18),
(3, 7, 11, 15, 19),
(4, 8, 12, 16, 20)
],
optional_data1: Some(InnerData { data1_1: 1 }),
optional_data2: Some(2),
optional_data3: Some(3),
optional_data7: Some((7,)),
optional_data8: Some((8,)),
optional_data11: Some((7, 27)),
optional_data12: Some((8, 18, 46)),
none1: None,
none2: None,
none3: None,
none4: None,
none5: None,
none6: None,
none7: None,
data15: "inner<br>html".to_owned(),
data16_1: std::cmp::Ordering::Less,
data16_2: std::cmp::Ordering::Less,
presence_of_data16: true,
}
);
}
html_extractor::html_extractor! {
#[derive(Debug, PartialEq)]
pub TestData {
pub(crate) data1: InnerData = (elem of "#data1"),
pub(super) data2: usize = (text of "#data2"),
pub data3: usize = (attr["data-3"] of "#data3"),
data4: Vec<InnerData> = (elem of "#data4 > div", collect),
data5: Vec<usize> = (text of "#data5 > div", collect),
data6: Vec<usize> = (attr["data-6"] of "#data6 > div", collect),
(data7: usize,) = (text of "#data7", capture with "%%%(.*)%%%"),
(data8: usize,) = (attr["data-8"] of "#data8", capture with "%%%(.*)%%%"),
data9: Vec<(usize, usize)> = (text[1] of "#data9 > div", capture with "%%%(.*)%%%(.*)%%%", collect),
data10: Vec<(usize, usize)> = (attr ["data-10"] of "#data10 > div", capture with "%%%(.*)%%%(.*)%%%", collect),
(data11_1: usize, data11_2: usize) = (text[2] of "#data11", capture with "%%%(.*)%%%(.*)%%%"),
(data12_1: usize, data12_2: usize, data12_3: usize) = (attr["data-12"] of "#data12", capture with "%%%(.*)%%%(.*)%%%(.*)%%%"),
data13: Vec<(usize, usize, usize, usize)> = (text of "#data13 > div", capture with "%%%(.*)%%%(.*)%%%(.*)%%%(.*)%%%", collect),
data14: Vec<(usize, usize, usize, usize, usize)> = (attr["data-14"] of "#data14 > div", capture with "%%%(.*)%%%(.*)%%%(.*)%%%(.*)%%%(.*)%%%", collect),
optional_data1: Option<InnerData> = (elem of "#data1", optional),
optional_data2: Option<usize> = (text of "#data2", optional),
optional_data3: Option<usize> = (attr["data-3"] of "#data3", optional),
optional_data7: Option<(usize,)> = (text of "#data7", capture with "%%%(.*)%%%", optional),
optional_data8: Option<(usize,)> = (attr["data-8"] of "#data8", capture with "%%%(.*)%%%", optional),
optional_data11: Option<(usize, usize)> = (text[2] of "#data11", capture with "%%%(.*)%%%(.*)%%%", optional),
optional_data12: Option<(usize, usize, usize)> = (attr["data-12"] of "#data12", capture with "%%%(.*)%%%(.*)%%%(.*)%%%", optional),
none1: Option<usize> = (text of "#none", optional),
none2: Option<usize> = (attr["none"] of "#none", optional),
none3: Option<InnerData> = (elem of "#none", optional),
none4: Option<(usize,)> = (text of "#none", capture with "(none)", optional),
none5: Option<(usize,)> = (attr["none"] of "#none", capture with "(none)", optional),
none6: Option<usize> = (text[3] of "#none", optional),
none7: Option<(usize,)> = (text[3] of "#none", capture with "(none)", optional),
data15: String = (inner_html of "#data15"),
data16_1: std::cmp::Ordering = (text of "#data16", parse with custom_parser),
data16_2: std::cmp::Ordering = (text of "#data16", parse with |input| match input {
">" => Ok(std::cmp::Ordering::Greater),
"<" => Ok(std::cmp::Ordering::Less),
"=" => Ok(std::cmp::Ordering::Equal),
_ => Err(())
}),
presence_of_data16: bool = (presence of "#data16"),
}
#[derive(Debug, PartialEq)]
pub(crate) InnerData {
data1_1: usize = (text of ".data1-1")
}
}
fn custom_parser(input: &str) -> Result<std::cmp::Ordering, ()> {
match input {
">" => Ok(std::cmp::Ordering::Greater),
"<" => Ok(std::cmp::Ordering::Less),
"=" => Ok(std::cmp::Ordering::Equal),
_ => Err(()),
}
}
|
/*
* B-tree set test (Rust)
*
* Copyright (c) 2020 Project Nayuki. (MIT License)
* https://www.nayuki.io/page/btree-set
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
* - The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* - The Software is provided "as is", without warranty of any kind, express or
* implied, including but not limited to the warranties of merchantability,
* fitness for a particular purpose and noninfringement. In no event shall the
* authors or copyright holders be liable for any claim, damages or other
* liability, whether in an action of contract, tort or otherwise, arising from,
* out of or in connection with the Software or the use or other dealings in the
* Software.
*/
use std::collections::HashSet;
extern crate rand;
use rand::Rng;
use rand::distributions::IndependentSample;
use rand::distributions::range::Range;
mod btreeset;
use btreeset::BTreeSet;
fn main() {
test_small_randomly();
test_insert_randomly();
test_large_randomly();
test_remove_all_randomly();
test_iterator_randomly();
println!("Test passed");
}
fn test_small_randomly() {
let trials = 1000;
let operations = 100;
let range = 1000;
let rng = &mut rand::thread_rng();
let degreedist = Range::new(2usize, 7);
let valuedist = Range::new(0i32, range);
for _ in 0 .. trials {
let mut set0 = HashSet::<i32>::new();
let mut set1 = BTreeSet::<i32>::new(degreedist.ind_sample(rng));
for _ in 0 .. operations {
// Add/remove a random value
let val: i32 = valuedist.ind_sample(rng);
if rng.next_f64() < 0.001 {
set0.clear();
set1.clear();
} else if rng.next_f64() < 0.5 {
assert_eq!(set0.insert(val), set1.insert(val));
} else {
assert_eq!(set0.remove(&val), set1.remove(&val));
}
set1.check_structure();
// Check size and check element membership over entire range
assert_eq!(set0.is_empty(), set1.is_empty());
assert_eq!(set0.len(), set1.len());
for k in -4 .. range + 4 {
assert_eq!(set0.contains(&k), set1.contains(&k));
}
}
}
}
fn test_insert_randomly() {
let trials = 100;
let operations = 10_000;
let range = 100_000;
let checks = 10;
let rng = &mut rand::thread_rng();
let valuedist = Range::new(0i32, range);
for _ in 0 .. trials {
let mut set0 = HashSet::<i32>::new();
let mut set1 = BTreeSet::<i32>::new(2);
for _ in 0 .. operations {
// Add a random value
let val: i32 = valuedist.ind_sample(rng);
assert_eq!(set0.insert(val), set1.insert(val));
if rng.next_f64() < 0.003 {
set1.check_structure();
}
// Check size and random element membership
assert_eq!(set0.len(), set1.len());
for _ in 0 .. checks {
let val: i32 = valuedist.ind_sample(rng);
assert_eq!(set0.contains(&val), set1.contains(&val));
}
}
}
}
fn test_large_randomly() {
let trials = 100;
let operations = 30_000;
let range = 100_000;
let checks = 10;
let rng = &mut rand::thread_rng();
let degreedist = Range::new(2usize, 7);
let valuedist = Range::new(0i32, range);
for _ in 0 .. trials {
let mut set0 = HashSet::<i32>::new();
let mut set1 = BTreeSet::<i32>::new(degreedist.ind_sample(rng));
for _ in 0 .. operations {
// Add/remove a random value
let val: i32 = valuedist.ind_sample(rng);
if rng.next_f64() < 0.5 {
assert_eq!(set0.insert(val), set1.insert(val));
} else {
assert_eq!(set0.remove(&val), set1.remove(&val));
}
if rng.next_f64() < 0.001 {
set1.check_structure();
}
// Check size and random element membership
assert_eq!(set0.len(), set1.len());
for _ in 0 .. checks {
let val: i32 = valuedist.ind_sample(rng);
assert_eq!(set0.contains(&val), set1.contains(&val));
}
}
}
}
fn test_remove_all_randomly() {
let trials = 100;
let limit = 10_000;
let range = 100_000;
let checks = 10;
let rng = &mut rand::thread_rng();
let degreedist = Range::new(2usize, 7);
let valuedist = Range::new(0i32, range);
for _ in 0 .. trials {
// Create sets and add all values
let mut set0 = HashSet::<i32>::new();
let mut set1 = BTreeSet::<i32>::new(degreedist.ind_sample(rng));
for _ in 0 .. limit {
let val: i32 = valuedist.ind_sample(rng);
assert_eq!(set0.insert(val), set1.insert(val));
}
set1.check_structure();
// Remove each value in random order
let mut list: Vec<i32> = set0.iter().cloned().collect();
rng.shuffle(&mut list);
for val in list {
assert_eq!(set0.remove(&val), set1.remove(&val));
if rng.next_f64() < (1.0 / (set1.len() as f64)).max(0.001) {
set1.check_structure();
}
assert_eq!(set0.len(), set1.len());
for _ in 0 .. checks {
let val: i32 = valuedist.ind_sample(rng);
assert_eq!(set0.contains(&val), set1.contains(&val));
}
}
}
}
fn test_iterator_randomly() {
let trials = 10_000;
let operations = 1000;
let range = 10_000;
let rng = &mut rand::thread_rng();
let degreedist = Range::new(2usize, 7);
let operdist = Range::new(0usize, operations);
let valuedist = Range::new(0i32, range);
for _ in 0 .. trials {
// Create sets and add all values
let mut set0 = HashSet::<i32>::new();
let mut set1 = BTreeSet::<i32>::new(degreedist.ind_sample(rng));
let numinsert = operdist.ind_sample(rng);
for _ in 0 .. numinsert {
let val: i32 = valuedist.ind_sample(rng);
assert_eq!(set0.insert(val), set1.insert(val));
}
assert_eq!(set0, set1.into_iter().cloned().collect::<HashSet<i32>>());
// Remove a random subset
let mut list: Vec<i32> = set1.into_iter().cloned().collect();
rng.shuffle(&mut list);
let numremove = Range::new(0usize, list.len() + 1).ind_sample(rng);
for val in &list[ .. numremove] {
assert_eq!(set0.remove(val), set1.remove(val));
}
assert_eq!(set0, set1.into_iter().cloned().collect::<HashSet<i32>>());
}
}
|
use std::convert::TryInto;
use meilidb_core::DocumentId;
use meilidb_schema::SchemaAttr;
use rocksdb::DBVector;
use crate::database::raw_index::InnerRawIndex;
use crate::document_attr_key::DocumentAttrKey;
#[derive(Clone)]
pub struct DocumentsIndex(pub(crate) InnerRawIndex);
impl DocumentsIndex {
pub fn document_field(&self, id: DocumentId, attr: SchemaAttr) -> Result<Option<DBVector>, rocksdb::Error> {
let key = DocumentAttrKey::new(id, attr).to_be_bytes();
self.0.get(key)
}
pub fn set_document_field(&self, id: DocumentId, attr: SchemaAttr, value: Vec<u8>) -> Result<(), rocksdb::Error> {
let key = DocumentAttrKey::new(id, attr).to_be_bytes();
self.0.set(key, value)?;
Ok(())
}
pub fn del_document_field(&self, id: DocumentId, attr: SchemaAttr) -> Result<(), rocksdb::Error> {
let key = DocumentAttrKey::new(id, attr).to_be_bytes();
self.0.delete(key)?;
Ok(())
}
pub fn del_all_document_fields(&self, id: DocumentId) -> Result<(), rocksdb::Error> {
let start = DocumentAttrKey::new(id, SchemaAttr::min()).to_be_bytes();
let end = DocumentAttrKey::new(id, SchemaAttr::max()).to_be_bytes();
self.0.delete_range(start, end)?;
Ok(())
}
pub fn document_fields(&self, id: DocumentId) -> DocumentFieldsIter {
let start = DocumentAttrKey::new(id, SchemaAttr::min()).to_be_bytes();
let end = DocumentAttrKey::new(id, SchemaAttr::max()).to_be_bytes();
let from = rocksdb::IteratorMode::From(&start[..], rocksdb::Direction::Forward);
let iter = self.0.iterator(from).unwrap();
DocumentFieldsIter(iter, end.to_vec())
}
pub fn len(&self) -> Result<usize, rocksdb::Error> {
let mut last_document_id = None;
let mut count = 0;
let from = rocksdb::IteratorMode::Start;
let iterator = self.0.iterator(from)?;
for (key, _) in iterator {
let slice = key.as_ref().try_into().unwrap();
let document_id = DocumentAttrKey::from_be_bytes(slice).document_id;
if Some(document_id) != last_document_id {
last_document_id = Some(document_id);
count += 1;
}
}
Ok(count)
}
}
pub struct DocumentFieldsIter<'a>(rocksdb::DBIterator<'a>, Vec<u8>);
impl<'a> Iterator for DocumentFieldsIter<'a> {
type Item = Result<(SchemaAttr, Box<[u8]>), rocksdb::Error>;
fn next(&mut self) -> Option<Self::Item> {
match self.0.next() {
Some((key, value)) => {
if key.as_ref() > self.1.as_ref() {
return None;
}
let slice: &[u8] = key.as_ref();
let array = slice.try_into().unwrap();
let key = DocumentAttrKey::from_be_bytes(array);
Some(Ok((key.attribute, value)))
},
None => None,
}
}
}
|
use crate::AppendToUrlQuery;
#[derive(Debug, Clone)]
pub struct Delimiter<'a>(&'a str);
impl<'a> Delimiter<'a> {
pub fn new(delimiter: &'a str) -> Self {
Self(delimiter)
}
}
impl<'a> AppendToUrlQuery for Delimiter<'a> {
fn append_to_url_query(&self, url: &mut url::Url) {
url.query_pairs_mut().append_pair("delimiter", self.0);
}
}
impl<'a> From<&'a str> for Delimiter<'a> {
fn from(delimiter: &'a str) -> Self {
Self(delimiter)
}
}
|
#![cfg(all(test, feature = "test_e2e"))]
use azure_core::Context;
use azure_cosmos::prelude::{CreateDocumentOptions, DeleteDatabaseOptions, GetDocumentOptions};
use serde::{Deserialize, Serialize};
mod setup;
use azure_core::prelude::*;
use azure_cosmos::prelude::*;
use collection::*;
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct MyDocument {
id: String,
hello: u32,
}
impl<'a> azure_cosmos::CosmosEntity<'a> for MyDocument {
type Entity = &'a str;
fn partition_key(&'a self) -> Self::Entity {
self.id.as_ref()
}
}
#[tokio::test]
async fn create_and_delete_document() {
const DATABASE_NAME: &str = "test-cosmos-db-create-and-delete-document";
const COLLECTION_NAME: &str = "test-collection-create-and-delete-document";
const DOCUMENT_NAME: &str = "test-document-name-create-and-delete-document";
let client = setup::initialize().unwrap();
client
.create_database(
azure_core::Context::new(),
DATABASE_NAME,
CreateDatabaseOptions::new(),
)
.await
.unwrap();
let database_client = client.into_database_client(DATABASE_NAME);
// create a new collection
let indexing_policy = IndexingPolicy {
automatic: true,
indexing_mode: IndexingMode::Consistent,
included_paths: vec![],
excluded_paths: vec![],
};
let options = CreateCollectionOptions::new("/id")
.offer(Offer::Throughput(400))
.indexing_policy(indexing_policy);
database_client
.create_collection(Context::new(), COLLECTION_NAME, options)
.await
.unwrap();
let collection_client = database_client
.clone()
.into_collection_client(COLLECTION_NAME);
// create a new document
let document_data = MyDocument {
id: DOCUMENT_NAME.to_owned(),
hello: 42,
};
collection_client
.create_document(Context::new(), &document_data, CreateDocumentOptions::new())
.await
.unwrap();
let documents = collection_client
.list_documents()
.execute::<MyDocument>()
.await
.unwrap()
.documents;
assert!(documents.len() == 1);
// try to get the contents of the previously created document
let document_client = collection_client
.clone()
.into_document_client(DOCUMENT_NAME, &DOCUMENT_NAME)
.unwrap();
let document_after_get = document_client
.get_document::<MyDocument>(Context::new(), GetDocumentOptions::new())
.await
.unwrap();
if let GetDocumentResponse::Found(document) = document_after_get {
assert_eq!(document.document.document, document_data);
} else {
panic!("document not found");
}
// delete document
document_client
.delete_document(Context::new(), DeleteDocumentOptions::new())
.await
.unwrap();
let documents = collection_client
.list_documents()
.execute::<MyDocument>()
.await
.unwrap()
.documents;
assert!(documents.len() == 0);
database_client
.delete_database(Context::new(), DeleteDatabaseOptions::new())
.await
.unwrap();
}
#[tokio::test]
async fn query_documents() {
const DATABASE_NAME: &str = "test-cosmos-db-query-documents";
const COLLECTION_NAME: &str = "test-collection-query-documents";
const DOCUMENT_NAME: &str = "test-document-name-query-documents";
let client = setup::initialize().unwrap();
client
.create_database(
azure_core::Context::new(),
DATABASE_NAME,
CreateDatabaseOptions::new(),
)
.await
.unwrap();
let database_client = client.into_database_client(DATABASE_NAME);
// create a new collection
let indexing_policy = IndexingPolicy {
automatic: true,
indexing_mode: IndexingMode::Consistent,
included_paths: vec![],
excluded_paths: vec![],
};
let options = CreateCollectionOptions::new("/id")
.indexing_policy(indexing_policy)
.offer(Offer::S2);
database_client
.create_collection(Context::new(), COLLECTION_NAME, options)
.await
.unwrap();
let collection_client = database_client
.clone()
.into_collection_client(COLLECTION_NAME);
// create a new document
let document_data = MyDocument {
id: DOCUMENT_NAME.to_owned(),
hello: 42,
};
collection_client
.create_document(Context::new(), &document_data, CreateDocumentOptions::new())
.await
.unwrap();
let documents = collection_client
.list_documents()
.execute::<MyDocument>()
.await
.unwrap()
.documents;
assert!(documents.len() == 1);
// now query all documents and see if we get the correct result
let query_result = collection_client
.query_documents()
.query_cross_partition(true)
.execute::<MyDocument, _>("SELECT * FROM c")
.await
.unwrap()
.into_documents()
.unwrap()
.results;
assert!(query_result.len() == 1);
assert!(query_result[0].document_attributes.rid() == documents[0].document_attributes.rid());
assert_eq!(query_result[0].result, document_data);
database_client
.delete_database(Context::new(), DeleteDatabaseOptions::new())
.await
.unwrap();
}
#[tokio::test]
async fn replace_document() {
const DATABASE_NAME: &str = "test-cosmos-db-replace-documents";
const COLLECTION_NAME: &str = "test-collection-replace-documents";
const DOCUMENT_NAME: &str = "test-document-name-replace-documents";
let client = setup::initialize().unwrap();
client
.create_database(
azure_core::Context::new(),
DATABASE_NAME,
CreateDatabaseOptions::new(),
)
.await
.unwrap();
let database_client = client.into_database_client(DATABASE_NAME);
// create a new collection
let indexing_policy = IndexingPolicy {
automatic: true,
indexing_mode: IndexingMode::Consistent,
included_paths: vec![],
excluded_paths: vec![],
};
let options = CreateCollectionOptions::new("/id")
.indexing_policy(indexing_policy)
.offer(Offer::S2);
database_client
.create_collection(Context::new(), COLLECTION_NAME, options)
.await
.unwrap();
let collection_client = database_client
.clone()
.into_collection_client(COLLECTION_NAME);
// create a new document
let mut document_data = MyDocument {
id: DOCUMENT_NAME.to_owned(),
hello: 42,
};
collection_client
.create_document(Context::new(), &document_data, CreateDocumentOptions::new())
.await
.unwrap();
let documents = collection_client
.list_documents()
.execute::<MyDocument>()
.await
.unwrap();
assert!(documents.documents.len() == 1);
// replace document with optimistic concurrency and session token
document_data.hello = 190;
collection_client
.clone()
.into_document_client(document_data.id.clone(), &document_data.id)
.unwrap()
.replace_document(
Context::new(),
&document_data,
ReplaceDocumentOptions::new()
.consistency_level(ConsistencyLevel::from(&documents))
.if_match_condition(IfMatchCondition::Match(
&documents.documents[0].document_attributes.etag(),
)),
)
.await
.unwrap();
// now get the replaced document
let document_client = collection_client
.into_document_client(DOCUMENT_NAME, &DOCUMENT_NAME)
.unwrap();
let document_after_get = document_client
.get_document::<MyDocument>(Context::new(), GetDocumentOptions::new())
.await
.unwrap();
if let GetDocumentResponse::Found(document) = document_after_get {
assert!(document.document.document.hello == 190);
} else {
panic!("document not found");
}
database_client
.delete_database(Context::new(), DeleteDatabaseOptions::new())
.await
.unwrap();
}
|
use std::sync::{Arc, RwLock};
use iced::{Button, Checkbox, Clipboard, Column, Command, Container, Length, PickList, Row, Slider, Text, button, pick_list, slider};
use crate::styling::Theme;
pub struct Flags {
pub settings: Arc<RwLock<crate::settings::Settings>>
}
#[derive(Debug, Clone)]
pub enum Message {
ThemeChanged(Theme),
ShowAllCoinsToggled(bool),
ShowAllCurrenciesToggled(bool),
RedChanged(u8),
GreenChanged(u8),
BlueChanged(u8),
AlphaChanged(u8),
}
#[derive(Default)]
pub struct Gui {
settings: Arc<RwLock<crate::settings::Settings>>,
theme_pick_list: pick_list::State<Theme>,
red_slider: slider::State,
green_slider: slider::State,
blue_slider: slider::State,
alpha_slider: slider::State,
}
impl Gui {
pub fn new(flags: Flags) -> (Self, Command<Message>) {
(Self {
settings: flags.settings,
theme_pick_list: Default::default(),
red_slider: Default::default(),
green_slider: Default::default(),
blue_slider: Default::default(),
alpha_slider: Default::default(),
}, Command::none())
}
pub fn update(&mut self, message: Message, _clipboard: &mut Clipboard) -> Command<Message> {
match message {
Message::ThemeChanged(theme) => {
self.settings.write().unwrap().theme = theme;
self.settings.read().unwrap().save().unwrap();
}
Message::ShowAllCoinsToggled(b) => {
self.settings.write().unwrap().show_all_coins = b;
self.settings.read().unwrap().save().unwrap();
}
Message::ShowAllCurrenciesToggled(b) => {
self.settings.write().unwrap().show_all_currencies = b;
self.settings.read().unwrap().save().unwrap();
}
Message::RedChanged(red) => {
self.settings.write().unwrap().graph_color.r = red as f32 / 255.0;
self.settings.read().unwrap().save().unwrap();
}
Message::GreenChanged(green) => {
self.settings.write().unwrap().graph_color.g = green as f32 / 255.0;
self.settings.read().unwrap().save().unwrap();
}
Message::BlueChanged(blue) => {
self.settings.write().unwrap().graph_color.b = blue as f32 / 255.0;
self.settings.read().unwrap().save().unwrap();
}
Message::AlphaChanged(alpha) => {
self.settings.write().unwrap().graph_color.a = alpha as f32 / 255.0;
self.settings.read().unwrap().save().unwrap();
}
}
Command::none()
}
pub fn view(&mut self) -> iced::Element<'_, Message> {
let lock = self.settings.read().unwrap();
let theme = lock.theme;
let mut column = Column::new()
.padding(5)
.width(Length::Fill)
.height(Length::Fill);
column = column.push(PickList::new(&mut self.theme_pick_list, Theme::ALL.iter().cloned().collect::<Vec<_>>(), Some(theme.clone()), Message::ThemeChanged).style(theme));
let mut show_all_coins_row = Row::new()
.padding(5)
.width(Length::Fill)
.height(Length::Shrink);
show_all_coins_row = show_all_coins_row.push(Checkbox::new(lock.show_all_coins, "show all coins", Message::ShowAllCoinsToggled).style(theme));
column = column.push(show_all_coins_row);
let mut show_all_currencies_row = Row::new()
.padding(5)
.width(Length::Fill)
.height(Length::Shrink);
show_all_currencies_row = show_all_currencies_row.push(Checkbox::new(lock.show_all_currencies, "show all currencies", Message::ShowAllCurrenciesToggled).style(theme));
column = column.push(show_all_currencies_row);
let graph_color = lock.graph_color;
let red = graph_color.r;
let green = graph_color.g;
let blue = graph_color.b;
let alpha = graph_color.a;
let mut graph_color_label_row = Row::new()
.padding(5)
.spacing(5)
.width(Length::Fill)
.height(Length::Shrink);
graph_color_label_row = graph_color_label_row.push(Text::new("Graph color"));
graph_color_label_row = graph_color_label_row.push(Text::new(format!("{{ r: {:.2}, g: {:.2}, b: {:.2}, a: {:.2}}}", red, green, blue, alpha)).color(graph_color));
column = column.push(graph_color_label_row);
let mut graph_color_red_row = Row::new()
.padding(5)
.width(Length::Fill)
.height(Length::Shrink);
let mut graph_color_green_row = Row::new()
.padding(5)
.width(Length::Fill)
.height(Length::Shrink);
let mut graph_color_blue_row = Row::new()
.padding(5)
.width(Length::Fill)
.height(Length::Shrink);
let mut graph_color_alpha_row = Row::new()
.padding(5)
.width(Length::Fill)
.height(Length::Shrink);
graph_color_red_row = graph_color_red_row.push(Text::new("Red").width(Length::Units(100)));
graph_color_red_row = graph_color_red_row.push(Slider::new(&mut self.red_slider, 0..=255, (red * 255.0) as u8, Message::RedChanged).width(Length::Units(256)).style(theme));
graph_color_green_row = graph_color_green_row.push(Text::new("Green").width(Length::Units(100)));
graph_color_green_row = graph_color_green_row.push(Slider::new(&mut self.green_slider, 0..=255, (green * 255.0) as u8, Message::GreenChanged).width(Length::Units(256)).style(theme));
graph_color_blue_row = graph_color_blue_row.push(Text::new("Blue").width(Length::Units(100)));
graph_color_blue_row = graph_color_blue_row.push(Slider::new(&mut self.blue_slider, 0..=255, (blue * 255.0) as u8, Message::BlueChanged).width(Length::Units(256)).style(theme));
graph_color_alpha_row = graph_color_alpha_row.push(Text::new("Alpha").width(Length::Units(100)));
graph_color_alpha_row = graph_color_alpha_row.push(Slider::new(&mut self.alpha_slider, 0..=255, (alpha * 255.0) as u8, Message::AlphaChanged).width(Length::Units(256)).style(theme));
column = column.push(graph_color_red_row);
column = column.push(graph_color_green_row);
column = column.push(graph_color_blue_row);
column = column.push(graph_color_alpha_row);
Container::new(column)
.width(Length::Fill)
.height(Length::Fill)
.center_x()
.center_y()
.style(theme)
.into()
}
} |
pub mod decal;
pub mod particle;
pub mod render;
pub mod world;
|
use super::InternedGrammar;
use crate::generate::grammars::{InputGrammar, Variable, VariableType};
use crate::generate::rules::{Rule, Symbol};
use anyhow::{anyhow, Result};
pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar> {
let interner = Interner { grammar };
if variable_type_for_name(&grammar.variables[0].name) == VariableType::Hidden {
return Err(anyhow!("A grammar's start rule must be visible."));
}
let mut variables = Vec::with_capacity(grammar.variables.len());
for variable in grammar.variables.iter() {
variables.push(Variable {
name: variable.name.clone(),
kind: variable_type_for_name(&variable.name),
rule: interner.intern_rule(&variable.rule)?,
});
}
let mut external_tokens = Vec::with_capacity(grammar.external_tokens.len());
for external_token in grammar.external_tokens.iter() {
let rule = interner.intern_rule(&external_token)?;
let (name, kind) = if let Rule::NamedSymbol(name) = external_token {
(name.clone(), variable_type_for_name(&name))
} else {
(String::new(), VariableType::Anonymous)
};
external_tokens.push(Variable { name, kind, rule });
}
let mut extra_symbols = Vec::with_capacity(grammar.extra_symbols.len());
for extra_token in grammar.extra_symbols.iter() {
extra_symbols.push(interner.intern_rule(extra_token)?);
}
let mut supertype_symbols = Vec::with_capacity(grammar.supertype_symbols.len());
for supertype_symbol_name in grammar.supertype_symbols.iter() {
supertype_symbols.push(
interner
.intern_name(supertype_symbol_name)
.ok_or_else(|| anyhow!("Undefined symbol `{}`", supertype_symbol_name))?,
);
}
let mut expected_conflicts = Vec::new();
for conflict in grammar.expected_conflicts.iter() {
let mut interned_conflict = Vec::with_capacity(conflict.len());
for name in conflict {
interned_conflict.push(
interner
.intern_name(&name)
.ok_or_else(|| anyhow!("Undefined symbol `{}`", name))?,
);
}
expected_conflicts.push(interned_conflict);
}
let mut variables_to_inline = Vec::new();
for name in grammar.variables_to_inline.iter() {
if let Some(symbol) = interner.intern_name(&name) {
variables_to_inline.push(symbol);
}
}
let mut word_token = None;
if let Some(name) = grammar.word_token.as_ref() {
word_token = Some(
interner
.intern_name(&name)
.ok_or_else(|| anyhow!("Undefined symbol `{}`", &name))?,
);
}
for (i, variable) in variables.iter_mut().enumerate() {
if supertype_symbols.contains(&Symbol::non_terminal(i)) {
variable.kind = VariableType::Hidden;
}
}
Ok(InternedGrammar {
variables,
external_tokens,
extra_symbols,
expected_conflicts,
variables_to_inline,
supertype_symbols,
word_token,
precedence_orderings: grammar.precedence_orderings.clone(),
})
}
struct Interner<'a> {
grammar: &'a InputGrammar,
}
impl<'a> Interner<'a> {
fn intern_rule(&self, rule: &Rule) -> Result<Rule> {
match rule {
Rule::Choice(elements) => {
let mut result = Vec::with_capacity(elements.len());
for element in elements {
result.push(self.intern_rule(element)?);
}
Ok(Rule::Choice(result))
}
Rule::Seq(elements) => {
let mut result = Vec::with_capacity(elements.len());
for element in elements {
result.push(self.intern_rule(element)?);
}
Ok(Rule::Seq(result))
}
Rule::Repeat(content) => Ok(Rule::Repeat(Box::new(self.intern_rule(content)?))),
Rule::Metadata { rule, params } => Ok(Rule::Metadata {
rule: Box::new(self.intern_rule(rule)?),
params: params.clone(),
}),
Rule::NamedSymbol(name) => {
if let Some(symbol) = self.intern_name(&name) {
Ok(Rule::Symbol(symbol))
} else {
Err(anyhow!("Undefined symbol `{}`", name))
}
}
_ => Ok(rule.clone()),
}
}
fn intern_name(&self, symbol: &str) -> Option<Symbol> {
for (i, variable) in self.grammar.variables.iter().enumerate() {
if variable.name == symbol {
return Some(Symbol::non_terminal(i));
}
}
for (i, external_token) in self.grammar.external_tokens.iter().enumerate() {
if let Rule::NamedSymbol(name) = external_token {
if name == symbol {
return Some(Symbol::external(i));
}
}
}
return None;
}
}
fn variable_type_for_name(name: &str) -> VariableType {
if name.starts_with("_") {
VariableType::Hidden
} else {
VariableType::Named
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_repeat_expansion() {
let grammar = intern_symbols(&build_grammar(vec![
Variable::named("x", Rule::choice(vec![Rule::named("y"), Rule::named("_z")])),
Variable::named("y", Rule::named("_z")),
Variable::named("_z", Rule::string("a")),
]))
.unwrap();
assert_eq!(
grammar.variables,
vec![
Variable::named(
"x",
Rule::choice(vec![Rule::non_terminal(1), Rule::non_terminal(2),])
),
Variable::named("y", Rule::non_terminal(2)),
Variable::hidden("_z", Rule::string("a")),
]
);
}
#[test]
fn test_interning_external_token_names() {
// Variable `y` is both an internal and an external token.
// Variable `z` is just an external token.
let mut input_grammar = build_grammar(vec![
Variable::named(
"w",
Rule::choice(vec![Rule::named("x"), Rule::named("y"), Rule::named("z")]),
),
Variable::named("x", Rule::string("a")),
Variable::named("y", Rule::string("b")),
]);
input_grammar
.external_tokens
.extend(vec![Rule::named("y"), Rule::named("z")]);
let grammar = intern_symbols(&input_grammar).unwrap();
// Variable `y` is referred to by its internal index.
// Variable `z` is referred to by its external index.
assert_eq!(
grammar.variables,
vec![
Variable::named(
"w",
Rule::choice(vec![
Rule::non_terminal(1),
Rule::non_terminal(2),
Rule::external(1),
])
),
Variable::named("x", Rule::string("a")),
Variable::named("y", Rule::string("b")),
]
);
// The external token for `y` refers back to its internal index.
assert_eq!(
grammar.external_tokens,
vec![
Variable::named("y", Rule::non_terminal(2)),
Variable::named("z", Rule::external(1)),
]
);
}
#[test]
fn test_grammar_with_undefined_symbols() {
let result = intern_symbols(&build_grammar(vec![Variable::named("x", Rule::named("y"))]));
match result {
Err(e) => assert_eq!(e.to_string(), "Undefined symbol `y`"),
_ => panic!("Expected an error but got none"),
}
}
fn build_grammar(variables: Vec<Variable>) -> InputGrammar {
InputGrammar {
variables,
name: "the_language".to_string(),
..Default::default()
}
}
}
|
static NUM: i32 = 120;
fn coerce_static<'a>(_: &'a i32) -> &'a i32 {
&NUM
}
fn execute(){
{
println!("hello world {}", NUM);
}
{
let sample = 12;
let checking = coerce_static(&sample);
println!("checking: {}", checking)
}
} |
//! Create a cabinet file.
extern crate clap;
extern crate makecab;
use clap::App;
use std::borrow::Cow;
use std::env;
use std::ffi::OsString;
use std::path::{Path,PathBuf};
use std::process;
fn main() {
let matches = App::new("makecab")
.version(env!("CARGO_PKG_VERSION"))
.author("Ted Mielczarek <ted@mielczarek.org>")
.about("Cabinet Maker (less-fully-featured Rust port)")
.args_from_usage(
"-F [directives] 'Not supported'
-D [var=value] 'Defines variable with specified value.'
-L [dir] 'Location to place destination (default is current directory)'
-V[n] 'Verbosity level
<source> 'File to compress.'
[destination] 'File name to give compressed file. If omitted, the last character of the source file name is replaced with an underscore (_) and used as the destination.'"
)
.get_matches();
// Check for unsupported options.
if matches.is_present("F") {
println!("Error: directive files are not supported");
process::exit(1);
}
if matches.values_of("D").map(|mut vals| vals.any(|v| v != "CompressionType=MSZIP")).unwrap_or(false) {
println!("Error: only '-D CompressionType=MSZIP' is supported.");
process::exit(1);
}
let source = matches.value_of_os("source").unwrap();
let dest_name = matches.value_of_os("destination").map(|p| Cow::Borrowed(p)).unwrap_or_else(|| {
let s = Path::new(source).file_name().unwrap().to_str().unwrap();
Cow::Owned(OsString::from(s.chars().take(s.len()-1).chain("_".chars()).collect::<String>()))
});
let dest = matches.value_of_os("L").map(PathBuf::from).unwrap_or_else(|| env::current_dir().unwrap()).join(dest_name);
println!("{:?} -> {:?}", source, dest);
match makecab::make_cab(dest, source) {
Ok(()) => {},
Err(e) => {
println!("Failed to write cab file: {}", e);
::std::process::exit(1);
}
}
}
|
#![feature(test)]
extern crate test;
use rand::prelude::*;
const N: usize = 100;
const LEN: usize = 1_000_000;
fn create_random_vec() -> Vec<i32> {
let mut rng = SmallRng::seed_from_u64(0);
let mut v: Vec<_> = (0..LEN as i32).collect();
v.shuffle(&mut rng);
v
}
mod slice {
use crate::{create_random_vec, N};
#[bench]
fn max(b: &mut test::Bencher) {
let v = create_random_vec();
b.iter(|| {
let mut v = v.clone();
test::black_box(out::slice::max(&mut v, N));
});
}
#[bench]
fn max_unstable(b: &mut test::Bencher) {
let v = create_random_vec();
b.iter(|| {
let mut v = v.clone();
test::black_box(out::slice::max_unstable(&mut v, N));
});
}
#[bench]
fn max_by_cached_key(b: &mut test::Bencher) {
let v = create_random_vec();
b.iter(|| {
let mut v = v.clone();
test::black_box(out::slice::max_by_cached_key(&mut v, N, |&a| a));
});
}
}
mod iter {
use crate::{create_random_vec, N};
#[bench]
fn max(b: &mut test::Bencher) {
let v = create_random_vec();
b.iter(|| {
let v = v.clone();
test::black_box(out::iter::max(v, N));
});
}
#[bench]
fn max_unstable(b: &mut test::Bencher) {
let v = create_random_vec();
b.iter(|| {
let v = v.clone();
test::black_box(out::iter::max_unstable(v, N));
});
}
}
mod std {
use crate::{create_random_vec, N};
use std::collections::BinaryHeap;
#[bench]
fn sort(b: &mut test::Bencher) {
let v = create_random_vec();
b.iter(|| {
let mut v = v.clone();
v.sort();
test::black_box(&v[..N]);
});
}
#[bench]
fn sort_unstable(b: &mut test::Bencher) {
let v = create_random_vec();
b.iter(|| {
let mut v = v.clone();
v.sort_unstable();
test::black_box(&v[..N]);
});
}
#[bench]
fn sort_by_cached_key(b: &mut test::Bencher) {
let v = create_random_vec();
b.iter(|| {
let mut v = v.clone();
v.sort_by_cached_key(|&a| a);
test::black_box(&v[..N]);
});
}
#[bench]
fn binary_heap(b: &mut test::Bencher) {
let v = create_random_vec();
b.iter(|| {
let heap = BinaryHeap::from(v.clone());
test::black_box(heap.into_iter().take(N).collect::<Vec<_>>());
});
}
}
|
use std::vec::Vec;
//
pub fn farey(n: usize) -> Vec<(usize, usize)> {
let mut elems = Vec::new();
let mut a = 0;
let mut b = 1;
let mut c = 1;
let mut d = n;
while c <= n {
let k = (n + b) / d;
let p = k * c - a;
let q = k * d - b;
elems.push((c, d));
a = c;
b = d;
c = p;
d = q;
}
return elems;
}
#[allow(dead_code)]
pub fn list_of_primes(n: usize) -> Vec<usize> {
let mut sieve = vec![true; n/2];
let sqrt = (n as f64).sqrt() as usize;
for i in (3..sqrt + 1).into_iter().step_by(2) {
if sieve[i/2] == true {
for j in (i*i/2..n/2).into_iter().step_by(i) {
sieve[j] = false;
}
}
}
let mut answer = Vec::new();
answer.push(2);
for (i, item) in sieve.iter().enumerate() {
if *item == true && i*2 >= 2 {
answer.push(i*2+1);
}
}
return answer;
}
#[allow(dead_code)]
fn digits(mut n: u32) -> Vec<u32> {
let mut digits = Vec::new();
while n != 0 {
let a = n%10;
n /= 10;
digits.push(a);
}
return digits;
}
#[allow(dead_code)]
fn bouncy(x: u32) -> bool {
let digs = digits(x);
let mut up = false;
let mut down = false;
for i in 1..digs.len() {
if digs.get(i-1) < digs.get(i) {
down = true;
} else if digs.get(i-1) > digs.get(i) {
up = true;
}
if up && down {
return true;
}
}
return false;
}
#[allow(dead_code)]
pub fn check_bouncy() {
let mut total : f64 = 0.0;
for i in 1..10000000 {
if bouncy(i) {
total += 1.0;
}
if total/i as f64 >= 0.99 {
print!("{:?} {:?}", i, total);
break;
}
}
}
|
#![no_std]
#![no_main]
#![feature(min_type_alias_impl_trait)]
#![feature(impl_trait_in_bindings)]
#![feature(type_alias_impl_trait)]
#![allow(incomplete_features)]
#[path = "../example_common.rs"]
mod example_common;
use defmt::{assert_eq, panic};
use embassy::executor::Spawner;
use embassy::traits::flash::Flash;
use embassy_nrf::Peripherals;
use embassy_nrf::{interrupt, qspi};
use example_common::*;
const PAGE_SIZE: usize = 4096;
// Workaround for alignment requirements.
// Nicer API will probably come in the future.
#[repr(C, align(4))]
struct AlignedBuf([u8; 4096]);
#[embassy::main]
async fn main(_spawner: Spawner, p: Peripherals) {
// Config for the MX25R64 present in the nRF52840 DK
let mut config = qspi::Config::default();
config.read_opcode = qspi::ReadOpcode::READ4IO;
config.write_opcode = qspi::WriteOpcode::PP4IO;
config.write_page_size = qspi::WritePageSize::_256BYTES;
let irq = interrupt::take!(QSPI);
let mut q = qspi::Qspi::new(
p.QSPI, irq, p.P0_19, p.P0_17, p.P0_20, p.P0_21, p.P0_22, p.P0_23, config,
)
.await;
let mut id = [1; 3];
q.custom_instruction(0x9F, &[], &mut id).await.unwrap();
info!("id: {}", id);
// Read status register
let mut status = [4; 1];
q.custom_instruction(0x05, &[], &mut status).await.unwrap();
info!("status: {:?}", status[0]);
if status[0] & 0x40 == 0 {
status[0] |= 0x40;
q.custom_instruction(0x01, &status, &mut []).await.unwrap();
info!("enabled quad in status");
}
let mut buf = AlignedBuf([0u8; PAGE_SIZE]);
let pattern = |a: u32| (a ^ (a >> 8) ^ (a >> 16) ^ (a >> 24)) as u8;
for i in 0..8 {
info!("page {:?}: erasing... ", i);
q.erase(i * PAGE_SIZE).await.unwrap();
for j in 0..PAGE_SIZE {
buf.0[j] = pattern((j + i * PAGE_SIZE) as u32);
}
info!("programming...");
q.write(i * PAGE_SIZE, &buf.0).await.unwrap();
}
for i in 0..8 {
info!("page {:?}: reading... ", i);
q.read(i * PAGE_SIZE, &mut buf.0).await.unwrap();
info!("verifying...");
for j in 0..PAGE_SIZE {
assert_eq!(buf.0[j], pattern((j + i * PAGE_SIZE) as u32));
}
}
info!("done!")
}
|
use rand::{thread_rng, Rng};
use std::collections::HashMap;
#[derive(Copy, Clone, PartialEq)]
pub enum BlockNumber {
One,
Two,
Three,
Five,
Eight,
Thirteen,
TwentyOne,
ThrityFour,
FiftyFive,
EightyNine,
}
impl BlockNumber {
pub fn to_u32(self) -> u32 {
match self {
Self::One => 1,
Self::Two => 2,
Self::Three => 3,
Self::Five => 5,
Self::Eight => 8,
Self::Thirteen => 13,
Self::TwentyOne => 21,
Self::ThrityFour => 34,
Self::FiftyFive => 55,
Self::EightyNine => 89,
}
}
pub fn next(self) -> Option<Self> {
match self {
Self::One => Some(Self::Two),
Self::Two => Some(Self::Three),
Self::Three => Some(Self::Five),
Self::Five => Some(Self::Eight),
Self::Eight => Some(Self::Thirteen),
Self::Thirteen => Some(Self::TwentyOne),
Self::TwentyOne => Some(Self::ThrityFour),
Self::ThrityFour => Some(Self::FiftyFive),
Self::FiftyFive => Some(Self::EightyNine),
Self::EightyNine => None,
}
}
}
#[derive(Copy, Clone, PartialEq)]
pub struct Position {
pub x: u32,
pub y: u32,
}
impl Position {
pub fn new(x: u32, y: u32) -> Self {
Self { x, y }
}
}
#[derive(Copy, Clone, PartialEq)]
pub struct Block {
pub number: BlockNumber,
pub position: Position,
}
impl Block {
pub fn new(number: BlockNumber, x: u32, y: u32) -> Self {
Self {
number,
position: Position::new(x, y),
}
}
}
pub struct MergeableBlocks {
pub from: Block,
pub to: Block,
}
pub enum Direction {
LEFT,
RIGHT,
DOWN,
}
pub struct Board {
pub width: u32,
pub height: u32,
pub current_block: Block,
pub blocks: Vec<Block>,
}
impl Board {
pub fn new(width: u32, height: u32) -> Self {
Self {
width,
height,
current_block: Board::default_current_block(width),
blocks: vec![],
}
}
fn default_current_block(width: u32) -> Block {
let random_value = thread_rng().gen_range(0, 2);
Block::new(
if random_value < 1 {
BlockNumber::One
} else {
BlockNumber::Two
},
(width - 1) / 2,
0,
)
}
pub fn move_current_block(&mut self, direction: Direction) {
let prev_x = self.current_block.position.x;
let prev_y = self.current_block.position.y;
match direction {
Direction::LEFT => {
self.current_block.position.x = if self.movable_to_left() {
prev_x - 1
} else {
prev_x
};
}
Direction::RIGHT => {
self.current_block.position.x = if self.movable_to_right() {
prev_x + 1
} else {
prev_x
};
}
Direction::DOWN => {
if self.movable_to_down() {
self.current_block.position.y = prev_y + 1;
} else {
self.blocks.push(self.current_block);
self.current_block = Board::default_current_block(self.width);
}
}
};
}
fn movable_to_down(&self) -> bool {
let prev_x = self.current_block.position.x;
let prev_y = self.current_block.position.y;
let highest_block = self
.blocks
.iter()
.filter(|block| block.position.x == prev_x)
.min_by_key(|block| block.position.y);
match highest_block {
Some(block) => {
prev_y
< if block.position.y > 0 {
block.position.y - 1
} else {
0
}
}
None => prev_y < self.height - 1,
}
}
fn movable_to_left(&self) -> bool {
let prev_x = self.current_block.position.x;
let prev_y = self.current_block.position.y;
let is_blocking = self
.blocks
.iter()
.any(|block| block.position.y == prev_y && block.position.x + 1 == prev_x);
(!is_blocking) && prev_x > 0
}
fn movable_to_right(&self) -> bool {
let prev_x = self.current_block.position.x;
let prev_y = self.current_block.position.y;
let is_blocking = self
.blocks
.iter()
.any(|block| block.position.y == prev_y && block.position.x == prev_x + 1);
(!is_blocking) && prev_x < self.width - 1
}
pub fn update(&mut self) {
loop {
let mergeable_blocks = self.mergeable_blocks();
if mergeable_blocks.is_empty() {
break;
}
for blocks in mergeable_blocks {
self.merge_blocks(blocks);
}
}
}
pub fn mergeable_blocks(&self) -> Vec<MergeableBlocks> {
let blocks_by_position = self.blocks_by_position();
let mut mergeable_blocks = vec![];
for x in 0..self.width - 1 {
for y in 0..self.height {
if let Some(left_block) = blocks_by_position.get(&(x, y)) {
if let Some(right_block) = blocks_by_position.get(&(x + 1, y)) {
if left_block
.number
.next()
.map_or(false, |n| n == right_block.number)
&& right_block.number.next().is_some()
{
mergeable_blocks.push(MergeableBlocks {
from: **left_block,
to: **right_block,
})
}
if right_block
.number
.next()
.map_or(false, |n| n == left_block.number)
&& left_block.number.next().is_some()
{
mergeable_blocks.push(MergeableBlocks {
from: **right_block,
to: **left_block,
})
}
}
}
}
}
for x in 0..self.width {
for y in 0..self.height - 1 {
if let Some(bottom_block) = blocks_by_position.get(&(x, y)) {
if let Some(top_block) = blocks_by_position.get(&(x, y + 1)) {
if bottom_block
.number
.next()
.map_or(false, |n| n == top_block.number)
&& top_block.number.next().is_some()
{
mergeable_blocks.push(MergeableBlocks {
from: **bottom_block,
to: **top_block,
})
}
if top_block
.number
.next()
.map_or(false, |n| n == bottom_block.number)
&& bottom_block.number.next().is_some()
{
mergeable_blocks.push(MergeableBlocks {
from: **top_block,
to: **bottom_block,
})
}
}
}
}
}
mergeable_blocks
}
fn blocks_by_position(&self) -> HashMap<(u32, u32), &Block> {
self.blocks
.iter()
.map(|block| ((block.position.x, block.position.y), block))
.collect()
}
pub fn merge_blocks(&mut self, mergeable_blocks: MergeableBlocks) {
self.blocks
.retain(|block| block != &mergeable_blocks.from && block != &mergeable_blocks.to);
self.blocks.push(Block {
position: mergeable_blocks.to.position,
number: mergeable_blocks.to.number.next().unwrap(),
})
}
}
|
pub mod accounts;
pub mod categories;
pub mod cron;
pub mod dashboard;
pub mod products;
pub mod terminal;
pub mod transactions;
use actix_web::web;
/// Setup routes for admin ui
pub fn init(config: &mut web::ServiceConfig) {
config.service(
web::scope("/admin")
.service(web::resource("").route(web::get().to(dashboard::get_dashboard)))
// Setup account mangement related routes
.service(web::resource("/accounts").route(web::get().to(accounts::get_accounts)))
.service(
web::resource("/account/create")
.route(web::post().to(accounts::post_account_create))
.route(web::get().to(accounts::get_account_create)),
)
.service(
web::resource("/account/delete/{account_id}")
.route(web::get().to(accounts::delete_get)),
)
.service(
web::resource("/account/invite/{account_id}")
.route(web::get().to(accounts::invite_get)),
)
.service(
web::resource("/account/revoke/{account_id}")
.route(web::get().to(accounts::revoke_get)),
)
.service(
web::resource("/account/remove-nfc/{account_id}")
.route(web::get().to(accounts::remove_nfc_get)),
)
.service(
web::resource("/account/remove-barcode/{account_id}")
.route(web::get().to(accounts::remove_barcode_get)),
)
.service(
web::resource("/account/{account_id}")
.route(web::post().to(accounts::post_account_edit))
.route(web::get().to(accounts::get_account_edit)),
)
// Setup product mangement related routes
.service(web::resource("/products").route(web::get().to(products::get_products)))
.service(
web::resource("/product/create")
.route(web::post().to(products::post_product_create))
.route(web::get().to(products::get_product_create)),
)
.service(
web::resource("/product/delete/{product_id}")
.route(web::get().to(products::get_product_delete)),
)
.service(
web::resource("/product/remove-image/{product_id}")
.route(web::get().to(products::get_product_remove_image)),
)
.service(
web::resource("/product/upload-image/{product_id}")
.route(web::post().to(products::post_product_upload_image)),
)
.service(
web::resource("/product/{product_id}")
.route(web::post().to(products::post_product_edit))
.route(web::get().to(products::get_product_edit)),
)
// Setup categories mangement related routes
.service(web::resource("/categories").route(web::get().to(categories::get_categories)))
.service(
web::resource("/category/create")
.route(web::post().to(categories::post_category_create))
.route(web::get().to(categories::get_category_create)),
)
.service(
web::resource("/category/delete/{category_id}")
.route(web::get().to(categories::get_category_delete)),
)
.service(
web::resource("/category/{category_id}")
.route(web::post().to(categories::post_category_edit))
.route(web::get().to(categories::get_category_edit)),
)
// Setup transaction mangement related routes
.service(
web::resource("/transactions/generate/{account_id}")
.route(web::post().to(transactions::post_transaction_generate_random))
.route(web::get().to(transactions::get_transaction_generate_random)),
)
.service(
web::resource("/transactions/validate")
.route(web::get().to(transactions::get_transactions_validate)),
)
.service(
web::resource("/transactions/{account_id}")
.route(web::get().to(transactions::get_transactions)),
)
.service(
web::resource("/transaction/execute/{account_id}")
.route(web::post().to(transactions::post_execute_transaction)),
)
.service(
web::resource("/transaction/{account_id}/{transaction_id}")
.route(web::get().to(transactions::get_transaction_details)),
)
.service(web::resource("/terminal").route(web::get().to(terminal::get_terminal)))
// Setup cronjob routes
.service(web::resource("/cron/reports").route(web::get().to(cron::send_reports))),
);
}
|
use std::{fmt::Display, str::FromStr};
use crate::{parser, KdlError, KdlNode, KdlValue};
/// Represents a KDL
/// [`Document`](https://github.com/kdl-org/kdl/blob/main/SPEC.md#document).
///
/// This type is also used to manage a [`KdlNode`]'s [`Children
/// Block`](https://github.com/kdl-org/kdl/blob/main/SPEC.md#children-block),
/// when present.
///
/// # Examples
///
/// The easiest way to create a `KdlDocument` is to parse it:
/// ```rust
/// # use kdl::KdlDocument;
/// let kdl: KdlDocument = "foo 1 2 3\nbar 4 5 6".parse().expect("parse failed");
/// ```
#[derive(Debug, Default, Clone, PartialEq)]
pub struct KdlDocument {
pub(crate) leading: Option<String>,
pub(crate) nodes: Vec<KdlNode>,
pub(crate) trailing: Option<String>,
}
impl KdlDocument {
/// Creates a new Document.
pub fn new() -> Self {
Default::default()
}
/// Gets the first child node with a matching name.
pub fn get(&self, name: &str) -> Option<&KdlNode> {
self.nodes.iter().find(move |n| n.name().value() == name)
}
/// Gets a reference to the first child node with a matching name.
pub fn get_mut(&mut self, name: &str) -> Option<&mut KdlNode> {
self.nodes
.iter_mut()
.find(move |n| n.name().value() == name)
}
/// Gets the first argument (value) of the first child node with a
/// matching name. This is a shorthand utility for cases where a document
/// is being used as a key/value store.
///
/// # Examples
///
/// Given a document like this:
/// ```kdl
/// foo 1
/// bar false
/// ```
///
/// You can fetch the value of `foo` in a single call like this:
/// ```rust
/// # use kdl::{KdlDocument, KdlValue};
/// # let doc: KdlDocument = "foo 1\nbar false".parse().unwrap();
/// assert_eq!(doc.get_arg("foo"), Some(&1.into()));
/// ```
pub fn get_arg(&self, name: &str) -> Option<&KdlValue> {
self.get(name)
.and_then(|node| node.get(0))
.map(|e| e.value())
}
/// Gets the all node arguments (value) of the first child node with a
/// matching name. This is a shorthand utility for cases where a document
/// is being used as a key/value store and the value is expected to be
/// array-ish.
///
/// If a node has no arguments, this will return an empty array.
///
/// # Examples
///
/// Given a document like this:
/// ```kdl
/// foo 1 2 3
/// bar false
/// ```
///
/// You can fetch the arguments for `foo` in a single call like this:
/// ```rust
/// # use kdl::{KdlDocument, KdlValue};
/// # let doc: KdlDocument = "foo 1 2 3\nbar false".parse().unwrap();
/// assert_eq!(doc.get_args("foo"), vec![&1.into(), &2.into(), &3.into()]);
/// ```
pub fn get_args(&self, name: &str) -> Vec<&KdlValue> {
self.get(name)
.map(|n| n.entries())
.unwrap_or_default()
.iter()
.filter(|e| e.name().is_none())
.map(|e| e.value())
.collect()
}
/// Gets a mutable reference to the first argument (value) of the first
/// child node with a matching name. This is a shorthand utility for cases
/// where a document is being used as a key/value store.
pub fn get_arg_mut(&mut self, name: &str) -> Option<&mut KdlValue> {
self.get_mut(name)
.and_then(|node| node.get_mut(0))
.map(|e| e.value_mut())
}
/// This utility makes it easy to interact with a KDL convention where
/// child nodes named `-` are treated as array-ish values.
///
/// # Examples
///
/// Given a document like this:
/// ```kdl
/// foo {
/// - 1
/// - 2
/// - false
/// }
/// ```
///
/// You can fetch the dashed child values of `foo` in a single call like this:
/// ```rust
/// # use kdl::{KdlDocument, KdlValue};
/// # let doc: KdlDocument = "foo {\n - 1\n - 2\n - false\n}".parse().unwrap();
/// assert_eq!(doc.get_dash_vals("foo"), vec![&1.into(), &2.into(), &false.into()]);
/// ```
pub fn get_dash_vals(&self, name: &str) -> Vec<&KdlValue> {
self.get(name)
.and_then(|n| n.children())
.map(|doc| doc.nodes())
.unwrap_or_default()
.iter()
.filter(|e| e.name().value() == "-")
.map(|e| e.get(0))
.filter(|v| v.is_some())
.map(|v| v.unwrap().value())
.collect()
}
/// Returns a reference to this document's child nodes.
pub fn nodes(&self) -> &[KdlNode] {
&self.nodes
}
/// Returns a mutable reference to this document's child nodes.
pub fn nodes_mut(&mut self) -> &mut Vec<KdlNode> {
&mut self.nodes
}
/// Gets leading text (whitespace, comments) for this KdlDocument.
pub fn leading(&self) -> Option<&str> {
self.leading.as_deref()
}
/// Sets leading text (whitespace, comments) for this KdlDocument.
pub fn set_leading(&mut self, leading: impl Into<String>) {
self.leading = Some(leading.into());
}
/// Gets trailing text (whitespace, comments) for this KdlDocument.
pub fn trailing(&self) -> Option<&str> {
self.trailing.as_deref()
}
/// Sets trailing text (whitespace, comments) for this KdlDocument.
pub fn set_trailing(&mut self, trailing: impl Into<String>) {
self.trailing = Some(trailing.into());
}
/// Length of this document when rendered as a string.
pub fn len(&self) -> usize {
format!("{}", self).len()
}
/// Returns true if this document is completely empty (including whitespace)
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears leading and trailing text (whitespace, comments). `KdlNode`s in
/// this document will be unaffected.
///
/// If you need to clear the `KdlNode`s, use [`Self::clear_fmt_recursive`].
pub fn clear_fmt(&mut self) {
self.leading = None;
self.trailing = None;
}
/// Clears leading and trailing text (whitespace, comments), also clearing
/// all the `KdlNode`s in the document.
pub fn clear_fmt_recursive(&mut self) {
self.clear_fmt();
for node in self.nodes.iter_mut() {
node.clear_fmt_recursive();
}
}
/// Auto-formats this Document, making everything nice while preserving
/// comments.
pub fn fmt(&mut self) {
self.fmt_impl(0, false);
}
/// Formats the document and removes all comments from the document.
pub fn fmt_no_comments(&mut self) {
self.fmt_impl(0, true);
}
}
impl Display for KdlDocument {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.stringify(f, 0)
}
}
impl KdlDocument {
pub(crate) fn fmt_impl(&mut self, indent: usize, no_comments: bool) {
if let Some(s) = self.leading.as_mut() {
crate::fmt::fmt_leading(s, indent, no_comments);
}
let mut has_nodes = false;
for node in &mut self.nodes {
has_nodes = true;
node.fmt_impl(indent, no_comments);
}
if let Some(s) = self.trailing.as_mut() {
crate::fmt::fmt_trailing(s, no_comments);
if !has_nodes {
s.push('\n');
}
}
}
pub(crate) fn stringify(
&self,
f: &mut std::fmt::Formatter<'_>,
indent: usize,
) -> std::fmt::Result {
if let Some(leading) = &self.leading {
write!(f, "{}", leading)?;
}
for node in &self.nodes {
node.stringify(f, indent)?;
if node.trailing.is_none() {
writeln!(f)?;
}
}
if let Some(trailing) = &self.trailing {
write!(f, "{}", trailing)?;
}
Ok(())
}
}
impl IntoIterator for KdlDocument {
type Item = KdlNode;
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.nodes.into_iter()
}
}
impl FromStr for KdlDocument {
type Err = KdlError;
fn from_str(input: &str) -> Result<Self, Self::Err> {
parser::parse(input, parser::document)
}
}
#[cfg(test)]
mod test {
use crate::{KdlEntry, KdlValue};
use super::*;
#[test]
fn canonical_clear_fmt() -> miette::Result<()> {
let left_src = r#"
// There is a node here
first_node /*with cool comments, too */ param=1.03e2 /-"commented" "argument" {
// With nested nodes too
nested 1 2 3
nested_2 "hi" "world" // this one is cool
}
second_node param=153 { nested one=1 two=2; }"#;
let right_src = r#"
first_node param=103.0 "argument" {
// Different indentation, because
// Why not
nested 1 2 3
nested_2 "hi" /* actually, "hello" */ "world"
}
// There is a node here
second_node /* This time, the comment is here */ param=153 {
nested one=1 two=2
}"#;
let mut left_doc: KdlDocument = left_src.parse()?;
let mut right_doc: KdlDocument = right_src.parse()?;
assert_ne!(left_doc, right_doc);
left_doc.clear_fmt_recursive();
right_doc.clear_fmt_recursive();
assert_eq!(left_doc, right_doc);
Ok(())
}
#[test]
fn parsing() -> miette::Result<()> {
let src = "
// This is the first node
foo 1 2 \"three\" null true bar=\"baz\" {
- 1
- 2
- \"three\"
(mytype)something (\"name\")\"else\"\r
}
null_id null_prop=null
true_id true_prop=null
+false true
bar \"indented\" // trailing whitespace after this\t
/*
Some random comment
*/
a; b; c;
/-commented \"node\"
another /*foo*/ \"node\" /-1 /*bar*/ null;
final;";
let mut doc: KdlDocument = src.parse()?;
assert_eq!(doc.leading, Some("".into()));
assert_eq!(doc.get_arg("foo"), Some(&1.into()));
assert_eq!(
doc.get_dash_vals("foo"),
vec![&1.into(), &2.into(), &"three".into()]
);
let foo = doc.get("foo").expect("expected a foo node");
assert_eq!(foo.leading, Some("\n// This is the first node\n".into()));
assert_eq!(&foo[2], &"three".into());
assert_eq!(&foo["bar"], &"baz".into());
assert_eq!(
foo.children().unwrap().get_arg("something"),
Some(&"else".into())
);
assert_eq!(doc.get_arg("another"), Some(&"node".into()));
let null = doc.get("null_id").expect("expected a null_id node");
assert_eq!(&null["null_prop"], &KdlValue::Null);
let tru = doc.get("true_id").expect("expected a true_id node");
assert_eq!(&tru["true_prop"], &KdlValue::Null);
let plusfalse = doc.get("+false").expect("expected a +false node");
assert_eq!(&plusfalse[0], &KdlValue::Bool(true));
let bar = doc.get("bar").expect("expected a bar node");
assert_eq!(
format!("{}", bar),
"\n bar \"indented\" // trailing whitespace after this\t\n"
);
let a = doc.get("a").expect("expected a node");
assert_eq!(
format!("{}", a),
"/*\nSome random comment\n */\n\na; ".to_string()
);
let b = doc.get("b").expect("expected a node");
assert_eq!(format!("{}", b), "b; ".to_string());
// Round-tripping works.
assert_eq!(format!("{}", doc), src);
// Programmatic manipulation works.
let mut node: KdlNode = "new\n".parse()?;
// Manual entry parsing preserves formatting/reprs.
node.push("\"blah\"=0xDEADbeef".parse::<KdlEntry>()?);
doc.nodes_mut().push(node);
assert_eq!(
format!("{}", doc),
format!("{}new \"blah\"=0xDEADbeef\n", src)
);
Ok(())
}
#[test]
fn construction() {
let mut doc = KdlDocument::new();
doc.nodes_mut().push(KdlNode::new("foo"));
let mut bar = KdlNode::new("bar");
bar.insert("prop", "value");
bar.push(1);
bar.push(2);
bar.push(false);
bar.push(KdlValue::Null);
let subdoc = bar.ensure_children();
subdoc.nodes_mut().push(KdlNode::new("barchild"));
doc.nodes_mut().push(bar);
doc.nodes_mut().push(KdlNode::new("baz"));
assert_eq!(
r#"foo
bar prop="value" 1 2 false null {
barchild
}
baz
"#,
format!("{}", doc)
);
}
#[test]
fn fmt() -> miette::Result<()> {
let mut doc: KdlDocument = r#"
/* x */ foo 1 "bar"=0xDEADbeef {
child1 1 ;
// child 2 comment
child2 2 // comment
child3 "
string\t" \
{
/*
multiline*/
inner1 \
r"value" \
;
inner2 \ //comment
{
inner3
}
}
}
// trailing comment here
"#
.parse()?;
KdlDocument::fmt(&mut doc);
print!("{}", doc);
assert_eq!(
doc.to_string(),
r#"/* x */
foo 1 bar=0xdeadbeef {
child1 1
// child 2 comment
child2 2 // comment
child3 "\n\n string\t" {
/*
multiline*/
inner1 r"value"
inner2 {
inner3
}
}
}
// trailing comment here"#
);
Ok(())
}
#[test]
fn parse_examples() -> miette::Result<()> {
include_str!("../examples/kdl-schema.kdl").parse::<KdlDocument>()?;
include_str!("../examples/Cargo.kdl").parse::<KdlDocument>()?;
include_str!("../examples/ci.kdl").parse::<KdlDocument>()?;
include_str!("../examples/nuget.kdl").parse::<KdlDocument>()?;
Ok(())
}
}
|
use std::path::Path;
use std::{borrow::Borrow, fs};
use std::{env, path::PathBuf};
use std::{
io::{stdin, stdout, Stdout, Write},
ops::Index,
};
use termion::*;
use termion::{
cursor::DetectCursorPos,
raw::{IntoRawMode, RawTerminal},
};
//use std::io::{self, BufRead, BufReader};
use dirs;
use termion::input::TermRead;
use super::super::parser::parser::CommandParse;
struct DirPathData {
dirs: Vec<PathBuf>,
}
impl DirPathData {
pub fn new() -> Self {
Self { dirs: vec![] }
}
}
pub fn run_gcd(commands: &CommandParse) -> Result<(), String> {
let path = if commands.get_path().trim().is_empty() {
"."
} else {
commands.get_path()
};
let mut dirs = map_dir(path);
let mut index = 0;
let stdin = stdin();
let mut stdout = stdout().into_raw_mode().unwrap();
let (x, y) = stdout.cursor_pos().unwrap();
ls_dirs_with_index(&dirs, index, &mut stdout);
for c in stdin.keys() {
write!(stdout, "{}{}", cursor::Goto(x, y), clear::AfterCursor).unwrap();
stdout.flush().unwrap();
match c {
Ok(event::Key::Up) => {
if index == 0 {
index = dirs.len() - 1;
} else {
index -= 1;
}
ls_dirs_with_index(&dirs, index, &mut stdout);
}
Ok(event::Key::Down) => {
if (index + 1) > dirs.len() - 1 {
index = 0;
} else {
index += 1;
}
ls_dirs_with_index(&dirs, index, &mut stdout);
}
Ok(event::Key::Right) => {
let current_path = dirs[index].clone();
dirs = map_dir(dirs[index].to_str().unwrap());
if dirs.len() == 0 {
env::set_current_dir(current_path);
return Ok(());
}
index = 0;
ls_dirs_with_index(&dirs, index, &mut stdout);
}
Ok(event::Key::Left) => {
let mut cuurent_path = dirs[index].clone();
cuurent_path.pop();
cuurent_path.pop();
dirs = map_dir(cuurent_path.to_str().unwrap());
index = 0;
ls_dirs_with_index(&dirs, index, &mut stdout);
}
// tab key
Ok(event::Key::Char('\t')) => {}
// return key
Ok(event::Key::Char('\n')) => {
env::set_current_dir(&dirs.index(index));
return Ok(());
}
Ok(event::Key::Ctrl('c')) => break,
_ => {}
}
}
Ok(())
}
fn map_dir(path: &str) -> Vec<PathBuf> {
fs::read_dir(path)
.unwrap()
.filter(|entry| match entry {
Ok(entry) => entry.metadata().unwrap().is_dir(),
Err(_) => false,
})
.map(|dir| dir.unwrap().path())
.collect::<Vec<PathBuf>>()
}
fn ls_dirs_with_index(dirs: &Vec<PathBuf>, index: usize, stdout: &mut RawTerminal<Stdout>) {
for (i, dir) in dirs.iter().enumerate() {
if index == i {
write!(stdout, "> {}\n\r", dir.to_str().unwrap()).unwrap();
} else {
write!(stdout, "{}\n\r", dir.to_str().unwrap()).unwrap();
}
}
stdout.flush().unwrap();
}
|
//! Contains structures for the generation of random phrases/filenames.
use iron::prelude::*;
use persistent;
use rand;
use rand::Rng;
use iron::typemap::Key;
use std::iter::FromIterator;
/// Capitalises the first letter of an input string.
fn as_capital_case(input: &str) -> String {
let mut result = String::new();
if input.len() == 0 {
return result;
}
result += &(&input[0..1]).to_uppercase();
if input.len() == 1 {
return result;
}
result += &input[1..];
result
}
/// Structure which generates phrases out of adjectives and nouns.
pub struct PhraseGenerator {
adjectives: Vec<String>,
nouns: Vec<String>,
}
impl PhraseGenerator {
/// Generates a new string.
fn generate(&self) -> String {
let mut rng = rand::thread_rng();
let mut result = String::new();
// TODO: Custom length
for _ in 0..1 {
let adjectives_ptr = rng.gen_range(0, self.adjectives.len());
result += &as_capital_case(&self.adjectives[adjectives_ptr]);
}
let nouns_ptr = rng.gen_range(0, self.nouns.len());
result += &as_capital_case(&self.nouns[nouns_ptr]);
result
}
/// Creates a new PhraseGenerator.
pub fn new(adjectives: &str, nouns: &str) -> PhraseGenerator {
let adjectives: Vec<String> = Vec::from_iter(adjectives.split("\n").map(String::from));
let nouns: Vec<String> = Vec::from_iter(nouns.split("\n").map(String::from));
PhraseGenerator { adjectives, nouns }
}
}
/// Container uses as middleware during execution of the webserver.
#[derive(Copy, Clone)]
pub struct PhraseGeneratorContainer;
impl Key for PhraseGeneratorContainer {
type Value = PhraseGenerator;
}
/// Result of a phrase generation.
pub struct RandomFilename {
pub filename: String,
}
impl RandomFilename {
/// Creates a new RandomFilename, taking it's context from a Request.
pub fn from(req: &mut Request) -> IronResult<RandomFilename> {
let arc = req
.get::<persistent::Read<PhraseGeneratorContainer>>()
.unwrap();
let phrases = arc.as_ref();
Ok(RandomFilename {
filename: phrases.generate(),
})
}
}
|
//! Abstract interpretation of the render commands to discover what symbolic
//! matrix is applied to each vertex.
use super::symbolic_matrix::{SMatrix, AMatrix};
use nitro::Model;
use nitro::render_cmds::SkinTerm;
type MatrixIdx = u16;
/// Records what symbolic matrix applies to each vertex.
pub struct VertexRecord {
/// A list of all the symbolic matrices computed in the course of drawing
/// the model.
pub matrices: Vec<AMatrix>,
/// For each vertex, which matrix in the above list is applied to it.
pub vertices: Vec<MatrixIdx>,
}
impl VertexRecord {
pub fn build_for_model(model: &Model) -> VertexRecord {
let mut b = Builder::new(model);
use nitro::render_cmds::Op;
for op in &model.render_ops {
match *op {
Op::LoadMatrix { stack_pos } => b.load_matrix(stack_pos),
Op::StoreMatrix { stack_pos } => b.store_matrix(stack_pos),
Op::MulObject { object_idx } => b.mul_object(object_idx),
Op::Skin { ref terms } => b.skin(&*terms),
Op::ScaleUp => b.scale_up(),
Op::ScaleDown => b.scale_down(),
Op::BindMaterial { .. } => (),
Op::Draw { piece_idx } => b.draw(piece_idx),
}
}
b.vr
}
}
struct Builder<'a> {
model: &'a Model,
vr: VertexRecord,
cur_matrix: MatrixIdx,
matrix_stack: Vec<Option<MatrixIdx>>,
}
impl<'a> Builder<'a> {
fn new(model: &Model) -> Builder {
Builder {
model,
vr: VertexRecord {
matrices: vec![AMatrix::one()],
vertices: vec![],
},
cur_matrix: 0,
matrix_stack: vec![None; 32],
}
}
/// Add a new AMatrix to the record, returning its index.
fn add_matrix(&mut self, mat: AMatrix) -> MatrixIdx {
self.vr.matrices.push(mat);
(self.vr.matrices.len() - 1) as MatrixIdx
}
fn fetch_from_stack(&mut self, stack_pos: u8) -> MatrixIdx {
// If the slot is uninitialized, make a new Uninitialized SMatrix for
// it.
if self.matrix_stack[stack_pos as usize].is_none() {
let uninit = SMatrix::Uninitialized { stack_pos }.into();
let uninit_idx = self.add_matrix(uninit);
self.matrix_stack[stack_pos as usize] = Some(uninit_idx);
}
self.matrix_stack[stack_pos as usize].unwrap()
}
fn load_matrix(&mut self, stack_pos: u8) {
let idx = self.fetch_from_stack(stack_pos);
self.cur_matrix = idx;
}
fn store_matrix(&mut self, stack_pos: u8) {
self.matrix_stack[stack_pos as usize] = Some(self.cur_matrix);
}
fn mul_object(&mut self, object_idx: u8) {
let mut mat = self.vr.matrices[self.cur_matrix as usize].clone();
mat *= SMatrix::Object { object_idx };
self.cur_matrix = self.add_matrix(mat);
}
fn skin(&mut self, terms: &[SkinTerm]) {
let mut acc = AMatrix::zero();
for term in terms {
// weight * stack[stack_pos] * inv_binds[inv_bind_idx]
let mat_idx = self.fetch_from_stack(term.stack_pos);
let mut mat = self.vr.matrices[mat_idx as usize].clone();
mat *= SMatrix::InvBind { inv_bind_idx: term.inv_bind_idx };
mat *= term.weight;
acc += mat
}
let mat_idx = self.add_matrix(acc);
self.cur_matrix = mat_idx;
}
// NOTE: Ignored for now, which is incorrect, but they IME don't end up
// affecting the final skeleton (because they end up in the "longest suffix
// of constant factors"; see joint_tree) so it doesn't matter much.
fn scale_up(&mut self) { }
fn scale_down(&mut self) { }
fn draw(&mut self, piece_idx: u8) {
let piece = &self.model.pieces[piece_idx as usize];
use nds::gpu_cmds::{CmdParser, GpuCmd};
let interpreter = CmdParser::new(&piece.gpu_commands);
for cmd_res in interpreter {
if cmd_res.is_err() { break; }
match cmd_res.unwrap() {
GpuCmd::Restore { idx } => self.load_matrix(idx as u8),
// Again, ignore scalings.
GpuCmd::Scale { .. } => (),
GpuCmd::Vertex { .. } => {
let cur_matrix = self.cur_matrix;
self.vr.vertices.push(cur_matrix)
}
_ => (),
}
}
}
}
|
use std::fs;
use std::io::{self, Write};
const OP_EXIT: i32 = 99;
const OP_ADD: i32 = 1;
const OP_MULTIPLY: i32 = 2;
const OP_INPUT: i32 = 3;
const OP_OUTPUT: i32 = 4;
fn run_program(mut program: Vec<i32>) -> i32 {
let mut i: usize = 0;
while i < program.len() {
let instr = program[i];
let opcode = read_opcode(instr);
i+= 1;
match opcode {
OP_EXIT => {
println!("Program exited");
break;
},
OP_ADD | OP_MULTIPLY => {
let param1 = read_param_value(i, &program, 0);
let param2 = read_param_value(i, &program, 1);
let result_address = program[i + 2] as usize;
i += 3;
match opcode {
OP_ADD => program[result_address] = param1 + param2,
OP_MULTIPLY => program[result_address] = param1 * param2,
_ => {}
}
},
OP_INPUT => {
let param = program[i];
i += 1;
let mut input_text = String::new();
print!("> ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut input_text).unwrap();
let input_value = input_text
.trim()
.parse::<i32>()
.expect("Input value is not an integer");
program[param as usize] = input_value;
},
OP_OUTPUT => {
let param = read_param_value(i, &program, 0);
i += 1;
println!("{}", param);
},
_ => {}
}
}
fn read_opcode(instr: i32) -> i32 {
return instr % 100;
}
fn read_param_mode(instr: i32, index: u32) -> i32 {
return instr % 10_i32.pow(index + 3) / 10_i32.pow(index + 2);
}
fn read_param_value(start: usize, program: &Vec<i32>, index: u32) -> i32 {
let mode = read_param_mode(program[start - 1], index);
let param = program[start + index as usize];
if mode == 0 {
return program[param as usize];
} else {
return param;
}
}
return program[0];
}
fn main() {
let program = fs::read_to_string("input.txt")
.unwrap()
.trim()
.split(",")
.map(|x| x.parse::<i32>().unwrap())
.collect::<Vec<i32>>();
run_program(program.clone());
}
|
use std::fs;
fn main() {
read_surveys("input/sample.input");
read_surveys("input/actual.input");
}
fn read_surveys(input: &str) {
let contents = fs::read_to_string(input)
.expect("Something went wrong reading the file");
let mut answers: [i32; 26] = [0; 26];
let mut count = 0;
let mut group_size = 0;
for line in contents.lines() {
if line != "" {
group_size += 1;
for letter in line.chars() {
let index = letter as u32 - 'a' as u32;
answers[index as usize] += 1;
}
} else {
for response in answers.iter() {
if *response == group_size {
count += 1
}
}
answers = [0; 26];
group_size = 0;
}
}
println!("sum : {}", count);
}
|
// Copyright 2018-2019 Parity Technologies (UK) Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::path::PathBuf;
use cargo_metadata::MetadataCommand;
use parity_wasm::elements::{
External,
MemoryType,
Module,
Section,
};
use crate::cmd::{
CommandError as Error,
Result,
};
/// This is the maximum number of pages available for a contract to allocate.
const MAX_MEMORY_PAGES: u32 = 16;
/// Relevant metadata obtained from Cargo.toml.
pub struct CrateMetadata {
original_wasm: PathBuf,
dest_wasm: PathBuf,
}
impl CrateMetadata {
/// Get the path of the wasm destination file
pub fn dest_wasm(self) -> PathBuf {
self.dest_wasm
}
}
/// Parses the contract manifest and returns relevant metadata.
pub fn collect_crate_metadata(working_dir: Option<&PathBuf>) -> Result<CrateMetadata> {
let mut cmd = MetadataCommand::new();
if let Some(dir) = working_dir {
cmd.current_dir(dir);
}
let metadata = cmd.exec()?;
let root_package_id = metadata
.resolve
.and_then(|resolve| resolve.root)
.ok_or_else(|| Error::Other("Cannot infer the root project id".to_string()))?;
// Find the root package by id in the list of packages. It is logical error if the root
// package is not found in the list.
let root_package = metadata
.packages
.iter()
.find(|package| package.id == root_package_id)
.expect("The package is not found in the `cargo metadata` output");
// Normalize the package name.
let package_name = root_package.name.replace("-", "_");
// {target_dir}/wasm32-unknown-unknown/release/{package_name}.wasm
let mut original_wasm = metadata.target_directory.clone();
original_wasm.push("wasm32-unknown-unknown");
original_wasm.push("release");
original_wasm.push(package_name.clone());
original_wasm.set_extension("wasm");
// {target_dir}/{package_name}.wasm
let mut dest_wasm = metadata.target_directory.clone();
dest_wasm.push(package_name);
dest_wasm.set_extension("wasm");
Ok(CrateMetadata {
original_wasm,
dest_wasm,
})
}
/// Invokes `cargo build` in the specified directory, defaults to the current directory.
///
/// Currently it assumes that user wants to use `+nightly`.
fn build_cargo_project(working_dir: Option<&PathBuf>) -> Result<()> {
super::exec_cargo(
"build",
&[
"--no-default-features",
"--release",
"--target=wasm32-unknown-unknown",
"--verbose",
],
working_dir,
)
}
/// Ensures the wasm memory import of a given module has the maximum number of pages.
///
/// Iterates over the import section, finds the memory import entry if any and adjusts the maximum
/// limit.
fn ensure_maximum_memory_pages(
module: &mut Module,
maximum_allowed_pages: u32,
) -> Result<()> {
let mem_ty = module
.import_section_mut()
.and_then(|section| {
section.entries_mut()
.iter_mut()
.find_map(|entry| {
match entry.external_mut() {
External::Memory(ref mut mem_ty) => Some(mem_ty),
_ => None,
}
})
})
.ok_or_else(||
Error::Other(
"Memory import is not found. Is --import-memory specified in the linker args".to_string()
)
)?;
if let Some(requested_maximum) = mem_ty.limits().maximum() {
// The module already has maximum, check if it is within the limit bail out.
if requested_maximum > maximum_allowed_pages {
return Err(
Error::Other(
format!(
"The wasm module requires {} pages. The maximum allowed number of pages is {}",
requested_maximum,
maximum_allowed_pages,
)
)
);
}
} else {
let initial = mem_ty.limits().initial();
*mem_ty = MemoryType::new(initial, Some(MAX_MEMORY_PAGES));
}
Ok(())
}
/// Strips all custom sections.
///
/// Presently all custom sections are not required so they can be stripped safely.
fn strip_custom_sections(module: &mut Module) {
module.sections_mut().retain(|section| {
match section {
Section::Custom(_) => false,
Section::Name(_) => false,
Section::Reloc(_) => false,
_ => true,
}
});
}
/// Performs required post-processing steps on the wasm artifact.
fn post_process_wasm(crate_metadata: &CrateMetadata) -> Result<()> {
// Deserialize wasm module from a file.
let mut module = parity_wasm::deserialize_file(&crate_metadata.original_wasm)?;
// Perform optimization.
//
// In practice only tree-shaking is performed, i.e transitively removing all symbols that are
// NOT used by the specified entrypoints.
pwasm_utils::optimize(&mut module, ["call", "deploy"].to_vec())?;
ensure_maximum_memory_pages(&mut module, MAX_MEMORY_PAGES)?;
strip_custom_sections(&mut module);
parity_wasm::serialize_to_file(&crate_metadata.dest_wasm, module)?;
Ok(())
}
/// Executes build of the smart-contract which produces a wasm binary that is ready for deploying.
///
/// It does so by invoking build by cargo and then post processing the final binary.
pub(crate) fn execute_build(working_dir: Option<&PathBuf>) -> Result<String> {
println!(" [1/3] Collecting crate metadata");
let crate_metadata = collect_crate_metadata(working_dir)?;
println!(" [2/3] Building cargo project");
build_cargo_project(working_dir)?;
println!(" [3/3] Post processing wasm file");
post_process_wasm(&crate_metadata)?;
Ok(format!(
"Your contract is ready.\nYou can find it here:\n{}",
crate_metadata.dest_wasm.display()
))
}
#[cfg(test)]
mod tests {
use crate::{
cmd::{
execute_new,
tests::with_tmp_dir,
},
AbstractionLayer,
};
#[cfg(feature = "test-ci-only")]
#[test]
fn build_template() {
with_tmp_dir(|path| {
execute_new(AbstractionLayer::Lang, "new_project", Some(path))
.expect("new project creation failed");
super::execute_build(Some(&path.join("new_project"))).expect("build failed");
});
}
}
|
struct Player {}
impl Player {
fn location(&self) {
println!("method()");
}
}
fn main() {
println!("Hello, world!");
let mut i = 0;
let mut v = Err("an error");
while let Err(_) = v {
i += 1;
if i > 3 {
v = Ok("success");
}
}
println!("{:?}", v);
'search:
for x in 0..3 {
for y in 0..3 {
println!("[{}][{}]", x, y);
if x > y {
break 'search;
}
}
}
let p = Player {};
let rp = &p;
p.location();
rp.location();
let v = Vec::<i32>::with_capacity(32);
let v = vec![1.0, 2.0, 3.0];
let sum = sum(&v);
println!("sum {}", sum);
}
fn sum(v: &Vec<f64>) -> f64 {
let mut sum = 0.0;
for elem in v {
sum += *elem;
}
sum
}
|
extern crate bkchainsaw;
use std::env;
use std::error::Error;
use std::fs::File;
use std::io::Write as IOWrite;
use std::io::{BufRead, BufReader, BufWriter};
use std::io::{Seek, SeekFrom};
use std::path::PathBuf;
use structopt::StructOpt;
use bkchainsaw::bk;
use bkchainsaw::bkfile;
use bkchainsaw::bktree;
use bkchainsaw::bktree::BkTreeAdd;
use bkchainsaw::bktreemut;
use bkchainsaw::keys;
use bkchainsaw::HammingMetric;
#[macro_use]
extern crate structopt;
#[derive(Debug, StructOpt)]
#[structopt(name = "bktree_from_ints", about = "Build an in-ram bktree")]
struct CommandLineArgs {
#[structopt(parse(from_os_str))]
input_filename: PathBuf,
}
fn main() -> Result<(), Box<dyn Error + 'static>> {
let opts = CommandLineArgs::from_args();
let args: Vec<String> = env::args().collect();
println!("args: {:?}", args);
// 1: input numbers
let mut tree: bk::BkInRamTree<
'_,
keys::U64Key,
HammingMetric<u64>,
bk::BkInRamAllocator<'_, u64>,
> = bk::BkInRamTree::new(HammingMetric::default(), &bk::U64_ALLOC);
let numbers = BufReader::new(File::open(args[1].clone())?).lines();
for numstr in numbers {
let num: u64 = numstr?.parse()?;
tree.add(num)?;
}
println!("{:?}", tree);
Ok(())
}
|
//! A module contains utility functions which grid relay on.
pub mod string;
|
use crate::{Address, TemplateAddr};
#[doc(hidden)]
#[derive(Debug, PartialEq, Clone)]
pub enum RuntimeError {
OOG,
TemplateNotFound(TemplateAddr),
AccountNotFound(Address),
CompilationFailed {
target: Address,
template: TemplateAddr,
msg: String,
},
InstantiationFailed {
target: Address,
template: TemplateAddr,
msg: String,
},
FuncNotFound {
target: Address,
template: TemplateAddr,
func: String,
},
FuncFailed {
target: Address,
template: TemplateAddr,
func: String,
msg: String,
},
FuncNotAllowed {
target: Address,
template: TemplateAddr,
func: String,
msg: String,
},
FuncInvalidSignature {
target: Address,
template: TemplateAddr,
func: String,
},
}
|
use structopt::StructOpt;
mod config;
use crate::config::CorvusOpt;
fn main() {
let opt = CorvusOpt::from_args();
println!("opt: {:?}", opt);
}
|
use std::ffi::OsStr;
use std::io;
use std::io::Error;
use std::iter::once;
use std::mem;
use std::os::windows::ffi::OsStrExt;
use std::ptr::null_mut;
use winapi::shared::minwindef::{BOOL, FALSE};
use winapi::shared::ntdef::NULL;
use winapi::um::handleapi::CloseHandle;
use winapi::um::processthreadsapi::{GetCurrentProcess, OpenProcessToken};
use winapi::um::securitybaseapi::AdjustTokenPrivileges;
use winapi::um::winbase::LookupPrivilegeValueW;
use winapi::um::winnt::{
HANDLE, PLUID, SE_PRIVILEGE_ENABLED, TOKEN_ADJUST_PRIVILEGES, TOKEN_PRIVILEGES,
};
/// Represents a process token. The associated `HANDLE` is closed when
/// this object is dropped.
struct ProcessToken(HANDLE);
impl ProcessToken {
/// Obtains the process token with the given access for the current process
///
/// # Arguments
///
/// * `desired_access`: Token access level
pub fn open_current(desired_access: u32) -> io::Result<Self> {
Self::open(unsafe { GetCurrentProcess() }, desired_access)
}
/// Obtains the process token for the given `process`
///
/// # Arguments
///
/// * `process`: Process to get the token for
/// * `desired_access`: Token access level
pub fn open(process: HANDLE, desired_access: u32) -> io::Result<Self> {
let mut process_token: HANDLE = NULL;
let result =
unsafe { OpenProcessToken(process, desired_access, &mut process_token as *mut HANDLE) };
match result {
0 => Err(Error::last_os_error()),
_ => Ok(ProcessToken(process_token)),
}
}
}
impl Drop for ProcessToken {
fn drop(&mut self) {
unsafe { CloseHandle(self.0) };
}
}
/// Updates the privileges of the current thread to include SeSystemEnvironmentPrivilege, which is
/// required to read and write NVRAM variables.
///
/// # Errors
///
/// Any errors from the underlying winapi calls will be returned as `Err()`
pub fn update_privileges() -> io::Result<()> {
// We need SeSystemEnvironmentPrivilege to do anything NVRAM-related
// So we configure it for the current thread here
// This means SystemManager is not Send
let mut tp = mem::MaybeUninit::<TOKEN_PRIVILEGES>::uninit();
// Lookup privilege value for SeSystemEnvironmentPrivilege
let se_system_environment_privilege: Vec<u16> = OsStr::new("SeSystemEnvironmentPrivilege")
.encode_wide()
.chain(once(0))
.collect();
let (mut tp, result) = unsafe {
(*tp.as_mut_ptr()).PrivilegeCount = 1;
let result = LookupPrivilegeValueW(
null_mut(),
se_system_environment_privilege.as_ptr(),
&mut (*tp.as_mut_ptr()).Privileges[0].Luid as PLUID,
);
(tp.assume_init(), result)
};
if result == 0 {
return Err(Error::last_os_error());
}
// Set privilege to enabled
tp.Privileges[0].Attributes = SE_PRIVILEGE_ENABLED;
// Get current thread token
let process_token = ProcessToken::open_current(TOKEN_ADJUST_PRIVILEGES)?;
// Update current security token
let result = unsafe {
AdjustTokenPrivileges(
process_token.0,
FALSE as BOOL,
&mut tp as *mut TOKEN_PRIVILEGES,
0,
null_mut(),
null_mut(),
)
};
// Check that the update is successful
match result {
0 => Err(Error::last_os_error()),
_ => Ok(()),
}
}
|
// Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
//! get_if_addrs
#![doc(
html_logo_url = "https://raw.githubusercontent.com/maidsafe/QA/master/Images/
maidsafe_logo.png",
html_favicon_url = "http://maidsafe.net/img/favicon.ico",
html_root_url = "http://maidsafe.github.io/get_if_addrs"
)]
// For explanation of lint checks, run `rustc -W help` or see
// https://github.com/maidsafe/QA/blob/master/Documentation/Rust%20Lint%20Checks.md
#![forbid(
exceeding_bitshifts, mutable_transmutes, no_mangle_const_items, unknown_crate_types, warnings
)]
#![deny(
bad_style, deprecated, improper_ctypes, missing_docs, non_shorthand_field_patterns,
overflowing_literals, plugin_as_library, private_no_mangle_fns, private_no_mangle_statics,
stable_features, unconditional_recursion, unknown_lints, unsafe_code, unused, unused_allocation,
unused_attributes, unused_comparisons, unused_features, unused_parens, while_true
)]
#![warn(
trivial_casts, trivial_numeric_casts, unused_extern_crates, unused_import_braces,
unused_qualifications, unused_results
)]
#![allow(
box_pointers, missing_copy_implementations, missing_debug_implementations,
variant_size_differences
)]
#![cfg_attr(
feature = "cargo-clippy",
deny(clippy, unicode_not_nfc, wrong_pub_self_convention, option_unwrap_used)
)]
#![cfg_attr(feature = "cargo-clippy", allow(use_debug, too_many_arguments))]
#[cfg(windows)]
extern crate winapi;
use std::io;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
#[cfg(test)]
#[macro_use]
extern crate unwrap;
extern crate c_linked_list;
#[cfg(target_os = "android")]
extern crate get_if_addrs_sys;
extern crate libc;
/// Details about an interface on this host
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct Interface {
/// The name of the interface.
pub name: String,
/// The address details of the interface.
pub addr: IfAddr,
}
/// Details about the address of an interface on this host
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub enum IfAddr {
/// This is an Ipv4 interface.
V4(Ifv4Addr),
/// This is an Ipv6 interface.
V6(Ifv6Addr),
}
/// Details about the ipv4 address of an interface on this host
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct Ifv4Addr {
/// The IP address of the interface.
pub ip: Ipv4Addr,
/// The netmask of the interface.
pub netmask: Ipv4Addr,
/// The broadcast address of the interface.
pub broadcast: Option<Ipv4Addr>,
}
/// Details about the ipv6 address of an interface on this host
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct Ifv6Addr {
/// The IP address of the interface.
pub ip: Ipv6Addr,
/// The netmask of the interface.
pub netmask: Ipv6Addr,
/// The broadcast address of the interface.
pub broadcast: Option<Ipv6Addr>,
}
impl Interface {
/// Check whether this is a loopback interface.
pub fn is_loopback(&self) -> bool {
self.addr.is_loopback()
}
/// Get the IP address of this interface.
pub fn ip(&self) -> IpAddr {
self.addr.ip()
}
}
impl IfAddr {
/// Check whether this is a loopback address.
pub fn is_loopback(&self) -> bool {
match *self {
IfAddr::V4(ref ifv4_addr) => ifv4_addr.is_loopback(),
IfAddr::V6(ref ifv6_addr) => ifv6_addr.is_loopback(),
}
}
/// Get the IP address of this interface address.
pub fn ip(&self) -> IpAddr {
match *self {
IfAddr::V4(ref ifv4_addr) => IpAddr::V4(ifv4_addr.ip),
IfAddr::V6(ref ifv6_addr) => IpAddr::V6(ifv6_addr.ip),
}
}
}
impl Ifv4Addr {
/// Check whether this is a loopback address.
pub fn is_loopback(&self) -> bool {
self.ip.octets()[0] == 127
}
}
impl Ifv6Addr {
/// Check whether this is a loopback address.
pub fn is_loopback(&self) -> bool {
self.ip.segments() == [0, 0, 0, 0, 0, 0, 0, 1]
}
}
#[cfg(not(windows))]
mod getifaddrs_posix {
use super::{IfAddr, Ifv4Addr, Ifv6Addr, Interface};
use c_linked_list::CLinkedListMut;
#[cfg(target_os = "android")]
use get_if_addrs_sys::freeifaddrs as posix_freeifaddrs;
#[cfg(target_os = "android")]
use get_if_addrs_sys::getifaddrs as posix_getifaddrs;
#[cfg(target_os = "android")]
use get_if_addrs_sys::ifaddrs as posix_ifaddrs;
#[cfg(not(target_os = "android"))]
use libc::freeifaddrs as posix_freeifaddrs;
#[cfg(not(target_os = "android"))]
use libc::getifaddrs as posix_getifaddrs;
#[cfg(not(target_os = "android"))]
use libc::ifaddrs as posix_ifaddrs;
use libc::sockaddr as posix_sockaddr;
use libc::sockaddr_in as posix_sockaddr_in;
use libc::sockaddr_in6 as posix_sockaddr_in6;
use libc::{AF_INET6, AF_INET};
use std::ffi::CStr;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use std::{io, mem};
#[allow(unsafe_code)]
fn sockaddr_to_ipaddr(sockaddr: *const posix_sockaddr) -> Option<IpAddr> {
if sockaddr.is_null() {
return None;
}
let sa_family = u32::from(unsafe { *sockaddr }.sa_family);
if sa_family == AF_INET as u32 {
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
let sa = &unsafe { *(sockaddr as *const posix_sockaddr_in) };
Some(IpAddr::V4(Ipv4Addr::new(
((sa.sin_addr.s_addr) & 255) as u8,
((sa.sin_addr.s_addr >> 8) & 255) as u8,
((sa.sin_addr.s_addr >> 16) & 255) as u8,
((sa.sin_addr.s_addr >> 24) & 255) as u8,
)))
} else if sa_family == AF_INET6 as u32 {
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
let sa = &unsafe { *(sockaddr as *const posix_sockaddr_in6) };
// Ignore all fe80:: addresses as these are link locals
if sa.sin6_addr.s6_addr[0] == 0xfe && sa.sin6_addr.s6_addr[1] == 0x80 {
return None;
}
Some(IpAddr::V6(Ipv6Addr::from(sa.sin6_addr.s6_addr)))
} else {
None
}
}
#[cfg(any(target_os = "linux", target_os = "android", target_os = "nacl"))]
fn do_broadcast(ifaddr: &posix_ifaddrs) -> Option<IpAddr> {
sockaddr_to_ipaddr(ifaddr.ifa_ifu)
}
#[cfg(
any(target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd")
)]
fn do_broadcast(ifaddr: &posix_ifaddrs) -> Option<IpAddr> {
sockaddr_to_ipaddr(ifaddr.ifa_dstaddr)
}
/// Return a vector of IP details for all the valid interfaces on this host
#[allow(unsafe_code)]
#[allow(trivial_casts)]
pub fn get_if_addrs() -> io::Result<Vec<Interface>> {
let mut ret = Vec::<Interface>::new();
let mut ifaddrs: *mut posix_ifaddrs;
unsafe {
ifaddrs = mem::uninitialized();
if -1 == posix_getifaddrs(&mut ifaddrs) {
return Err(io::Error::last_os_error());
}
}
for ifaddr in unsafe { CLinkedListMut::from_ptr(ifaddrs, |a| a.ifa_next) }.iter() {
if ifaddr.ifa_addr.is_null() {
continue;
}
let name = unsafe { CStr::from_ptr(ifaddr.ifa_name as *const _) }
.to_string_lossy()
.into_owned();
let addr = match sockaddr_to_ipaddr(ifaddr.ifa_addr) {
None => continue,
Some(IpAddr::V4(ipv4_addr)) => {
let netmask = match sockaddr_to_ipaddr(ifaddr.ifa_netmask) {
Some(IpAddr::V4(netmask)) => netmask,
_ => Ipv4Addr::new(0, 0, 0, 0),
};
let broadcast = if (ifaddr.ifa_flags & 2) != 0 {
match do_broadcast(ifaddr) {
Some(IpAddr::V4(broadcast)) => Some(broadcast),
_ => None,
}
} else {
None
};
IfAddr::V4(Ifv4Addr {
ip: ipv4_addr,
netmask,
broadcast,
})
}
Some(IpAddr::V6(ipv6_addr)) => {
let netmask = match sockaddr_to_ipaddr(ifaddr.ifa_netmask) {
Some(IpAddr::V6(netmask)) => netmask,
_ => Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0),
};
let broadcast = if (ifaddr.ifa_flags & 2) != 0 {
match do_broadcast(ifaddr) {
Some(IpAddr::V6(broadcast)) => Some(broadcast),
_ => None,
}
} else {
None
};
IfAddr::V6(Ifv6Addr {
ip: ipv6_addr,
netmask,
broadcast,
})
}
};
ret.push(Interface { name, addr });
}
unsafe {
posix_freeifaddrs(ifaddrs);
}
Ok(ret)
}
}
/// Get a list of all the network interfaces on this machine along with their IP info.
#[cfg(not(windows))]
pub fn get_if_addrs() -> io::Result<Vec<Interface>> {
getifaddrs_posix::get_if_addrs()
}
#[cfg(windows)]
mod getifaddrs_windows {
use super::{IfAddr, Ifv4Addr, Ifv6Addr, Interface};
use c_linked_list::CLinkedListConst;
use libc;
use libc::{c_char, c_int, c_ulong, c_void, size_t};
use std::ffi::CStr;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use std::{io, ptr};
use winapi::SOCKADDR as sockaddr;
use winapi::SOCKADDR_IN as sockaddr_in;
use winapi::{sockaddr_in6, AF_INET6, AF_INET, DWORD, ERROR_SUCCESS};
#[repr(C)]
struct SocketAddress {
pub lp_socket_address: *const sockaddr,
pub i_socket_address_length: c_int,
}
#[repr(C)]
struct IpAdapterUnicastAddress {
pub length: c_ulong,
pub flags: DWORD,
pub next: *const IpAdapterUnicastAddress,
// Loads more follows, but I'm not bothering to map these for now
pub address: SocketAddress,
}
#[repr(C)]
struct IpAdapterPrefix {
pub length: c_ulong,
pub flags: DWORD,
pub next: *const IpAdapterPrefix,
pub address: SocketAddress,
pub prefix_length: c_ulong,
}
#[repr(C)]
struct IpAdapterAddresses {
pub length: c_ulong,
pub if_index: DWORD,
pub next: *const IpAdapterAddresses,
pub adapter_name: *const c_char,
pub first_unicast_address: *const IpAdapterUnicastAddress,
first_anycast_address: *const c_void,
first_multicast_address: *const c_void,
first_dns_server_address: *const c_void,
dns_suffix: *const c_void,
description: *const c_void,
friendly_name: *const c_void,
physical_address: [c_char; 8],
physical_address_length: DWORD,
flags: DWORD,
mtu: DWORD,
if_type: DWORD,
oper_status: c_int,
ipv6_if_index: DWORD,
zone_indices: [DWORD; 16],
// Loads more follows, but I'm not bothering to map these for now
pub first_prefix: *const IpAdapterPrefix,
}
#[link(name = "Iphlpapi")]
extern "system" {
/// get adapter's addresses
fn GetAdaptersAddresses(
family: c_ulong,
flags: c_ulong,
reserved: *const c_void,
addresses: *const IpAdapterAddresses,
size: *mut c_ulong,
) -> c_ulong;
}
#[allow(unsafe_code)]
fn sockaddr_to_ipaddr(sockaddr: *const sockaddr) -> Option<IpAddr> {
if sockaddr.is_null() {
return None;
}
if unsafe { *sockaddr }.sa_family as u32 == AF_INET as u32 {
let sa = &unsafe { *(sockaddr as *const sockaddr_in) };
// Ignore all 169.254.x.x addresses as these are not active interfaces
if sa.sin_addr.S_un & 65535 == 0xfea9 {
return None;
}
Some(IpAddr::V4(Ipv4Addr::new(
((sa.sin_addr.S_un >> 0) & 255) as u8,
((sa.sin_addr.S_un >> 8) & 255) as u8,
((sa.sin_addr.S_un >> 16) & 255) as u8,
((sa.sin_addr.S_un >> 24) & 255) as u8,
)))
} else if unsafe { *sockaddr }.sa_family as u32 == AF_INET6 as u32 {
let sa = &unsafe { *(sockaddr as *const sockaddr_in6) };
// Ignore all fe80:: addresses as these are link locals
if sa.sin6_addr.s6_addr[0] == 0xfe && sa.sin6_addr.s6_addr[1] == 0x80 {
return None;
}
Some(IpAddr::V6(Ipv6Addr::from(sa.sin6_addr.s6_addr)))
} else {
None
}
}
// trivial_numeric_casts lint may become allow by default.
// Refer: https://github.com/rust-lang/rfcs/issues/1020
/// Return a vector of IP details for all the valid interfaces on this host
#[allow(unsafe_code, trivial_numeric_casts)]
pub fn get_if_addrs() -> io::Result<Vec<Interface>> {
let mut ret = Vec::<Interface>::new();
let mut ifaddrs: *const IpAdapterAddresses;
let mut buffersize: c_ulong = 15000;
loop {
unsafe {
ifaddrs = libc::malloc(buffersize as size_t) as *mut IpAdapterAddresses;
if ifaddrs.is_null() {
panic!("Failed to allocate buffer in get_if_addrs()");
}
let retcode = GetAdaptersAddresses(
0,
// GAA_FLAG_SKIP_ANYCAST |
// GAA_FLAG_SKIP_MULTICAST |
// GAA_FLAG_SKIP_DNS_SERVER |
// GAA_FLAG_INCLUDE_PREFIX |
// GAA_FLAG_SKIP_FRIENDLY_NAME
0x3e,
ptr::null(),
ifaddrs,
&mut buffersize,
);
match retcode {
ERROR_SUCCESS => break,
111 => {
libc::free(ifaddrs as *mut c_void);
buffersize *= 2;
continue;
}
_ => return Err(io::Error::last_os_error()),
}
}
}
for ifaddr in unsafe { CLinkedListConst::from_ptr(ifaddrs, |a| a.next) }.iter() {
for addr in unsafe {
CLinkedListConst::from_ptr(ifaddr.first_unicast_address, |a| a.next)
}.iter()
{
let name = unsafe { CStr::from_ptr(ifaddr.adapter_name) }
.to_string_lossy()
.into_owned();
let addr = match sockaddr_to_ipaddr(addr.address.lp_socket_address) {
None => continue,
Some(IpAddr::V4(ipv4_addr)) => {
let mut item_netmask = Ipv4Addr::new(0, 0, 0, 0);
let mut item_broadcast = None;
// Search prefixes for a prefix matching addr
'prefixloopv4: for prefix in unsafe {
CLinkedListConst::from_ptr(ifaddr.first_prefix, |p| p.next)
}.iter()
{
let ipprefix = sockaddr_to_ipaddr(prefix.address.lp_socket_address);
match ipprefix {
Some(IpAddr::V4(ref a)) => {
let mut netmask: [u8; 4] = [0; 4];
for (n, netmask_elt) in netmask
.iter_mut()
.enumerate()
.take((prefix.prefix_length as usize + 7) / 8)
{
let x_byte = ipv4_addr.octets()[n];
let y_byte = a.octets()[n];
// Clippy 0.0.128 doesn't handle the label on the `continue`
#[cfg_attr(
feature = "cargo-clippy", allow(needless_continue)
)]
for m in 0..8 {
if (n * 8) + m > prefix.prefix_length as usize {
break;
}
let bit = 1 << m;
if (x_byte & bit) == (y_byte & bit) {
*netmask_elt |= bit;
} else {
continue 'prefixloopv4;
}
}
}
item_netmask = Ipv4Addr::new(
netmask[0], netmask[1], netmask[2], netmask[3],
);
let mut broadcast: [u8; 4] = ipv4_addr.octets();
for n in 0..4 {
broadcast[n] |= !netmask[n];
}
item_broadcast = Some(Ipv4Addr::new(
broadcast[0],
broadcast[1],
broadcast[2],
broadcast[3],
));
break 'prefixloopv4;
}
_ => continue,
};
}
IfAddr::V4(Ifv4Addr {
ip: ipv4_addr,
netmask: item_netmask,
broadcast: item_broadcast,
})
}
Some(IpAddr::V6(ipv6_addr)) => {
let mut item_netmask = Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0);
// Search prefixes for a prefix matching addr
'prefixloopv6: for prefix in unsafe {
CLinkedListConst::from_ptr(ifaddr.first_prefix, |p| p.next)
}.iter()
{
let ipprefix = sockaddr_to_ipaddr(prefix.address.lp_socket_address);
match ipprefix {
Some(IpAddr::V6(ref a)) => {
// Iterate the bits in the prefix, if they all match this prefix
// is the right one, else try the next prefix
let mut netmask: [u16; 8] = [0; 8];
for (n, netmask_elt) in netmask
.iter_mut()
.enumerate()
.take((prefix.prefix_length as usize + 15) / 16)
{
let x_word = ipv6_addr.segments()[n];
let y_word = a.segments()[n];
// Clippy 0.0.128 doesn't handle the label on the `continue`
#[cfg_attr(
feature = "cargo-clippy", allow(needless_continue)
)]
for m in 0..16 {
if (n * 16) + m > prefix.prefix_length as usize {
break;
}
let bit = 1 << m;
if (x_word & bit) == (y_word & bit) {
*netmask_elt |= bit;
} else {
continue 'prefixloopv6;
}
}
}
item_netmask = Ipv6Addr::new(
netmask[0], netmask[1], netmask[2], netmask[3], netmask[4],
netmask[5], netmask[6], netmask[7],
);
break 'prefixloopv6;
}
_ => continue,
};
}
IfAddr::V6(Ifv6Addr {
ip: ipv6_addr,
netmask: item_netmask,
broadcast: None,
})
}
};
ret.push(Interface {
name: name,
addr: addr,
});
}
}
unsafe {
libc::free(ifaddrs as *mut c_void);
}
Ok(ret)
}
}
#[cfg(windows)]
/// Get address
pub fn get_if_addrs() -> io::Result<Vec<Interface>> {
getifaddrs_windows::get_if_addrs()
}
#[cfg(test)]
mod tests {
use super::{get_if_addrs, Interface};
use std::error::Error;
use std::io::Read;
use std::net::{IpAddr, Ipv4Addr};
use std::process::{Command, Stdio};
use std::str::FromStr;
use std::thread;
use std::time::Duration;
fn list_system_interfaces(cmd: &str, arg: &str) -> String {
let start_cmd = if arg == "" {
Command::new(cmd).stdout(Stdio::piped()).spawn()
} else {
Command::new(cmd).arg(arg).stdout(Stdio::piped()).spawn()
};
let mut process = match start_cmd {
Err(why) => {
println!("couldn't start cmd {} : {}", cmd, why.description());
return "".to_string();
}
Ok(process) => process,
};
thread::sleep(Duration::from_millis(1000));
let _ = process.kill();
let result: Vec<u8> = unwrap!(process.stdout)
.bytes()
.map(|x| unwrap!(x))
.collect();
unwrap!(String::from_utf8(result))
}
#[cfg(windows)]
fn list_system_addrs() -> Vec<IpAddr> {
use std::net::Ipv6Addr;
list_system_interfaces("ipconfig", "")
.lines()
.filter_map(|line| {
println!("{}", line);
if line.contains("Address") && !line.contains("Link-local") {
let addr_s: Vec<&str> = line.split(" : ").collect();
if line.contains("IPv6") {
return Some(IpAddr::V6(unwrap!(Ipv6Addr::from_str(addr_s[1]))));
} else if line.contains("IPv4") {
return Some(IpAddr::V4(unwrap!(Ipv4Addr::from_str(addr_s[1]))));
}
}
None
})
.collect()
}
#[cfg(any(target_os = "linux", target_os = "android", target_os = "nacl"))]
fn list_system_addrs() -> Vec<IpAddr> {
list_system_interfaces("ip", "addr")
.lines()
.filter_map(|line| {
println!("{}", line);
if line.contains("inet ") {
let addr_s: Vec<&str> = line.split_whitespace().collect();
let addr: Vec<&str> = addr_s[1].split('/').collect();
return Some(IpAddr::V4(unwrap!(Ipv4Addr::from_str(addr[0]))));
}
None
})
.collect()
}
#[cfg(any(target_os = "freebsd", target_os = "macos", target_os = "ios"))]
fn list_system_addrs() -> Vec<IpAddr> {
list_system_interfaces("ifconfig", "")
.lines()
.filter_map(|line| {
println!("{}", line);
if line.contains("inet ") {
let addr_s: Vec<&str> = line.split_whitespace().collect();
return Some(IpAddr::V4(unwrap!(Ipv4Addr::from_str(addr_s[1]))));
}
None
})
.collect()
}
#[test]
fn test_get_if_addrs() {
let ifaces = unwrap!(get_if_addrs());
println!("Local interfaces:");
println!("{:#?}", ifaces);
// at least one loop back address
assert!(
1 <= ifaces
.iter()
.filter(|interface| interface.is_loopback())
.count()
);
// one address of IpV4(127.0.0.1)
let is_loopback =
|interface: &&Interface| interface.addr.ip() == IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));
assert_eq!(1, ifaces.iter().filter(is_loopback).count());
// each system address shall be listed
let system_addrs = list_system_addrs();
assert!(!system_addrs.is_empty());
for addr in system_addrs {
let mut listed = false;
println!("\n checking whether {:?} has been properly listed \n", addr);
for interface in &ifaces {
if interface.addr.ip() == addr {
listed = true;
}
}
assert!(listed);
}
}
}
|
#[doc = r" Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Counter/Timer Register"]
pub tmr0: TMR0,
#[doc = "0x04 - Counter/Timer A0 Compare Registers"]
pub cmpra0: CMPRA0,
#[doc = "0x08 - Counter/Timer B0 Compare Registers"]
pub cmprb0: CMPRB0,
#[doc = "0x0c - Counter/Timer Control"]
pub ctrl0: CTRL0,
_reserved0: [u8; 4usize],
#[doc = "0x14 - Counter/Timer A0 Compare Registers"]
pub cmprauxa0: CMPRAUXA0,
#[doc = "0x18 - Counter/Timer B0 Compare Registers"]
pub cmprauxb0: CMPRAUXB0,
#[doc = "0x1c - Counter/Timer Auxiliary"]
pub aux0: AUX0,
#[doc = "0x20 - Counter/Timer Register"]
pub tmr1: TMR1,
#[doc = "0x24 - Counter/Timer A1 Compare Registers"]
pub cmpra1: CMPRA1,
#[doc = "0x28 - Counter/Timer B1 Compare Registers"]
pub cmprb1: CMPRB1,
#[doc = "0x2c - Counter/Timer Control"]
pub ctrl1: CTRL1,
_reserved1: [u8; 4usize],
#[doc = "0x34 - Counter/Timer A1 Compare Registers"]
pub cmprauxa1: CMPRAUXA1,
#[doc = "0x38 - Counter/Timer B1 Compare Registers"]
pub cmprauxb1: CMPRAUXB1,
#[doc = "0x3c - Counter/Timer Auxiliary"]
pub aux1: AUX1,
#[doc = "0x40 - Counter/Timer Register"]
pub tmr2: TMR2,
#[doc = "0x44 - Counter/Timer A2 Compare Registers"]
pub cmpra2: CMPRA2,
#[doc = "0x48 - Counter/Timer B2 Compare Registers"]
pub cmprb2: CMPRB2,
#[doc = "0x4c - Counter/Timer Control"]
pub ctrl2: CTRL2,
_reserved2: [u8; 4usize],
#[doc = "0x54 - Counter/Timer A2 Compare Registers"]
pub cmprauxa2: CMPRAUXA2,
#[doc = "0x58 - Counter/Timer B2 Compare Registers"]
pub cmprauxb2: CMPRAUXB2,
#[doc = "0x5c - Counter/Timer Auxiliary"]
pub aux2: AUX2,
#[doc = "0x60 - Counter/Timer Register"]
pub tmr3: TMR3,
#[doc = "0x64 - Counter/Timer A3 Compare Registers"]
pub cmpra3: CMPRA3,
#[doc = "0x68 - Counter/Timer B3 Compare Registers"]
pub cmprb3: CMPRB3,
#[doc = "0x6c - Counter/Timer Control"]
pub ctrl3: CTRL3,
_reserved3: [u8; 4usize],
#[doc = "0x74 - Counter/Timer A3 Compare Registers"]
pub cmprauxa3: CMPRAUXA3,
#[doc = "0x78 - Counter/Timer B3 Compare Registers"]
pub cmprauxb3: CMPRAUXB3,
#[doc = "0x7c - Counter/Timer Auxiliary"]
pub aux3: AUX3,
#[doc = "0x80 - Counter/Timer Register"]
pub tmr4: TMR4,
#[doc = "0x84 - Counter/Timer A4 Compare Registers"]
pub cmpra4: CMPRA4,
#[doc = "0x88 - Counter/Timer B4 Compare Registers"]
pub cmprb4: CMPRB4,
#[doc = "0x8c - Counter/Timer Control"]
pub ctrl4: CTRL4,
_reserved4: [u8; 4usize],
#[doc = "0x94 - Counter/Timer A4 Compare Registers"]
pub cmprauxa4: CMPRAUXA4,
#[doc = "0x98 - Counter/Timer B4 Compare Registers"]
pub cmprauxb4: CMPRAUXB4,
#[doc = "0x9c - Counter/Timer Auxiliary"]
pub aux4: AUX4,
#[doc = "0xa0 - Counter/Timer Register"]
pub tmr5: TMR5,
#[doc = "0xa4 - Counter/Timer A5 Compare Registers"]
pub cmpra5: CMPRA5,
#[doc = "0xa8 - Counter/Timer B5 Compare Registers"]
pub cmprb5: CMPRB5,
#[doc = "0xac - Counter/Timer Control"]
pub ctrl5: CTRL5,
_reserved5: [u8; 4usize],
#[doc = "0xb4 - Counter/Timer A5 Compare Registers"]
pub cmprauxa5: CMPRAUXA5,
#[doc = "0xb8 - Counter/Timer B5 Compare Registers"]
pub cmprauxb5: CMPRAUXB5,
#[doc = "0xbc - Counter/Timer Auxiliary"]
pub aux5: AUX5,
#[doc = "0xc0 - Counter/Timer Register"]
pub tmr6: TMR6,
#[doc = "0xc4 - Counter/Timer A6 Compare Registers"]
pub cmpra6: CMPRA6,
#[doc = "0xc8 - Counter/Timer B6 Compare Registers"]
pub cmprb6: CMPRB6,
#[doc = "0xcc - Counter/Timer Control"]
pub ctrl6: CTRL6,
_reserved6: [u8; 4usize],
#[doc = "0xd4 - Counter/Timer A6 Compare Registers"]
pub cmprauxa6: CMPRAUXA6,
#[doc = "0xd8 - Counter/Timer B6 Compare Registers"]
pub cmprauxb6: CMPRAUXB6,
#[doc = "0xdc - Counter/Timer Auxiliary"]
pub aux6: AUX6,
#[doc = "0xe0 - Counter/Timer Register"]
pub tmr7: TMR7,
#[doc = "0xe4 - Counter/Timer A7 Compare Registers"]
pub cmpra7: CMPRA7,
#[doc = "0xe8 - Counter/Timer B7 Compare Registers"]
pub cmprb7: CMPRB7,
#[doc = "0xec - Counter/Timer Control"]
pub ctrl7: CTRL7,
_reserved7: [u8; 4usize],
#[doc = "0xf4 - Counter/Timer A7 Compare Registers"]
pub cmprauxa7: CMPRAUXA7,
#[doc = "0xf8 - Counter/Timer B7 Compare Registers"]
pub cmprauxb7: CMPRAUXB7,
#[doc = "0xfc - Counter/Timer Auxiliary"]
pub aux7: AUX7,
#[doc = "0x100 - Counter/Timer Global Enable"]
pub globen: GLOBEN,
#[doc = "0x104 - Counter/Timer Output Config 0"]
pub outcfg0: OUTCFG0,
#[doc = "0x108 - Counter/Timer Output Config 1"]
pub outcfg1: OUTCFG1,
#[doc = "0x10c - Counter/Timer Output Config 2"]
pub outcfg2: OUTCFG2,
_reserved8: [u8; 4usize],
#[doc = "0x114 - Counter/Timer Output Config 3"]
pub outcfg3: OUTCFG3,
#[doc = "0x118 - Counter/Timer Input Config"]
pub incfg: INCFG,
_reserved9: [u8; 36usize],
#[doc = "0x140 - Configuration Register"]
pub stcfg: STCFG,
#[doc = "0x144 - System Timer Count Register (Real Time Counter)"]
pub sttmr: STTMR,
#[doc = "0x148 - Capture Control Register"]
pub capturecontrol: CAPTURECONTROL,
_reserved10: [u8; 4usize],
#[doc = "0x150 - Compare Register A"]
pub scmpr0: SCMPR0,
#[doc = "0x154 - Compare Register B"]
pub scmpr1: SCMPR1,
#[doc = "0x158 - Compare Register C"]
pub scmpr2: SCMPR2,
#[doc = "0x15c - Compare Register D"]
pub scmpr3: SCMPR3,
#[doc = "0x160 - Compare Register E"]
pub scmpr4: SCMPR4,
#[doc = "0x164 - Compare Register F"]
pub scmpr5: SCMPR5,
#[doc = "0x168 - Compare Register G"]
pub scmpr6: SCMPR6,
#[doc = "0x16c - Compare Register H"]
pub scmpr7: SCMPR7,
_reserved11: [u8; 112usize],
#[doc = "0x1e0 - Capture Register A"]
pub scapt0: SCAPT0,
#[doc = "0x1e4 - Capture Register B"]
pub scapt1: SCAPT1,
#[doc = "0x1e8 - Capture Register C"]
pub scapt2: SCAPT2,
#[doc = "0x1ec - Capture Register D"]
pub scapt3: SCAPT3,
#[doc = "0x1f0 - System Timer NVRAM_A Register"]
pub snvr0: SNVR0,
#[doc = "0x1f4 - System Timer NVRAM_B Register"]
pub snvr1: SNVR1,
#[doc = "0x1f8 - System Timer NVRAM_C Register"]
pub snvr2: SNVR2,
#[doc = "0x1fc - System Timer NVRAM_D Register"]
pub snvr3: SNVR3,
#[doc = "0x200 - Counter/Timer Interrupts: Enable"]
pub inten: INTEN,
#[doc = "0x204 - Counter/Timer Interrupts: Status"]
pub intstat: INTSTAT,
#[doc = "0x208 - Counter/Timer Interrupts: Clear"]
pub intclr: INTCLR,
#[doc = "0x20c - Counter/Timer Interrupts: Set"]
pub intset: INTSET,
_reserved12: [u8; 240usize],
#[doc = "0x300 - STIMER Interrupt registers: Enable"]
pub stminten: STMINTEN,
#[doc = "0x304 - STIMER Interrupt registers: Status"]
pub stmintstat: STMINTSTAT,
#[doc = "0x308 - STIMER Interrupt registers: Clear"]
pub stmintclr: STMINTCLR,
#[doc = "0x30c - STIMER Interrupt registers: Set"]
pub stmintset: STMINTSET,
}
#[doc = "Counter/Timer Register"]
pub struct TMR0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Register"]
pub mod tmr0;
#[doc = "Counter/Timer A0 Compare Registers"]
pub struct CMPRA0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A0 Compare Registers"]
pub mod cmpra0;
#[doc = "Counter/Timer B0 Compare Registers"]
pub struct CMPRB0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B0 Compare Registers"]
pub mod cmprb0;
#[doc = "Counter/Timer Control"]
pub struct CTRL0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Control"]
pub mod ctrl0;
#[doc = "Counter/Timer A0 Compare Registers"]
pub struct CMPRAUXA0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A0 Compare Registers"]
pub mod cmprauxa0;
#[doc = "Counter/Timer B0 Compare Registers"]
pub struct CMPRAUXB0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B0 Compare Registers"]
pub mod cmprauxb0;
#[doc = "Counter/Timer Auxiliary"]
pub struct AUX0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Auxiliary"]
pub mod aux0;
#[doc = "Counter/Timer Register"]
pub struct TMR1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Register"]
pub mod tmr1;
#[doc = "Counter/Timer A1 Compare Registers"]
pub struct CMPRA1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A1 Compare Registers"]
pub mod cmpra1;
#[doc = "Counter/Timer B1 Compare Registers"]
pub struct CMPRB1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B1 Compare Registers"]
pub mod cmprb1;
#[doc = "Counter/Timer Control"]
pub struct CTRL1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Control"]
pub mod ctrl1;
#[doc = "Counter/Timer A1 Compare Registers"]
pub struct CMPRAUXA1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A1 Compare Registers"]
pub mod cmprauxa1;
#[doc = "Counter/Timer B1 Compare Registers"]
pub struct CMPRAUXB1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B1 Compare Registers"]
pub mod cmprauxb1;
#[doc = "Counter/Timer Auxiliary"]
pub struct AUX1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Auxiliary"]
pub mod aux1;
#[doc = "Counter/Timer Register"]
pub struct TMR2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Register"]
pub mod tmr2;
#[doc = "Counter/Timer A2 Compare Registers"]
pub struct CMPRA2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A2 Compare Registers"]
pub mod cmpra2;
#[doc = "Counter/Timer B2 Compare Registers"]
pub struct CMPRB2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B2 Compare Registers"]
pub mod cmprb2;
#[doc = "Counter/Timer Control"]
pub struct CTRL2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Control"]
pub mod ctrl2;
#[doc = "Counter/Timer A2 Compare Registers"]
pub struct CMPRAUXA2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A2 Compare Registers"]
pub mod cmprauxa2;
#[doc = "Counter/Timer B2 Compare Registers"]
pub struct CMPRAUXB2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B2 Compare Registers"]
pub mod cmprauxb2;
#[doc = "Counter/Timer Auxiliary"]
pub struct AUX2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Auxiliary"]
pub mod aux2;
#[doc = "Counter/Timer Register"]
pub struct TMR3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Register"]
pub mod tmr3;
#[doc = "Counter/Timer A3 Compare Registers"]
pub struct CMPRA3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A3 Compare Registers"]
pub mod cmpra3;
#[doc = "Counter/Timer B3 Compare Registers"]
pub struct CMPRB3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B3 Compare Registers"]
pub mod cmprb3;
#[doc = "Counter/Timer Control"]
pub struct CTRL3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Control"]
pub mod ctrl3;
#[doc = "Counter/Timer A3 Compare Registers"]
pub struct CMPRAUXA3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A3 Compare Registers"]
pub mod cmprauxa3;
#[doc = "Counter/Timer B3 Compare Registers"]
pub struct CMPRAUXB3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B3 Compare Registers"]
pub mod cmprauxb3;
#[doc = "Counter/Timer Auxiliary"]
pub struct AUX3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Auxiliary"]
pub mod aux3;
#[doc = "Counter/Timer Register"]
pub struct TMR4 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Register"]
pub mod tmr4;
#[doc = "Counter/Timer A4 Compare Registers"]
pub struct CMPRA4 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A4 Compare Registers"]
pub mod cmpra4;
#[doc = "Counter/Timer B4 Compare Registers"]
pub struct CMPRB4 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B4 Compare Registers"]
pub mod cmprb4;
#[doc = "Counter/Timer Control"]
pub struct CTRL4 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Control"]
pub mod ctrl4;
#[doc = "Counter/Timer A4 Compare Registers"]
pub struct CMPRAUXA4 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A4 Compare Registers"]
pub mod cmprauxa4;
#[doc = "Counter/Timer B4 Compare Registers"]
pub struct CMPRAUXB4 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B4 Compare Registers"]
pub mod cmprauxb4;
#[doc = "Counter/Timer Auxiliary"]
pub struct AUX4 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Auxiliary"]
pub mod aux4;
#[doc = "Counter/Timer Register"]
pub struct TMR5 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Register"]
pub mod tmr5;
#[doc = "Counter/Timer A5 Compare Registers"]
pub struct CMPRA5 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A5 Compare Registers"]
pub mod cmpra5;
#[doc = "Counter/Timer B5 Compare Registers"]
pub struct CMPRB5 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B5 Compare Registers"]
pub mod cmprb5;
#[doc = "Counter/Timer Control"]
pub struct CTRL5 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Control"]
pub mod ctrl5;
#[doc = "Counter/Timer A5 Compare Registers"]
pub struct CMPRAUXA5 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A5 Compare Registers"]
pub mod cmprauxa5;
#[doc = "Counter/Timer B5 Compare Registers"]
pub struct CMPRAUXB5 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B5 Compare Registers"]
pub mod cmprauxb5;
#[doc = "Counter/Timer Auxiliary"]
pub struct AUX5 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Auxiliary"]
pub mod aux5;
#[doc = "Counter/Timer Register"]
pub struct TMR6 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Register"]
pub mod tmr6;
#[doc = "Counter/Timer A6 Compare Registers"]
pub struct CMPRA6 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A6 Compare Registers"]
pub mod cmpra6;
#[doc = "Counter/Timer B6 Compare Registers"]
pub struct CMPRB6 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B6 Compare Registers"]
pub mod cmprb6;
#[doc = "Counter/Timer Control"]
pub struct CTRL6 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Control"]
pub mod ctrl6;
#[doc = "Counter/Timer A6 Compare Registers"]
pub struct CMPRAUXA6 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A6 Compare Registers"]
pub mod cmprauxa6;
#[doc = "Counter/Timer B6 Compare Registers"]
pub struct CMPRAUXB6 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B6 Compare Registers"]
pub mod cmprauxb6;
#[doc = "Counter/Timer Auxiliary"]
pub struct AUX6 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Auxiliary"]
pub mod aux6;
#[doc = "Counter/Timer Register"]
pub struct TMR7 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Register"]
pub mod tmr7;
#[doc = "Counter/Timer A7 Compare Registers"]
pub struct CMPRA7 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A7 Compare Registers"]
pub mod cmpra7;
#[doc = "Counter/Timer B7 Compare Registers"]
pub struct CMPRB7 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B7 Compare Registers"]
pub mod cmprb7;
#[doc = "Counter/Timer Control"]
pub struct CTRL7 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Control"]
pub mod ctrl7;
#[doc = "Counter/Timer A7 Compare Registers"]
pub struct CMPRAUXA7 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer A7 Compare Registers"]
pub mod cmprauxa7;
#[doc = "Counter/Timer B7 Compare Registers"]
pub struct CMPRAUXB7 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer B7 Compare Registers"]
pub mod cmprauxb7;
#[doc = "Counter/Timer Auxiliary"]
pub struct AUX7 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Auxiliary"]
pub mod aux7;
#[doc = "Counter/Timer Global Enable"]
pub struct GLOBEN {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Global Enable"]
pub mod globen;
#[doc = "Counter/Timer Output Config 0"]
pub struct OUTCFG0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Output Config 0"]
pub mod outcfg0;
#[doc = "Counter/Timer Output Config 1"]
pub struct OUTCFG1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Output Config 1"]
pub mod outcfg1;
#[doc = "Counter/Timer Output Config 2"]
pub struct OUTCFG2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Output Config 2"]
pub mod outcfg2;
#[doc = "Counter/Timer Output Config 3"]
pub struct OUTCFG3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Output Config 3"]
pub mod outcfg3;
#[doc = "Counter/Timer Input Config"]
pub struct INCFG {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Input Config"]
pub mod incfg;
#[doc = "Configuration Register"]
pub struct STCFG {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Configuration Register"]
pub mod stcfg;
#[doc = "System Timer Count Register (Real Time Counter)"]
pub struct STTMR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "System Timer Count Register (Real Time Counter)"]
pub mod sttmr;
#[doc = "Capture Control Register"]
pub struct CAPTURECONTROL {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Capture Control Register"]
pub mod capturecontrol;
#[doc = "Compare Register A"]
pub struct SCMPR0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Compare Register A"]
pub mod scmpr0;
#[doc = "Compare Register B"]
pub struct SCMPR1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Compare Register B"]
pub mod scmpr1;
#[doc = "Compare Register C"]
pub struct SCMPR2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Compare Register C"]
pub mod scmpr2;
#[doc = "Compare Register D"]
pub struct SCMPR3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Compare Register D"]
pub mod scmpr3;
#[doc = "Compare Register E"]
pub struct SCMPR4 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Compare Register E"]
pub mod scmpr4;
#[doc = "Compare Register F"]
pub struct SCMPR5 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Compare Register F"]
pub mod scmpr5;
#[doc = "Compare Register G"]
pub struct SCMPR6 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Compare Register G"]
pub mod scmpr6;
#[doc = "Compare Register H"]
pub struct SCMPR7 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Compare Register H"]
pub mod scmpr7;
#[doc = "Capture Register A"]
pub struct SCAPT0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Capture Register A"]
pub mod scapt0;
#[doc = "Capture Register B"]
pub struct SCAPT1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Capture Register B"]
pub mod scapt1;
#[doc = "Capture Register C"]
pub struct SCAPT2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Capture Register C"]
pub mod scapt2;
#[doc = "Capture Register D"]
pub struct SCAPT3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Capture Register D"]
pub mod scapt3;
#[doc = "System Timer NVRAM_A Register"]
pub struct SNVR0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "System Timer NVRAM_A Register"]
pub mod snvr0;
#[doc = "System Timer NVRAM_B Register"]
pub struct SNVR1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "System Timer NVRAM_B Register"]
pub mod snvr1;
#[doc = "System Timer NVRAM_C Register"]
pub struct SNVR2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "System Timer NVRAM_C Register"]
pub mod snvr2;
#[doc = "System Timer NVRAM_D Register"]
pub struct SNVR3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "System Timer NVRAM_D Register"]
pub mod snvr3;
#[doc = "Counter/Timer Interrupts: Enable"]
pub struct INTEN {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Interrupts: Enable"]
pub mod inten;
#[doc = "Counter/Timer Interrupts: Status"]
pub struct INTSTAT {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Interrupts: Status"]
pub mod intstat;
#[doc = "Counter/Timer Interrupts: Clear"]
pub struct INTCLR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Interrupts: Clear"]
pub mod intclr;
#[doc = "Counter/Timer Interrupts: Set"]
pub struct INTSET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Interrupts: Set"]
pub mod intset;
#[doc = "STIMER Interrupt registers: Enable"]
pub struct STMINTEN {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "STIMER Interrupt registers: Enable"]
pub mod stminten;
#[doc = "STIMER Interrupt registers: Status"]
pub struct STMINTSTAT {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "STIMER Interrupt registers: Status"]
pub mod stmintstat;
#[doc = "STIMER Interrupt registers: Clear"]
pub struct STMINTCLR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "STIMER Interrupt registers: Clear"]
pub mod stmintclr;
#[doc = "STIMER Interrupt registers: Set"]
pub struct STMINTSET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "STIMER Interrupt registers: Set"]
pub mod stmintset;
|
use super::DeltaTime;
use crate::vector::Vec2f;
use specs::prelude::*;
use specs::Component;
#[derive(Component, Clone, Copy)]
#[storage(VecStorage)]
pub struct Pos(pub Vec2f);
#[derive(Component, Default, Clone, Copy)]
#[storage(VecStorage)]
pub struct Vel(pub Vec2f);
#[derive(Component, Clone, Copy)]
#[storage(VecStorage)]
pub struct Theta(pub f64);
#[derive(Component, Clone, Copy)]
#[storage(VecStorage)]
pub struct Omega(pub f64);
pub struct TranslationalKinematicSys;
impl<'a> System<'a> for TranslationalKinematicSys {
type SystemData = (
ReadExpect<'a, DeltaTime>,
WriteStorage<'a, Pos>,
ReadStorage<'a, Vel>,
);
fn run(&mut self, (delta, mut pos, vel): Self::SystemData) {
let dt = delta.0.as_secs_f64();
(&mut pos, &vel)
.par_join()
.for_each(|(Pos(pos), &Vel(vel))| {
*pos += vel * dt;
});
}
}
pub struct RotationalKinematicSys;
impl<'a> System<'a> for RotationalKinematicSys {
type SystemData = (
ReadExpect<'a, DeltaTime>,
WriteStorage<'a, Theta>,
ReadStorage<'a, Omega>,
);
fn run(&mut self, (delta, mut theta, omega): Self::SystemData) {
let dt = delta.0.as_secs_f64();
(&mut theta, &omega)
.par_join()
.for_each(|(Theta(theta), &Omega(omega))| {
*theta += omega * dt;
});
}
}
pub struct EdgeDeflectSys {
pub min: Vec2f,
pub max: Vec2f,
}
impl<'a> System<'a> for EdgeDeflectSys {
type SystemData = (WriteStorage<'a, Vel>, ReadStorage<'a, Pos>);
fn run(&mut self, (mut vel, pos): Self::SystemData) {
(&mut vel, &pos)
.par_join()
.for_each(|(Vel(vel), &Pos(pos))| {
if (pos.x < self.min.x && vel.x < 0.0) || (pos.x > self.max.x && vel.x > 0.0) {
vel.x *= -1.0;
} else if (pos.y < self.min.y && vel.y < 0.0) || (pos.y > self.max.y && vel.y > 0.0)
{
vel.y *= -1.0;
}
});
}
} |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::os::unix::io::RawFd;
use super::Addr;
use super::Pid;
use crate::FromToRaw;
// TODO: Upstream this struct to libc crate.
#[repr(C)]
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub struct f_owner_ex {
typ: libc::c_int,
pid: libc::pid_t,
}
command_enum! {
/// A `fcntl` command paired with its argument.
pub enum FcntlCmd<'a>: libc::c_int {
F_DUPFD(RawFd) = 0,
F_GETFD = 1,
F_SETFD(RawFd) = 2,
F_GETFL = 3,
F_SETFL(i32) = 4,
F_GETLK(Option<Addr<'a, libc::flock>>) = 5,
F_SETLK(Option<Addr<'a, libc::flock>>) = 6,
F_SETLKW(Option<Addr<'a, libc::flock>>) = 7,
F_SETOWN = 8,
F_GETOWN(Pid) = 9,
F_SETSIG(i32) = 10,
F_GETSIG = 11,
F_GETLK64(Option<Addr<'a, libc::flock64>>) = 12,
F_SETLK64(Option<Addr<'a, libc::flock64>>) = 13,
F_SETLKW64(Option<Addr<'a, libc::flock64>>) = 14,
F_SETOWN_EX(Option<Addr<'a, f_owner_ex>>) = 15,
F_GETOWN_EX(Option<Addr<'a, f_owner_ex>>) = 16,
F_GETOWNER_UIDS = 17,
F_OFD_GETLK(Option<Addr<'a, libc::flock>>) = 36,
F_OFD_SETLK(Option<Addr<'a, libc::flock>>) = 37,
F_OFD_SETLKW(Option<Addr<'a, libc::flock>>) = 38,
F_SETLEASE(i32) = 1024,
F_GETLEASE = 1025,
F_NOTIFY(i32) = 1026,
F_DUPFD_CLOEXEC(i32) = 1030,
F_SETPIPE_SZ(i32) = 1031,
F_GETPIPE_SZ = 1032,
F_ADD_SEALS(i32) = 1033,
F_GET_SEALS = 1034,
F_GET_RW_HINT(Option<Addr<'a, u64>>) = 1035,
F_SET_RW_HINT(Option<Addr<'a, u64>>) = 1036,
F_GET_FILE_RW_HINT(Option<Addr<'a, u64>>) = 1037,
F_SET_FILE_RW_HINT(Option<Addr<'a, u64>>) = 1038,
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_fcntl() {
assert_eq!(format!("{:?}", FcntlCmd::F_DUPFD(2)), "F_DUPFD(2)");
assert_eq!(format!("{}", FcntlCmd::F_DUPFD(2)), "F_DUPFD(2)");
assert_eq!(FcntlCmd::from_raw(libc::F_DUPFD, 42), FcntlCmd::F_DUPFD(42));
assert_eq!(FcntlCmd::from_raw(1337, 42), FcntlCmd::Other(1337, 42));
assert_eq!(FcntlCmd::F_DUPFD(42).into_raw(), (libc::F_DUPFD, 42));
}
}
|
pub mod admin;
pub mod default;
pub mod login;
pub mod utils;
// TODO: REMOVE FOR PRODUCTION!
pub mod proxy;
use crate::core::ServiceResult;
use crate::web::utils::HbData;
use actix_files as fs;
use actix_web::{web, HttpRequest, HttpResponse};
use handlebars::Handlebars;
/// Setup routes for admin ui
pub fn init(config: &mut web::ServiceConfig) {
admin::init(config);
config.service(
web::scope("/")
// Setup static routes
.service(fs::Files::new("/stylesheets", "static/stylesheets/"))
.service(fs::Files::new("/javascripts", "static/javascripts/"))
.service(fs::Files::new("/images", "static/images/"))
.service(fs::Files::new("/product/image", "img/"))
// Setup login routes
.service(
web::resource("/login")
.route(web::post().to(login::post_login))
.route(web::get().to(login::get_login)),
)
.service(web::resource("/logout").route(web::get().to(login::get_logout)))
.service(
web::resource("/register/{invitation_id}")
.route(web::post().to(login::post_register))
.route(web::get().to(login::get_register)),
)
.configure(default::init)
.default_service(web::get().to(get_404)),
);
}
/// GET route for 404 error.
pub async fn get_404(
hb: web::Data<Handlebars<'_>>,
request: HttpRequest,
) -> ServiceResult<HttpResponse> {
let body = HbData::new(&request).render(&hb, "404")?;
Ok(HttpResponse::Ok().body(body))
}
|
// fn main() {
// // <> 表示的是一个属于的关系,RefBoy这个结构体,不能比'a更长
// struct RefBoy<'a> {
// loc: &'a i32,
// }
// }
// 结构体的引用字段必须要有显式的生命周期
// 一个被显式写出生命周期的结构体,与其自身的生命周期一定小于等于其显式写出的任意一个生命周期
// 生命周期是可以写多个的,用,分隔
// 生命周期与泛型都写在<>里,先生命周期后泛型,用,分隔
// #[derive(Copy, Clone)]
// struct A {
// a: i32,
// }
// impl A {
// pub fn show(&self) {
// println!("{}", self.a);
// }
// pub fn add_two(&mut self) {
// self.add_one();
// self.add_one();
// self.show();
// }
// pub fn add_one(&mut self) {
// self.a += 1;
// }
// }
// fn main() {
// let mut ast = A { a: 12i32 };
// ast.show();
// ast.add_two();
// }
// enum SpecialPoint {
// Point(i32, i32),
// Special(String),
// }
// enum SpecialPoint {
// Point { x: i32, y: i32 },
// Special(String),
// }
// 枚举访问成员需要用到模式匹配
// enum SpecialPoint {
// Point(i32, i32),
// Special(String),
// }
// fn main() {
// let sp = SpecialPoint::Point(0, 0);
// match sp {
// SpecialPoint::Point(x, y) => {
// println!("I'am SpecialPoint(x={}, y={})", x, y);
// }
// SpecialPoint::Special(why) => {
// println!("I'am Special because I am {}", why);
// }
// }
// }
// struct Point {
// x: i32,
// y: i32,
// }
// let point = Point{x:1,y:2};
// let Point{x:x,y:y} = point;
// let Point{x,y} = point;
fn use_str(s: &str) {
println!("i am {}", s);
}
fn main() {
let s = "Hello".to_string();
use_str(&*s);
}
|
use backend::models::{Type, Custom, Modifier, Package};
use token;
/// Position relative in file where the declaration is present.
pub type Pos = (usize, usize);
pub type Token<T> = token::Token<T, Pos>;
#[derive(Debug, PartialEq, Clone)]
pub struct FieldInit {
pub name: Token<String>,
pub value: Token<Value>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct Instance {
pub ty: Custom,
pub arguments: Vec<Token<FieldInit>>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct Constant {
pub prefix: Option<String>,
pub parts: Vec<String>,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Value {
String(String),
Number(f64),
Boolean(bool),
Identifier(String),
Type(Type),
Instance(Token<Instance>),
Constant(Token<Constant>),
}
#[derive(Debug)]
pub struct OptionDecl {
pub name: String,
pub values: Vec<Token<Value>>,
}
#[derive(Debug)]
pub struct Field {
pub modifier: Modifier,
pub name: String,
pub ty: Type,
pub field_as: Option<Token<Value>>,
}
impl Field {
pub fn is_optional(&self) -> bool {
match self.modifier {
Modifier::Optional => true,
_ => false,
}
}
}
#[derive(Debug)]
pub enum Member {
Field(Field),
Code(String, Vec<String>),
Option(Token<OptionDecl>),
Match(MatchDecl),
}
#[derive(Debug)]
pub struct MatchVariable {
pub name: String,
pub ty: Type,
}
#[derive(Debug)]
pub enum MatchCondition {
/// Match a specific value.
Value(Token<Value>),
/// Match a type, and add a binding for the given name that can be resolved in the action.
Type(MatchVariable),
}
#[derive(Debug)]
pub struct MatchMember {
pub condition: Token<MatchCondition>,
pub value: Token<Value>,
}
#[derive(Debug)]
pub struct MatchDecl {
pub members: Vec<Token<MatchMember>>,
}
pub trait Body {
fn name(&self) -> &str;
}
impl Body for TupleBody {
fn name(&self) -> &str {
&self.name
}
}
impl Body for TypeBody {
fn name(&self) -> &str {
&self.name
}
}
impl Body for EnumBody {
fn name(&self) -> &str {
&self.name
}
}
impl Body for InterfaceBody {
fn name(&self) -> &str {
&self.name
}
}
#[derive(Debug)]
pub struct TupleBody {
pub name: String,
pub members: Vec<Token<Member>>,
}
#[derive(Debug)]
pub struct InterfaceBody {
pub name: String,
pub members: Vec<Token<Member>>,
pub sub_types: Vec<Token<SubType>>,
}
#[derive(Debug)]
pub struct TypeBody {
pub name: String,
pub members: Vec<Token<Member>>,
}
/// Sub-types in interface declarations.
#[derive(Debug)]
pub struct SubType {
pub name: String,
pub members: Vec<Token<Member>>,
}
#[derive(Debug)]
pub struct EnumBody {
pub name: String,
pub values: Vec<Token<EnumValue>>,
pub members: Vec<Token<Member>>,
}
#[derive(Debug)]
pub struct EnumValue {
pub name: String,
pub arguments: Vec<Token<Value>>,
pub ordinal: Option<Token<Value>>,
}
#[derive(Debug)]
pub enum Decl {
Type(TypeBody),
Tuple(TupleBody),
Interface(InterfaceBody),
Enum(EnumBody),
}
impl Decl {
pub fn name(&self) -> String {
match *self {
Decl::Interface(ref interface) => interface.name.clone(),
Decl::Type(ref ty) => ty.name.clone(),
Decl::Tuple(ref ty) => ty.name.clone(),
Decl::Enum(ref ty) => ty.name.clone(),
}
}
pub fn display(&self) -> String {
match *self {
Decl::Interface(ref body) => format!("interface {}", body.name),
Decl::Type(ref body) => format!("type {}", body.name),
Decl::Tuple(ref body) => format!("tuple {}", body.name),
Decl::Enum(ref body) => format!("enum {}", body.name),
}
}
}
#[derive(Debug)]
pub struct UseDecl {
pub package: Token<Package>,
pub alias: Option<String>,
}
#[derive(Debug)]
pub struct File {
pub package: Token<Package>,
pub uses: Vec<Token<UseDecl>>,
pub decls: Vec<Token<Decl>>,
}
|
//! lru-cache-macros
//! ================
//!
//! An attribute procedural macro to automatically cache the result of a function given a set of inputs.
//!
//! # Example:
//!
//! ```rust
//! use lru_cache_macros::lru_cache;
//!
//! #[lru_cache(20)]
//! fn fib(x: u32) -> u64 {
//! println!("{:?}", x);
//! if x <= 1 {
//! 1
//! } else {
//! fib(x - 1) + fib(x - 2)
//! }
//! }
//!
//! assert_eq!(fib(19), 6765);
//! ```
//!
//! The above example only calls `fib` twenty times, with the values from 0 to 19. All intermediate
//! results because of the recursion hit the cache.
//!
//! # Usage:
//!
//! Simply place `#[lru_cache([size])]` above your function. The function must obey a few properties
//! to use lru_cache:
//!
//! * All arguments and return values must implement `Clone`.
//! * The function may not take `self` in any form.
//!
//! The macro will use the LruCache at `::lru_cache::LruCache`. This may be made configurable in the future.
//!
//! The `LruCache` type used must accept two generic parameters `<Args, Return>` and must support methods
//! `get_mut(&K)` and `insert(K, V)`. The `lru-cache` crate meets these requirements.
//!
//! Currently, this crate only works on nightly rust. However, once the 2018 edition stabilizes as well as the
//! procedural macro diagnostic interface, it should be able to run on stable.
//!
//! # Details
//!
//! The created cache resides in thread-local storage so that multiple threads may simultaneously call
//! the decorated function, but will not share cached results with each other.
//!
//! The above example will generate the following code:
//!
//! ```rust
//! fn __lru_base_fib(x: u32) -> u64 {
//! if x <= 1 { 1 } else { fib(x - 1) + fib(x - 2) }
//! }
//! fn fib(x: u32) -> u64 {
//! use std::cell::UnsafeCell;
//! use std::thread_local;
//!
//! thread_local!(
//! static cache: UnsafeCell<::lru_cache::LruCache<(u32,), u64>> =
//! UnsafeCell::new(::lru_cache::LruCache::new(20usize));
//! );
//!
//! cache.with(|c|
//! {
//! let mut cache_ref = unsafe { &mut *c.get() };
//! let cloned_args = (x.clone(),);
//! let stored_result = cache_ref.get_mut(&cloned_args);
//! if let Some(stored_result) = stored_result {
//! *stored_result
//! } else {
//! let ret = __lru_base_fib(x);
//! cache_ref.insert(cloned_args, ret);
//! ret
//! }
//! })
//! }
//! ```
#![feature(extern_crate_item_prelude)]
#![feature(proc_macro_diagnostic)]
#![recursion_limit="128"]
extern crate proc_macro;
use proc_macro::TokenStream;
use syn;
use syn::{Token, parse_quote};
use syn::spanned::Spanned;
use syn::punctuated::Punctuated;
use quote::quote;
use proc_macro2;
#[proc_macro_attribute]
pub fn lru_cache(attr: TokenStream, item: TokenStream) -> TokenStream {
let mut original_fn: syn::ItemFn = syn::parse(item.clone()).unwrap();
let mut new_fn = original_fn.clone();
let cache_size = get_lru_size(attr);
if cache_size.is_none() {
return item;
}
let cache_size = cache_size.unwrap();
let return_type =
if let syn::ReturnType::Type(_, ref ty) = original_fn.decl.output {
Some(ty.clone())
} else {
original_fn.ident.span().unstable()
.error("There's no point of caching the output of a function that has no output")
.emit();
return item;
};
let new_name = format!("__lru_base_{}", original_fn.ident.to_string());
original_fn.ident = syn::Ident::new(&new_name[..], original_fn.ident.span());
let result = get_args_and_types(&original_fn);
let call_args;
let types;
if let Some((args_inner, types_inner)) = result {
call_args = args_inner;
types = types_inner;
} else {
return item;
}
let cloned_args = make_cloned_args_tuple(&call_args);
let fn_path = path_from_ident(original_fn.ident.clone());
let fn_call = syn::ExprCall {
attrs: Vec::new(),
paren_token: syn::token::Paren { span: proc_macro2::Span::call_site() },
args: call_args.clone(),
func: Box::new(fn_path)
};
let tuple_type = syn::TypeTuple {
paren_token: syn::token::Paren { span: proc_macro2::Span::call_site() },
elems: types,
};
let lru_body: syn::Block = parse_quote! {
{
use std::cell::UnsafeCell;
use std::thread_local;
thread_local!(
// We use `UnsafeCell` here to allow recursion. Since it is in the TLS, it should
// not introduce any actual unsafety.
static cache: UnsafeCell<::lru_cache::LruCache<#tuple_type, #return_type>> =
UnsafeCell::new(::lru_cache::LruCache::new(#cache_size));
);
cache.with(|c| {
let mut cache_ref = unsafe { &mut *c.get() };
let cloned_args = #cloned_args;
let stored_result = cache_ref.get_mut(&cloned_args);
if let Some(stored_result) = stored_result {
*stored_result
} else {
let ret = #fn_call;
cache_ref.insert(cloned_args, ret);
ret
}
})
}
};
new_fn.block = Box::new(lru_body);
let out = quote! {
#original_fn
#new_fn
};
out.into()
}
fn path_from_ident(ident: syn::Ident) -> syn::Expr {
let mut segments: Punctuated<_, Token![::]> = Punctuated::new();
segments.push(syn::PathSegment { ident: ident, arguments: syn::PathArguments::None });
syn::Expr::Path(syn::ExprPath { attrs: Vec::new(), qself: None, path: syn::Path { leading_colon: None, segments: segments} })
}
fn get_lru_size(attr: TokenStream) -> Option<usize> {
let value: Result<syn::LitInt, _> = syn::parse(attr.clone());
if let Ok(val) = value {
Some(val.value() as usize)
} else {
proc_macro2::Span::call_site().unstable()
.error("The lru_cache macro must specify a maximum cache size as an argument")
.emit();
None
}
}
fn make_cloned_args_tuple(args: &Punctuated<syn::Expr, Token![,]>) -> syn::ExprTuple {
let mut cloned_args = Punctuated::<_, Token![,]>::new();
for arg in args {
let call = syn::ExprMethodCall {
attrs: Vec::new(),
receiver: Box::new(arg.clone()),
dot_token: syn::token::Dot { spans: [arg.span(); 1] },
method: syn::Ident::new("clone", proc_macro2::Span::call_site()),
turbofish: None,
paren_token: syn::token::Paren { span: proc_macro2::Span::call_site() },
args: Punctuated::new(),
};
cloned_args.push(syn::Expr::MethodCall(call));
}
syn::ExprTuple {
attrs: Vec::new(),
paren_token: syn::token::Paren { span: proc_macro2::Span::call_site() },
elems: cloned_args,
}
}
fn get_args_and_types(f: &syn::ItemFn) -> Option<(Punctuated<syn::Expr, Token![,]>, Punctuated<syn::Type, Token![,]>)> {
let mut call_args = Punctuated::<_, Token![,]>::new();
let mut types = Punctuated::<_, Token![,]>::new();
for input in &f.decl.inputs {
match input {
syn::FnArg::SelfValue(p) => {
p.span().unstable()
.error("`self` arguments are currently unsupported by lru_cache")
.emit();
return None;
}
syn::FnArg::SelfRef(p) => {
p.span().unstable()
.error("`&self` arguments are currently unsupported by lru_cache")
.emit();
return None;
}
syn::FnArg::Captured(arg_captured) => {
let mut segments: syn::punctuated::Punctuated<_, Token![::]> = syn::punctuated::Punctuated::new();
if let syn::Pat::Ident(ref pat_ident) = arg_captured.pat {
if let Some(m) = pat_ident.mutability {
m.span.unstable()
.error("`mut` arguments are not supported with lru_cache as this could lead to incorrect results being stored")
.emit();
return None;
}
segments.push(syn::PathSegment { ident: pat_ident.ident.clone(), arguments: syn::PathArguments::None });
}
// If the arg type is a reference, remove the reference because the arg will be cloned
if let syn::Type::Reference(type_reference) = &arg_captured.ty {
types.push(type_reference.elem.as_ref().to_owned()); // as_ref -> to_owned unboxes the type
} else {
types.push(arg_captured.ty.clone());
}
call_args.push(syn::Expr::Path(syn::ExprPath { attrs: Vec::new(), qself: None, path: syn::Path { leading_colon: None, segments } }));
},
syn::FnArg::Inferred(p) => {
p.span().unstable()
.error("inferred arguments are currently unsupported by lru_cache")
.emit();
return None;
}
syn::FnArg::Ignored(p) => {
p.span().unstable()
.error("ignored arguments are currently unsupported by lru_cache")
.emit();
return None;
}
}
}
if types.len() == 1 {
types.push_punct(syn::token::Comma { spans: [proc_macro2::Span::call_site(); 1] })
}
Some((call_args, types))
}
|
use crate::Uncertain;
use rand_pcg::Pcg32;
const D0: f32 = 0.999;
const D1: f32 = 0.999;
const STEP: usize = 10;
const MAXS: usize = 1000;
fn accept_likelyhood(prob: f32, val: bool) -> f32 {
let p = 0.5 * (1.0 + prob);
if val {
p
} else {
1.0 - p
}
}
fn reject_likelyhood(prob: f32, val: bool) -> f32 {
let p = 0.5 * prob;
if val {
p
} else {
1.0 - p
}
}
fn log_likelyhood_ratio(prob: f32, val: bool) -> f32 {
reject_likelyhood(prob, val).ln() - accept_likelyhood(prob, val).ln()
}
/// Compute the sequential probability ration test.
pub fn compute<U>(src: &U, prob: f32) -> bool
where
U: Uncertain + ?Sized,
U::Value: Into<bool>,
{
let mut rng = Pcg32::new(0xcafef00dd15ea5e5, 0xa02bdbf7bb3c0a7);
let upper_ln = (D1 / (1.0 - D1)).ln();
let lower_ln = ((1.0 - D0) / D0).ln();
let mut ratio_ln = 0.0;
for batch in 0..MAXS {
for batch_step in 0..STEP {
let epoch = STEP * batch + batch_step;
let val = src.sample(&mut rng, epoch).into();
ratio_ln += log_likelyhood_ratio(prob, val);
}
if ratio_ln > upper_ln || ratio_ln < lower_ln {
break;
}
}
ratio_ln < lower_ln
}
#[cfg(test)]
mod tests {
use super::*;
use crate::*;
use rand_distr::Bernoulli;
#[test]
fn basic_sprt_works() {
let src = Distribution::from(Bernoulli::new(0.5).unwrap());
assert!(compute(&src, 0.4));
assert!(!compute(&src, 0.6));
}
#[test]
fn likelyhood_sanity_check() {
assert_eq!(accept_likelyhood(0.0, true), 0.5);
assert_eq!(accept_likelyhood(1.0, true), 1.0);
assert_eq!(reject_likelyhood(0.0, false), 1.0);
assert_eq!(reject_likelyhood(1.0, false), 0.5);
}
}
|
use super::{GetMode, SpanToken, Storage};
use super::rlex::{Lexer, token, Token, DelimToken, Ident};
use std::cell::UnsafeCell;
use std::str;
pub struct Splice {
pos: usize,
len: usize,
new: Vec<u8>,
}
#[derive(Copy, Clone, Debug)]
pub struct Mark {
pos: usize,
}
#[derive(Copy, Clone, Debug)]
pub struct InIdent {
start: usize,
end: usize,
}
impl InIdent {
pub fn mark(&self) -> Mark {
Mark { pos: self.start }
}
}
#[derive(Copy, Clone)]
pub struct Span {
pub pos: usize,
pub line: usize,
pub col: usize,
}
pub fn dummy_span() -> Span {
Span { pos: !0, line: 0, col: 0 }
}
pub struct TTWriter<'x, 'a: 'x> {
tr: &'x mut TTReader<'a>,
pos: usize,
pub out: Vec<u8>,
}
impl<'x, 'a: 'x> TTWriter<'x, 'a> {
pub fn write(&mut self, tok: Token) {
let s = match tok {
token::Pound => "#",
token::Underscore => "_",
token::Comma => ",",
token::Colon => ":",
token::ModSep => "::",
token::Lt => "<",
token::Gt => ">",
token::OpenDelim(DelimToken::Bracket) => "[",
token::CloseDelim(DelimToken::Bracket) => "]",
token::OpenDelim(DelimToken::Paren) => "(",
token::CloseDelim(DelimToken::Paren) => ")",
token::OpenDelim(DelimToken::Brace) => "{",
token::CloseDelim(DelimToken::Brace) => "}",
token::Ident(ident) => ident.name.as_str(),
_ => panic!("missing case {:?}", tok),
};
self.out.push(b' ');
self.out.extend_from_slice(s.as_bytes());
self.out.push(b' ');
}
pub fn write_ident_str(&mut self, ident_str: &str) {
self.out.extend_from_slice(ident_str.as_bytes());
}
pub fn copy_from_mark_range(&mut self, start: Mark, end: Mark, _: GetMode) {
self.out.extend_from_slice(&self.tr.data[start.pos..end.pos]);
}
pub fn finish(mut self) {
self.tr.splices.push(Splice {
pos: self.pos,
len: 0,
new: self.out,
});
}
}
pub struct TTReader<'a> {
data: &'a [u8],
lexer: Lexer<'a>,
pub storage: &'a UnsafeCell<Storage>,
splices: Vec<Splice>,
}
impl<'a> TTReader<'a> {
pub fn new(data: &'a [u8], storage: &'a UnsafeCell<Storage>) -> Self {
TTReader {
data: data,
lexer: Lexer::new(data),
storage: storage,
splices: Vec::new(),
}
}
pub fn next(&mut self) -> Option<SpanToken<'a>> {
loop {
let span = Span {
pos: self.lexer.pos(),
line: self.lexer.line(),
col: self.lexer.col(),
};
match self.lexer.next() {
token::Eof => return None,
token::White => continue,
tok @ _ => {
unsafe {
let ptr = self.storage.get();
(*ptr).span = span;
(*ptr).token = tok;
return Some(SpanToken { span: &(*ptr).span, token: &(*ptr).token });
}
},
}
}
}
pub fn next_no_enter(&mut self) -> Option<SpanToken<'a>> {
self.next()
}
pub fn mark_last(&self) -> Mark {
unsafe {
Mark { pos: (*self.storage.get()).span.pos }
}
}
pub fn mark_next(&self) -> Mark {
Mark { pos: self.lexer.pos() }
}
pub fn last_ii(&self, _: &Ident) -> InIdent {
InIdent {
start: self.mark_last().pos,
end: self.lexer.pos(),
}
}
pub fn rewind_to(&mut self, _: Mark, span: Span) {
self.lexer.rewind_to(span.pos, span.line, span.col);
}
pub fn delete_mark_range(&mut self, start: Mark, end: Mark) {
if start.pos == end.pos { return; }
self.splices.push(Splice {
pos: start.pos,
len: end.pos - start.pos,
new: Vec::new(),
});
}
pub fn mutate_ident(&mut self, ii: InIdent, new: String) {
self.splices.push(Splice {
pos: ii.start,
len: ii.end - ii.start,
new: new.into_bytes(),
});
}
pub fn get_ident_str(&self, ii: InIdent) -> &'a str {
str::from_utf8(&self.data[ii.start..ii.end]).unwrap()
}
pub fn writer<'x>(&'x mut self) -> TTWriter<'x, 'a> {
let pos = self.lexer.pos();
TTWriter {
tr: self,
pos: pos,
out: Vec::new(),
}
}
pub fn output(&mut self) -> Option<Vec<u8>> {
if self.splices.is_empty() {
None
} else {
self.splices.sort_by_key(|splice| splice.pos);
let mut pos = 0;
let mut out = Vec::with_capacity(self.data.len() +
self.splices.iter()
.map(|s| s.new.len())
.sum::<usize>());
for splice in &self.splices {
assert!(pos <= splice.pos);
out.extend_from_slice(&self.data[pos..splice.pos]);
//out.push(b'<');
out.extend_from_slice(&splice.new);
//out.push(b'>');
pos = splice.pos + splice.len;
}
out.extend_from_slice(&self.data[pos..]);
Some(out)
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.