text stringlengths 8 4.13M |
|---|
mod sql_test;
use apllodb_server::test_support::test_setup;
use sql_test::{SqlTest, Step, StepRes, Steps};
#[ctor::ctor]
fn setup() {
test_setup();
}
#[async_std::test]
async fn test_add_column() {
SqlTest::default()
.add_steps(Steps::CreateTablePeople)
.add_step(Step::new("BEGIN", StepRes::Ok))
.add_step(Step::new(
"ALTER TABLE people ADD COLUMN c1 INTEGER",
StepRes::Ok,
))
.add_step(Step::new(
"ALTER TABLE people ADD COLUMN c2 BIGINT NOT NULL",
StepRes::Ok,
))
.add_step(Step::new("ALTER TABLE people ADD c3 BIGINT", StepRes::Ok))
.run()
.await;
}
|
#![allow(dead_code, unused_variables)]
use futures::Future;
fn ma() {
let x = foo1();
}
async fn foo1() -> usize {
0
}
fn foo2() -> impl Future<Output = usize> {
async {
0
}
}
|
use web_sys::{WebGl2RenderingContext, WebGlShader};
use super::api::{WebRenderAPI};
#[derive(Debug)]
pub struct RenderBuilder {
webgl_context: Option<WebGl2RenderingContext>,
vert_shader: Option<WebGlShader>,
frag_shader: Option<WebGlShader>,
}
impl RenderBuilder {
pub fn new() -> Self {
RenderBuilder {
webgl_context: None,
vert_shader: None,
frag_shader: None,
}
}
pub fn set_context(&mut self, context: WebGl2RenderingContext) {
self.webgl_context = Some(context);
}
pub fn set_frag_shader(&mut self, shader_source: &str) -> Result<(), BuildError> {
let shader_type = WebGl2RenderingContext::FRAGMENT_SHADER;
self.frag_shader = Some(self.create_shader(shader_source, shader_type)?);
Ok(())
}
pub fn set_vert_shader(&mut self, shader_source: &str) -> Result<(), BuildError> {
let shader_type = WebGl2RenderingContext::VERTEX_SHADER;
self.vert_shader = Some(self.create_shader(shader_source, shader_type)?);
Ok(())
}
pub fn build_render_api(&self) -> Result<WebRenderAPI, BuildError> {
let context = self.webgl_context.clone().ok_or(BuildError::ExpectedContext)?;
let vert_shader = self.vert_shader.clone().ok_or(BuildError::ExpectedVertShaded)?;
let frag_shader = self.frag_shader.clone().ok_or(BuildError::ExpectedFragShaded)?;
let program = context.create_program().ok_or(BuildError::CannotCreateProgram)?;
context.attach_shader(&program, &vert_shader);
context.attach_shader(&program, &frag_shader);
context.link_program(&program);
let did_link = context
.get_program_parameter(&program, WebGl2RenderingContext::LINK_STATUS)
.as_bool()
.ok_or(BuildError::FailedToLinkProgram)?;
return if did_link {
context.use_program(Some(&program));
Ok(WebRenderAPI::create(context, program))
} else {
Err(BuildError::FailedToLinkProgram)
};
}
fn get_context(&self) -> Option<&WebGl2RenderingContext> {
match &self.webgl_context {
Some(ref value) => Some(&value),
None => None,
}
}
fn create_shader(&self, shader_source: &str, shader_type: u32) -> Result<WebGlShader, BuildError> {
let context = self.get_context().ok_or(BuildError::ExpectedContext)?;
let shader = context.create_shader(shader_type).ok_or(BuildError::CannotCreateShader)?;
context.shader_source(&shader, shader_source);
context.compile_shader(&shader);
let did_compile = context
.get_shader_parameter(&shader, WebGl2RenderingContext::COMPILE_STATUS)
.as_bool()
.ok_or(BuildError::FailedToCompileShader(None))?;
return if did_compile {
Ok(shader)
} else {
Err(BuildError::FailedToCompileShader(context.get_shader_info_log(&shader)))
}
}
}
pub enum BuildError {
ExpectedContext,
ExpectedVertShaded,
ExpectedFragShaded,
FailedToCompileShader(Option<String>),
FailedToLinkProgram,
CannotCreateShader,
CannotCreateProgram,
}
impl ToString for BuildError {
fn to_string(&self) -> String {
match self {
BuildError::ExpectedContext => "expected webgl context to be defined".to_string(),
BuildError::ExpectedFragShaded => "expected frag shader to be defined".to_string(),
BuildError::ExpectedVertShaded => "expected vert shader to be defined".to_string(),
BuildError::FailedToCompileShader(reason) => match reason {
None => "failed to compile shader, for an unknown reason".to_string(),
Some(reason) => format!("failed to compile shader: {}", reason),
},
BuildError::FailedToLinkProgram => "failed to link program".to_string(),
BuildError::CannotCreateShader => "could not create a shader from the context".to_string(),
BuildError::CannotCreateProgram => "could not create a program from the context".to_string(),
}
}
}
|
use alloc::boxed::Box;
use collections::{BTreeMap, String};
use core::cmp::{min, max};
use fs::{KScheme, Resource, ResourceSeek, VecResource};
use system::error::{Error, Result, ENOENT};
use system::syscall::{MODE_DIR, MODE_FILE, Stat};
#[path="../../build/initfs.gen"]
pub mod gen;
/// Init Filesystem resource
pub struct InitFsResource {
path: String,
data: &'static [u8],
seek: usize,
}
impl InitFsResource {
pub fn new(path: String, data: &'static [u8]) -> Self {
InitFsResource {
path: path,
data: data,
seek: 0,
}
}
}
impl Resource for InitFsResource {
fn dup(&self) -> Result<Box<Resource>> {
Ok(box InitFsResource {
path: self.path.clone(),
data: self.data,
seek: self.seek,
})
}
fn path(&self, buf: &mut [u8]) -> Result <usize> {
let path = self.path.as_bytes();
let mut i = 0;
while i < buf.len() && i < path.len() {
buf[i] = path[i];
i += 1;
}
Ok(i)
}
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let mut i = 0;
while i < buf.len() && self.seek < self.data.len() {
match self.data.get(self.seek) {
Some(b) => buf[i] = *b,
None => (),
}
self.seek += 1;
i += 1;
}
return Ok(i);
}
fn seek(&mut self, pos: ResourceSeek) -> Result<usize> {
match pos {
ResourceSeek::Start(offset) => self.seek = min(self.data.len(), offset),
ResourceSeek::Current(offset) =>
self.seek = max(0, min(self.seek as isize, self.seek as isize + offset)) as usize,
ResourceSeek::End(offset) =>
self.seek = max(0,
min(self.seek as isize,
self.data.len() as isize +
offset)) as usize,
}
return Ok(self.seek);
}
fn stat(&self, stat: &mut Stat) -> Result<()> {
stat.st_size = self.data.len() as u32;
stat.st_mode = MODE_FILE;
Ok(())
}
fn sync(&mut self) -> Result<()> {
Ok(())
}
}
/// A memory scheme
pub struct InitFsScheme {
pub files: BTreeMap<&'static str, &'static [u8]>
}
impl InitFsScheme {
pub fn new() -> Box<InitFsScheme> {
Box::new(InitFsScheme {
files: gen::gen()
})
}
}
impl KScheme for InitFsScheme {
fn scheme(&self) -> &str {
"initfs"
}
fn open(&mut self, url: &str, _: usize) -> Result<Box<Resource>> {
let reference = url.splitn(2, ":").nth(1).unwrap_or("").trim_matches('/');
if let Some(data) = self.files.get(reference) {
Ok(box InitFsResource::new(format!("initfs:/{}", reference), data))
} else {
let mut list = String::new();
'files: for file in self.files.iter() {
let mut file_parts = file.0.split('/');
if ! reference.is_empty() {
let mut ref_parts = reference.split('/');
while let Some(ref_part) = ref_parts.next() {
if let Some(file_part) = file_parts.next() {
if file_part != ref_part {
continue 'files;
}
} else {
continue 'files;
}
}
}
if let Some(file_part) = file_parts.next() {
for item in list.split('\n') {
if item == file_part {
continue 'files;
}
}
if ! list.is_empty() {
list.push('\n');
}
list.push_str(file_part);
}
}
if ! list.is_empty() {
if ! reference.is_empty() {
Ok(box VecResource::new(format!("initfs:/{}/", reference), list.into_bytes(), MODE_DIR))
} else {
Ok(box VecResource::new(format!("initfs:/"), list.into_bytes(), MODE_DIR))
}
} else {
Err(Error::new(ENOENT))
}
}
}
}
|
use std::{collections::BTreeMap, fs::File, io::BufReader, path::PathBuf};
use assembly_xml::common::exact::{expect_attribute, expect_end, expect_start, expect_text};
use color_eyre::eyre::eyre;
use quick_xml::{events::Event as XmlEvent, Reader as XmlReader};
use structopt::StructOpt;
#[derive(StructOpt)]
struct Options {
path: PathBuf,
prefix: String,
}
fn main() -> color_eyre::Result<()> {
let opt = Options::from_args();
let file = File::open(opt.path)?;
let file = BufReader::new(file);
let mut reader = XmlReader::from_reader(file);
reader.trim_text(true);
let mut buf = Vec::new();
// The `Reader` does not implement `Iterator` because it outputs borrowed data (`Cow`s)
if let Ok(XmlEvent::Decl(_)) = reader.read_event(&mut buf) {}
buf.clear();
let _ = expect_start("localization", &mut reader, &mut buf)?;
//println!("<localization>");
buf.clear();
let e_locales = expect_start("locales", &mut reader, &mut buf)?;
//println!("<locales>");
let locale_count = expect_attribute("count", &reader, &e_locales)?;
buf.clear();
for _ in 0..locale_count {
let _ = expect_start("locale", &mut reader, &mut buf)?;
//print!("<locale>");
buf.clear();
let _locale = expect_text(&mut reader, &mut buf)?;
//print!("{}", locale);
buf.clear();
let _ = expect_end("locale", &mut reader, &mut buf)?;
//println!("</locale>");
buf.clear();
}
let _ = expect_end("locales", &mut reader, &mut buf)?;
buf.clear();
//println!("</locales>");
let mut dict = BTreeMap::new();
let e_locales = expect_start("phrases", &mut reader, &mut buf)?;
//println!("<phrases>");
let phrase_count = expect_attribute("count", &reader, &e_locales)?;
buf.clear();
for _ in 0..phrase_count {
let e_phrase = expect_start("phrase", &mut reader, &mut buf)?;
let id: String = expect_attribute("id", &reader, &e_phrase)?;
let key = id.strip_prefix(&opt.prefix).map(|x| x.to_owned());
buf.clear();
let mut translation = None;
loop {
let event = reader.read_event(&mut buf)?;
let e_translation = match event {
XmlEvent::End(e) => {
if e.name() == b"phrase" {
break;
} else {
let name_str = reader.decode(e.name());
return Err(eyre!("Unexpected end tag </{}>", name_str));
}
}
XmlEvent::Start(e) => {
if e.name() == b"translation" {
e
} else {
let name_str = reader.decode(e.name());
return Err(eyre!("Unexpected tag <{}>", name_str));
}
}
_ => panic!(),
};
let locale: String = expect_attribute("locale", &reader, &e_translation)?;
buf.clear();
let trans = expect_text(&mut reader, &mut buf)?;
if &locale == "en_US" {
translation = Some(trans);
}
buf.clear();
let _ = expect_end("translation", &mut reader, &mut buf)?;
buf.clear();
}
if let (Some(key), Some(translation)) = (key, translation) {
dict.insert(key, translation);
}
}
let _ = expect_end("phrases", &mut reader, &mut buf)?;
//println!("</phrases>");
buf.clear();
let string: String = serde_json::to_string(&dict)?;
println!("{}", string);
Ok(())
}
|
//! Netherrack-specific reading I/O operations
use io_operations::reader::Reader;
extern crate varint;
extern crate bit_utils;
use self::bit_utils::BitInformation;
pub trait NetherrackReader : Reader {
/// Reads a UTF-8 string from the buffer
#[allow(unused_variables)] //Only for error stuff
fn read_utf8_string(&mut self) -> Result<String, &'static str> {
match self.read_unsigned_varint_32() {
Err(error) => {
return Err("Bad size for utf-8 string");
}
Ok(mut size) => {
let mut bytes: Vec<u8> = Vec::<u8>::with_capacity(size as usize);
while size > 0 {
match self.read_unsigned_byte() {
Err(error) => {
return Err("Bad things while reading a UTF-8 string");
}
Ok(value) => {
bytes.push(value);
size -= 1;
}
}
}
match String::from_utf8(bytes) {
Err(errval) => {
return Err("Error parsing UTF-8 String");
}
Ok(string) => {
return Ok(string);
}
}
}
}
}
/// Reads a signed 32-bit Varint from this NetherrackReader
#[allow(unused_variables)] //For the error handling as we need to change the string
fn read_signed_varint_32(&mut self) -> Result<i32, &'static str> {
match self.read_unsigned_varint_32() {
Ok(value) => {
return Ok(self::varint::zigzag_unsigned_int(value));
}
Err(error) => {
return Err("Could not read a signed varint32");
}
}
}
/// Reads an unsigned 32-bit Varint from this NetherrackReader
fn read_unsigned_varint_32(&mut self) -> Result<u32, &'static str> {
// The number of bits to shift by. <<0, <<7, <<14, etc
let mut shift_amount: u32 = 0;
// The decoded value
let mut decoded_value: u32 = 0;
loop {
match self.read_unsigned_byte() {
Err(error) => {
error!("Got an error while reading a byte for an unsigned varint32: {}", error);
return Err("Could not read an unsigned byte for an unsigned varint32");
}
Ok(byte_value) => {
decoded_value |= ((byte_value & 0b01111111) as u32) << shift_amount;
// See if we're supposed to keep reading
if byte_value.has_most_signifigant_bit() {
shift_amount += 7;
} else {
return Ok(decoded_value);
}
}
}
}
}
}
impl NetherrackReader for ::std::io::Cursor<Vec<u8>> { }
impl NetherrackReader for ::std::net::TcpStream { }
|
#[doc = "Register `LPMCCR` reader"]
pub type R = crate::R<LPMCCR_SPEC>;
#[doc = "Register `LPMCCR` writer"]
pub type W = crate::W<LPMCCR_SPEC>;
#[doc = "Field `VLPSIZE` reader - VACT largest packet size"]
pub type VLPSIZE_R = crate::FieldReader;
#[doc = "Field `VLPSIZE` writer - VACT largest packet size"]
pub type VLPSIZE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `LPSIZE` reader - Largest packet size"]
pub type LPSIZE_R = crate::FieldReader;
#[doc = "Field `LPSIZE` writer - Largest packet size"]
pub type LPSIZE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
impl R {
#[doc = "Bits 0:7 - VACT largest packet size"]
#[inline(always)]
pub fn vlpsize(&self) -> VLPSIZE_R {
VLPSIZE_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 16:23 - Largest packet size"]
#[inline(always)]
pub fn lpsize(&self) -> LPSIZE_R {
LPSIZE_R::new(((self.bits >> 16) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - VACT largest packet size"]
#[inline(always)]
#[must_use]
pub fn vlpsize(&mut self) -> VLPSIZE_W<LPMCCR_SPEC, 0> {
VLPSIZE_W::new(self)
}
#[doc = "Bits 16:23 - Largest packet size"]
#[inline(always)]
#[must_use]
pub fn lpsize(&mut self) -> LPSIZE_W<LPMCCR_SPEC, 16> {
LPSIZE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DSI Host low-power mode current configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`lpmccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`lpmccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct LPMCCR_SPEC;
impl crate::RegisterSpec for LPMCCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`lpmccr::R`](R) reader structure"]
impl crate::Readable for LPMCCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`lpmccr::W`](W) writer structure"]
impl crate::Writable for LPMCCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets LPMCCR to value 0"]
impl crate::Resettable for LPMCCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
/*!
Type-level ascii characters and bytes.
This module is declared at the root so that error messages will print a shorter type.
*/
#![allow(non_snake_case, non_camel_case_types)]
/*
This is code used to generate the macro invocation.
fn main() {
let mut list=(0..=255u8)
.map(|b|{
let c=b as char;
if (c.is_alphanumeric() || c=='_') && b<128 {
format!("(_{1},B{0}),",b,b as char)
}else{
format!("(B{0}),",b)
}
})
.collect::<Vec<_>>();
for chunk in list.chunks(8) {
for param in chunk {
print!("{}",param);
}
println!();
}
}
*/
macro_rules! create_unit_struct {
(inner; ($struct_:ident ,$alias:ident ) )=>{
#[derive(Debug)]
pub struct $struct_;
pub type $alias=$struct_;
};
(inner; ($struct_:ident) )=>{
#[derive(Debug)]
pub struct $struct_;
};
($( $param:tt ),* $(,)*) => {
$(
create_unit_struct!(inner; $param );
)*
}
}
create_unit_struct! {
(B0),(B1),(B2),(B3),(B4),(B5),(B6),(B7),
(B8),(B9),(B10),(B11),(B12),(B13),(B14),(B15),
(B16),(B17),(B18),(B19),(B20),(B21),(B22),(B23),
(B24),(B25),(B26),(B27),(B28),(B29),(B30),(B31),
(B32),(B33),(B34),(B35),(B36),(B37),(B38),(B39),
(B40),(B41),(B42),(B43),(B44),(B45),(B46),(B47),
(_0,B48),(_1,B49),(_2,B50),(_3,B51),(_4,B52),(_5,B53),(_6,B54),(_7,B55),
(_8,B56),(_9,B57),(B58),(B59),(B60),(B61),(B62),(B63),
(B64),(_A,B65),(_B,B66),(_C,B67),(_D,B68),(_E,B69),(_F,B70),(_G,B71),
(_H,B72),(_I,B73),(_J,B74),(_K,B75),(_L,B76),(_M,B77),(_N,B78),(_O,B79),
(_P,B80),(_Q,B81),(_R,B82),(_S,B83),(_T,B84),(_U,B85),(_V,B86),(_W,B87),
(_X,B88),(_Y,B89),(_Z,B90),(B91),(B92),(B93),(B94),(__,B95),
(B96),(_a,B97),(_b,B98),(_c,B99),(_d,B100),(_e,B101),(_f,B102),(_g,B103),
(_h,B104),(_i,B105),(_j,B106),(_k,B107),(_l,B108),(_m,B109),(_n,B110),(_o,B111),
(_p,B112),(_q,B113),(_r,B114),(_s,B115),(_t,B116),(_u,B117),(_v,B118),(_w,B119),
(_x,B120),(_y,B121),(_z,B122),(B123),(B124),(B125),(B126),(B127),
(B128),(B129),(B130),(B131),(B132),(B133),(B134),(B135),
(B136),(B137),(B138),(B139),(B140),(B141),(B142),(B143),
(B144),(B145),(B146),(B147),(B148),(B149),(B150),(B151),
(B152),(B153),(B154),(B155),(B156),(B157),(B158),(B159),
(B160),(B161),(B162),(B163),(B164),(B165),(B166),(B167),
(B168),(B169),(B170),(B171),(B172),(B173),(B174),(B175),
(B176),(B177),(B178),(B179),(B180),(B181),(B182),(B183),
(B184),(B185),(B186),(B187),(B188),(B189),(B190),(B191),
(B192),(B193),(B194),(B195),(B196),(B197),(B198),(B199),
(B200),(B201),(B202),(B203),(B204),(B205),(B206),(B207),
(B208),(B209),(B210),(B211),(B212),(B213),(B214),(B215),
(B216),(B217),(B218),(B219),(B220),(B221),(B222),(B223),
(B224),(B225),(B226),(B227),(B228),(B229),(B230),(B231),
(B232),(B233),(B234),(B235),(B236),(B237),(B238),(B239),
(B240),(B241),(B242),(B243),(B244),(B245),(B246),(B247),
(B248),(B249),(B250),(B251),(B252),(B253),(B254),(B255),
}
|
use crate::ast_to_rustspec::{translate_base_typ, translate_expr_expects_exp, SpecialNames};
use crate::name_resolution::TopLevelContext;
use crate::rustspec::*;
use pretty::RcDoc;
use proc_macro2;
use rustc_ast::{
ast::{self, AttrVec, Attribute, Expr, ExprKind, Path, PathSegment, Ty},
node_id::NodeId,
ptr::P,
token::{Delimiter, Lit, LitKind as TokenLitKind, TokenKind},
tokenstream::{TokenStream, TokenTree},
};
use rustc_session::Session;
use rustc_span::{symbol, Span};
use serde::{ser::SerializeSeq, Serialize, Serializer};
fn translate_pearlite_binop(op: syn::BinOp) -> ast::BinOpKind {
match op {
syn::BinOp::Add(_) => ast::BinOpKind::Add,
syn::BinOp::Sub(_) => ast::BinOpKind::Sub,
syn::BinOp::Mul(_) => ast::BinOpKind::Mul,
syn::BinOp::Div(_) => ast::BinOpKind::Div,
syn::BinOp::Rem(_) => ast::BinOpKind::Rem,
syn::BinOp::And(_) => ast::BinOpKind::And,
syn::BinOp::Or(_) => ast::BinOpKind::Or,
syn::BinOp::BitXor(_) => ast::BinOpKind::BitXor,
syn::BinOp::BitAnd(_) => ast::BinOpKind::BitAnd,
syn::BinOp::BitOr(_) => ast::BinOpKind::BitOr,
syn::BinOp::Shl(_) => ast::BinOpKind::Shl,
syn::BinOp::Shr(_) => ast::BinOpKind::Shr,
syn::BinOp::Eq(_) => ast::BinOpKind::Eq,
syn::BinOp::Lt(_) => ast::BinOpKind::Lt,
syn::BinOp::Le(_) => ast::BinOpKind::Le,
syn::BinOp::Ne(_) => ast::BinOpKind::Ne,
syn::BinOp::Ge(_) => ast::BinOpKind::Ge,
syn::BinOp::Gt(_) => ast::BinOpKind::Gt,
binop => panic!("binop error: {:#?}", binop), // Error
// syn::BinOp::AddEq(_) => ast::BinOpKind::AddEq,
// syn::BinOp::SubEq(_) => ast::BinOpKind::SubEq,
// syn::BinOp::MulEq(_) => ast::BinOpKind::MulEq,
// syn::BinOp::DivEq(_) => ast::BinOpKind::DivEq,
// syn::BinOp::RemEq(_) => ast::BinOpKind::RemEq,
// syn::BinOp::BitXorEq(_) => ast::BinOpKind::BitXorEq,
// syn::BinOp::BitAndEq(_) => ast::BinOpKind::BitAndEq,
// syn::BinOp::BitOrEq(_) => ast::BinOpKind::BitOrEq,
// syn::BinOp::ShlEq(_) => ast::BinOpKind::ShlEq,
// syn::BinOp::ShrEq(_) => ast::BinOpKind::ShrEq,
// _ => RcDoc::as_string(format!("TODO: {:?}", b)),
}
}
pub(crate) fn translate_pearlite_ident(
ident: proc_macro2::Ident,
span: Span,
) -> rustc_span::symbol::Ident {
rustc_span::symbol::Ident::new(
rustc_span::symbol::Symbol::intern(format!("{}", ident).as_str()),
span, // translate_pearlite_span(ident.span())
)
}
pub(crate) fn translate_pearlite_unquantified(
sess: &Session,
t: pearlite_syn::term::Term,
span: Span,
) -> Option<Expr> {
match translate_pearlite(sess, t, span) {
Quantified::Unquantified(e) => Some(e),
_ => None,
}
}
fn translate_pearlite_lit<'a>(l: syn::Lit, span: Span) -> Lit {
match l.clone() {
syn::Lit::Int(lit) => {
Lit {
kind: rustc_ast::token::LitKind::Integer,
symbol: rustc_span::symbol::Symbol::intern(lit.base10_digits()), // .value()
suffix: Some(rustc_span::symbol::Symbol::intern(lit.suffix())), // None, // rustc_span::symbol::Symbol::intern(lit.suffix())
}
}
syn::Lit::Bool(lit) => {
Lit {
kind: rustc_ast::token::LitKind::Bool,
symbol: rustc_span::symbol::Symbol::intern(format!("{}", lit.value()).as_str()),
suffix: None, // rustc_span::symbol::Symbol::intern(lit.suffix())
}
}
_ => panic!("TODO: Implement pearlite literals"),
}
}
fn translate_id(id: rustc_span::symbol::Ident) -> Ident {
Ident::Unresolved(format!("{}", id))
}
fn translate_pearlite_type(sess: &Session, typ: syn::Type, span: Span) -> rustc_ast::ast::Ty {
match typ {
// syn::Type::Array(arr_ty) => {
// BaseTyp::Array(match translate_pearlite(arr_ty.len, span) {
// _ => panic!()
// }, Box::new(translate_pearlite_type(*arr_ty.elem, span))) }
// syn::Type::BareFn(TypeBareFn) => ,
// syn::Type::Group(TypeGroup) => ,
// syn::Type::ImplTrait(TypeImplTrait) => ,
// syn::Type::Infer(TypeInfer) => ,
// syn::Type::Macro(TypeMacro) => ,
// syn::Type::Never(TypeNever) => ,
// syn::Type::Paren(TypeParen) => ,
syn::Type::Path(syn::TypePath {
qself: _,
path:
syn::Path {
leading_colon: _,
segments: s,
},
}) => {
Ty {
tokens: None,
id: NodeId::MAX,
kind: rustc_ast::TyKind::Path(
None,
rustc_ast::ast::Path {
span,
segments: s
.iter()
.map(|x| match x {
syn::PathSegment { ident: id, .. } => rustc_ast::ast::PathSegment {
ident: translate_pearlite_ident(id.clone(), span),
id: NodeId::MAX,
args: None,
},
})
.collect(),
tokens: None,
},
),
span, // tok.span.clone(),
}
}
// BaseTyp::Named(
// (
// TopLevelIdent(
// s.iter()
// .fold("".to_string(), |s, x| match x {
// syn::PathSegment { ident: id, .. } =>
// (if s == "".to_string() { s } else { s + "::" }) + format!("{}", id.clone()).as_str(),
// }),
// ),
// span,
// ),
// None,
// )
,
// syn::Type::Ptr(TypePtr) => ,
// syn::Type::Reference(TypeReference) => ,
// syn::Type::Slice(TypeSlice) => ,
// syn::Type::TraitObject(TypeTraitObject) => ,
// syn::Type::Tuple(TypeTuple) => ,
// syn::Type::Verbatim(TokenStream) => ,
_ => panic!("Type panic"),
}
}
// translate_expr
pub(crate) fn translate_pearlite(
sess: &Session,
t: pearlite_syn::term::Term,
span: Span,
) -> Quantified<(Ident, Ty), Expr> {
let kind = match t {
// pearlite_syn::term::Term::Array(_) => RcDoc::as_string("TODOArray"),
pearlite_syn::term::Term::Binary(pearlite_syn::term::TermBinary { left, op, right }) => {
if translate_pearlite_binop(op) == ast::BinOpKind::Eq {
return Quantified::Eq(
Box::new(translate_pearlite(sess, *left, span)),
Box::new(translate_pearlite(sess, *right, span)),
)
}
ExprKind::Binary(
rustc_span::source_map::Spanned {
node: translate_pearlite_binop(op),
span,
},
P(translate_pearlite_unquantified(sess, *left, span).unwrap()),
P(translate_pearlite_unquantified(sess, *right, span).unwrap()),
)
}
// pearlite_syn::term::Term::Block(pearlite_syn::term::TermBlock { block, .. }) => {
// ExprKind::Block(
// P(rustc_ast::ast::Block {
// stmts: block
// .stmts
// .map(|x| Stmt {
// id: NodeId::from_usize(0),
// kind: match x {
// pearlite_syn::term::TermStmt::Local(pearlite_syn::term::TLocal { let_token, pat, init, semi_token }) =>
// rustc_ast::ast::StmtKind::Local(P(rustc_ast::ast::Local {id: NodeId::from_usize(0), pat: P(pat), None, })),
// },
// span,
// })
// .collect(),
// id: NodeId::from_usize(0),
// rules: BlockCheckMode::Default,
// span,
// tokens: None,
// could_be_bare_literal: true,
// }),
// None,
// )
// }
pearlite_syn::term::Term::Call(pearlite_syn::term::TermCall { func, args, .. }) => {
ExprKind::Call(
P(translate_pearlite_unquantified(sess, *func, span).unwrap()),
args.into_iter()
.map(|x| P(translate_pearlite_unquantified(sess, x, span).unwrap()))
.collect(),
)
}
// pearlite_syn::term::Term::Cast(_) => RcDoc::as_string("TODOCast"),
// pearlite_syn::term::Term::Field(pearlite_syn::term::TermField { base, member, .. }) => {
// RcDoc::as_string("TODOField")
// }
// pearlite_syn::term::Term::Group(_) => RcDoc::as_string("TODOGroup"),
// pearlite_syn::term::Term::If(pearlite_syn::term::TermIf {
// cond,
// then_branch,
// else_branch,
// ..
// }) => RcDoc::as_string("TODOIf"),
pearlite_syn::term::Term::Index(pearlite_syn::term::TermIndex { expr, index, .. }) => {
ExprKind::Index(
P(translate_pearlite_unquantified(sess, *expr, span).unwrap()),
P(translate_pearlite_unquantified(sess, *index, span).unwrap()),
)
}
// pearlite_syn::term::Term::Let(_) => RcDoc::as_string("TODOLet"),
pearlite_syn::term::Term::Lit(pearlite_syn::term::TermLit { ref lit }) => {
ExprKind::Lit(translate_pearlite_lit(lit.clone(), span))
}
// pearlite_syn::term::Term::Match(pearlite_syn::term::TermMatch { expr, arms, .. }) => {
// RcDoc::as_string("TODOMatch")
// }
pearlite_syn::term::Term::MethodCall(pearlite_syn::term::TermMethodCall {
receiver,
method,
turbofish, // TODO: turbofish??
args,
..
}) => {
let mut arg_expr = args.into_iter()
.map(|x| P(translate_pearlite_unquantified(sess, x, span).unwrap())).collect();
let mut receiver_expr = P(
translate_pearlite_unquantified(sess, *receiver, span).unwrap()
);
ExprKind::MethodCall(
Box::new(
ast::MethodCall {
seg: PathSegment {
ident: translate_pearlite_ident(method, span),
id: NodeId::MAX,
args: None,
},
receiver: receiver_expr,
args: arg_expr,
span,
}
),
)
}
pearlite_syn::term::Term::Paren(pearlite_syn::term::TermParen { expr, .. }) => {
// match expr.clone() {
// ExprKind::Paren(P(
return translate_pearlite(sess, *expr, span); // _unquantified.unwrap()
// ))
// }
}
pearlite_syn::term::Term::Path(pearlite_syn::term::TermPath {
inner:
syn::ExprPath {
attrs: _,
qself: _,
path:
syn::Path {
leading_colon: _,
segments: s,
},
},
}) => ExprKind::Path(
None,
Path {
span,
segments: s
.iter()
.map(|x| match x {
syn::PathSegment {
ident: id,
arguments: args,
..
} => rustc_ast::ast::PathSegment {
ident: translate_pearlite_ident(id.clone(), span),
id: NodeId::MAX,
args: match args {
syn::PathArguments::None => None,
syn::PathArguments::AngleBracketed(
syn::AngleBracketedGenericArguments { args: ab_args, .. },
) => Some(P(rustc_ast::ast::AngleBracketed(
rustc_ast::ast::AngleBracketedArgs {
span,
args: ab_args
.into_iter()
.map(|arg| match arg {
syn::GenericArgument::Type(ty) => {
rustc_ast::ast::AngleBracketedArg::Arg(
rustc_ast::ast::GenericArg::Type(P(
translate_pearlite_type(
sess,
ty.clone(),
span,
),
)),
)
}
_ => panic!("Missing cases"),
})
.collect(),
},
))),
syn::PathArguments::Parenthesized(pga) => None,
},
},
})
.collect(),
tokens: None,
},
),
// pearlite_syn::term::Term::Range(_) => RcDoc::as_string("TODORange"),
// pearlite_syn::term::Term::Repeat(_) => RcDoc::as_string("TODORepeat"),
// pearlite_syn::term::Term::Struct(_) => RcDoc::as_string("TODOStruct"),
// pearlite_syn::term::Term::Tuple(pearlite_syn::term::TermTuple { elems, .. }) => {
// make_paren(RcDoc::intersperse(
// elems
// .into_iter()
// .map(|x| make_paren(translate_pearlite(x, top_ctx, idents.clone()))),
// RcDoc::as_string(",").append(RcDoc::space()),
// ))
// }
// pearlite_syn::term::Term::Type(ty) => RcDoc::as_string("TODOType"),
pearlite_syn::term::Term::Unary(pearlite_syn::term::TermUnary { op, expr }) => {
if let syn::UnOp::Not(_) = op {
return Quantified::Not(
Box::new(translate_pearlite(sess, *expr, span)),
)
}
else {
panic!("translate_pearlite_todo unary: {:#?} {:#?}\n", op, expr);
}
// RcDoc::as_string("TODOUnary").append(translate_pearlite(*expr, top_ctx, idents.clone()))
}
// pearlite_syn::term::Term::Final(pearlite_syn::term::TermFinal { term, .. }) => {
// RcDoc::as_string("TODOFinal").append(translate_pearlite(*term, top_ctx, idents.clone()))
// }
pearlite_syn::term::Term::Model(pearlite_syn::term::TermModel { term, .. }) => {
// TODO: Does not make sence in combination with hacspec!
return translate_pearlite(sess, *term, span); // Model supported? (@)
}
// pearlite_syn::term::Term::Verbatim(_) => RcDoc::as_string("TODOVerbatim"),
pearlite_syn::term::Term::LogEq(pearlite_syn::term::TermLogEq { lhs, rhs, .. }) => {
return Quantified::Eq(
Box::new(translate_pearlite(sess, *lhs, span)),
Box::new(translate_pearlite(sess, *rhs, span)),
)
}
pearlite_syn::term::Term::Impl(pearlite_syn::term::TermImpl { hyp, cons, .. }) => {
return Quantified::Implication(
Box::new(translate_pearlite(sess, *hyp, span)),
Box::new(translate_pearlite(sess, *cons, span)),
);
// make_paren(translate_pearlite(*hyp, top_ctx, idents.clone()))
// .append(RcDoc::space())
// .append(RcDoc::as_string("->"))
// .append(RcDoc::space())
// .append(make_paren(translate_pearlite(*cons, top_ctx, idents.clone())))
}
pearlite_syn::term::Term::Forall(pearlite_syn::term::TermForall { args, term, .. }) => {
return Quantified::Forall(
args.iter()
.map(|x| {
(
translate_id(translate_pearlite_ident(x.ident.clone(), span)),
translate_pearlite_type(sess, *x.ty.clone(), span),
)
})
.collect(),
Box::new(translate_pearlite(sess, *term, span)),
);
}
// pearlite_syn::term::Term::Exists(pearlite_syn::term::TermExists { args, term, .. }) => {
// RcDoc::as_string("exists")
// .append(RcDoc::space())
// .append(
// args.iter()
// .fold(RcDoc::nil(), |rs, x| rs.append(x.ident.to_string())),
// )
// .append(RcDoc::as_string(","))
// .append(RcDoc::space())
// .append(translate_pearlite(*term, top_ctx, idents.clone()))
// }
// pearlite_syn::term::Term::Absurd(_) => RcDoc::as_string("TODOAbsurd"),
// pearlite_syn::term::Term::Pearlite(term) => RcDoc::as_string("TODOPearlite"),
// pearlite_syn::term::Term::__Nonexhaustive => RcDoc::as_string("TODONonexhaustive"),
// }
a => {
panic!("translate_pearlite_todo: {:#?}\n", a);
// ExprKind::Underscore
}
};
Quantified::Unquantified(Expr {
id: NodeId::from_usize(0),
kind,
span,
attrs: AttrVec::new(),
tokens: None,
})
}
pub(crate) fn translate_quantified_expr(
sess: &Session,
specials: &SpecialNames,
qe: Quantified<(Ident, Ty), Expr>,
) -> Quantified<(Ident, Spanned<BaseTyp>), Spanned<Expression>> {
match qe {
Quantified::Unquantified(expr) => {
Quantified::Unquantified(translate_expr_expects_exp(sess, specials, &expr).unwrap())
}
Quantified::Forall(ids, term) => Quantified::Forall(
ids.into_iter()
.map(|(id, ty)| {
(
id,
crate::ast_to_rustspec::translate_base_typ(sess, &ty).unwrap(),
)
})
.collect(),
Box::new(translate_quantified_expr(sess, specials, *term)),
),
Quantified::Exists(ids, term) => Quantified::Exists(
ids.into_iter()
.map(|(id, ty)| {
(
id,
crate::ast_to_rustspec::translate_base_typ(sess, &ty).unwrap(),
)
})
.collect(),
Box::new(translate_quantified_expr(sess, specials, *term)),
),
Quantified::Implication(a, b) => Quantified::Implication(
Box::new(translate_quantified_expr(sess, specials, *a)),
Box::new(translate_quantified_expr(sess, specials, *b)),
),
Quantified::Eq(a, b) => Quantified::Eq(
Box::new(translate_quantified_expr(sess, specials, *a)),
Box::new(translate_quantified_expr(sess, specials, *b)),
),
Quantified::Not(a) => {
Quantified::Not(Box::new(translate_quantified_expr(sess, specials, *a)))
}
}
}
fn binop_text(op: rustc_ast::token::BinOpToken) -> String {
match op {
rustc_ast::token::BinOpToken::Plus => "+".to_string(),
rustc_ast::token::BinOpToken::Minus => "-".to_string(),
rustc_ast::token::BinOpToken::Star => "*".to_string(),
rustc_ast::token::BinOpToken::Slash => "/".to_string(),
rustc_ast::token::BinOpToken::Percent => "%".to_string(),
rustc_ast::token::BinOpToken::Caret => "^".to_string(),
rustc_ast::token::BinOpToken::And => "&".to_string(),
rustc_ast::token::BinOpToken::Or => "|".to_string(),
rustc_ast::token::BinOpToken::Shl => "<<".to_string(),
rustc_ast::token::BinOpToken::Shr => ">>".to_string(),
}
}
fn tokentree_text(x: TokenTree) -> String {
match x {
TokenTree::Token(tok, ..) => match tok.kind {
TokenKind::Eq => "=".to_string(),
TokenKind::Lt => "<".to_string(),
TokenKind::Le => "<=".to_string(),
TokenKind::EqEq => "==".to_string(),
TokenKind::Ne => "!=".to_string(),
TokenKind::Ge => ">=".to_string(),
TokenKind::Gt => ">".to_string(),
TokenKind::AndAnd => "&&".to_string(),
TokenKind::OrOr => "||".to_string(),
TokenKind::Not => "!".to_string(),
TokenKind::Tilde => "`".to_string(),
TokenKind::BinOp(op) => binop_text(op),
TokenKind::BinOpEq(op) => binop_text(op) + "=",
TokenKind::At => "@".to_string(),
TokenKind::Dot => ".".to_string(),
TokenKind::DotDot => "..".to_string(),
TokenKind::DotDotDot => "...".to_string(),
TokenKind::Comma => ",".to_string(),
TokenKind::Semi => ";".to_string(),
TokenKind::Colon => ":".to_string(),
TokenKind::ModSep => "::".to_string(),
TokenKind::RArrow => "->".to_string(),
TokenKind::LArrow => "<-".to_string(),
TokenKind::FatArrow => "=>".to_string(),
TokenKind::Pound => "€".to_string(),
TokenKind::Dollar => "$".to_string(),
TokenKind::Question => "$".to_string(),
TokenKind::Literal(x) => format![" {} ", x].to_string(),
TokenKind::Ident(sym, _) => format![" {} ", sym].to_string(),
y => {
panic!(" (TODO: {:?})", y);
}
},
TokenTree::Delimited(_, delim_token, inner) => {
let (left, right) = match delim_token {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::Bracket => ("[", "]"),
Delimiter::Brace => ("{", "}"),
Delimiter::Invisible => ("", ""),
};
left.to_string()
+ &inner
.trees()
.fold("".to_string(), |s, x| s + &tokentree_text(x.clone()))
+ right
}
}
}
pub(crate) fn attribute_requires(attr: &Attribute) -> Option<String> {
let attr_name = attr.name_or_empty().to_ident_string();
match attr_name.as_str() {
"requires" => {
let inner = crate::ast_to_rustspec::get_delimited_tree(attr.clone())?;
let textify = inner
.trees()
.fold("".to_string(), |s, x| s + &tokentree_text(x.clone()));
Some(textify)
}
_ => None,
}
}
pub(crate) fn attribute_ensures(attr: &Attribute) -> Option<String> {
let attr_name = attr.name_or_empty().to_ident_string();
match attr_name.as_str() {
"ensures" => {
let inner = crate::ast_to_rustspec::get_delimited_tree(attr.clone())?;
let textify = inner
.trees()
.fold("".to_string(), |s, x| s + &tokentree_text(x.clone()));
Some(textify)
}
_ => None,
}
}
fn resolve_quantified_identifiers(
ids: Vec<(Ident, Spanned<BaseTyp>)>,
name_context: &crate::name_resolution::NameContext,
) -> (Vec<(Ident, Spanned<BaseTyp>)>, crate::name_resolution::NameContext) {
let new_ids: Vec<(Ident, Spanned<BaseTyp>)> = ids
.iter()
.map(|(x, ty)| {
let new_x = match x {
Ident::Unresolved(s) => crate::name_resolution::to_fresh_ident(s, false),
_ => panic!("should not happen"),
};
(new_x, ty.clone())
})
.collect();
let new_context = ids
.iter()
.zip(new_ids.clone().iter())
.fold(name_context.clone(), |ctx, ((x, _), (new_x, _))| {
crate::name_resolution::add_name(x, &new_x.clone(), ctx)
});
(new_ids, new_context)
}
pub(crate) fn resolve_quantified_expression(
sess: &Session,
qe: Quantified<(Ident, Spanned<BaseTyp>), Spanned<Expression>>,
name_context: &crate::name_resolution::NameContext,
top_level_ctx: &TopLevelContext,
) -> crate::name_resolution::ResolutionResult<Quantified<(Ident, Spanned<BaseTyp>), Spanned<Expression>>> {
match qe {
Quantified::Unquantified(e) => Ok(Quantified::Unquantified(crate::name_resolution::resolve_expression(
sess,
e,
name_context,
top_level_ctx,
)?)),
Quantified::Forall(ids, qe2) => {
let (new_ids, new_context) = resolve_quantified_identifiers(ids, name_context);
let qe2_resolved =
resolve_quantified_expression(sess, *qe2, &new_context, top_level_ctx)?;
Ok(Quantified::Forall(new_ids, Box::new(qe2_resolved)))
}
Quantified::Exists(ids, qe2) => {
let (new_ids, new_context) = resolve_quantified_identifiers(ids, name_context);
Ok(Quantified::Exists(
new_ids,
Box::new(resolve_quantified_expression(
sess,
*qe2,
&new_context,
top_level_ctx,
)?),
))
}
Quantified::Implication(a, b) => Ok(Quantified::Implication(
Box::new(resolve_quantified_expression(
sess,
*a,
name_context,
top_level_ctx,
)?),
Box::new(resolve_quantified_expression(
sess,
*b,
name_context,
top_level_ctx,
)?),
)),
Quantified::Eq(a, b) => Ok(Quantified::Eq(
Box::new(resolve_quantified_expression(
sess,
*a,
name_context,
top_level_ctx,
)?),
Box::new(resolve_quantified_expression(
sess,
*b,
name_context,
top_level_ctx,
)?),
)),
Quantified::Not(x) => Ok(Quantified::Not(Box::new(resolve_quantified_expression(
sess,
*x,
name_context,
top_level_ctx,
)?))),
}
}
pub(crate) fn translate_quantified_expression<'a>(
qe: Quantified<(Ident, Spanned<BaseTyp>), Spanned<Expression>>,
top_ctx: &'a TopLevelContext,
) -> RcDoc<'a, ()> {
match qe {
Quantified::Unquantified((e, _)) => crate::rustspec_to_coq::translate_expression(e, top_ctx),
Quantified::Forall(ids, qe2) => RcDoc::as_string("forall")
.append(RcDoc::space())
.append(RcDoc::intersperse(
ids.into_iter().map(|(x, (typ, _))| {
crate::rustspec_to_coq_base::translate_ident(x.clone())
.append(RcDoc::as_string(" : "))
.append(crate::rustspec_to_coq::translate_base_typ(typ))
}),
RcDoc::space(),
))
.append(RcDoc::as_string(","))
.append(RcDoc::line())
.append(translate_quantified_expression(*qe2, top_ctx)),
Quantified::Exists(ids, qe2) => RcDoc::as_string("exists")
.append(RcDoc::space())
.append(RcDoc::intersperse(
ids.into_iter().map(|(x, (typ, _))| {
crate::rustspec_to_coq_base::translate_ident(x.clone())
.append(RcDoc::as_string(" : "))
.append(crate::rustspec_to_coq::translate_base_typ(typ))
}),
RcDoc::space(),
))
.append(RcDoc::as_string(","))
.append(RcDoc::line())
.append(translate_quantified_expression(*qe2, top_ctx)),
Quantified::Implication(qe2, qe3) => translate_quantified_expression(*qe2, top_ctx)
.append(RcDoc::space())
.append(RcDoc::as_string("->"))
.append(RcDoc::line())
.append(translate_quantified_expression(*qe3, top_ctx)),
Quantified::Eq(qe2, qe3) => translate_quantified_expression(*qe2, top_ctx)
.append(RcDoc::space())
.append(RcDoc::as_string("="))
.append(RcDoc::space())
.append(translate_quantified_expression(*qe3, top_ctx)),
Quantified::Not(qex) => RcDoc::as_string("~")
.append(RcDoc::space())
.append(crate::rustspec_to_coq_base::make_paren(translate_quantified_expression(*qex, top_ctx))),
}
}
|
use ckb_error::{Error, ErrorKind};
use failure::Fail;
#[derive(Fail, Debug, PartialEq, Eq, Clone)]
pub enum ScriptError {
/// The field code_hash in script is invalid
#[fail(display = "InvalidCodeHash")]
InvalidCodeHash,
/// The script consumes too much cycles
#[fail(display = "ExceededMaximumCycles")]
ExceededMaximumCycles,
/// `script.type_hash` hits multiple cells with different data
#[fail(display = "MultipleMatches")]
MultipleMatches,
/// Non-zero exit code returns by script
#[fail(display = "ValidationFailure({})", _0)]
ValidationFailure(i8),
}
impl From<ScriptError> for Error {
fn from(error: ScriptError) -> Self {
error.context(ErrorKind::Script).into()
}
}
|
#[doc = "Reader of register TRIM_LDO_3"]
pub type R = crate::R<u32, super::TRIM_LDO_3>;
#[doc = "Writer for register TRIM_LDO_3"]
pub type W = crate::W<u32, super::TRIM_LDO_3>;
#[doc = "Register TRIM_LDO_3 `reset()`'s with value 0x10"]
impl crate::ResetValue for super::TRIM_LDO_3 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x10
}
}
#[doc = "Reader of field `LVDET`"]
pub type LVDET_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `LVDET`"]
pub struct LVDET_W<'a> {
w: &'a mut W,
}
impl<'a> LVDET_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x1f) | ((value as u32) & 0x1f);
self.w
}
}
#[doc = "Reader of field `SLOPE_SB_BMULT`"]
pub type SLOPE_SB_BMULT_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SLOPE_SB_BMULT`"]
pub struct SLOPE_SB_BMULT_W<'a> {
w: &'a mut W,
}
impl<'a> SLOPE_SB_BMULT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 5)) | (((value as u32) & 0x03) << 5);
self.w
}
}
impl R {
#[doc = "Bits 0:4 - To trim the trip points of the LV-Detect block"]
#[inline(always)]
pub fn lvdet(&self) -> LVDET_R {
LVDET_R::new((self.bits & 0x1f) as u8)
}
#[doc = "Bits 5:6 - To trim standby regulator beta-multiplier temp-co slope"]
#[inline(always)]
pub fn slope_sb_bmult(&self) -> SLOPE_SB_BMULT_R {
SLOPE_SB_BMULT_R::new(((self.bits >> 5) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 0:4 - To trim the trip points of the LV-Detect block"]
#[inline(always)]
pub fn lvdet(&mut self) -> LVDET_W {
LVDET_W { w: self }
}
#[doc = "Bits 5:6 - To trim standby regulator beta-multiplier temp-co slope"]
#[inline(always)]
pub fn slope_sb_bmult(&mut self) -> SLOPE_SB_BMULT_W {
SLOPE_SB_BMULT_W { w: self }
}
}
|
mod context;
mod manager;
use std::borrow::Cow;
use std::collections::HashSet;
use std::sync::Arc;
use anyhow::Result;
use crossbeam_channel::Sender;
use once_cell::sync::Lazy;
use parking_lot::Mutex;
use crate::stdio_server::providers::builtin::on_session_create;
use crate::stdio_server::{rpc::Call, types::ProviderId, MethodCall};
pub use self::context::{Scale, SessionContext, SyncFilterResults};
pub use self::manager::{NewSession, SessionManager};
static BACKGROUND_JOBS: Lazy<Arc<Mutex<HashSet<u64>>>> =
Lazy::new(|| Arc::new(Mutex::new(HashSet::default())));
pub type SessionId = u64;
#[async_trait::async_trait]
pub trait EventHandler: Send + Sync + 'static {
async fn handle_on_move(&mut self, msg: MethodCall, context: Arc<SessionContext>)
-> Result<()>;
async fn handle_on_typed(
&mut self,
msg: MethodCall,
context: Arc<SessionContext>,
) -> Result<()>;
}
#[derive(Debug, Clone)]
pub struct Session<T> {
pub session_id: u64,
pub context: Arc<SessionContext>,
/// Each Session can have its own message processing logic.
pub event_handler: T,
pub event_recv: crossbeam_channel::Receiver<SessionEvent>,
pub source_scale: Scale,
}
#[derive(Debug, Clone)]
pub enum SessionEvent {
OnTyped(MethodCall),
OnMove(MethodCall),
Create,
Terminate,
}
impl SessionEvent {
/// Simplified display of session event.
pub fn short_display(&self) -> Cow<'_, str> {
match self {
Self::OnTyped(msg) => format!("OnTyped, msg_id: {}", msg.id).into(),
Self::OnMove(msg) => format!("OnMove, msg_id: {}", msg.id).into(),
Self::Create => "Create".into(),
Self::Terminate => "Terminate".into(),
}
}
}
impl<T: EventHandler> Session<T> {
pub fn new(call: Call, event_handler: T) -> (Self, Sender<SessionEvent>) {
let (session_sender, session_receiver) = crossbeam_channel::unbounded();
let session = Session {
session_id: call.session_id(),
context: Arc::new(call.into()),
event_handler,
event_recv: session_receiver,
source_scale: Scale::Indefinite,
};
(session, session_sender)
}
/// Sets the running signal to false, in case of the forerunner thread is still working.
pub fn handle_terminate(&mut self) {
let mut val = self.context.is_running.lock();
*val.get_mut() = false;
tracing::debug!(
session_id = self.session_id,
provider_id = %self.provider_id(),
"Session terminated",
);
}
pub fn provider_id(&self) -> &ProviderId {
&self.context.provider_id
}
fn process_source_scale(&self, scale: Scale) {
if let Some(total) = scale.total() {
let method = "s:set_total_size";
utility::println_json_with_length!(total, method);
}
if let Some(lines) = scale.initial_lines(100) {
printer::decorate_lines::<i64>(
lines,
self.context.display_winwidth as usize,
self.context.icon,
)
.print_on_session_create();
}
let mut val = self.context.scale.lock();
*val = scale;
}
async fn handle_create(&mut self) {
let context_clone = self.context.clone();
const TIMEOUT: u64 = 300;
match tokio::time::timeout(
std::time::Duration::from_millis(TIMEOUT),
on_session_create(context_clone),
)
.await
{
Ok(scale_result) => match scale_result {
Ok(scale) => self.process_source_scale(scale),
Err(e) => tracing::error!(?e, "Error occurred on creating session"),
},
Err(_) => {
tracing::debug!(timeout = TIMEOUT, "Did not receive value in time");
match self.context.provider_id.as_str() {
"grep" | "grep2" => {
let rg_cmd = crate::command::grep::RgBaseCommand::new(
self.context.cwd.to_path_buf(),
);
let job_id = utility::calculate_hash(&rg_cmd.inner);
let mut background_jobs = BACKGROUND_JOBS.lock();
if background_jobs.contains(&job_id) {
tracing::debug!(job_id, "An existing job for grep/grep2");
} else {
tracing::debug!(job_id, "Spawning a background job for grep/grep2");
background_jobs.insert(job_id);
tokio::spawn(async move {
let res = rg_cmd.create_cache().await;
let mut background_jobs = BACKGROUND_JOBS.lock();
background_jobs.remove(&job_id);
tracing::debug!(
job_id,
result = ?res,
"The background job is done",
);
});
}
}
_ => {}
}
}
}
}
async fn process_event(&mut self, event: SessionEvent) -> Result<()> {
match event {
SessionEvent::Terminate => self.handle_terminate(),
SessionEvent::Create => self.handle_create().await,
SessionEvent::OnMove(msg) => {
self.event_handler
.handle_on_move(msg, self.context.clone())
.await?;
}
SessionEvent::OnTyped(msg) => {
// TODO: use a buffered channel here, do not process on every
// single char change.
self.event_handler
.handle_on_typed(msg, self.context.clone())
.await?;
}
}
Ok(())
}
pub fn start_event_loop(mut self) {
tokio::spawn(async move {
tracing::debug!(
session_id = self.session_id,
provider_id = %self.provider_id(),
"Spawning a new session task",
);
loop {
match self.event_recv.recv() {
Ok(event) => {
tracing::debug!(event = ?event.short_display(), "Received an event");
if let Err(err) = self.process_event(event).await {
tracing::debug!(?err, "Error processing SessionEvent");
}
}
Err(err) => {
tracing::debug!(?err, "The channel is possibly broken");
break;
}
}
}
});
}
}
|
//! Defines the astronomical length standards as newtypes
use core::{self, fmt};
use length::imperial::*;
use length::metric::*;
use composite::UnitName;
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct AU(pub f64);
pub type AUs = AU;
impl_full_unit!(AU);
impl_unit_debug!(AU => "AU");
impl_from_cf!(AU <===> 92955807.27302553 Mile);
impl_from_cf!(AU <===> 163602220800.52493 Yard);
impl_from_cf!(AU <===> 490806662401.5748 Foot);
impl_from_cf!(AU <===> 5889679948818.898 Inch);
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct Lightyear(pub f64);
pub type Lightyears = Lightyear;
impl_full_unit!(Lightyear);
impl_unit_debug!(Lightyear => "ly");
impl_from_cf!(Lightyear <===> 63241.07 AU);
impl_from_cf!(Lightyear <===> 9460729412789.648 Kilometer);
impl_from_cf!(Lightyear <===> 9460729412789648.0 Meter);
impl_from_cf!(Lightyear <===> 946072941278964800.0 Centimeter);
impl_from_cf!(Lightyear <===> 9460729412789648000.0 Millimeter);
impl_from_cf!(Lightyear <===> 9460729412789648000000.0 Micrometer);
impl_from_cf!(Lightyear <===> 9460729412789648000000000.0 Picometer);
impl_from_cf!(Lightyear <===> 9460729412789648000000000000.0 Femtometer);
impl_from_cf!(Lightyear <===> 5878624714659.916 Mile);
impl_from_cf!(Lightyear <===> 10346379497801454.0 Yard);
impl_from_cf!(Lightyear <===> 31039138493404360.0 Foot);
impl_from_cf!(Lightyear <===> 372469661920852352.0 Inch);
|
/*
* RIDB API Additional Functions 0.1
*
* The Recreation Information Database (RIDB) provides data resources to citizens, offering a single point of access to information about recreational opportunities nationwide. The RIDB represents an authoritative source of information and services for millions of visitors to federal lands, historic sites, museums, and other attractions/resources. This initiative integrates multiple Federal channels and sources about recreation opportunities into a one-stop, searchable database of recreational areas nationwide.
*
* OpenAPI spec version: 0.1.0
*
* Generated by: https://openapi-generator.tech
*/
#[allow(unused_imports)]
use serde_json::Value;
#[derive(Debug, Serialize, Deserialize)]
pub struct SearchAvailabilityOk {
#[serde(rename = "payload")]
payload: ::models::SearchAvailabilityOkPayload
}
impl SearchAvailabilityOk {
pub fn new(payload: ::models::SearchAvailabilityOkPayload) -> SearchAvailabilityOk {
SearchAvailabilityOk {
payload: payload
}
}
pub fn set_payload(&mut self, payload: ::models::SearchAvailabilityOkPayload) {
self.payload = payload;
}
pub fn with_payload(mut self, payload: ::models::SearchAvailabilityOkPayload) -> SearchAvailabilityOk {
self.payload = payload;
self
}
pub fn payload(&self) -> &::models::SearchAvailabilityOkPayload {
&self.payload
}
}
|
#[doc = "Register `ETZPC_HWCFGR` reader"]
pub type R = crate::R<ETZPC_HWCFGR_SPEC>;
#[doc = "Field `NUM_TZMA` reader - NUM_TZMA"]
pub type NUM_TZMA_R = crate::FieldReader;
#[doc = "Field `NUM_PER_SEC` reader - NUM_PER_SEC"]
pub type NUM_PER_SEC_R = crate::FieldReader;
#[doc = "Field `NUM_AHB_SEC` reader - NUM_AHB_SEC"]
pub type NUM_AHB_SEC_R = crate::FieldReader;
#[doc = "Field `CHUNKS1N4` reader - CHUNKS1N4"]
pub type CHUNKS1N4_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:7 - NUM_TZMA"]
#[inline(always)]
pub fn num_tzma(&self) -> NUM_TZMA_R {
NUM_TZMA_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - NUM_PER_SEC"]
#[inline(always)]
pub fn num_per_sec(&self) -> NUM_PER_SEC_R {
NUM_PER_SEC_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 16:23 - NUM_AHB_SEC"]
#[inline(always)]
pub fn num_ahb_sec(&self) -> NUM_AHB_SEC_R {
NUM_AHB_SEC_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:31 - CHUNKS1N4"]
#[inline(always)]
pub fn chunks1n4(&self) -> CHUNKS1N4_R {
CHUNKS1N4_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
#[doc = "ETZPC IP HW configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`etzpc_hwcfgr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ETZPC_HWCFGR_SPEC;
impl crate::RegisterSpec for ETZPC_HWCFGR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`etzpc_hwcfgr::R`](R) reader structure"]
impl crate::Readable for ETZPC_HWCFGR_SPEC {}
#[doc = "`reset()` method sets ETZPC_HWCFGR to value 0x6002"]
impl crate::Resettable for ETZPC_HWCFGR_SPEC {
const RESET_VALUE: Self::Ux = 0x6002;
}
|
//! Relocations-related types.
use std::convert::TryInto;
use crate::int::u24;
use crate::int::Int;
use crate::obj::Block;
use crate::syn::operand::Symbol;
/// An error occuring while relocating a symbol.
#[derive(Debug)]
pub enum RelocationError {
/// Indicates that the symbol's address was not in the expected bank.
WrongBank {
/// The bank we wanted.
expected: u8,
/// The bank we got.
got: u8,
},
/// Indicates that a symbol was too far; that is, a symbol wasn't actually
/// within a byte offset range from the `relative_to` field.
SymbolTooFar,
}
/// A relocation for a missing symbol.
///
/// A `Relocation` describes information that's missing from an assembled
/// `Block`, which can be filled in by a linker.
#[derive(Copy, Clone, Debug)]
pub struct Relocation<'asm> {
/// Information for resolving the relocation.
pub info: RelocationInfo,
/// The symbol that is needed to resolve this relocation.
pub source: Symbol<'asm>,
}
/// Information describing where a relocation is, and what conditions are
/// necessary to resolve it.
#[derive(Copy, Clone, Debug)]
pub struct RelocationInfo {
/// An offset into the containing block poing to the exact place where the
/// symbol value needs to be written.
pub offset: u16,
/// The relocation type, which describes how many bytes must be written.
pub ty: RelocationType,
}
impl RelocationInfo {
/// Resolves this relocation in the given block, using the given absolute
/// address as the relocated value.
pub fn resolve_in(
self,
block: &mut Block,
value: u24,
) -> Result<(), RelocationError> {
let value = match self.ty {
RelocationType::Absolute => Int::I24(value),
RelocationType::BankRelative(bank) => {
if bank == value.bank {
Int::I16(value.addr)
} else {
return Err(RelocationError::WrongBank {
expected: bank,
got: value.bank,
});
}
}
RelocationType::AddrRelative16(address) => {
if address.bank != value.bank {
return Err(RelocationError::WrongBank {
expected: address.bank,
got: value.bank,
});
}
Int::I16(value.addr.wrapping_sub(address.addr))
}
RelocationType::AddrRelative8(address) => {
if address.bank != value.bank {
return Err(RelocationError::WrongBank {
expected: address.bank,
got: value.bank,
});
}
let offset = value.addr.wrapping_sub(address.addr);
let offset: i8 = match (offset as i16).try_into() {
Ok(offset) => offset,
_ => return Err(RelocationError::SymbolTooFar),
};
Int::I8(offset as u8)
}
};
value
.write_le(&mut block[self.offset..])
.expect("the space being overwritten should already be zeroed");
Ok(())
}
}
/// A type of a relocation.
///
/// A `RelocationType` describes how large the relocation is and what
/// information can be used to compress the symbol's address.
#[derive(Copy, Clone, Debug)]
pub enum RelocationType {
/// An absolute, 24-bit relocation. No checks necessary.
Absolute,
/// A bank-relative 16-bit relocation. The bank byte of the symbol *must*
/// match the given value.
///
/// This type of relocation is useful for most 16-bit addressing modes.
BankRelative(u8),
/// An address-relative 16-bit relocation. The bank byte of the symbol *must*
/// match the bank of the given address. In addition, the lower 16 bits of the
/// address will be subtracted from those of the symbol, forming a relative
/// offset.
///
/// This type of relocation is useful for 16-bit branches.
AddrRelative16(u24),
/// An address-relative 16-bit relocation. The bank byte of the symbol *must*
/// match the bank of the given address. In addition, the lower 16 bits of the
/// address will be subtracted from those of the symbol, forming a relative
/// offset. In addition, this relative offset must be convertible from `i16`
/// to `i8` without loss of precision.
///
/// This type of relocation is useful for 16-bit branches.
AddrRelative8(u24),
}
|
fn read<T: std::str::FromStr>() -> T {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
s.trim().parse().ok().unwrap()
}
fn read_vec<T: std::str::FromStr>() -> Vec<T> {
read::<String>()
.split_whitespace()
.map(|e| e.parse().ok().unwrap())
.collect()
}
fn cmb_sub(n:u64,r:u64)->u64{
if r==0{
1
}else{
((n * cmb_sub(n-1, r-1)) / r)
}
}
fn cmb(n:u64,r:u64)->u64{
if r < n-r{
cmb_sub(n, r) % 1000000007
}else{
cmb_sub(n, n-r)% 1000000007
}
}
fn main() {
let v:Vec<u64>=read_vec();
let n = v[0];
let k = v[1];
for i in 1..k+1{
if i>n-k+1{
println!("0");
}else{
println!("{}", (cmb(k-1,i-1)*cmb(n-k+1,i))%1000000007);
}
}
}
|
#[cfg(test)]
mod cli {
use std::process::Command;
use assert_cmd::prelude::*;
#[test]
fn should_match_single_json_when_selector_is_prop() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg(".name");
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer("{\"name\":\"jeff goldblum\"}\n");
assert_cmd
.assert()
.success()
.stdout("{\"name\":\"jeff goldblum\"}\n");
}
#[test]
fn should_match_only_root_of_json_when_selector_is_prop_with_start_matcher() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg("-^").arg(".name");
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{\"name\":\"jeff goldblum\"}
{\"person\":{\"name\":\"John Doe\"}}",
);
assert_cmd
.assert()
.success()
.stdout("{\"name\":\"jeff goldblum\"}\n");
}
#[test]
fn should_match_only_json_with_prop_when_selector_is_prop() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg(".name");
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{\"name\":\"jeff goldblum\"}
{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"}\n",
);
assert_cmd
.assert()
.success()
.stdout("{\"name\":\"jeff goldblum\"}\n");
}
#[test]
fn should_match_json_with_deep_matching_props() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg(".job.title");
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{\"name\":\"John Doe\",\"job\":{\"title\":\"Unknown\"}}
{\"name\":\"John Doe\",\"job\":{}}
{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"}\n",
);
assert_cmd
.assert()
.success()
.stdout("{\"name\":\"John Doe\",\"job\":{\"title\":\"Unknown\"}}\n");
}
#[test]
fn should_match_multiple_json_with_matching_prop_when_selector_is_prop() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg(".name");
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{\"name\":\"jeff goldblum\"}
{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"}
{\"name\":\"blanco white\"}\n",
);
assert_cmd
.assert()
.success()
.stdout("{\"name\":\"jeff goldblum\"}\n{\"name\":\"blanco white\"}\n");
}
#[test]
fn should_match_json_porperty_using_dictionary_index_selector() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg(r#"{"name"}"#);
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{\"name\":\"jeff goldblum\"}
{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"}
{\"name\":\"blanco white\"}\n",
);
assert_cmd.assert().success().stdout(
"{\"name\":\"jeff goldblum\"}
{\"name\":\"blanco white\"}\n",
);
}
#[test]
fn should_match_json_porperty_with_a_value_when_using_the_exact_matcher() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg(r#"{"name":"blanco white"}"#);
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{\"name\":\"jeff goldblum\"}
{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"}
{\"name\":\"blanco white\"}\n",
);
assert_cmd
.assert()
.success()
.stdout("{\"name\":\"blanco white\"}\n");
}
#[test]
fn should_match_json_porperty_with_a_value_when_using_the_contains_exact_matcher() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg(r#"{"name"~:"white"}"#);
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{\"name\":\"jeff goldblum\"}
{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"}
{\"name\":\"blanco white\"}\n",
);
assert_cmd
.assert()
.success()
.stdout("{\"name\":\"blanco white\"}\n");
}
#[test]
fn should_match_json_porperty_with_a_value_when_using_the_contains_matcher() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg(r#"{"name"*:"f "}"#);
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{\"name\":\"jeff goldblum\"}
{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"}
{\"name\":\"chef goldblum\"}\n",
);
assert_cmd.assert().success().stdout(
"{\"name\":\"jeff goldblum\"}
{\"name\":\"chef goldblum\"}\n",
);
}
#[test]
fn should_match_json_with_inner_matching_props() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg(r#".job{"title":"Unknown-title"}"#);
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{\"name\":\"John Doe\",\"job\":{\"title\":\"Unknown-title\"}}
{\"name\":\"John Doe\",\"title\":\"mr\"}
{\"name\":\"John Doe\",\"self\":{\"name\":\"John Doe\",\"job\":{\"title\":\"Unknown-title\"}}}
{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"}\n",
);
assert_cmd.assert().success().stdout(
"{\"name\":\"John Doe\",\"job\":{\"title\":\"Unknown-title\"}}
{\"name\":\"John Doe\",\"self\":{\"name\":\"John Doe\",\"job\":{\"title\":\"Unknown-title\"}}}\n",
);
}
}
|
#[doc = "Reader of register PROC_CONFIG"]
pub type R = crate::R<u32, super::PROC_CONFIG>;
#[doc = "Writer for register PROC_CONFIG"]
pub type W = crate::W<u32, super::PROC_CONFIG>;
#[doc = "Register PROC_CONFIG `reset()`'s with value 0x1000_0000"]
impl crate::ResetValue for super::PROC_CONFIG {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x1000_0000
}
}
#[doc = "Reader of field `PROC1_DAP_INSTID`"]
pub type PROC1_DAP_INSTID_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `PROC1_DAP_INSTID`"]
pub struct PROC1_DAP_INSTID_W<'a> {
w: &'a mut W,
}
impl<'a> PROC1_DAP_INSTID_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 28)) | (((value as u32) & 0x0f) << 28);
self.w
}
}
#[doc = "Reader of field `PROC0_DAP_INSTID`"]
pub type PROC0_DAP_INSTID_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `PROC0_DAP_INSTID`"]
pub struct PROC0_DAP_INSTID_W<'a> {
w: &'a mut W,
}
impl<'a> PROC0_DAP_INSTID_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24);
self.w
}
}
#[doc = "Reader of field `PROC1_HALTED`"]
pub type PROC1_HALTED_R = crate::R<bool, bool>;
#[doc = "Reader of field `PROC0_HALTED`"]
pub type PROC0_HALTED_R = crate::R<bool, bool>;
impl R {
#[doc = "Bits 28:31 - Configure proc1 DAP instance ID.\\n Recommend that this is NOT changed until you require debug access in multi-chip environment\\n WARNING: do not set to 15 as this is reserved for RescueDP"]
#[inline(always)]
pub fn proc1_dap_instid(&self) -> PROC1_DAP_INSTID_R {
PROC1_DAP_INSTID_R::new(((self.bits >> 28) & 0x0f) as u8)
}
#[doc = "Bits 24:27 - Configure proc0 DAP instance ID.\\n Recommend that this is NOT changed until you require debug access in multi-chip environment\\n WARNING: do not set to 15 as this is reserved for RescueDP"]
#[inline(always)]
pub fn proc0_dap_instid(&self) -> PROC0_DAP_INSTID_R {
PROC0_DAP_INSTID_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bit 1 - Indication that proc1 has halted"]
#[inline(always)]
pub fn proc1_halted(&self) -> PROC1_HALTED_R {
PROC1_HALTED_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0 - Indication that proc0 has halted"]
#[inline(always)]
pub fn proc0_halted(&self) -> PROC0_HALTED_R {
PROC0_HALTED_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 28:31 - Configure proc1 DAP instance ID.\\n Recommend that this is NOT changed until you require debug access in multi-chip environment\\n WARNING: do not set to 15 as this is reserved for RescueDP"]
#[inline(always)]
pub fn proc1_dap_instid(&mut self) -> PROC1_DAP_INSTID_W {
PROC1_DAP_INSTID_W { w: self }
}
#[doc = "Bits 24:27 - Configure proc0 DAP instance ID.\\n Recommend that this is NOT changed until you require debug access in multi-chip environment\\n WARNING: do not set to 15 as this is reserved for RescueDP"]
#[inline(always)]
pub fn proc0_dap_instid(&mut self) -> PROC0_DAP_INSTID_W {
PROC0_DAP_INSTID_W { w: self }
}
}
|
// Copyright 2019-2020 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Substrate chain configurations.
//!
//! This crate contains structs and utilities to declare
//! a runtime-specific configuration file (a.k.a chain spec).
//!
//! Basic chain spec type containing all required parameters is
//! [`ChainSpec`](./struct.ChainSpec.html). It can be extended with
//! additional options that contain configuration specific to your chain.
//! Usually the extension is going to be an amalgamate of types exposed
//! by Substrate core modules. To allow the core modules to retrieve
//! their configuration from your extension you should use `ChainSpecExtension`
//! macro exposed by this crate.
//!
//! ```rust
//! use std::collections::HashMap;
//! use sc_chain_spec::{GenericChainSpec, ChainSpecExtension};
//!
//! #[derive(Clone, Debug, serde::Serialize, serde::Deserialize, ChainSpecExtension)]
//! pub struct MyExtension {
//! pub known_blocks: HashMap<u64, String>,
//! }
//!
//! pub type MyChainSpec<G> = GenericChainSpec<G, MyExtension>;
//! ```
//!
//! Some parameters may require different values depending on the
//! current blockchain height (a.k.a. forks). You can use `ChainSpecGroup`
//! macro and provided [`Forks`](./struct.Forks.html) structure to put
//! such parameters to your chain spec.
//! This will allow to override a single parameter starting at specific
//! block number.
//!
//! ```rust
//! use sc_chain_spec::{Forks, ChainSpecGroup, ChainSpecExtension, GenericChainSpec};
//!
//! #[derive(Clone, Debug, serde::Serialize, serde::Deserialize, ChainSpecGroup)]
//! pub struct ClientParams {
//! max_block_size: usize,
//! max_extrinsic_size: usize,
//! }
//!
//! #[derive(Clone, Debug, serde::Serialize, serde::Deserialize, ChainSpecGroup)]
//! pub struct PoolParams {
//! max_transaction_size: usize,
//! }
//!
//! #[derive(Clone, Debug, serde::Serialize, serde::Deserialize, ChainSpecGroup, ChainSpecExtension)]
//! pub struct Extension {
//! pub client: ClientParams,
//! pub pool: PoolParams,
//! }
//!
//! pub type BlockNumber = u64;
//!
//! /// A chain spec supporting forkable `ClientParams`.
//! pub type MyChainSpec1<G> = GenericChainSpec<G, Forks<BlockNumber, ClientParams>>;
//!
//! /// A chain spec supporting forkable `Extension`.
//! pub type MyChainSpec2<G> = GenericChainSpec<G, Forks<BlockNumber, Extension>>;
//! ```
//!
//! It's also possible to have a set of parameters that is allowed to change
//! with block numbers (i.e. is forkable), and another set that is not subject to changes.
//! This is also possible by declaring an extension that contains `Forks` within it.
//!
//!
//! ```rust
//! use serde::{Serialize, Deserialize};
//! use sc_chain_spec::{Forks, GenericChainSpec, ChainSpecGroup, ChainSpecExtension};
//!
//! #[derive(Clone, Debug, Serialize, Deserialize, ChainSpecGroup)]
//! pub struct ClientParams {
//! max_block_size: usize,
//! max_extrinsic_size: usize,
//! }
//!
//! #[derive(Clone, Debug, Serialize, Deserialize, ChainSpecGroup)]
//! pub struct PoolParams {
//! max_transaction_size: usize,
//! }
//!
//! #[derive(Clone, Debug, Serialize, Deserialize, ChainSpecExtension)]
//! pub struct Extension {
//! pub client: ClientParams,
//! #[forks]
//! pub pool: Forks<u64, PoolParams>,
//! }
//!
//! pub type MyChainSpec<G> = GenericChainSpec<G, Extension>;
//! ```
mod chain_spec;
mod extension;
pub use chain_spec::{
ChainSpec as GenericChainSpec, LightSyncState, NoExtension, SerializableLightSyncState,
};
pub use extension::{get_extension, Extension, Fork, Forks, GetExtension, Group};
pub use sc_chain_spec_derive::{ChainSpecExtension, ChainSpecGroup};
pub use sp_chain_spec::{ChainType, Properties};
use sc_network::config::MultiaddrWithPeerId;
use sc_telemetry::TelemetryEndpoints;
use serde::{de::DeserializeOwned, Serialize};
use sp_core::storage::Storage;
use sp_runtime::BuildStorage;
/// A set of traits for the runtime genesis config.
pub trait RuntimeGenesis: Serialize + DeserializeOwned + BuildStorage {}
impl<T: Serialize + DeserializeOwned + BuildStorage> RuntimeGenesis for T {}
/// Common interface of a chain specification.
pub trait ChainSpec: BuildStorage + Send {
/// Spec name.
fn name(&self) -> &str;
/// Spec id.
fn id(&self) -> &str;
/// Type of the chain.
fn chain_type(&self) -> ChainType;
/// A list of bootnode addresses.
fn boot_nodes(&self) -> &[MultiaddrWithPeerId];
/// Telemetry endpoints (if any)
fn telemetry_endpoints(&self) -> &Option<TelemetryEndpoints>;
/// Network protocol id.
fn protocol_id(&self) -> Option<&str>;
/// Additional loosly-typed properties of the chain.
///
/// Returns an empty JSON object if 'properties' not defined in config
fn properties(&self) -> Properties;
/// Returns a reference to defined chain spec extensions.
fn extensions(&self) -> &dyn GetExtension;
/// Add a bootnode to the list.
fn add_boot_node(&mut self, addr: MultiaddrWithPeerId);
/// Return spec as JSON.
fn as_json(&self, raw: bool) -> Result<String, String>;
/// Return StorageBuilder for this spec.
fn as_storage_builder(&self) -> &dyn BuildStorage;
/// Returns a cloned `Box<dyn ChainSpec>`.
fn cloned_box(&self) -> Box<dyn ChainSpec>;
/// Set the storage that should be used by this chain spec.
///
/// This will be used as storage at genesis.
fn set_storage(&mut self, storage: Storage);
/// Hardcode infomation to allow light clients to sync quickly into the chain spec.
fn set_light_sync_state(&mut self, light_sync_state: SerializableLightSyncState);
}
impl std::fmt::Debug for dyn ChainSpec {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "ChainSpec(name = {:?}, id = {:?})", self.name(), self.id())
}
}
|
extern crate slack;
extern crate rustc_serialize;
use self::rustc_serialize::json::Json;
use reply;
use gomi;
fn pick_up_user_action(raw_json: &str) -> bool {
//#test channel id -> C45M040DA
//if raw_json.contains("\"channel\":\"C45M040DA\"") {
//#random channel id -> C45SC46VC
if raw_json.contains("\"channel\":\"C45SC46VC\"") {
//event -> JSON which user and text data include pick up
if raw_json.contains("\"user\":") && raw_json.contains("\"text\":") {
return true;
}
}
false
}
pub struct MyHandler;
#[allow(unused_variables)]
impl slack::EventHandler for MyHandler {
//if user action in slack
fn on_event(&mut self,
cli: &mut slack::RtmClient,
event: Result<slack::Event, slack::Error>,
raw_json: &str) {
println!("on_event(event: {:?}, raw_json: {:?})", event, raw_json);
//popuko and pipimi bot id -> U453MJ2HW
//not action to popuko and pipimi
if raw_json.contains("\"user\":\"U453MJ2HW\"") {
return;
}
//pick up user and text relating data
if pick_up_user_action(raw_json) {
//raw_json(str) -> json(JSON)
let json = match Json::from_str(raw_json) {
Ok(val) => val,
Err(_) => return,
};
//text data in json pick up
let text_data = match json.find("text") {
Some(val) => val,
None => return,
};
//text data cast json data to str
let text_string = text_data.to_string();
let text_str = text_string.as_str();
reply::reply_message(cli, text_str);
}
}
//keep connect
fn on_ping(&mut self, cli: &mut slack::RtmClient) {
println!("on_ping");
gomi::send_mention(cli);
}
//close connect
fn on_close(&mut self, cli: &mut slack::RtmClient) {
println!("on_close");
}
//start connect
fn on_connect(&mut self, cli: &mut slack::RtmClient) {
println!("on_connect");
}
}
|
use ai::libc::c_char;
use ai::internals::AtNode;
use ai::string::string;
use ai::node::Node;
use ai::node_entry::AI_NODE_ALL;
use std::marker::PhantomData;
use std::ptr;
use std::ffi::CString;
#[derive(Debug)]
pub struct Universe<'a> {
phantom: PhantomData<&'a i32>,
}
pub fn universe_is_active() -> bool {
unsafe { AiUniverseIsActive() }
}
impl<'a> Universe<'a> {
pub fn new() -> Option<Universe<'a>> {
if universe_is_active() {
None
} else {
unsafe { AiBegin(); }
Some(Universe { phantom: PhantomData })
}
}
pub fn create_node(&mut self, node_type: &str) -> Node<'a> {
let p = unsafe { AiNode(string::from(node_type), string::new(), ptr::null()) };
Node::from_ptr(p)
}
pub fn create_node_with_name(&mut self, node_type: &str, node_name: &str) -> Node<'a> {
let p = unsafe { AiNode(string::from(node_type), string::from(node_name), ptr::null()) };
Node::from_ptr(p)
}
pub fn write_flattened(&self, out: &str, binary: bool) -> i32 {
let cs = CString::new(out).unwrap();
unsafe { AiASSWrite(cs.as_ptr(), AI_NODE_ALL, true, binary) }
}
}
impl<'a> Drop for Universe<'a> {
fn drop(&mut self) {
unsafe { AiEnd() }
}
}
#[link(name = "ai")]
extern "C" {
fn AiBegin();
fn AiEnd();
fn AiUniverseIsActive() -> bool;
fn AiNode(nentry_name: string, name: string, parent: *const AtNode) -> *mut AtNode;
fn AiASSWrite(filename: *const c_char, mask: i32, open_procs: bool, binary: bool) -> i32;
}
#[test]
fn universe_test() {
assert!(!universe_is_active());
{
let _u = Universe::new();
assert!(universe_is_active());
}
assert!(!universe_is_active());
}
|
use embedded_hal::blocking::spi;
use std::io::{Read, Write};
use std::sync::{Arc, Mutex};
use crate::io::{send_clear, send_spi_init, send_spi_write};
pub struct SPI<T> {
ident: String,
channel: Arc<Mutex<Box<T>>>,
}
impl<T> SPI<T>
where
T: Read + Write,
{
pub fn new(
ident: String,
sck: String,
miso: String,
mosi: String,
speed: u32,
channel: Arc<Mutex<Box<T>>>,
) -> Self {
send_clear(&mut *channel.lock().unwrap()).ok();
send_spi_init(
&mut *channel.lock().unwrap(),
&ident,
&sck,
&miso,
&mosi,
speed,
)
.ok();
SPI { ident, channel }
}
}
impl<T> spi::Write<u8> for SPI<T>
where
T: Read + Write,
{
type Error = std::io::Error;
fn write(&mut self, bytes: &[u8]) -> Result<(), Self::Error> {
send_spi_write(&mut *self.channel.lock().unwrap(), &self.ident, bytes)
}
}
|
#![no_std]
#![feature(alloc)]
#[macro_use]
extern crate alloc;
use alloc::vec::Vec;
extern crate common;
use common::bytesrepr::ToBytes;
use common::contract_api::call_contract;
use common::contract_api::pointers::ContractPointer;
#[no_mangle]
pub extern "C" fn call() {
//This hash comes from blake2b256( [0;32] ++ [0;8] ++ [0;4] )
let hash = ContractPointer::Hash([
164, 102, 153, 51, 236, 214, 169, 167, 126, 44, 250, 247, 179, 214, 203, 229, 239, 69, 145,
25, 5, 153, 113, 55, 255, 188, 176, 201, 7, 4, 42, 100,
]);
let arg = "inc";
let args = vec![arg.to_bytes()];
let _result: () = call_contract(hash.clone(), &args, &Vec::new());
let value: i32 = {
let arg = "get";
let args = vec![arg.to_bytes()];
call_contract(hash, &args, &Vec::new())
};
assert_eq!(value, 1);
}
|
//
// zhttpto.rs
//
// Starting code for PS1
// Running on Rust 0.9
//
// Note that this code has serious security risks! You should not run it
// on any system with access to sensitive files.
//
// University of Virginia - cs4414 Spring 2014
// Weilin Xu and David Evans
// Version 0.3
#[feature(globs)];
use std::io::*;
use std::io::net::ip::{SocketAddr};
use std::{str};
static IP: &'static str = "127.0.0.1";
static PORT: int = 4414;
static mut visitor_count: int =0;
fn main() {
let addr = from_str::<SocketAddr>(format!("{:s}:{:d}", IP, PORT)).unwrap();
let mut acceptor = net::tcp::TcpListener::bind(addr).listen();
println(format!("Listening on [{:s}] ...", addr.to_str()));
for stream in acceptor.incoming() {
// Spawn a task to handle the connection
do spawn {
let mut stream = stream;
match stream {
Some(ref mut s) => {
match s.peer_name() {
Some(pn) => {println(format!("Received connection from: [{:s}]", pn.to_str()));},
None => ()
}
},
None => ()
}
let mut buf = [0, ..500];
stream.read(buf);
let request_str = str::from_utf8(buf);
let mut split = request_str.slice(5,10).to_owned();
println(format!("Received request :\n{:s}", request_str));
unsafe{
visitor_count += 1;
println(format!("Visitor count {:d}",visitor_count));
let response: ~str =
format!("HTTP/1.1 200 OK\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n
<doctype !html><html><head><title>Hello, Rust!</title>
<style>body \\{ background-color: \\#111; color: \\#FFEEAA \\}
h1 \\{ font-size:2cm; text-align: center; color: black; text-shadow: 0 0 4mm red\\}
h2 \\{ font-size:2cm; text-align: center; color: black; text-shadow: 0 0 4mm green\\}
</style></head>
<body>
<h1>Greetings, Krusty! </h1>
visitor count: {:d}
requested path : {:s}
</body></html>\r\n ",visitor_count,split) ;
stream.write(response.as_bytes());
}
println!("Connection terminates.");
}
}
} |
use wasm_bindgen::prelude::*;
macro_rules! router {
{$path:ident then _ => $default:expr} => {{
$default
}};
{$path:ident then $pattern:tt $(($capture:ident))? => $html:expr, $($patterns:tt $(($captures:ident))? => $htmls:expr),+} => {{
let pattern = format!("^{}$", $pattern);
let pattern = regex::Regex::new(pattern.as_str()).unwrap();
if let Some(_capture) = pattern.captures(&$path) {
$(
let $capture = _capture;
)?
$html
} else {
router! {$path then $($patterns $(($captures))? => $htmls),+}
}
}};
{$($patterns:tt $(($captures:ident))? => $htmls:expr),+} => {{
let path = web_sys::window().unwrap().location().pathname().unwrap();
let path = path.as_str();
router! {path then $($patterns $(($captures))? => $htmls),+}
}}
}
pub fn jump_to(path: &str) {
let _ = web_sys::window()
.unwrap()
.history()
.unwrap()
.push_state_with_url(&JsValue::null(), "", Some(path));
}
|
use blocks::case::{Case, TypeCase};
use pancurses::Window;
pub struct Map {
list_items: Vec<Case>,
}
impl Map {
pub fn new() -> Self {
Map {
list_items: Vec::new(),
}
}
pub fn draw(&self, window: &Window) {
window.clear();
window.draw_box('|', '-');
self.list_items.iter().for_each(|case| {
case.draw(&window);
});
window.refresh();
}
pub fn create_empty(&mut self, size_x: i32, size_y: i32) {
for x in 1..size_x {
for y in 1..size_y {
self.list_items.push(Case::new(x, y, TypeCase::Void));
}
}
}
pub fn add_case(&mut self, c: Case) {
self.list_items.push(c);
}
}
|
use crate::engine::{ControlMap, Engine};
use crate::config;
use std::sync::mpsc;
use synthesizer_io_core::graph::{Message, Note};
pub struct NoteModule {
voices: [Voices; config::CHANNEL_COUNT],
//note_ch_tx : mpsc::Sender::<NoteEvent>
}
impl NoteModule {
pub fn new() -> NoteModule {
NoteModule {
voices: [NONE_VOICES; config::CHANNEL_COUNT],
//note_ch_tx: note_ch_tx
}
}
pub fn note_event(&mut self, engine: &mut Engine, note_event : NoteEvent, channel : usize) {
let midi_num = note_event.note;
let velocity = note_event.velocity;
let on = note_event.down;
let ts = note_event.timestamp;
let targets = engine.get_control_map(channel).note_receivers.clone();
let mut vx = 0;
let mut curr_voice : Option<usize> = None;
let mut oldest_voice = 0;
let mut oldest_ts = u64::max_value();
for voice in self.voices[channel].iter_mut () {
//println!("{},{}", voice.note.unwrap_or(0.0), voice.timestamp);
if voice.timestamp < oldest_ts {
oldest_ts = voice.timestamp;
oldest_voice = vx;
}
if !on{
if voice.note.unwrap_or(-1.0) == midi_num{
voice.note = None;
voice.velocity = 0.0;
curr_voice = Some(vx);
break;
}
}else if voice.note == None {
voice.note = Some(midi_num);
voice.velocity = velocity;
voice.timestamp = ts;
curr_voice = Some(vx);
break;
}
vx += 1;
}
if curr_voice.is_none() {
if !on {
return;
}
let note = Note {
ixs: targets[oldest_voice].clone().into_boxed_slice(),
midi_num: self.voices[channel][oldest_voice].note.unwrap(),
velocity: velocity,
on: false,
timestamp: ts,
};
engine.send(Message::Note(note));
self.voices[channel][oldest_voice].note = Some(midi_num);
curr_voice = Some(oldest_voice);
}
let note = Note {
ixs: targets[curr_voice.unwrap_or(0)].clone().into_boxed_slice(),
midi_num: midi_num,
velocity: velocity,
on: on,
timestamp: ts,
};
engine.send(Message::Note(note));
//self.note_ch_tx.send(note_event).unwrap_or_else(|err| println!("{:?}", err));
}
pub fn get_voices(&self, channel : usize) -> &Voices{
&self.voices[channel]
}
}
#[derive(Clone)]
pub struct NoteEvent {
pub down: bool,
pub note: f32,
pub velocity: f32,
pub timestamp : u64,
}
pub const NONE_NOTE : NoteEvent = NoteEvent{down: false, note: 0.0, velocity: 0.0, timestamp: 0};
type Voices = [Voice; config::VOICE_COUNT];
const NONE_VOICE : Voice = Voice{note: None, velocity: 0.0, timestamp: 0};
const NONE_VOICES : [Voice; config::VOICE_COUNT] = [NONE_VOICE; config::VOICE_COUNT];
#[derive(Clone)]
pub struct Voice {
pub note : Option<f32>,
pub velocity: f32,
pub timestamp : u64,
}
|
pub mod player;
pub mod renderer;
pub mod checks;
pub mod game;
pub mod coin;
pub mod board;
use crate::game::Game;
fn main() {
let mut game = Game::new_game();
game.start()
} |
#[doc = "Reader of register IFCR"]
pub type R = crate::R<u32, super::IFCR>;
#[doc = "Writer for register IFCR"]
pub type W = crate::W<u32, super::IFCR>;
#[doc = "Register IFCR `reset()`'s with value 0"]
impl crate::ResetValue for super::IFCR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `EOTC`"]
pub type EOTC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EOTC`"]
pub struct EOTC_W<'a> {
w: &'a mut W,
}
impl<'a> EOTC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `TXTFC`"]
pub type TXTFC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TXTFC`"]
pub struct TXTFC_W<'a> {
w: &'a mut W,
}
impl<'a> TXTFC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `UDRC`"]
pub type UDRC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `UDRC`"]
pub struct UDRC_W<'a> {
w: &'a mut W,
}
impl<'a> UDRC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `OVRC`"]
pub type OVRC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OVRC`"]
pub struct OVRC_W<'a> {
w: &'a mut W,
}
impl<'a> OVRC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `CRCEC`"]
pub type CRCEC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CRCEC`"]
pub struct CRCEC_W<'a> {
w: &'a mut W,
}
impl<'a> CRCEC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `TIFREC`"]
pub type TIFREC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TIFREC`"]
pub struct TIFREC_W<'a> {
w: &'a mut W,
}
impl<'a> TIFREC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `MODFC`"]
pub type MODFC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MODFC`"]
pub struct MODFC_W<'a> {
w: &'a mut W,
}
impl<'a> MODFC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `TSERFC`"]
pub type TSERFC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TSERFC`"]
pub struct TSERFC_W<'a> {
w: &'a mut W,
}
impl<'a> TSERFC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `SUSPC`"]
pub type SUSPC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SUSPC`"]
pub struct SUSPC_W<'a> {
w: &'a mut W,
}
impl<'a> SUSPC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
impl R {
#[doc = "Bit 3 - EOTC"]
#[inline(always)]
pub fn eotc(&self) -> EOTC_R {
EOTC_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - TXTFC"]
#[inline(always)]
pub fn txtfc(&self) -> TXTFC_R {
TXTFC_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - UDRC"]
#[inline(always)]
pub fn udrc(&self) -> UDRC_R {
UDRC_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - OVRC"]
#[inline(always)]
pub fn ovrc(&self) -> OVRC_R {
OVRC_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - CRCEC"]
#[inline(always)]
pub fn crcec(&self) -> CRCEC_R {
CRCEC_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - TIFREC"]
#[inline(always)]
pub fn tifrec(&self) -> TIFREC_R {
TIFREC_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - MODFC"]
#[inline(always)]
pub fn modfc(&self) -> MODFC_R {
MODFC_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - TSERFC"]
#[inline(always)]
pub fn tserfc(&self) -> TSERFC_R {
TSERFC_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - SUSPC"]
#[inline(always)]
pub fn suspc(&self) -> SUSPC_R {
SUSPC_R::new(((self.bits >> 11) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 3 - EOTC"]
#[inline(always)]
pub fn eotc(&mut self) -> EOTC_W {
EOTC_W { w: self }
}
#[doc = "Bit 4 - TXTFC"]
#[inline(always)]
pub fn txtfc(&mut self) -> TXTFC_W {
TXTFC_W { w: self }
}
#[doc = "Bit 5 - UDRC"]
#[inline(always)]
pub fn udrc(&mut self) -> UDRC_W {
UDRC_W { w: self }
}
#[doc = "Bit 6 - OVRC"]
#[inline(always)]
pub fn ovrc(&mut self) -> OVRC_W {
OVRC_W { w: self }
}
#[doc = "Bit 7 - CRCEC"]
#[inline(always)]
pub fn crcec(&mut self) -> CRCEC_W {
CRCEC_W { w: self }
}
#[doc = "Bit 8 - TIFREC"]
#[inline(always)]
pub fn tifrec(&mut self) -> TIFREC_W {
TIFREC_W { w: self }
}
#[doc = "Bit 9 - MODFC"]
#[inline(always)]
pub fn modfc(&mut self) -> MODFC_W {
MODFC_W { w: self }
}
#[doc = "Bit 10 - TSERFC"]
#[inline(always)]
pub fn tserfc(&mut self) -> TSERFC_W {
TSERFC_W { w: self }
}
#[doc = "Bit 11 - SUSPC"]
#[inline(always)]
pub fn suspc(&mut self) -> SUSPC_W {
SUSPC_W { w: self }
}
}
|
extern mod std;
extern mod zmq;
use std::io;
use std::str;
use std::task;
fn new_server(socket: zmq::Socket) {
let msg = socket.recv_str(0).unwrap();
io::println(format!("server received {}", msg));
let msg = format!("hello {}", msg);
io::println(format!("server sending {}", msg));
match socket.send_str(msg, 0) {
Ok(()) => { },
Err(e) => fail!(e.to_str())
}
}
fn new_client(socket: zmq::Socket) {
io::println("starting client");
socket.set_sndhwm(10).unwrap();
socket.set_rcvhwm(10).unwrap();
io::println(format!("rcvhwm: {}", socket.get_rcvhwm().unwrap()));
io::println(format!("sndhwm: {}", socket.get_sndhwm().unwrap()));
socket.set_identity("identity".as_bytes()).unwrap();
let identity = socket.get_identity().unwrap();
io::println(format!("identity: {}", str::from_utf8(identity)));
let msg = "foo";
io::println(format!("client sending {}", msg));
socket.send_str(msg, 0).unwrap();
let msg = socket.recv_str(0).unwrap();
io::println(format!("client recieving {}", msg));
}
fn main() {
let (major, minor, patch) = zmq::version();
io::println(format!("version: {:d} {:d} {:d}", major, minor, patch));
let ctx = zmq::Context::new();
let server_socket = ctx.socket(zmq::REP).unwrap();
let client_socket = ctx.socket(zmq::REQ).unwrap();
// Connect the two sockets to each other.
server_socket.bind("tcp://127.0.0.1:3456").unwrap();
client_socket.connect("tcp://127.0.0.1:3456").unwrap();
// We need to start the server in a separate thread as it blocks.
let mut task = task::task();
task.sched_mode(task::SingleThreaded);
task.spawn_with(server_socket, new_server);
new_client(client_socket);
}
|
use colored::*;
use nod::{NodError, Node, Progress, Result};
use pbr::{ProgressBar, Units};
struct CliProgress<'a> {
pb: &'a mut ProgressBar<std::io::Stdout>,
}
impl<'a> CliProgress<'a> {
pub fn new(pb: &'a mut ProgressBar<std::io::Stdout>) -> CliProgress<'a> {
CliProgress { pb }
}
}
impl<'a> Progress for CliProgress<'a> {
fn progress(&mut self, progress: u64, _total: u64) {
self.pb.add(progress);
}
}
pub fn use_cmd(node: &Node, version: &str) -> Result<()> {
let version = node.get_version(version);
if version.is_none() {
return Err(NodError::Other("no version"));
}
let v = version.unwrap();
let size = node.download_size(&v)?;
let mut pb = ProgressBar::new(size);
pb.set_units(Units::Bytes);
pb.set_width(Some(80));
println!("Downloading: {}", v.version.bold());
{
let mut progress = CliProgress::new(&mut pb);
node.download(&v, &mut progress)?;
}
let done = format!("Download ... {}", "done".green());
pb.finish_print(&done);
print!("\nUnpacking ... ");
node.unpack(&v)?;
print!("{}", "done".green());
node.link(&v)?;
Ok(())
}
pub fn run_cmd(node: &Node) -> Result<()> {
Ok(())
}
pub fn list_cmd(node: &Node) -> Result<()> {
let locals = node.installed_versions()?;
if locals.len() == 0 {
println!("No installed version yet.");
return Ok(());
}
println!("{}", "Installed version:".bold());
for v in locals {
println!("{}", v);
}
Ok(())
}
pub fn listremote_cmd(node: &Node, count: usize) -> Result<()> {
let locals = node.installed_versions()?;
let versions = node.available_versions()?;
let vs: Vec<String> = versions
.iter()
.take(count)
.map(|x| x.version.clone())
.collect();
print!("{}:\n", "Node versions".bold());
for v in vs {
match locals.iter().position(|m| *m == v) {
Some(_) => print!("{}\n", v.bold()),
None => print!("{}\n", v),
}
}
Ok(())
}
pub fn clear_cache(node: &Node) -> Result<()> {
print!("Clearing cache ... ");
let size = node.clean_cache()?;
print!("done\nFreed {}kb\n", size);
Ok(())
} |
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// AwsAccountListResponse : List of enabled AWS accounts.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AwsAccountListResponse {
/// List of enabled AWS accounts.
#[serde(rename = "accounts", skip_serializing_if = "Option::is_none")]
pub accounts: Option<Vec<crate::models::AwsAccount>>,
}
impl AwsAccountListResponse {
/// List of enabled AWS accounts.
pub fn new() -> AwsAccountListResponse {
AwsAccountListResponse {
accounts: None,
}
}
}
|
use std::io::Read;
use crate::error::{TychoError, TychoResult};
use crate::ident::ValueIdent;
use crate::read::func::{read_byte, read_bytes};
use crate::read::length::read_length;
use crate::read::number::{read_number, read_number_ident};
use crate::read::string::{read_char, read_string};
use crate::{Value, Uuid};
pub(crate) fn read_value_ident<R: Read>(reader: &mut R) -> TychoResult<ValueIdent> {
let byte = read_byte(reader)?;
match byte {
0x00 => Ok(ValueIdent::Null),
0x01 => Ok(ValueIdent::Boolean),
0x02 => Ok(ValueIdent::String),
0x03 => Ok(ValueIdent::Char),
0x04 => Ok(ValueIdent::Number(read_number_ident(reader)?)),
0x05 => Ok(ValueIdent::Bytes),
0x06 => Ok(ValueIdent::UUID),
_ => Err(TychoError::InvalidIdent { found: byte, expecting: "value ident".to_string() })
}
}
pub(crate) fn read_value<R: Read>(reader: &mut R, ident: &ValueIdent) -> TychoResult<Value> {
match ident {
ValueIdent::Null => Ok(Value::Null),
ValueIdent::Boolean => Ok(Value::Boolean(read_byte(reader)? == 0x01)),
ValueIdent::String => Ok(Value::String(read_string(reader)?)),
ValueIdent::Char => Ok(Value::Char(read_char(reader)?)),
ValueIdent::Number(n) => Ok(Value::Number(read_number(reader, n)?)),
ValueIdent::Bytes => {
let length = read_length(reader)?;
Ok(Value::Bytes(read_bytes(reader, length)?))
}
// todo: slow + suffering
ValueIdent::UUID => {
let bytes = [
read_byte(reader)?, read_byte(reader)?, read_byte(reader)?, read_byte(reader)?,
read_byte(reader)?, read_byte(reader)?, read_byte(reader)?, read_byte(reader)?,
read_byte(reader)?, read_byte(reader)?, read_byte(reader)?, read_byte(reader)?,
read_byte(reader)?, read_byte(reader)?, read_byte(reader)?, read_byte(reader)?,
];
Ok(Value::UUID(Uuid::from_slice(bytes)))
}
}
} |
use super::Style;
/// Specifies the format for outputing css.
///
/// The format is the style (expanded or compressed) and the precision
/// for numeric values.
#[derive(Clone, Copy, Debug)]
pub struct Format {
pub style: Style,
pub precision: usize,
}
impl Format {
pub fn introspect() -> Self {
let mut t = Self::default();
t.style = Style::Introspection;
t
}
pub fn is_compressed(&self) -> bool {
self.style == Style::Compressed
}
pub fn is_introspection(&self) -> bool {
self.style == Style::Introspection
}
}
impl Default for Format {
fn default() -> Format {
Format {
style: Style::Expanded,
precision: 10,
}
}
}
/// A small container binding a value with an output format.
///
/// See e.g. [`css::Value::format`].
///
/// [`css::Value::format`]: ../css/enum.Value.html#method.format
pub struct Formatted<'a, T> {
pub value: &'a T,
pub format: Format,
}
|
struct Person { name: String, birth: i32 }
pub
fn compose() {
let mut composers = Vec::new();
composers.push(Person { name: "palestrina".to_string(),
birth: 1525 });
composers.push(Person { name: "dowland".to_string(),
birth: 1563 });
composers.push(Person { name: "lully".to_string(),
birth: 1632 });
for composer in &composers {
println!("{}, born {}",
composer.name,
composer.birth);
}
}
|
use components::{Link, RelativeTime};
use eos::*;
use prelude::*;
use stdweb::web::document;
use views::svg;
pub struct ProfilePage {
props: Props,
polls: EosData<Vec<Poll>>,
eos_agent: Box<Bridge<EosAgent>>,
}
#[derive(PartialEq, Clone, Default)]
pub struct Props {
pub context: Context,
pub chain: Chain,
pub account: AccountName,
}
pub enum Msg {
Eos(EosOutput),
}
impl Component for ProfilePage {
type Message = Msg;
type Properties = Props;
fn create(props: Self::Properties, mut link: ComponentLink<Self>) -> Self {
let chain = props.chain.clone();
let eos_callback = link.send_back(Msg::Eos);
let mut eos_agent = EosAgent::new(chain, eos_callback);
eos_agent.send(EosInput::GetPolls(props.account.clone()));
ProfilePage {
props,
eos_agent,
polls: EosData::default(),
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::Eos(output) => match output {
EosOutput::Polls(account, data) => {
if account == self.props.account {
self.polls = data;
true
} else {
false
}
}
_ => false,
},
}
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
self.props = props;
true
}
}
impl ProfilePage {
fn view_loading(&self) -> Html<Self> {
html! {
<div class="poll_list -loading", >
{ "Loading..." }
</div>
}
}
fn view_error(&self, error: &EosError) -> Html<Self> {
html! {
<div class="poll_list -error", >
{ svg::link_cross() }
</div>
}
}
fn view_empty(&self) -> Html<Self> {
html! {
<div class="poll_list -empty", >
{ svg::eos() }
</div>
}
}
fn view_items(&self, polls: &[Poll]) -> Html<Self> {
html! {
<ul class="poll_list -loaded", >
{ for polls.iter().map(|poll| self.view_item(poll)) }
</ul>
}
}
fn view_item(&self, poll: &Poll) -> Html<Self> {
let poll_route = Route::PollVoting(self.props.chain.to_chain_id_prefix(), poll.id.clone());
html! {
<li class="poll", >
<Link: class="poll_title",
route=poll_route,
text=poll.title.clone(),
/>
<div class="poll_details", >
<div class="poll_create_time", >
{ "Submitted " }
<RelativeTime: timestamp=poll.create_time, />
</div>
</div>
</li>
}
}
}
impl Page for ProfilePage {
fn get_title(&self) -> String {
self.props.account.clone()
}
fn get_class(&self) -> String {
"profile_page".to_string()
}
fn get_state(&self) -> PageState {
PageState::Loaded
}
fn get_content(&self) -> Html<Self> {
match &self.polls {
EosData::NotAsked => self.view_empty(),
EosData::Loading => self.view_loading(),
EosData::Success(data) => {
if data.is_empty() {
self.view_empty()
} else {
self.view_items(&data)
}
}
EosData::Failure(error) => self.view_error(error),
}
}
fn get_route(&self) -> Route {
Route::Profile(
self.props.chain.to_chain_id_prefix(),
self.props.account.clone(),
)
}
fn get_breadcrumbs(&self) -> Vec<(Route, String)> {
let chain = &self.props.chain;
let chain_id_prefix = chain.to_chain_id_prefix();
vec![
(Route::Home(None), "Home".to_string()),
(
Route::Home(Some(chain_id_prefix.clone())),
chain.long_name.clone(),
),
]
}
fn get_description(&self) -> String {
// TODO
self.get_title()
}
}
page_view! { ProfilePage }
|
// https://beta.atcoder.jp/contests/abc003/tasks/abc003_3
macro_rules! scan {
($t:ty) => {
{
let mut line: String = String::new();
std::io::stdin().read_line(&mut line).unwrap();
line.trim().parse::<$t>().unwrap()
}
};
($($t:ty),*) => {
{
let mut line: String = String::new();
std::io::stdin().read_line(&mut line).unwrap();
let mut iter = line.split_whitespace();
(
$(iter.next().unwrap().parse::<$t>().unwrap(),)*
)
}
};
($t:ty; $n:expr) => {
(0..$n).map(|_|
scan!($t)
).collect::<Vec<_>>()
};
($($t:ty),*; $n:expr) => {
(0..$n).map(|_|
scan!($($t),*)
).collect::<Vec<_>>()
};
($t:ty ;;) => {
{
let mut line: String = String::new();
std::io::stdin().read_line(&mut line).unwrap();
line.split_whitespace()
.map(|t| t.parse::<$t>().unwrap())
.collect::<Vec<_>>()
}
};
($t:ty ;; $n:expr) => {
(0..$n).map(|_| scan!($t ;;)).collect::<Vec<_>>()
};
}
fn main() {
let (n, k) = scan!(usize, usize);
let mut r = String::new();
let _ = std::io::stdin().read_line(&mut r);
let mut r = r.split_whitespace()
.map(|x| x.parse::<usize>().unwrap())
.collect::<Vec<_>>();
r.sort();
let mut c = 0.0;
for i in n-k..n {
c = (c + r[i] as f32) / 2.0;
}
println!("{}", c);
}
|
#[macro_use(lift)]
extern crate carboxyl;
extern crate conv;
extern crate num;
extern crate rustc_serialize;
extern crate sdl2;
extern crate sdl2_image;
extern crate sdl2_ttf;
extern crate snes_spc;
extern crate time;
use carboxyl::Sink;
use sdl2::audio::AudioSpecDesired;
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use std::cmp::{min, max};
mod audio;
mod brobot;
mod event;
mod map;
mod physics;
mod ratio;
mod renderer;
mod scene;
mod textbox;
mod tilepicker;
use audio::{SpcPlayer, Mixer};
use brobot::controlled_sprite;
use event::{IOEvent, translate_event};
use map::MapLayer;
use ratio::{Ratio, Scalable};
use renderer::{RenderContext, HPos, VPos};
use scene::{Scene, sprite, text};
use textbox::Textbox;
use tilepicker::TilePicker;
fn main() {
let sdl_context = sdl2::init().unwrap();
let video = sdl_context.video().unwrap();
let ttf = sdl2_ttf::init().unwrap();
let screen_w = 960;
let screen_h = 600;
let window = video.window("Tarnished", screen_w, screen_h)
.position_centered()
.resizable()
.build()
.unwrap();
let mut renderer = window.renderer()
.accelerated()
//.present_vsync()
.build()
.unwrap();
let mut render_context = RenderContext::new(ttf);
// The default scaling factor we'll apply when rendering
let default_scale = Ratio::from_integer(4u32);
// Start making noise
let audio_subsystem = sdl_context.audio().unwrap();
let desired_spec = AudioSpecDesired {
freq: Some(64000),
channels: Some(2),
samples: None
};
let audio = audio_subsystem.open_playback(None, &desired_spec, |spec| {
println!("Audio initialized: {:?}", spec);
let mut mixer = Mixer::new();
mixer.play(SpcPlayer::new("assets/FireSpring.spc"));
mixer
}).unwrap();
audio.resume();
// Draw some stuff
let starman = sprite("assets/starmanjr",
HPos::Center(200), VPos::Center(125));
let textbox = Textbox::new("assets/box",
Rect::new(12, 12, 32, 16));
let hello = text("$0.00", "assets/orangekid", 30, 18);
let mut map = MapLayer::from_file("assets/map.json")
.unwrap_or(MapLayer::new("assets/cotp", (16, 16), 25, vec![0;25*16]));
// XXX: note that this widget is rendered in unscaled space, so its width
// of 960 is actually the full window width. Soon these different spaces
// should be managed more cleanly.
let mut tilepicker = TilePicker::new("assets/cotp", 16, 16, 0, 0, 960, 66);
let mut painting = false;
// The one sink for all SDL events.
let sdl_sink = Sink::new();
// A Stream consisting of all key-up and key-down events
let keyboard_stream = sdl_sink.stream().filter(|event| {
match *event {
IOEvent::KeyDown(_) | IOEvent::KeyUp(_) => true,
_ => false
}
});
// Shove time deltas in here...
let delta_sink = Sink::new();
// ...the current time comes out here.
let time = delta_sink.stream().fold(0.0, |a, b| a + b);
let (hero_pos, hero_display) = controlled_sprite(
"assets/porky", 16, 24, 85, 100,
keyboard_stream.clone(), time.clone(), delta_sink.stream());
// A Stream consisting of just key-down events
// XXX: temporary, just used by scale and show_gui signals
let keydown_stream = keyboard_stream.filter_map(|event| {
match event {
IOEvent::KeyDown(keycode) => Some(keycode),
_ => None
}
});
// Render scale is a signal changed by accumulated keyboard events
let scale_signal = keydown_stream.fold(default_scale, |s, keycode| {
let min_scale = Ratio::new(1, 2);
let joe_factor = Ratio::from_integer(8);
match keycode {
Keycode::RightBracket => min(joe_factor, s + Ratio::new(1, 2)),
Keycode::LeftBracket => max(min_scale, s - Ratio::new(1, 2)),
_ => s
}
});
// Screen position is determined by hero position and scale
// XXX: (Also by screen size, but we'll move this to a signal as well later
// XXX: This should be broken up more; scaling and screen size both
// should probably be introduced in a separate lift.
let (screen_w, screen_h) = renderer.window().unwrap().size();
let screen_pos = lift!(move |scale, (hero_x, hero_y)| {
let screen_w = (Ratio::from_integer(screen_w) / scale).to_integer();
let screen_h = (Ratio::from_integer(screen_h) / scale).to_integer();
(hero_x as i32 - (screen_w/2) as i32 + 8,
hero_y as i32 - (screen_h/2) as i32 + 12)
}, &scale_signal, &hero_pos);
// show_gui is a simple boolean signal that switches on pressing 'F'
let show_gui = keydown_stream
.filter(|k| *k == Keycode::F)
.fold(false, |t, _| !t );
// Game loop control
let mut curtime = time::precise_time_ns();
let mut accumulator = 0u64;
let dt = 16666667;
// Metrics
let mut logic_time = 0u64;
let mut logic_time_max = 0u64;
let mut render_time = 0u64;
let mut render_time_max = 0u64;
let mut present_time = 0u64;
let mut present_time_max = 0u64;
let mut frames = 0u64;
let start = time::precise_time_ns();
'mainloop: loop {
let logic_start = time::precise_time_ns();
// XXX: We have to explicitly transform by viewport,
// eventually UI should be part of the scene (?)
let scale = scale_signal.sample();
let (screen_x, screen_y) = screen_pos.sample();
// Add rendering delta to accumulator
// XXX: need to clean this up and factor out rendering/integration
// code; would make this loop much prettier.
let newtime = time::precise_time_ns();
let frametime = newtime - curtime;
curtime = newtime;
accumulator += frametime;
while accumulator >= dt {
let transform_to_world = |x: i32, y: i32| {
let x = x.scale(scale.recip()) + screen_x;
let y = y.scale(scale.recip()) + screen_y;
(x, y)
};
for event in sdl_context.event_pump().unwrap().poll_iter() {
match event {
Event::Quit{..} |
Event::KeyDown {keycode: Some(Keycode::Escape), ..} => {
break 'mainloop
},
Event::MouseMotion {x, y, ..} => {
if painting {
let (x, y) = transform_to_world(x, y);
map.set_px((x, y), tilepicker.selected()).ok();
}
},
Event::MouseButtonDown {x, y, ..} => {
if !show_gui.sample() || !tilepicker.click((x, y)) {
let (x, y) = transform_to_world(x, y);
map.set_px((x, y), tilepicker.selected()).ok();
painting = true;
}
},
Event::MouseButtonUp {..} => {
painting = false;
},
Event::MouseWheel {y: scroll_y, ..} => {
tilepicker.scroll(scroll_y);
},
_ => { }
}
translate_event(event).map(|e| sdl_sink.send(e));
}
accumulator -= dt;
delta_sink.send((dt as f32) / 1e9);
}
// Count time spent updating the reactive network
{
let this_frame = time::precise_time_ns() - logic_start;
logic_time_max = max(this_frame, logic_time_max);
logic_time += this_frame;
}
let render_start = time::precise_time_ns();
// XXX: note that box must be rendered before creating scene, since
// scene borrows references to all the instructions added to it. This
// is perhaps an API weakness; might end up just boxing visibles.
let rendered_box = textbox.render();
let rendered_map = map.render();
let rendered_hero = hero_display.sample();
renderer.set_draw_color(Color::RGBA(176, 208, 184, 255));
renderer.clear();
{
let mut world = Scene::new();
world.add_all(&rendered_map, -1);
world.add(&rendered_hero, 0);
world.add(&starman, 0);
world.present(&mut renderer, &mut render_context,
screen_pos.sample(), scale);
}
{
let mut hud = Scene::new();
hud.add_all(&rendered_box, 1);
hud.add(&hello, 2);
hud.present_scaled(&mut renderer, &mut render_context, scale);
}
if show_gui.sample() {
// This rendering bit is kind of "all wires exposed"; once we
// figure out a more managed structure for getting Visibles from
// widget to Scene, this will all look much nicer.
let (rects, tiles) = tilepicker.render();
let mut gui = Scene::new();
gui.add_all(&rects, 0);
gui.add_all(&tiles, 1);
gui.present(&mut renderer, &mut render_context, (0, 0),
Ratio::from_integer(1));
}
// Count time spent rendering the frame
{
let this_frame = time::precise_time_ns() - render_start;
render_time_max = max(this_frame, render_time_max);
render_time += this_frame;
}
let present_start = time::precise_time_ns();
// XXX: this takes <1ms even with present_vsync enabled; instead the
// rendering step absorbs the synchronization latency? Figure out why.
renderer.present();
// Count time spent presenting to the SDL renderer
{
let this_frame = time::precise_time_ns() - present_start;
present_time_max = max(this_frame, present_time_max);
present_time += this_frame;
}
frames += 1;
}
let end = time::precise_time_ns();
let fps = (frames as f64 / ((end - start) as f64 / 1e9)) as u32;
println!("Performance summary:");
println!("Rendered {} frames in {} ns; effective: {} fps",
frames, end - start, fps);
println!("Logic update 𝚫t:\t\tmean: {:.*} ms\tmax: {:.*} ms",
2, logic_time as f64 / frames as f64 / 1e6,
2, logic_time_max as f64 / 1e6);
println!("Render 𝚫t:\t\t\tmean: {:.*} ms\tmax: {:.*} ms",
2, render_time as f64 / frames as f64 / 1e6,
2, render_time_max as f64 / 1e6);
println!("Present 𝚫t:\t\t\tmean: {:.*} ms\tmax: {:.*} ms",
2, present_time as f64 / frames as f64 / 1e6,
2, present_time_max as f64 / 1e6);
map.save("assets/map.json").unwrap();
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
// Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
fn main() {
let proto_files = [
"src/proto/channel_transaction.proto",
"src/proto/message.proto",
"src/proto/script_package.proto",
];
let includes = ["../libra/types/src/proto", "src/proto"];
prost_build::compile_protos(&proto_files, &includes).unwrap();
}
|
use syn::{self, Ident, Fields};
use overrides::Overrides;
pub struct Variant {
pub ident: Ident,
pub name: String,
}
impl Variant {
pub fn parse(raw: &syn::Variant) -> Result<Variant, String> {
match raw.fields {
Fields::Unit => {}
_ => return Err("non-C-like enums are not supported".to_owned()),
}
let overrides = Overrides::extract(&raw.attrs)?;
Ok(Variant {
ident: raw.ident.clone(),
name: overrides.name.unwrap_or_else(|| raw.ident.to_string()),
})
}
}
|
//! Contains the `add...metric` functions that are used for gathering metrics.
use crate::server::{Command, ControlChanError, ControlChanErrorKind, ControlChanMiddleware, ControlChanMsg, Event, Reply, ReplyCode};
use async_trait::async_trait;
use lazy_static::*;
use prometheus::{opts, register_int_counter, register_int_counter_vec, register_int_gauge, IntCounter, IntCounterVec, IntGauge};
// Control channel middleware that adds metrics
pub struct MetricsMiddleware<Next>
where
Next: ControlChanMiddleware,
{
pub collect_metrics: bool,
pub next: Next,
}
#[async_trait]
impl<Next> ControlChanMiddleware for MetricsMiddleware<Next>
where
Next: ControlChanMiddleware,
{
async fn handle(&mut self, event: Event) -> Result<Reply, ControlChanError> {
if self.collect_metrics {
add_event_metric(&event);
}
let (evt_type_label, evt_label) = event_to_labels(&event);
let result: Result<Reply, ControlChanError> = self.next.handle(event).await;
if self.collect_metrics {
match &result {
Ok(reply) => add_reply_metric(reply, evt_type_label, evt_label),
Err(e) => add_error_metric(e.kind(), evt_type_label, evt_label),
}
}
result
}
}
lazy_static! {
static ref FTP_AUTH_FAILURES: IntCounter = register_int_counter!(opts!("ftp_auth_failures", "Total number of authentication failures.")).unwrap();
static ref FTP_SESSIONS: IntGauge = register_int_gauge!(opts!("ftp_sessions_total", "Total number of FTP sessions.")).unwrap();
static ref FTP_SESSIONS_COUNT: IntCounter = register_int_counter!(opts!("ftp_sessions_count", "Total number of FTP sessions.")).unwrap();
static ref FTP_BACKEND_WRITE_BYTES: IntCounter =
register_int_counter!(opts!("ftp_backend_write_bytes", "Total number of bytes successfully written to the backend.")).unwrap();
static ref FTP_BACKEND_READ_BYTES: IntCounter = register_int_counter!(opts!(
"ftp_backend_read_bytes",
"Total number of bytes successfully retrieved from the backend and sent to the client."
))
.unwrap();
static ref FTP_BACKEND_WRITE_FILES: IntCounter =
register_int_counter!(opts!("ftp_backend_write_files", "Total number of files successfully written to the backend.")).unwrap();
static ref FTP_BACKEND_READ_FILES: IntCounter = register_int_counter!(opts!(
"ftp_backend_read_files",
"Total number of files successfully retrieved from the backend."
))
.unwrap();
static ref FTP_COMMAND_TOTAL: IntCounterVec = register_int_counter_vec!("ftp_command_total", "Total number of commands received.", &["command"]).unwrap();
static ref FTP_REPLY_TOTAL: IntCounterVec = register_int_counter_vec!(
"ftp_reply_total",
"Total number of reply codes server sent to clients.",
&["range", "event_type", "event"],
)
.unwrap();
static ref FTP_ERROR_TOTAL: IntCounterVec =
register_int_counter_vec!("ftp_error_total", "Total number of errors encountered.", &["type", "event_type", "event"]).unwrap();
static ref FTP_SENT_BYTES: IntCounterVec = register_int_counter_vec!("ftp_sent_bytes", "Total bytes sent to FTP clients", &["command"]).unwrap();
static ref FTP_RECEIVED_BYTES: IntCounterVec =
register_int_counter_vec!("ftp_received_bytes", "Total bytes received from FTP clients", &["command"]).unwrap();
static ref FTP_TRANSFERRED_TOTAL: IntCounterVec = register_int_counter_vec!(
"ftp_transferred_total",
"The total number of attempted file transfers and directory listings",
&["command", "status"]
)
.unwrap();
}
/// Add a metric for an event.
fn add_event_metric(event: &Event) {
match event {
Event::Command(cmd) => {
add_command_metric(cmd);
}
Event::InternalMsg(msg) => match msg {
ControlChanMsg::SentData { bytes, .. } => {
FTP_BACKEND_READ_BYTES.inc_by(*bytes);
FTP_BACKEND_READ_FILES.inc();
}
ControlChanMsg::WrittenData { bytes, .. } => {
FTP_BACKEND_WRITE_BYTES.inc_by(*bytes);
FTP_BACKEND_WRITE_FILES.inc();
}
ControlChanMsg::AuthFailed => {
FTP_AUTH_FAILURES.inc();
}
_ => {}
},
}
}
/// Increase the amount of bytes sent (/downloaded/ from client perspective)
pub fn inc_sent_bytes(bytes: usize, command: &'static str) {
FTP_SENT_BYTES.with_label_values(&[command]).inc_by(bytes.try_into().unwrap());
}
/// Increase the amount of bytes received (/uploaded/ from client perspective)
pub fn inc_received_bytes(bytes: usize, command: &'static str) {
FTP_RECEIVED_BYTES.with_label_values(&[command]).inc_by(bytes.try_into().unwrap());
}
/// Increase the number of file and directory listing transfer attempts
pub fn inc_transferred(command: &'static str, status: &'static str) {
FTP_TRANSFERRED_TOTAL.with_label_values(&[command, status]).inc();
}
/// Increase the metrics gauge for client sessions
pub fn inc_session() {
FTP_SESSIONS.inc();
FTP_SESSIONS_COUNT.inc();
}
/// Decrease the metrics gauge for client sessions
pub fn dec_session() {
FTP_SESSIONS.dec();
}
fn add_command_metric(cmd: &Command) {
let label = command_to_label(cmd);
FTP_COMMAND_TOTAL.with_label_values(&[&label]).inc();
}
/// Error during command processing
fn add_error_metric(error: &ControlChanErrorKind, evt_type_label: String, evt_label: String) {
let error_str = error.to_string();
let label = error_str.split_whitespace().next().unwrap_or("unknown").to_lowercase();
FTP_ERROR_TOTAL.with_label_values(&[&label, &evt_type_label, &evt_label]).inc();
}
/// Add a metric for an FTP reply.
fn add_reply_metric(reply: &Reply, evt_type_label: String, evt_label: String) {
match *reply {
Reply::None => {}
Reply::CodeAndMsg { code, .. } => add_replycode_metric(code, evt_type_label, evt_label),
Reply::MultiLine { code, .. } => add_replycode_metric(code, evt_type_label, evt_label),
}
}
fn add_replycode_metric(code: ReplyCode, evt_type_label: String, evt_label: String) {
let range = format!("{}xx", code as u32 / 100 % 10);
FTP_REPLY_TOTAL.with_label_values(&[&range, &evt_type_label, &evt_label]).inc();
}
fn event_to_labels(evt: &Event) -> (String, String) {
let (evt_type_str, evt_str) = match evt {
Event::Command(cmd) => ("command".into(), cmd.to_string()),
Event::InternalMsg(msg) => ("ctrl-chan-msg".into(), msg.to_string()),
};
let evt_name_str = evt_str.split_whitespace().next().unwrap_or("unknown").to_lowercase();
(evt_type_str, evt_name_str)
}
fn command_to_label(cmd: &Command) -> String {
let cmd_str = cmd.to_string();
cmd_str.split_whitespace().next().unwrap_or("unknown").to_lowercase()
}
|
#[doc = "Register `DAC_CR` reader"]
pub type R = crate::R<DAC_CR_SPEC>;
#[doc = "Register `DAC_CR` writer"]
pub type W = crate::W<DAC_CR_SPEC>;
#[doc = "Field `EN1` reader - DAC channel1 enable This bit is set and cleared by software to enable/disable DAC channel1."]
pub type EN1_R = crate::BitReader;
#[doc = "Field `EN1` writer - DAC channel1 enable This bit is set and cleared by software to enable/disable DAC channel1."]
pub type EN1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TEN1` reader - DAC channel1 trigger enable"]
pub type TEN1_R = crate::BitReader;
#[doc = "Field `TEN1` writer - DAC channel1 trigger enable"]
pub type TEN1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TSEL1` reader - DAC channel1 trigger selection These bits select the external event used to trigger DAC channel1. Note: Only used if bit TEN1 = 1 (DAC channel1 trigger enabled)."]
pub type TSEL1_R = crate::FieldReader;
#[doc = "Field `TSEL1` writer - DAC channel1 trigger selection These bits select the external event used to trigger DAC channel1. Note: Only used if bit TEN1 = 1 (DAC channel1 trigger enabled)."]
pub type TSEL1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `WAVE1` reader - DAC channel1 noise/triangle wave generation enable These bits are set and cleared by software. Note: Only used if bit TEN1 = 1 (DAC channel1 trigger enabled)."]
pub type WAVE1_R = crate::FieldReader;
#[doc = "Field `WAVE1` writer - DAC channel1 noise/triangle wave generation enable These bits are set and cleared by software. Note: Only used if bit TEN1 = 1 (DAC channel1 trigger enabled)."]
pub type WAVE1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `MAMP1` reader - DAC channel1 mask/amplitude selector These bits are written by software to select mask in wave generation mode or amplitude in triangle generation mode. = 1011: Unmask bits\\[11:0\\]
of LFSR/ triangle amplitude equal to 4095"]
pub type MAMP1_R = crate::FieldReader;
#[doc = "Field `MAMP1` writer - DAC channel1 mask/amplitude selector These bits are written by software to select mask in wave generation mode or amplitude in triangle generation mode. = 1011: Unmask bits\\[11:0\\]
of LFSR/ triangle amplitude equal to 4095"]
pub type MAMP1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `DMAEN1` reader - DAC channel1 DMA enable This bit is set and cleared by software."]
pub type DMAEN1_R = crate::BitReader;
#[doc = "Field `DMAEN1` writer - DAC channel1 DMA enable This bit is set and cleared by software."]
pub type DMAEN1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DMAUDRIE1` reader - DAC channel1 DMA Underrun Interrupt enable This bit is set and cleared by software."]
pub type DMAUDRIE1_R = crate::BitReader;
#[doc = "Field `DMAUDRIE1` writer - DAC channel1 DMA Underrun Interrupt enable This bit is set and cleared by software."]
pub type DMAUDRIE1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CEN1` reader - DAC Channel 1 calibration enable This bit is set and cleared by software to enable/disable DAC channel 1 calibration, it can be written only if bit EN1=0 into DAC_CR (the calibration mode can be entered/exit only when the DAC channel is disabled) Otherwise, the write operation is ignored."]
pub type CEN1_R = crate::BitReader;
#[doc = "Field `CEN1` writer - DAC Channel 1 calibration enable This bit is set and cleared by software to enable/disable DAC channel 1 calibration, it can be written only if bit EN1=0 into DAC_CR (the calibration mode can be entered/exit only when the DAC channel is disabled) Otherwise, the write operation is ignored."]
pub type CEN1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EN2` reader - DAC channel2 enable This bit is set and cleared by software to enable/disable DAC channel2."]
pub type EN2_R = crate::BitReader;
#[doc = "Field `EN2` writer - DAC channel2 enable This bit is set and cleared by software to enable/disable DAC channel2."]
pub type EN2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TEN2` reader - DAC channel2 trigger enable"]
pub type TEN2_R = crate::BitReader;
#[doc = "Field `TEN2` writer - DAC channel2 trigger enable"]
pub type TEN2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TSEL2` reader - DAC channel2 trigger selection These bits select the external event used to trigger DAC channel2 Note: Only used if bit TEN2 = 1 (DAC channel2 trigger enabled)."]
pub type TSEL2_R = crate::FieldReader;
#[doc = "Field `TSEL2` writer - DAC channel2 trigger selection These bits select the external event used to trigger DAC channel2 Note: Only used if bit TEN2 = 1 (DAC channel2 trigger enabled)."]
pub type TSEL2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `WAVE2` reader - DAC channel2 noise/triangle wave generation enable These bits are set/reset by software. 1x: Triangle wave generation enabled Note: Only used if bit TEN2 = 1 (DAC channel2 trigger enabled)"]
pub type WAVE2_R = crate::FieldReader;
#[doc = "Field `WAVE2` writer - DAC channel2 noise/triangle wave generation enable These bits are set/reset by software. 1x: Triangle wave generation enabled Note: Only used if bit TEN2 = 1 (DAC channel2 trigger enabled)"]
pub type WAVE2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `MAMP2` reader - DAC channel2 mask/amplitude selector These bits are written by software to select mask in wave generation mode or amplitude in triangle generation mode. = 1011: Unmask bits\\[11:0\\]
of LFSR/ triangle amplitude equal to 4095"]
pub type MAMP2_R = crate::FieldReader;
#[doc = "Field `MAMP2` writer - DAC channel2 mask/amplitude selector These bits are written by software to select mask in wave generation mode or amplitude in triangle generation mode. = 1011: Unmask bits\\[11:0\\]
of LFSR/ triangle amplitude equal to 4095"]
pub type MAMP2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `DMAEN2` reader - DAC channel2 DMA enable This bit is set and cleared by software."]
pub type DMAEN2_R = crate::BitReader;
#[doc = "Field `DMAEN2` writer - DAC channel2 DMA enable This bit is set and cleared by software."]
pub type DMAEN2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DMAUDRIE2` reader - DAC channel2 DMA underrun interrupt enable This bit is set and cleared by software."]
pub type DMAUDRIE2_R = crate::BitReader;
#[doc = "Field `DMAUDRIE2` writer - DAC channel2 DMA underrun interrupt enable This bit is set and cleared by software."]
pub type DMAUDRIE2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CEN2` reader - DAC Channel 2 calibration enable This bit is set and cleared by software to enable/disable DAC channel 2 calibration, it can be written only if bit EN2=0 into DAC_CR (the calibration mode can be entered/exit only when the DAC channel is disabled) Otherwise, the write operation is ignored."]
pub type CEN2_R = crate::BitReader;
#[doc = "Field `CEN2` writer - DAC Channel 2 calibration enable This bit is set and cleared by software to enable/disable DAC channel 2 calibration, it can be written only if bit EN2=0 into DAC_CR (the calibration mode can be entered/exit only when the DAC channel is disabled) Otherwise, the write operation is ignored."]
pub type CEN2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - DAC channel1 enable This bit is set and cleared by software to enable/disable DAC channel1."]
#[inline(always)]
pub fn en1(&self) -> EN1_R {
EN1_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - DAC channel1 trigger enable"]
#[inline(always)]
pub fn ten1(&self) -> TEN1_R {
TEN1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bits 2:5 - DAC channel1 trigger selection These bits select the external event used to trigger DAC channel1. Note: Only used if bit TEN1 = 1 (DAC channel1 trigger enabled)."]
#[inline(always)]
pub fn tsel1(&self) -> TSEL1_R {
TSEL1_R::new(((self.bits >> 2) & 0x0f) as u8)
}
#[doc = "Bits 6:7 - DAC channel1 noise/triangle wave generation enable These bits are set and cleared by software. Note: Only used if bit TEN1 = 1 (DAC channel1 trigger enabled)."]
#[inline(always)]
pub fn wave1(&self) -> WAVE1_R {
WAVE1_R::new(((self.bits >> 6) & 3) as u8)
}
#[doc = "Bits 8:11 - DAC channel1 mask/amplitude selector These bits are written by software to select mask in wave generation mode or amplitude in triangle generation mode. = 1011: Unmask bits\\[11:0\\]
of LFSR/ triangle amplitude equal to 4095"]
#[inline(always)]
pub fn mamp1(&self) -> MAMP1_R {
MAMP1_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bit 12 - DAC channel1 DMA enable This bit is set and cleared by software."]
#[inline(always)]
pub fn dmaen1(&self) -> DMAEN1_R {
DMAEN1_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - DAC channel1 DMA Underrun Interrupt enable This bit is set and cleared by software."]
#[inline(always)]
pub fn dmaudrie1(&self) -> DMAUDRIE1_R {
DMAUDRIE1_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - DAC Channel 1 calibration enable This bit is set and cleared by software to enable/disable DAC channel 1 calibration, it can be written only if bit EN1=0 into DAC_CR (the calibration mode can be entered/exit only when the DAC channel is disabled) Otherwise, the write operation is ignored."]
#[inline(always)]
pub fn cen1(&self) -> CEN1_R {
CEN1_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 16 - DAC channel2 enable This bit is set and cleared by software to enable/disable DAC channel2."]
#[inline(always)]
pub fn en2(&self) -> EN2_R {
EN2_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - DAC channel2 trigger enable"]
#[inline(always)]
pub fn ten2(&self) -> TEN2_R {
TEN2_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bits 18:21 - DAC channel2 trigger selection These bits select the external event used to trigger DAC channel2 Note: Only used if bit TEN2 = 1 (DAC channel2 trigger enabled)."]
#[inline(always)]
pub fn tsel2(&self) -> TSEL2_R {
TSEL2_R::new(((self.bits >> 18) & 0x0f) as u8)
}
#[doc = "Bits 22:23 - DAC channel2 noise/triangle wave generation enable These bits are set/reset by software. 1x: Triangle wave generation enabled Note: Only used if bit TEN2 = 1 (DAC channel2 trigger enabled)"]
#[inline(always)]
pub fn wave2(&self) -> WAVE2_R {
WAVE2_R::new(((self.bits >> 22) & 3) as u8)
}
#[doc = "Bits 24:27 - DAC channel2 mask/amplitude selector These bits are written by software to select mask in wave generation mode or amplitude in triangle generation mode. = 1011: Unmask bits\\[11:0\\]
of LFSR/ triangle amplitude equal to 4095"]
#[inline(always)]
pub fn mamp2(&self) -> MAMP2_R {
MAMP2_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bit 28 - DAC channel2 DMA enable This bit is set and cleared by software."]
#[inline(always)]
pub fn dmaen2(&self) -> DMAEN2_R {
DMAEN2_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - DAC channel2 DMA underrun interrupt enable This bit is set and cleared by software."]
#[inline(always)]
pub fn dmaudrie2(&self) -> DMAUDRIE2_R {
DMAUDRIE2_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - DAC Channel 2 calibration enable This bit is set and cleared by software to enable/disable DAC channel 2 calibration, it can be written only if bit EN2=0 into DAC_CR (the calibration mode can be entered/exit only when the DAC channel is disabled) Otherwise, the write operation is ignored."]
#[inline(always)]
pub fn cen2(&self) -> CEN2_R {
CEN2_R::new(((self.bits >> 30) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - DAC channel1 enable This bit is set and cleared by software to enable/disable DAC channel1."]
#[inline(always)]
#[must_use]
pub fn en1(&mut self) -> EN1_W<DAC_CR_SPEC, 0> {
EN1_W::new(self)
}
#[doc = "Bit 1 - DAC channel1 trigger enable"]
#[inline(always)]
#[must_use]
pub fn ten1(&mut self) -> TEN1_W<DAC_CR_SPEC, 1> {
TEN1_W::new(self)
}
#[doc = "Bits 2:5 - DAC channel1 trigger selection These bits select the external event used to trigger DAC channel1. Note: Only used if bit TEN1 = 1 (DAC channel1 trigger enabled)."]
#[inline(always)]
#[must_use]
pub fn tsel1(&mut self) -> TSEL1_W<DAC_CR_SPEC, 2> {
TSEL1_W::new(self)
}
#[doc = "Bits 6:7 - DAC channel1 noise/triangle wave generation enable These bits are set and cleared by software. Note: Only used if bit TEN1 = 1 (DAC channel1 trigger enabled)."]
#[inline(always)]
#[must_use]
pub fn wave1(&mut self) -> WAVE1_W<DAC_CR_SPEC, 6> {
WAVE1_W::new(self)
}
#[doc = "Bits 8:11 - DAC channel1 mask/amplitude selector These bits are written by software to select mask in wave generation mode or amplitude in triangle generation mode. = 1011: Unmask bits\\[11:0\\]
of LFSR/ triangle amplitude equal to 4095"]
#[inline(always)]
#[must_use]
pub fn mamp1(&mut self) -> MAMP1_W<DAC_CR_SPEC, 8> {
MAMP1_W::new(self)
}
#[doc = "Bit 12 - DAC channel1 DMA enable This bit is set and cleared by software."]
#[inline(always)]
#[must_use]
pub fn dmaen1(&mut self) -> DMAEN1_W<DAC_CR_SPEC, 12> {
DMAEN1_W::new(self)
}
#[doc = "Bit 13 - DAC channel1 DMA Underrun Interrupt enable This bit is set and cleared by software."]
#[inline(always)]
#[must_use]
pub fn dmaudrie1(&mut self) -> DMAUDRIE1_W<DAC_CR_SPEC, 13> {
DMAUDRIE1_W::new(self)
}
#[doc = "Bit 14 - DAC Channel 1 calibration enable This bit is set and cleared by software to enable/disable DAC channel 1 calibration, it can be written only if bit EN1=0 into DAC_CR (the calibration mode can be entered/exit only when the DAC channel is disabled) Otherwise, the write operation is ignored."]
#[inline(always)]
#[must_use]
pub fn cen1(&mut self) -> CEN1_W<DAC_CR_SPEC, 14> {
CEN1_W::new(self)
}
#[doc = "Bit 16 - DAC channel2 enable This bit is set and cleared by software to enable/disable DAC channel2."]
#[inline(always)]
#[must_use]
pub fn en2(&mut self) -> EN2_W<DAC_CR_SPEC, 16> {
EN2_W::new(self)
}
#[doc = "Bit 17 - DAC channel2 trigger enable"]
#[inline(always)]
#[must_use]
pub fn ten2(&mut self) -> TEN2_W<DAC_CR_SPEC, 17> {
TEN2_W::new(self)
}
#[doc = "Bits 18:21 - DAC channel2 trigger selection These bits select the external event used to trigger DAC channel2 Note: Only used if bit TEN2 = 1 (DAC channel2 trigger enabled)."]
#[inline(always)]
#[must_use]
pub fn tsel2(&mut self) -> TSEL2_W<DAC_CR_SPEC, 18> {
TSEL2_W::new(self)
}
#[doc = "Bits 22:23 - DAC channel2 noise/triangle wave generation enable These bits are set/reset by software. 1x: Triangle wave generation enabled Note: Only used if bit TEN2 = 1 (DAC channel2 trigger enabled)"]
#[inline(always)]
#[must_use]
pub fn wave2(&mut self) -> WAVE2_W<DAC_CR_SPEC, 22> {
WAVE2_W::new(self)
}
#[doc = "Bits 24:27 - DAC channel2 mask/amplitude selector These bits are written by software to select mask in wave generation mode or amplitude in triangle generation mode. = 1011: Unmask bits\\[11:0\\]
of LFSR/ triangle amplitude equal to 4095"]
#[inline(always)]
#[must_use]
pub fn mamp2(&mut self) -> MAMP2_W<DAC_CR_SPEC, 24> {
MAMP2_W::new(self)
}
#[doc = "Bit 28 - DAC channel2 DMA enable This bit is set and cleared by software."]
#[inline(always)]
#[must_use]
pub fn dmaen2(&mut self) -> DMAEN2_W<DAC_CR_SPEC, 28> {
DMAEN2_W::new(self)
}
#[doc = "Bit 29 - DAC channel2 DMA underrun interrupt enable This bit is set and cleared by software."]
#[inline(always)]
#[must_use]
pub fn dmaudrie2(&mut self) -> DMAUDRIE2_W<DAC_CR_SPEC, 29> {
DMAUDRIE2_W::new(self)
}
#[doc = "Bit 30 - DAC Channel 2 calibration enable This bit is set and cleared by software to enable/disable DAC channel 2 calibration, it can be written only if bit EN2=0 into DAC_CR (the calibration mode can be entered/exit only when the DAC channel is disabled) Otherwise, the write operation is ignored."]
#[inline(always)]
#[must_use]
pub fn cen2(&mut self) -> CEN2_W<DAC_CR_SPEC, 30> {
CEN2_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DAC control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dac_cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dac_cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DAC_CR_SPEC;
impl crate::RegisterSpec for DAC_CR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dac_cr::R`](R) reader structure"]
impl crate::Readable for DAC_CR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`dac_cr::W`](W) writer structure"]
impl crate::Writable for DAC_CR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DAC_CR to value 0"]
impl crate::Resettable for DAC_CR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
mod errors;
// Uncertain if this one belongs here, but is useful in some of the
// examples.
pub mod bezier_util;
mod color;
mod growth_image;
mod growth_image_builder;
mod kd_tree;
pub mod palettes;
mod point_tracker;
mod topology;
pub use color::RGB;
pub use errors::Error;
pub use growth_image::SaveImageType;
pub use growth_image_builder::GrowthImageBuilder;
pub use palettes::*;
pub use topology::PixelLoc;
|
//! A deferred job queue.
//!
//! When you want to run some time-consuming work, please consider using this
//! mechanism.
use alloc::boxed::Box;
use crossbeam::queue::SegQueue;
use kerla_api::sync::SpinLock;
pub trait JobCallback = FnOnce() + Send + 'static;
static GLOBAL_QUEUE: SpinLock<SegQueue<Box<dyn JobCallback>>> = SpinLock::new(SegQueue::new());
pub struct DeferredJob {
// Will be useful for debugging.
#[allow(unused)]
name: &'static str,
}
impl DeferredJob {
pub const fn new(name: &'static str) -> DeferredJob {
DeferredJob { name }
}
/// Enqueues a job. `callback` will be automatically run sometime later.
///
/// # Caveats
///
/// `callback` MUST NOT sleep since it's can be run in an interrupt context!
pub fn run_later<F: JobCallback>(&self, callback: F) {
GLOBAL_QUEUE.lock().push(Box::new(callback));
}
}
/// Run pending deferred jobs.
pub fn run_deferred_jobs() {
// TODO: The current user process is still blocked until we leave the
// interrupt handler. Should we have a limit of the maximum number of jobs?
//
// TODO: Re-enable interrupts here since this may take long.
while let Some(callback) = GLOBAL_QUEUE.lock().pop() {
callback();
}
}
|
pub mod post;
pub mod s3;
|
use types;
use card::Card;
use calculator::utility;
pub fn test(cards: Vec<Card>) -> Option<types::Combination> {
if cards.len() < 5 {
return None;
}
for suit in vec![
types::Suit::Clubs,
types::Suit::Diamonds,
types::Suit::Hearts,
types::Suit::Spades,
] {
if let Some(flush) = test_flush_for_suit_for_slice(suit, &cards[..]) {
let mut result: [types::Rank; 5] = [types::Rank::Ace; 5];
result.clone_from_slice(&flush);
return Some(types::Combination::Flush(suit, result));
}
}
return None;
}
fn test_flush_for_suit_for_slice(suit: types::Suit, cards: &[Card]) -> Option<Vec<types::Rank>> {
let mut count = 0;
let mut flush: Vec<Card> = vec![];
for card in cards[..].iter() {
if card.suit as i32 == suit as i32 {
flush.push(*card);
count = count + 1;
}
}
utility::sort_cards(&mut flush);
if count > 4 {
flush.truncate(5);
return Some(
flush
.iter()
.map(|card| card.rank)
.collect::<Vec<types::Rank>>(),
);
}
None
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn none_for_empty() {
assert_eq!(None, test(vec![]));
}
#[test]
fn none_for_four_cards() {
let cards = [
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Queen,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::King,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Spades,
},
];
assert_eq!(None, test(cards.to_vec()));
}
#[test]
fn option_for_five_cards() {
let cards = [
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Queen,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::King,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Spades,
},
];
assert_eq!(
Some(types::Combination::Flush(
types::Suit::Spades,
[
types::Rank::Ace,
types::Rank::King,
types::Rank::Queen,
types::Rank::Three,
types::Rank::Two
]
)),
test(cards.to_vec())
);
}
#[test]
fn option_for_seven_unsorted_cards_all_spades() {
let cards = [
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Queen,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::King,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Ten,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Five,
suit: types::Suit::Spades,
},
];
assert_eq!(
Some(types::Combination::Flush(
types::Suit::Spades,
[
types::Rank::Ace,
types::Rank::King,
types::Rank::Queen,
types::Rank::Ten,
types::Rank::Five
]
)),
test(cards.to_vec())
);
}
#[test]
fn option_for_seven_unsorted_cards_with_only_five_spades() {
let cards = [
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Queen,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::King,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Ten,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Five,
suit: types::Suit::Spades,
},
];
assert_eq!(
Some(types::Combination::Flush(
types::Suit::Spades,
[
types::Rank::King,
types::Rank::Ten,
types::Rank::Five,
types::Rank::Three,
types::Rank::Two
]
)),
test(cards.to_vec())
);
}
#[test]
fn none_for_seven_unsorted_cards_with_only_four_spades() {
let cards = [
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Queen,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::King,
suit: types::Suit::Clubs,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Ten,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Five,
suit: types::Suit::Spades,
},
];
assert_eq!(None, test(cards.to_vec()));
}
#[test]
fn option_for_ten_unsorted_cards_with_five_spades_and_duplicates() {
let cards = [
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Queen,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::King,
suit: types::Suit::Clubs,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Ten,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Five,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Five,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Five,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Five,
suit: types::Suit::Spades,
},
];
assert_eq!(
Some(types::Combination::Flush(
types::Suit::Spades,
[
types::Rank::Ten,
types::Rank::Five,
types::Rank::Five,
types::Rank::Five,
types::Rank::Five
]
)),
test(cards.to_vec())
);
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate rocket;
#[macro_use]
extern crate rocket_contrib;
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
pub mod models;
pub mod routes;
pub mod schema;
use rocket_contrib::templates::Template;
// Registers your database with Rocket, returning a `fairing` that can be `.attach`'d to your
// Rocket application to set up a connection pool for it and automatically manage it for you.
#[database("fivemj")]
pub struct DbConn(diesel::MysqlConnection);
fn rocket() -> rocket::Rocket {
rocket::ignite()
.mount("/", routes![routes::index])
.attach(Template::fairing())
.attach(DbConn::fairing())
}
fn main() {
rocket().launch();
}
|
use nix::sys::signal;
use nix::sys::signal::SaFlags;
use nix::sys::signal::{sigaction, SigAction, SigHandler, SigSet};
use super::process;
extern "C" fn handle_signal(_signam: i32) {
println!();
}
impl process::Process {
pub(crate) fn signal_action(&self) {
let sa = SigAction::new(
SigHandler::Handler(handle_signal),
SaFlags::SA_RESETHAND,
SigSet::empty(),
);
unsafe { sigaction(signal::SIGINT, &sa) }.unwrap();
}
} |
use std::{fs::read_to_string};
struct GrepArgs {
pattern: String,
path: String,
}
impl GrepArgs {
fn new(path: String, pattern: String) -> GrepArgs {
GrepArgs {path, pattern}
}
}
fn grep(content: String, pattern: String) {
for line in content.lines() {
if line.contains(pattern.as_str()) {
println!("{}", line);
}
}
}
fn run(state: GrepArgs) {
match read_to_string(state.path) {
Ok(content) => grep(content, state.pattern),
Err(reason) => println!("{}", reason)
}
}
fn main() {
let pattern = std::env::args().nth(1);
let path = std::env::args().nth(2);
match (pattern, path) {
(Some(pattern), Some(path)) => run(GrepArgs::new(path, pattern)),
_ => println!("path or pattern is not specified"),
}
}
|
use serde::{Deserialize, Serialize};
use std::ops::Add;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Point {
pub row: usize,
pub column: usize,
}
impl Add<(usize, usize)> for Point {
type Output = Point;
fn add(self, rhs: (usize, usize)) -> Self::Output {
Self {
row: self.row + rhs.0,
column: self.column + rhs.1,
}
}
}
impl Add<Point> for Point {
type Output = Point;
fn add(self, rhs: Point) -> Self::Output {
Self {
row: self.row + rhs.row,
column: self.column + rhs.column,
}
}
}
impl Point {
pub fn new(row: usize, column: usize) -> Self {
Self { row, column }
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Range {
pub start: Point,
pub end: Point,
}
impl Add<usize> for Range {
type Output = Range;
fn add(self, rhs: usize) -> Self::Output {
Self {
start: self.start + (0, rhs),
end: self.end,
}
}
}
impl Add<Range> for Range {
type Output = Range;
fn add(self, rhs: Range) -> Self::Output {
Self {
start: self.start + rhs.start,
end: self.end + rhs.end,
}
}
}
impl Range {
pub fn new(start: (usize, usize), end: (usize, usize)) -> Self {
Self {
start: Point::new(start.0, start.1),
end: Point::new(end.0, end.1),
}
}
}
|
use paste::paste;
#[cxx::bridge(namespace = "xtensor_rust::bridge")]
mod ffi {
// Unfortunately we could not automate this boilerplate with a macro
extern "Rust" {
type RsTensorU8;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[u8];
fn as_slice_mut(&mut self) -> &mut [u8];
unsafe fn rsU8_copy_from_ptr(shape: &[usize], ptr: *const u8) -> Box<RsTensorU8>;
}
extern "Rust" {
type RsTensorI8;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[i8];
fn as_slice_mut(&mut self) -> &mut [i8];
unsafe fn rsI8_copy_from_ptr(shape: &[usize], ptr: *const i8) -> Box<RsTensorI8>;
}
extern "Rust" {
type RsTensorU16;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[u16];
fn as_slice_mut(&mut self) -> &mut [u16];
unsafe fn rsU16_copy_from_ptr(shape: &[usize], ptr: *const u16) -> Box<RsTensorU16>;
}
extern "Rust" {
type RsTensorI16;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[i16];
fn as_slice_mut(&mut self) -> &mut [i16];
unsafe fn rsI16_copy_from_ptr(shape: &[usize], ptr: *const i16) -> Box<RsTensorI16>;
}
extern "Rust" {
type RsTensorU32;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[u32];
fn as_slice_mut(&mut self) -> &mut [u32];
unsafe fn rsU32_copy_from_ptr(shape: &[usize], ptr: *const u32) -> Box<RsTensorU32>;
}
extern "Rust" {
type RsTensorI32;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[i32];
fn as_slice_mut(&mut self) -> &mut [i32];
unsafe fn rsI32_copy_from_ptr(shape: &[usize], ptr: *const i32) -> Box<RsTensorI32>;
}
extern "Rust" {
type RsTensorU64;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[u64];
fn as_slice_mut(&mut self) -> &mut [u64];
unsafe fn rsU64_copy_from_ptr(shape: &[usize], ptr: *const u64) -> Box<RsTensorU64>;
}
extern "Rust" {
type RsTensorI64;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[i64];
fn as_slice_mut(&mut self) -> &mut [i64];
unsafe fn rsI64_copy_from_ptr(shape: &[usize], ptr: *const i64) -> Box<RsTensorI64>;
}
extern "Rust" {
type RsTensorF32;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[f32];
fn as_slice_mut(&mut self) -> &mut [f32];
unsafe fn rsF32_copy_from_ptr(shape: &[usize], ptr: *const f32) -> Box<RsTensorF32>;
}
extern "Rust" {
type RsTensorF64;
fn shape(&self) -> &[usize];
fn as_slice(&self) -> &[f64];
fn as_slice_mut(&mut self) -> &mut [f64];
unsafe fn rsF64_copy_from_ptr(shape: &[usize], ptr: *const f64) -> Box<RsTensorF64>;
}
}
/// Handles creating newtypes for RsTensor<T> for all the primitive T's
macro_rules! newtypes {
($typ:ty) => {
paste! {
struct [<RsTensor $typ:upper>](RsTensor<$typ>);
// Project the wrapped methods to the inner type
impl [<RsTensor $typ:upper>] {
pub fn shape(&self) -> &[usize] {
self.0.shape()
}
pub fn as_slice(&self) -> &[$typ] {
self.0.as_slice()
}
pub fn as_slice_mut(&mut self) -> &mut [$typ] {
self.0.as_slice_mut()
}
pub unsafe fn copy_from_ptr(shape: &[usize], ptr: *const $typ) -> Self {
Self(RsTensor::copy_from_ptr(shape, ptr))
}
}
// Implement conversion traits to/from inner type
impl From<RsTensor<$typ>> for [<RsTensor $typ:upper>] {
fn from(tensor: RsTensor<$typ>) -> Self { Self(tensor) }
}
impl From<[<RsTensor $typ:upper>]> for RsTensor<$typ> {
fn from(wrapped: [<RsTensor $typ:upper>]) -> Self { wrapped.0 }
}
// Had to make this wrapped in a box due to opaque type across FFI boundary
// Could not make an associated function, due to lack of support in CXX
#[allow(non_snake_case)]
unsafe fn [<rs $typ:upper _copy_from_ptr>](shape: &[usize], ptr: *const $typ) -> Box<[<RsTensor $typ:upper>]> {
Box::new([<RsTensor $typ:upper>]::copy_from_ptr(shape, ptr))
}
}
};
($typ:ty, $($tail:ty),+) => {
newtypes!($typ);
newtypes!($($tail),+);
}
}
newtypes!(u8, i8, u16, i16, u32, i32, u64, i64, f32, f64);
pub struct RsTensor<T>(ndarray::ArrayD<T>);
impl<T: Copy> RsTensor<T> {
/// Returns Err(()) if the array was not a standard layout. See `[ndarray::ArrayBase::is_standard_layout()]`
pub fn new(arr: ndarray::ArrayD<T>) -> Result<Self, ()> {
if arr.is_standard_layout() {
Ok(Self(arr))
} else {
Err(())
}
}
pub fn shape(&self) -> &[usize] {
self.0.shape()
}
pub fn as_slice(&self) -> &[T] {
self.0.as_slice().unwrap()
}
pub fn as_slice_mut(&mut self) -> &mut [T] {
self.0.as_slice_mut().unwrap()
}
pub unsafe fn copy_from_ptr(shape: &[usize], ptr: *const T) -> Self {
let view = ndarray::ArrayViewD::from_shape_ptr(shape, ptr);
let arr = view.to_owned();
Self(arr)
}
}
impl<T> From<RsTensor<T>> for ndarray::ArrayD<T> {
fn from(tensor: RsTensor<T>) -> Self {
tensor.0
}
}
|
use super::*;
use crate::track::Duration;
#[test]
fn test_valid() {
let payload = std::include_str!("test.json");
let json = serde_json
::from_str::<Json>(payload)
.expect("failed to parse json");
let expected = Json {
audios: AudiosObject(
Box::new(
[
Entry {
id: "371745443_456552853".into(),
url: "psv4/c815137/u371745443/audios/864ffa53ed2b".into(),
track_id: "Somne, Mind Against - Vertere".into(),
duration: Duration::new(9, 14),
extra: Some("IUZ-ACRuwtNix4E8bePkI20u2owgeJBf2NFP-ZFwymXCD-fvNnHK3ixzxcWsbhwNWFu0seQBNGG-_aF5-iGy1jdKeRNrsblQ4rZivQmF_sxxUTgUJyarOgprjpTO-wrfMrNS-MWJTp-lS93cUpDFM-0".into()),
},
Entry {
id: "-2001463066_59463066".into(),
url: "cs1-41v4/p1/844e0fb9227745".into(),
track_id: "Georgi Z - Vertere".into(),
duration: Duration::new(6, 24),
extra: Some("Iwm_p382q0AB06-n22zGWZ4Oud7cJsPEJt-hvlQa-hrn55sZJ7-VXtB8gxZykPjifMtc6cKADbRPfgmFup05DbZ6g2olAb7wbDGjO8ksaGZCirP3RHeu48eFUOq7yXB_Db-XHaVw_vb3GtVnAw91U8k5".into()),
},
],
)
)
};
assert_eq!(json, expected);
}
|
use std::marker::PhantomData;
use crate::{IsBot, IsTop, LatticeFrom, LatticeOrd, Merge};
/// A `Point` lattice, corresponding to a single instance of `T`.
///
/// Will runtime panic if a merge between inequal values is attempted.
///
/// The `Provenance` generic param is a token for the origin of this point. The parameter can be
/// used to differentiate between points with different provenances. This will prevent them from
/// being merged together, avoiding any posibility of panic.
///
/// Like [`Conflict<T>`](crate::Conflict) but will panic instead of going to a "conflict" top
/// state.
///
/// Can be thought of as a lattice with a domain of size one, corresponding to the specific value
/// inside.
///
/// This also can be used to wrap non lattice data into a lattice in a way that typechecks.
#[repr(transparent)]
#[derive(Copy, Clone, Debug, Default, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Point<T, Provenance> {
/// The value stored inside. This should not be mutated.
pub val: T,
_token: PhantomData<*mut Provenance>,
}
impl<T, Provenance> Point<T, Provenance> {
/// Create a new `Point` lattice instance from a value.
pub fn new(val: T) -> Self {
Self {
val,
_token: PhantomData,
}
}
/// Create a new `Point` lattice instance from a value using `Into`.
pub fn new_from(val: impl Into<T>) -> Self {
Self::new(val.into())
}
}
impl<T, Provenance> Merge<Point<T, Provenance>> for Point<T, Provenance>
where
T: PartialEq,
{
fn merge(&mut self, other: Point<T, Provenance>) -> bool {
if self.val != other.val {
panic!("The `Point` lattice cannot merge inequal elements.")
}
false
}
}
impl<T, Provenance> LatticeFrom<Point<T, Provenance>> for Point<T, Provenance> {
fn lattice_from(other: Point<T, Provenance>) -> Self {
other
}
}
impl<T, Provenance> PartialOrd<Point<T, Provenance>> for Point<T, Provenance>
where
T: PartialEq,
{
fn partial_cmp(&self, other: &Point<T, Provenance>) -> Option<std::cmp::Ordering> {
if self.val != other.val {
panic!("The `Point` lattice does not have a partial order between inequal elements.");
}
Some(std::cmp::Ordering::Equal)
}
}
impl<T, Provenance> LatticeOrd<Point<T, Provenance>> for Point<T, Provenance> where
Self: PartialOrd<Point<T, Provenance>>
{
}
impl<T, Provenance> PartialEq<Point<T, Provenance>> for Point<T, Provenance>
where
T: PartialEq,
{
fn eq(&self, other: &Point<T, Provenance>) -> bool {
self.val == other.val
}
}
impl<T, Provenance> IsBot for Point<T, Provenance> {
fn is_bot(&self) -> bool {
true
}
}
impl<T, Provenance> IsTop for Point<T, Provenance> {
fn is_top(&self) -> bool {
true
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::test::{
check_all, check_lattice_ord, check_lattice_properties, check_partial_ord_properties,
};
#[test]
fn consistency_equal() {
check_all(&[Point::<_, ()>::new("hello world")])
}
#[test]
fn consistency_inequal() {
use std::collections::BTreeSet;
let items: &[Point<_, ()>] = &[
Point::new(BTreeSet::from_iter([])),
Point::new(BTreeSet::from_iter([0])),
Point::new(BTreeSet::from_iter([1])),
Point::new(BTreeSet::from_iter([0, 1])),
];
// Merged inequal elements panic, therefore `NaiveMerge` panics.
assert!(std::panic::catch_unwind(|| check_lattice_ord(items)).is_err());
// `Point` does not have a partial order.
assert!(std::panic::catch_unwind(|| check_partial_ord_properties(items)).is_err());
// `Point` is not actually a lattice.
assert!(std::panic::catch_unwind(|| check_lattice_properties(items)).is_err());
}
}
|
use rbs::Value;
use std::fmt::{Debug, Display, Formatter};
use std::str::FromStr;
use crate::Error;
use serde::Deserializer;
#[derive(serde::Serialize, Clone, Eq, PartialEq, Hash)]
#[serde(rename = "Json")]
pub struct Json(pub String);
impl<'de> serde::Deserialize<'de> for Json {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(Json::from(Value::deserialize(deserializer)?))
}
}
impl Default for Json {
fn default() -> Self {
Self {
0: "null".to_string(),
}
}
}
impl From<serde_json::Value> for Json {
fn from(arg: serde_json::Value) -> Self {
Json(arg.to_string())
}
}
impl From<Value> for Json {
fn from(v: Value) -> Self {
match v {
Value::Null => Json(v.to_string()),
Value::Bool(v) => Json(v.to_string()),
Value::I32(v) => Json(v.to_string()),
Value::I64(v) => Json(v.to_string()),
Value::U32(v) => Json(v.to_string()),
Value::U64(v) => Json(v.to_string()),
Value::F32(v) => Json(v.to_string()),
Value::F64(v) => Json(v.to_string()),
Value::String(mut v) => {
if (v.starts_with("{") && v.ends_with("}"))
|| (v.starts_with("[") && v.ends_with("]"))
|| (v.starts_with("\"") && v.ends_with("\""))
{
//is json-string
Json(v)
} else {
v.insert(0, '"');
v.push('"');
Json(v)
}
}
Value::Binary(v) => Json(unsafe { String::from_utf8_unchecked(v) }),
Value::Array(_) => Json(v.to_string()),
Value::Map(v) => Json(v.to_string()),
Value::Ext(_name, v) => Json::from(*v),
}
}
}
impl Display for Json {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "Json({})", self.0)
}
}
impl Debug for Json {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "Json({})", self.0)
}
}
impl From<Json> for Value {
fn from(arg: Json) -> Self {
Value::Ext("Json", Box::new(Value::String(arg.0)))
}
}
impl FromStr for Json {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(s.to_string()))
}
}
#[cfg(test)]
mod test {
use crate::json::Json;
use rbs::value::map::ValueMap;
#[test]
fn test_decode_js_string() {
let m = rbs::Value::String(r#""aa""#.to_string());
println!("{}", m);
assert_eq!(r#""aa""#, Json::from(m).0);
}
#[test]
fn test_decode_js_string_map() {
let mut m = ValueMap::new();
m.insert("a".into(), "1".into());
let m = rbs::Value::Map(m);
println!("{}", m.to_string());
assert_eq!(r#"{"a":"1"}"#, Json::from(m).0);
}
#[test]
fn test_decode_js_int_map() {
let mut m = ValueMap::new();
m.insert("a".into(), 1.into());
let m = rbs::Value::Map(m);
println!("{}", m.to_string());
assert_eq!(r#"{"a":1}"#, Json::from(m).0);
}
#[test]
fn test_decode_js_int_arr() {
let arr = rbs::Value::Array(vec![rbs::Value::I64(1), rbs::Value::I64(2)]);
println!("{}", arr.to_string());
assert_eq!(r#"[1,2]"#, Json::from(arr).0);
}
#[test]
fn test_decode_js_string_arr() {
let arr = rbs::Value::Array(vec![
rbs::Value::String(1.to_string()),
rbs::Value::String(2.to_string()),
]);
println!("{}", arr.to_string());
assert_eq!(r#"["1","2"]"#, Json::from(arr).0);
}
}
|
//! Manages searches for ...
//!
//! ? `Coords` and `SigningPublicKey`s for nodes for which the IPv6 is known
use crate::{dev::*, types::NodeID};
///
/// ?? Handle<StartSearch>
pub trait SearchManager<C: Core>: Sized {
// ///
// type Router: <C as Core>::Router;
/// Information about an ongoing search.
///
type Search: Search<C, Self>;
fn reconfigure(&mut self);
// fn new_search(&self, dest: NodeID, mask: NodeID) -> Result<&SearchInfo, Error>;
}
///
/// ?? can be polled until completion, producing a Session
pub trait Search<C: Core, S: SearchManager<C>>
// where
// Self: ActorFuture<Actor = Self, Output = Addr<S>> + Actor,
// S: Session,
{
}
|
macro_rules! parse_name_from_captures{
($captures:expr, $name:expr) => {$captures.name($name).unwrap().as_str().parse()?};
}
|
#[cfg(feature = "create")]
mod create_direct;
mod single_array;
#[cfg(feature = "create")]
pub(crate) use create_direct::*;
pub(crate) use single_array::*;
pub(crate) use std::path::PathBuf;
|
use proc_macro2::TokenTree;
use syn::Macro;
use syn::ItemUse;
use syn::Visibility;
use syn::ItemMod;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use syn::Attribute;
use syn::Signature;
use syn::Binding;
use syn::UseRename;
use syn::{UsePath, UseTree};
use proc_macro2::Span;
use syn::{Ident, Token};
use syn::visit_mut::{self, VisitMut};
use quote::{format_ident, quote};
mod map_runes;
mod map_cuneiform;
mod map_hieroglyph;
fn map_ident(ident: &Ident, convert: fn(&str) -> String) -> Ident {
let newname = convert(&ident.to_string());
format_ident!("{}", newname)
}
struct V {
use_stack: Vec<Ident>,
use_vis_stack: Vec<Visibility>,
need_rename: Vec<Vec<(Visibility, Ident)>>,
convert: fn(&str) -> String,
}
impl V {
fn new(convert: fn(&str) -> String) -> Self {
Self {
use_stack: vec![],
use_vis_stack: vec![],
need_rename: vec![],
convert,
}
}
}
impl VisitMut for V {
fn visit_ident_mut(&mut self, i: &mut Ident) {
*i = map_ident(i, self.convert);
visit_mut::visit_ident_mut(self, i);
}
fn visit_attribute_mut(&mut self, _: &mut Attribute) {
// nop
}
fn visit_binding_mut(&mut self, i: &mut Binding) {
visit_mut::visit_type_mut(self, &mut i.ty)
}
fn visit_item_mod_mut(&mut self, i: &mut ItemMod) {
for it in &mut i.attrs {
self.visit_attribute_mut(it);
}
self.visit_visibility_mut(&mut i.vis);
// no ident changes
// rename after
self.need_rename.last_mut().unwrap().push((i.vis.clone(), i.ident.clone()));
if let Some((_, items)) = &mut i.content {
let mut newitems = vec![];
for it in &mut *items {
self.need_rename.push(vec![]);
self.visit_item_mut(it);
newitems.push(it.clone());
for (visibility, need_rename) in self.need_rename.pop().unwrap() {
let rename = map_ident(&need_rename, self.convert);
let item = syn::parse2(quote! {
#[allow(unused_imports)]
#visibility use #need_rename as #rename;
}).unwrap();
newitems.push(item);
}
}
*items = newitems;
};
}
fn visit_use_tree_mut(&mut self, i: &mut UseTree) {
match i {
UseTree::Name(name) => {
let ident = name.ident.clone();
let rename = if ident == "self" && !self.use_stack.is_empty() {
map_ident(self.use_stack.last().unwrap(), self.convert)
} else {
map_ident(&ident, self.convert)
};
*i = UseTree::Rename(UseRename {
ident,
as_token: Token),
rename,
});
}
_ => visit_mut::visit_use_tree_mut(self, i)
}
}
fn visit_use_rename_mut(&mut self, i: &mut UseRename) {
if i.rename != "_" {
self.need_rename.last_mut().unwrap().push((self.use_vis_stack.last().unwrap().clone(), i.rename.clone()));
}
}
fn visit_use_path_mut(&mut self, i: &mut UsePath) {
self.use_stack.push(i.ident.clone());
self.visit_use_tree_mut(&mut i.tree);
self.use_stack.pop();
}
fn visit_item_use_mut(&mut self, i: &mut ItemUse) {
self.use_vis_stack.push(i.vis.clone());
visit_mut::visit_item_use_mut(self, i);
self.use_vis_stack.pop();
}
fn visit_signature_mut(&mut self, i: &mut Signature) {
//https://github.com/rust-lang/rust/issues/28937
if i.ident != "main" && !i.inputs.is_empty() {
visit_mut::visit_signature_mut(self, i)
}
}
fn visit_macro_mut(&mut self, i: &mut Macro) {
self.visit_path_mut(&mut i.path);
self.visit_macro_delimiter_mut(&mut i.delimiter);
i.tokens = i.tokens.clone().into_iter().map(|t| match t {
TokenTree::Ident(ident) => map_ident(&ident, self.convert).into(),
_ => t,
}).collect();
}
fn visit_file_mut(&mut self, i: &mut syn::File) {
let attrs = syn::parse2::<syn::File>(quote! {
//remove becuse cargo not work.
//#![no_implicit_prelude]
#![allow(uncommon_codepoints)]
}).unwrap();
for attr in attrs.attrs {
i.attrs.push(attr.clone());
}
let prelude = syn::parse2(quote!{
mod prelude {
#![allow(unused_imports)]
use std::marker::{Copy, Send, Sized, Sync, Unpin};
use std::ops::{Drop, Fn, FnMut, FnOnce};
use std::mem::drop;
use std::boxed::Box;
use std::borrow::ToOwned;
use std::clone::Clone;
use std::cmp::{PartialEq, PartialOrd, Eq, Ord};
use std::convert::{AsRef, AsMut, Into, From};
use std::default::Default;
use std::iter::{Iterator, Extend, IntoIterator, DoubleEndedIterator, ExactSizeIterator};
use std::option::Option::{self, Some, None};
use std::result::Result::{self, Ok, Err};
use std::string::{String, ToString};
use std::vec::Vec;
}
}).unwrap();
let useprelude = syn::parse2(quote!{
#[allow(unused_imports)]
use prelude::*;
}).unwrap();
let usemacro = syn::parse2(quote! {
#[allow(unused_imports)]
use std::{assert, assert_eq, assert_ne, cfg, column, compile_error, concat, dbg, debug_assert, debug_assert_eq, debug_assert_ne, env, eprint, eprintln, file, format, format_args, include, include_bytes, include_str, is_x86_feature_detected, line, matches, module_path, option_env, panic, print, println, stringify, thread_local, todo, unimplemented, unreachable, vec, write, writeln};
}).unwrap();
i.items.insert(0, usemacro);
i.items.insert(0, useprelude);
i.items.insert(0, prelude);
for it in &mut i.attrs {
self.visit_attribute_mut(it);
}
let mut newitems = vec![];
for it in &mut *i.items {
self.need_rename.push(vec![]);
self.visit_item_mut(it);
newitems.push(it.clone());
for (visibility, need_rename) in self.need_rename.pop().unwrap() {
let rename = map_ident(&need_rename, self.convert);
let item = syn::parse2(quote! {
#[allow(unused_imports)]
#visibility use #need_rename as #rename;
}).unwrap();
newitems.push(item);
}
}
i.items = newitems;
}
}
fn historify<R, W>(mut reader: R, mut writer: W, convert: fn(&str) -> String) -> io::Result<()> where R: Read, W: Write {
let mut src = String::new();
reader.read_to_string(&mut src).unwrap();
let mut ast = syn::parse_file(&src).unwrap();
V::new(convert).visit_file_mut(&mut ast);
let prog = quote::quote! {
#ast
};
write!(writer, "{}", prog)?;
Ok(())
}
fn main() -> io::Result<()> {
let fun = map_hieroglyph::convert;
if let Some(fname) = env::args().nth(1) {
let mut file = File::open(fname).unwrap();
historify(&mut file, io::stdout(), fun)?;
} else {
historify(io::stdin(), io::stdout(), fun)?;
}
Ok(())
}
|
#[doc = "Register `CRYP_CSGCM6R` reader"]
pub type R = crate::R<CRYP_CSGCM6R_SPEC>;
#[doc = "Register `CRYP_CSGCM6R` writer"]
pub type W = crate::W<CRYP_CSGCM6R_SPEC>;
#[doc = "Field `CSGCM6` reader - CSGCM6"]
pub type CSGCM6_R = crate::FieldReader<u32>;
#[doc = "Field `CSGCM6` writer - CSGCM6"]
pub type CSGCM6_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 32, O, u32>;
impl R {
#[doc = "Bits 0:31 - CSGCM6"]
#[inline(always)]
pub fn csgcm6(&self) -> CSGCM6_R {
CSGCM6_R::new(self.bits)
}
}
impl W {
#[doc = "Bits 0:31 - CSGCM6"]
#[inline(always)]
#[must_use]
pub fn csgcm6(&mut self) -> CSGCM6_W<CRYP_CSGCM6R_SPEC, 0> {
CSGCM6_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Please refer to Section39.6.21: CRYP context swap GCM-CCM registers (CRYP_CSGCMCCMxR) for details.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cryp_csgcm6r::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cryp_csgcm6r::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CRYP_CSGCM6R_SPEC;
impl crate::RegisterSpec for CRYP_CSGCM6R_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cryp_csgcm6r::R`](R) reader structure"]
impl crate::Readable for CRYP_CSGCM6R_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cryp_csgcm6r::W`](W) writer structure"]
impl crate::Writable for CRYP_CSGCM6R_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CRYP_CSGCM6R to value 0"]
impl crate::Resettable for CRYP_CSGCM6R_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::actions::Actions;
use crate::digger::{DiggerState, FuelUpgrade, WasteCollected};
use crate::loading::AudioAssets;
use crate::GameState;
use bevy::prelude::*;
use bevy_kira_audio::{Audio, AudioChannel, AudioPlugin};
pub struct InternalAudioPlugin;
impl Plugin for InternalAudioPlugin {
fn build(&self, app: &mut AppBuilder) {
app.insert_resource(AudioChannels {
digging: AudioChannel::new("digging".to_owned()),
flying: AudioChannel::new("flying".to_owned()),
})
.add_plugin(AudioPlugin)
.add_system_set(SystemSet::on_enter(GameState::Playing).with_system(start_audio.system()))
.add_system_set(
SystemSet::on_update(GameState::Playing)
.with_system(play_flying_and_digging_sounds.system())
.with_system(collect_waste.system())
.with_system(collect_fuel.system()),
)
.add_system_set(SystemSet::on_exit(GameState::Playing).with_system(stop_audio.system()));
}
}
struct AudioChannels {
flying: AudioChannel,
digging: AudioChannel,
}
fn start_audio(audio_assets: Res<AudioAssets>, audio: Res<Audio>, channels: Res<AudioChannels>) {
audio.set_volume_in_channel(0.3, &channels.flying);
audio.set_volume_in_channel(0.3, &channels.digging);
audio.play_looped_in_channel(audio_assets.flying.clone(), &channels.flying);
audio.play_looped_in_channel(audio_assets.digging.clone(), &channels.digging);
audio.pause_channel(&channels.flying);
audio.pause_channel(&channels.digging);
}
fn stop_audio(audio: Res<Audio>, channels: Res<AudioChannels>) {
audio.stop_channel(&channels.flying);
audio.stop_channel(&channels.digging);
}
fn play_flying_and_digging_sounds(
digger_state: Res<DiggerState>,
actions: Res<Actions>,
audio: Res<Audio>,
channels: Res<AudioChannels>,
) {
if actions.flying {
audio.resume_channel(&channels.flying);
} else {
audio.pause_channel(&channels.flying)
}
if digger_state.mining_target.is_some() {
audio.resume_channel(&channels.digging);
} else {
audio.pause_channel(&channels.digging);
}
}
fn collect_waste(
mut events: EventReader<WasteCollected>,
audio: Res<Audio>,
audio_assets: Res<AudioAssets>,
) {
for _event in events.iter() {
audio.play(audio_assets.waste.clone());
}
}
fn collect_fuel(
mut events: EventReader<FuelUpgrade>,
audio: Res<Audio>,
audio_assets: Res<AudioAssets>,
) {
for _event in events.iter() {
audio.play(audio_assets.fuel.clone());
}
}
|
use chrono::{serde::ts_seconds, DateTime, Utc};
use serde_derive::{Deserialize, Serialize};
use sqlx::postgres::PgConnection;
///////////////////////////////////////////////////////////////////////////////
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Object {
pub id: i64,
pub scope: String,
pub app: String,
pub frontend_id: i64,
#[serde(with = "ts_seconds")]
pub created_at: DateTime<Utc>,
}
#[derive(Debug)]
pub(crate) struct ListQuery {}
impl ListQuery {
pub fn new() -> Self {
Self {}
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Vec<Object>> {
sqlx::query_as!(
Object,
r#"
SELECT *
FROM scope
"#,
)
.fetch_all(conn)
.await
}
}
#[derive(Debug)]
pub(crate) struct DeleteQuery {
scope: String,
}
impl DeleteQuery {
pub(crate) fn new(scope: String) -> Self {
Self { scope }
}
pub(crate) async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<()> {
sqlx::query!("DELETE FROM scope WHERE scope = $1", self.scope)
.execute(conn)
.await
.map(|_| ())
}
}
|
mod contentdirectory;
pub use self::contentdirectory::ContentDirectory;
mod connectionmanager;
pub use self::connectionmanager::ConnectionManager;
|
use super::prelude::*;
use std::fmt::Write;
#[derive(Debug, Clone, PartialEq)]
pub struct If {
pub cond: Box<Expr>,
pub consequence: Block,
pub alternative: Option<Block>,
}
impl Display for If {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let mut out = format!("if {} {}", self.cond, self.consequence);
if let Some(alt) = &self.alternative {
write!(out, " else {}", alt)?;
}
write!(f, "{}", out)
}
}
impl TryFrom<Expr> for If {
type Error = Error;
fn try_from(value: Expr) -> Result<Self> {
match value {
Expr::If(mif) => Ok(mif),
expr => Err(ParserError::Convert(format!("{:?}", expr), "If".into()).into()),
}
}
}
|
use crate::game::{town::Town, Game};
use crate::gui::ui_state::ClockTick;
use crate::init::quicksilver_integration::Signal;
use crate::prelude::*;
use crate::view::Frame;
use quicksilver::graphics::Mesh;
use quicksilver::prelude::Window;
use specs::WorldExt;
use std::marker::PhantomData;
pub(crate) struct TownFrame<'a, 'b> {
phantom: PhantomData<(&'a (), &'b ())>,
// Graphics optimization
pub background_cache: Option<Mesh>,
}
impl<'a, 'b> TownFrame<'a, 'b> {
pub fn new() -> Self {
TownFrame {
phantom: PhantomData,
background_cache: None,
}
}
}
impl<'a, 'b> Frame for TownFrame<'a, 'b> {
type Error = PadlError;
type State = Game<'a, 'b>;
type Graphics = Window;
type Event = PadlEvent;
type Signal = Signal;
fn draw(
&mut self,
state: &mut Self::State,
window: &mut Self::Graphics,
) -> Result<(), Self::Error> {
{
let ul = state.world.fetch::<ScreenResolution>().unit_length();
let tick = state.world.read_resource::<ClockTick>().0;
let (asset, town) = (&mut state.sprites, &state.world.fetch::<Town>());
if self.background_cache.is_none() {
self.background_cache = Some(Mesh::new());
town.render_background(self.background_cache.as_mut().unwrap(), asset, ul)?;
}
window
.mesh()
.extend(self.background_cache.as_ref().unwrap());
town.render(window, asset, tick, ul)?;
}
state.render_town_entities(window)?;
Ok(())
}
}
|
fn main() {
let args = std::env::args();
if args.len() < 2 {
panic!("Please provide a file");
}
panic!(args.first());
let path = std::path::Path::new(&args[1]);
let s = std::fs::File::open(&path).read_to_stirng().unwrap();
}
|
use std::{
ffi::OsString,
fs::{File, OpenOptions},
io::Write,
os::fd::AsRawFd,
};
use input_linux::{
sys::{input_event, BUS_VIRTUAL},
AbsoluteAxis, AbsoluteEvent, AbsoluteInfo, AbsoluteInfoSetup, EventKind, EventTime, InputEvent,
InputId, InputProperty, Key, KeyEvent, KeyState, SynchronizeEvent, UInputHandle,
};
use crate::VitaVirtualDevice;
type TrackingId = u8;
pub struct Config {}
pub struct VitaDevice<F: AsRawFd> {
main_handle: UInputHandle<F>,
sensor_handle: UInputHandle<F>,
previous_front_touches: [Option<TrackingId>; 6],
previous_back_touches: [Option<TrackingId>; 4],
ids: Option<Vec<OsString>>,
}
#[derive(thiserror::Error, Debug)]
#[non_exhaustive]
pub enum Error {
#[error("Failed to create uinput device")]
DeviceCreationFailed(#[source] std::io::Error),
#[error("Failed to write uinput device event")]
WriteEventFailed(#[source] std::io::Error),
}
impl<F: AsRawFd> VitaDevice<F> {
pub fn new(uinput_file: F, uinput_sensor_file: F) -> std::io::Result<Self> {
let main_handle = UInputHandle::new(uinput_file);
main_handle.set_evbit(EventKind::Key)?;
main_handle.set_keybit(Key::ButtonSouth)?;
main_handle.set_keybit(Key::ButtonEast)?;
main_handle.set_keybit(Key::ButtonNorth)?;
main_handle.set_keybit(Key::ButtonWest)?;
main_handle.set_keybit(Key::ButtonTL)?;
main_handle.set_keybit(Key::ButtonTR)?;
main_handle.set_keybit(Key::ButtonStart)?;
main_handle.set_keybit(Key::ButtonSelect)?;
main_handle.set_keybit(Key::ButtonDpadUp)?;
main_handle.set_keybit(Key::ButtonDpadDown)?;
main_handle.set_keybit(Key::ButtonDpadLeft)?;
main_handle.set_keybit(Key::ButtonDpadRight)?;
main_handle.set_evbit(EventKind::Absolute)?;
let joystick_abs_info = AbsoluteInfo {
flat: 128,
fuzz: 0, // Already fuzzed
maximum: 255,
minimum: 0,
resolution: 255,
..Default::default()
};
let joystick_x_info = AbsoluteInfoSetup {
info: joystick_abs_info,
axis: AbsoluteAxis::X,
};
let joystick_y_info = AbsoluteInfoSetup {
info: joystick_abs_info,
axis: AbsoluteAxis::Y,
};
let joystick_rx_info = AbsoluteInfoSetup {
info: joystick_abs_info,
axis: AbsoluteAxis::RX,
};
let joystick_ry_info = AbsoluteInfoSetup {
info: joystick_abs_info,
axis: AbsoluteAxis::RY,
};
// Touchscreen (front)
let front_mt_x_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 0,
maximum: 1919,
..Default::default()
},
axis: AbsoluteAxis::MultitouchPositionX,
};
let front_mt_y_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 0,
maximum: 1087,
..Default::default()
},
axis: AbsoluteAxis::MultitouchPositionY,
};
let front_mt_id_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 0,
maximum: 128,
..Default::default()
},
axis: AbsoluteAxis::MultitouchTrackingId,
}; //TODO: Query infos
let front_mt_slot_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 0,
maximum: 5,
..Default::default()
},
axis: AbsoluteAxis::MultitouchSlot,
}; // According to vitasdk docs
let front_mt_pressure_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 1,
maximum: 128,
..Default::default()
},
axis: AbsoluteAxis::MultitouchPressure,
}; //TODO: Query infos
let id = InputId {
bustype: BUS_VIRTUAL,
vendor: 0x54c,
product: 0x2d2,
version: 2,
};
main_handle.create(
&id,
b"PS Vita",
0,
&[
joystick_x_info,
joystick_y_info,
joystick_rx_info,
joystick_ry_info,
front_mt_x_info,
front_mt_y_info,
front_mt_id_info,
front_mt_slot_info,
front_mt_pressure_info,
],
)?;
// Have to create another device because sensors can't be mixed with directional axes
// and we can't assign the back touch surface along with the touchscreen.
// So this second device contains info for the motion sensors and the back touch surface.
let sensor_handle = UInputHandle::new(uinput_sensor_file);
sensor_handle.set_evbit(EventKind::Absolute)?;
sensor_handle.set_propbit(InputProperty::Accelerometer)?;
let accel_abs_info = AbsoluteInfo {
minimum: -16,
maximum: 16,
..Default::default()
};
let accel_x_info = AbsoluteInfoSetup {
info: accel_abs_info,
axis: AbsoluteAxis::X,
};
let accel_y_info = AbsoluteInfoSetup {
info: accel_abs_info,
axis: AbsoluteAxis::Y,
};
let accel_z_info = AbsoluteInfoSetup {
info: accel_abs_info,
axis: AbsoluteAxis::Z,
};
let gyro_abs_info = AbsoluteInfo {
minimum: -1,
maximum: 1,
..Default::default()
};
let gyro_x_info = AbsoluteInfoSetup {
info: gyro_abs_info,
axis: AbsoluteAxis::RX,
};
let gyro_y_info = AbsoluteInfoSetup {
info: gyro_abs_info,
axis: AbsoluteAxis::RY,
};
let gyro_z_info = AbsoluteInfoSetup {
info: gyro_abs_info,
axis: AbsoluteAxis::RZ,
};
let mt_x_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 0,
maximum: 1919,
..Default::default()
},
axis: AbsoluteAxis::MultitouchPositionX,
};
let mt_y_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 108,
maximum: 889,
..Default::default()
},
axis: AbsoluteAxis::MultitouchPositionY,
};
let mt_id_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 0,
maximum: 128,
..Default::default()
},
axis: AbsoluteAxis::MultitouchTrackingId,
};
let mt_slot_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 0,
maximum: 3,
..Default::default()
},
axis: AbsoluteAxis::MultitouchSlot,
};
let mt_pressure_info = AbsoluteInfoSetup {
info: AbsoluteInfo {
minimum: 1,
maximum: 128,
..Default::default()
},
axis: AbsoluteAxis::MultitouchPressure,
};
let id = InputId {
bustype: BUS_VIRTUAL,
vendor: 0x54c,
product: 0x2d3,
version: 2,
};
sensor_handle.create(
&id,
b"PS Vita (Sensors)",
0,
&[
accel_x_info,
accel_y_info,
accel_z_info,
gyro_x_info,
gyro_y_info,
gyro_z_info,
mt_x_info,
mt_y_info,
mt_id_info,
mt_slot_info,
mt_pressure_info,
],
)?;
let ids = main_handle
.evdev_name()
.ok()
.zip(sensor_handle.evdev_name().ok())
.map(|(main, sensor)| [main, sensor].to_vec());
Ok(VitaDevice {
main_handle,
sensor_handle,
previous_front_touches: [None; 6],
previous_back_touches: [None; 4],
ids,
})
}
}
impl VitaDevice<File> {
pub fn create() -> crate::Result<Self> {
let uinput_file = OpenOptions::new()
.read(true)
.write(true)
.open("/dev/uinput")
.map_err(Error::DeviceCreationFailed)?;
let uinput_sensor_file = OpenOptions::new()
.read(true)
.write(true)
.open("/dev/uinput")
.map_err(Error::DeviceCreationFailed)?;
let device =
Self::new(uinput_file, uinput_sensor_file).map_err(Error::DeviceCreationFailed)?;
Ok(device)
}
}
impl<F: AsRawFd + Write> VitaVirtualDevice<Config> for VitaDevice<F> {
type Config = Config;
fn identifiers(&self) -> Option<&[OsString]> {
self.ids.as_ref().map(|ids| ids.as_slice())
}
fn set_config(&mut self, config: Config) -> crate::Result<()> {
Ok(())
}
fn send_report(&mut self, report: vita_reports::MainReport) -> crate::Result<()> {
const EVENT_TIME_ZERO: EventTime = EventTime::new(0, 0);
let syn_event = *SynchronizeEvent::report(EVENT_TIME_ZERO)
.as_event()
.as_raw();
macro_rules! key_event {
($report:ident, $report_name:ident, $uinput_name:ident) => {
KeyEvent::new(
EVENT_TIME_ZERO,
Key::$uinput_name,
KeyState::pressed($report.buttons.$report_name),
)
};
}
macro_rules! stick_event {
($report:ident, $report_name:ident, $uinput_name:ident) => {
AbsoluteEvent::new(
EVENT_TIME_ZERO,
AbsoluteAxis::$uinput_name,
$report.$report_name.into(),
)
};
}
macro_rules! mt_event {
($report:ident, $report_name:ident, $uinput_name:ident) => {
AbsoluteEvent::new(
EVENT_TIME_ZERO,
AbsoluteAxis::$uinput_name,
$report.$report_name.into(),
)
};
}
macro_rules! accel_event {
($report:ident, $report_name:ident, $uinput_name:ident) => {
AbsoluteEvent::new(
EVENT_TIME_ZERO,
AbsoluteAxis::$uinput_name,
$report.motion.accelerometer.$report_name.round() as i32,
)
};
}
macro_rules! gyro_event {
($report:ident, $report_name:ident, $uinput_name:ident) => {
AbsoluteEvent::new(
EVENT_TIME_ZERO,
AbsoluteAxis::$uinput_name,
$report.motion.gyro.$report_name.round() as i32,
)
};
}
// Main device events
let buttons_events: &[InputEvent] = &[
key_event!(report, triangle, ButtonNorth),
key_event!(report, circle, ButtonEast),
key_event!(report, cross, ButtonSouth),
key_event!(report, square, ButtonWest),
key_event!(report, lt, ButtonTL),
key_event!(report, rt, ButtonTR),
key_event!(report, select, ButtonSelect),
key_event!(report, start, ButtonStart),
key_event!(report, up, ButtonDpadUp),
key_event!(report, right, ButtonDpadRight),
key_event!(report, down, ButtonDpadDown),
key_event!(report, left, ButtonDpadLeft),
]
.map(|ev| ev.into());
let sticks_events = &[
stick_event!(report, lx, X),
stick_event!(report, ly, Y),
stick_event!(report, rx, RX),
stick_event!(report, ry, RY),
]
.map(|ev| ev.into());
let front_touch_resets_events = self
.previous_front_touches
.iter()
.enumerate()
.filter_map(|(slot, id)| {
let new_id = report.front_touch.reports.get(slot).map(|r| r.id);
match (*id, new_id) {
(Some(_), None) => Some([
AbsoluteEvent::new(
EVENT_TIME_ZERO,
AbsoluteAxis::MultitouchSlot,
slot as i32,
),
AbsoluteEvent::new(EVENT_TIME_ZERO, AbsoluteAxis::MultitouchTrackingId, -1),
]),
_ => None,
}
})
.flatten()
.map(|ev| ev.into())
.collect::<Vec<InputEvent>>();
self.previous_front_touches = report
.front_touch
.reports
.iter()
.map(|report| Some(report.id))
.chain(
std::iter::repeat(None)
.take(self.previous_front_touches.len() - report.front_touch.reports.len()),
)
.collect::<Vec<Option<u8>>>()
.try_into()
.unwrap();
let front_touch_events: Vec<_> = report
.front_touch
.reports
.into_iter()
.enumerate()
.map(|(slot, report)| {
[
AbsoluteEvent::new(EVENT_TIME_ZERO, AbsoluteAxis::MultitouchSlot, slot as i32),
mt_event!(report, x, MultitouchPositionX),
mt_event!(report, y, MultitouchPositionY),
mt_event!(report, id, MultitouchTrackingId),
mt_event!(report, force, MultitouchPressure),
]
.map(|event| event.into())
})
.flatten()
.collect::<Vec<InputEvent>>();
let events: Vec<input_event> = [
buttons_events,
sticks_events,
&front_touch_resets_events,
&front_touch_events,
]
.concat()
.into_iter()
.map(|ev| ev.into())
.map(|ev: InputEvent| *ev.as_raw())
.collect();
self.main_handle
.write(&events)
.map_err(Error::WriteEventFailed)?;
self.main_handle
.write(&[syn_event])
.map_err(Error::WriteEventFailed)?;
// Sensors device events
let motion_events: &[InputEvent] = &[
accel_event!(report, x, X),
accel_event!(report, y, Y),
accel_event!(report, z, Z),
gyro_event!(report, x, RX),
gyro_event!(report, y, RY),
gyro_event!(report, z, RZ),
]
.map(|ev| ev.into());
let back_touch_resets_events = self
.previous_back_touches
.iter()
.enumerate()
.filter_map(|(slot, id)| {
let new_id = report.back_touch.reports.get(slot).map(|r| r.id);
match (*id, new_id) {
(Some(_), None) => Some([
AbsoluteEvent::new(
EVENT_TIME_ZERO,
AbsoluteAxis::MultitouchSlot,
slot as i32,
),
AbsoluteEvent::new(EVENT_TIME_ZERO, AbsoluteAxis::MultitouchTrackingId, -1),
]),
_ => None,
}
})
.flatten()
.map(|ev| ev.into())
.collect::<Vec<InputEvent>>();
self.previous_back_touches = report
.back_touch
.reports
.iter()
.map(|report| Some(report.id))
.chain(
std::iter::repeat(None)
.take(self.previous_back_touches.len() - report.back_touch.reports.len()),
)
.collect::<Vec<Option<u8>>>()
.try_into()
.unwrap();
let back_touch_events: Vec<_> = report
.back_touch
.reports
.into_iter()
.enumerate()
.map(|(slot, report)| {
[
AbsoluteEvent::new(EVENT_TIME_ZERO, AbsoluteAxis::MultitouchSlot, slot as i32),
mt_event!(report, x, MultitouchPositionX),
mt_event!(report, y, MultitouchPositionY),
mt_event!(report, id, MultitouchTrackingId),
mt_event!(report, force, MultitouchPressure),
]
.map(|event| event.into())
})
.flatten()
.collect::<Vec<InputEvent>>();
let events: Vec<input_event> =
[motion_events, &back_touch_resets_events, &back_touch_events]
.concat()
.into_iter()
.map(|ev| ev.into())
.map(|ev: InputEvent| *ev.as_raw())
.collect();
self.sensor_handle
.write(&events)
.map_err(Error::WriteEventFailed)?;
self.sensor_handle
.write(&[syn_event])
.map_err(Error::WriteEventFailed)?;
Ok(())
}
}
|
// Copyright 2021 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use anyhow::Context;
use clap::Clap;
use super::RootCommand;
use crate::{config::RootConfig, state::State, storage::Storage};
#[derive(Clap, Debug, Default)]
pub(super) struct ServerCommand;
impl ServerCommand {
pub async fn run(&self, root: &RootCommand) -> anyhow::Result<()> {
let config: RootConfig = root.load_config()?;
// Connect to the database
let pool = config.database.connect().await?;
let storage = Storage::new(pool).with_static_clients(&config.oauth2.clients);
// Load and compile the templates
let templates = crate::templates::load().context("could not load templates")?;
// Create the shared state
let state = State::new(config, templates, storage);
// Start the server
let address = state.config().http.address.clone();
let mut app = tide::with_state(state);
app.with(tide_tracing::TraceMiddleware::new());
crate::handlers::install(&mut app);
app.listen(address)
.await
.context("could not start server")?;
Ok(())
}
}
|
use binance_async::{
model::websocket::{BinanceWebsocketMessage, Subscription},
BinanceWebsocket,
};
use sqlx::PgPool;
use std::{error::Error, time::SystemTime};
use tokio::stream::StreamExt;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
dotenv::dotenv().ok();
let mut socket = BinanceWebsocket::default();
socket.subscribe(Subscription::TickerAll).await?;
let pool = PgPool::new(&std::env::var("DATABASE_URL")?).await?;
let mut timestamp: Option<i64> = None;
let interval: i64 = std::env::var("DATA_INTERVAL")?.parse()?;
// Throw away first few results for better timing.
for _ in 0..3 {
socket.try_next().await?;
}
while let Some(message) = socket.try_next().await? {
if let BinanceWebsocketMessage::TickerAll(tickers) = message {
timestamp = Some(if let Some(now) = timestamp {
let next = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)?
.as_millis() as i64;
let expected_next = now + 1000;
// Check if something went wrong with the timing, adjust if necessary.
if (next - expected_next).abs() >= 500 {
next
} else {
expected_next
}
} else {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)?
.as_millis() as i64
});
// Build query.
let mut sql = String::new();
sql.push_str("WITH inserted AS (INSERT INTO tickers (symbol, value, timestamp) VALUES");
for i in 0..tickers.len() {
sql.push_str(&format!(
" (${}, ${}, ${}),",
i * 3 + 1,
i * 3 + 2,
i * 3 + 3
));
}
sql.pop();
sql.push_str(&format!(
" RETURNING timestamp) DELETE FROM tickers WHERE timestamp <= (SELECT MAX(timestamp) - {} FROM inserted)",
interval
));
// Bind values to query.
let mut query = sqlx::query(&sql);
for ticker in &tickers {
query = query.bind(&ticker.symbol);
query = query.bind(ticker.current_close);
query = query.bind(timestamp.unwrap() / 1000);
}
query.execute(&pool).await?;
}
}
Ok(())
}
|
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let params = vault_iam_auth::Parameters {
iam_server_id: None,
mount_path: String::from("aws"),
role: String::from("my-role"),
vault_address: String::from("https://vault.address.com:8200"),
};
let response: serde_json::Value = vault_iam_auth::authenticate(¶ms).await?;
let token = response
.get("auth")
.unwrap()
.get("client_token")
.unwrap()
.as_str()
.unwrap();
println!("{}", token);
Ok(())
}
|
// This file is part of Substrate.
// Copyright (C) 2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Code generation for the ratio assignment type' compact representation.
use crate::field_name_for;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
fn from_impl(count: usize) -> TokenStream2 {
let from_impl_single = {
let name = field_name_for(1);
quote!(1 => compact.#name.push(
(
index_of_voter(&who).or_invalid_index()?,
index_of_target(&distribution[0].0).or_invalid_index()?,
)
),)
};
let from_impl_double = {
let name = field_name_for(2);
quote!(2 => compact.#name.push(
(
index_of_voter(&who).or_invalid_index()?,
(
index_of_target(&distribution[0].0).or_invalid_index()?,
distribution[0].1,
),
index_of_target(&distribution[1].0).or_invalid_index()?,
)
),)
};
let from_impl_rest = (3..=count)
.map(|c| {
let inner = (0..c - 1)
.map(
|i| quote!((index_of_target(&distribution[#i].0).or_invalid_index()?, distribution[#i].1),),
)
.collect::<TokenStream2>();
let field_name = field_name_for(c);
let last_index = c - 1;
let last = quote!(index_of_target(&distribution[#last_index].0).or_invalid_index()?);
quote!(
#c => compact.#field_name.push(
(
index_of_voter(&who).or_invalid_index()?,
[#inner],
#last,
)
),
)
})
.collect::<TokenStream2>();
quote!(
#from_impl_single
#from_impl_double
#from_impl_rest
)
}
fn into_impl(count: usize, per_thing: syn::Type) -> TokenStream2 {
let into_impl_single = {
let name = field_name_for(1);
quote!(
for (voter_index, target_index) in self.#name {
assignments.push(_npos::Assignment {
who: voter_at(voter_index).or_invalid_index()?,
distribution: vec![
(target_at(target_index).or_invalid_index()?, #per_thing::one())
],
})
}
)
};
let into_impl_double = {
let name = field_name_for(2);
quote!(
for (voter_index, (t1_idx, p1), t2_idx) in self.#name {
if p1 >= #per_thing::one() {
return Err(_npos::Error::CompactStakeOverflow);
}
// defensive only. Since Percent doesn't have `Sub`.
let p2 = _npos::sp_arithmetic::traits::Saturating::saturating_sub(
#per_thing::one(),
p1,
);
assignments.push( _npos::Assignment {
who: voter_at(voter_index).or_invalid_index()?,
distribution: vec![
(target_at(t1_idx).or_invalid_index()?, p1),
(target_at(t2_idx).or_invalid_index()?, p2),
]
});
}
)
};
let into_impl_rest = (3..=count)
.map(|c| {
let name = field_name_for(c);
quote!(
for (voter_index, inners, t_last_idx) in self.#name {
let mut sum = #per_thing::zero();
let mut inners_parsed = inners
.iter()
.map(|(ref t_idx, p)| {
sum = _npos::sp_arithmetic::traits::Saturating::saturating_add(sum, *p);
let target = target_at(*t_idx).or_invalid_index()?;
Ok((target, *p))
})
.collect::<Result<Vec<(A, #per_thing)>, _npos::Error>>()?;
if sum >= #per_thing::one() {
return Err(_npos::Error::CompactStakeOverflow);
}
// defensive only. Since Percent doesn't have `Sub`.
let p_last = _npos::sp_arithmetic::traits::Saturating::saturating_sub(
#per_thing::one(),
sum,
);
inners_parsed.push((target_at(t_last_idx).or_invalid_index()?, p_last));
assignments.push(_npos::Assignment {
who: voter_at(voter_index).or_invalid_index()?,
distribution: inners_parsed,
});
}
)
})
.collect::<TokenStream2>();
quote!(
#into_impl_single
#into_impl_double
#into_impl_rest
)
}
pub(crate) fn assignment(
ident: syn::Ident,
voter_type: syn::Type,
target_type: syn::Type,
weight_type: syn::Type,
count: usize,
) -> TokenStream2 {
let from_impl = from_impl(count);
let into_impl = into_impl(count, weight_type.clone());
quote!(
use _npos::__OrInvalidIndex;
impl #ident {
pub fn from_assignment<FV, FT, A>(
assignments: Vec<_npos::Assignment<A, #weight_type>>,
index_of_voter: FV,
index_of_target: FT,
) -> Result<Self, _npos::Error>
where
A: _npos::IdentifierT,
for<'r> FV: Fn(&'r A) -> Option<#voter_type>,
for<'r> FT: Fn(&'r A) -> Option<#target_type>,
{
let mut compact: #ident = Default::default();
for _npos::Assignment { who, distribution } in assignments {
match distribution.len() {
0 => continue,
#from_impl
_ => {
return Err(_npos::Error::CompactTargetOverflow);
}
}
};
Ok(compact)
}
pub fn into_assignment<A: _npos::IdentifierT>(
self,
voter_at: impl Fn(#voter_type) -> Option<A>,
target_at: impl Fn(#target_type) -> Option<A>,
) -> Result<Vec<_npos::Assignment<A, #weight_type>>, _npos::Error> {
let mut assignments: Vec<_npos::Assignment<A, #weight_type>> = Default::default();
#into_impl
Ok(assignments)
}
}
)
}
|
use aoc::read_data;
use std::collections::HashMap;
use std::error::Error;
#[derive(Debug)]
// loc, binary
struct Mem(String, String);
#[derive(Debug)]
struct Section {
mask: String,
mem: Vec<Mem>,
}
impl Section {
fn from_v(v: Vec<String>) -> Self {
let mut mem = Vec::new();
for d in v.iter().skip(1) {
let start_mem = d.find('[').unwrap();
let end_mem = d.find(']').unwrap();
let e = d.find('=').unwrap();
mem.push(Mem(
format!(
"{:0width$b}",
d[start_mem + 1..end_mem].parse::<usize>().unwrap(),
width = 36
),
format!(
"{:0width$b}",
d[e + 2..].parse::<usize>().unwrap(),
width = 36
),
));
}
Self {
mask: v[0][7..].to_string(),
mem,
}
}
}
fn trans(data: &[String]) -> Vec<Section> {
let mut v = Vec::new();
let mut r = Vec::new();
for d in data {
if d.contains("mask") && !v.is_empty() {
r.push(Section::from_v(v.clone()));
v.clear()
}
v.push(d.to_string());
}
r.push(Section::from_v(v));
r
}
struct Machine {
mem: HashMap<usize, usize>,
}
impl Machine {
fn new() -> Self {
Self {
mem: HashMap::new(),
}
}
fn aps(&mut self, s: &Section) {
let mask: Vec<char> = s.mask.chars().collect();
for m in &s.mem {
let mut masked = String::new();
for (i, c) in m.1.chars().enumerate() {
match mask[i] {
'X' => masked += &c.to_string(),
'1' => masked += "1",
'0' => masked += "0",
_ => panic!("err"),
}
}
//println!("{}: {}", m.0, usize::from_str_radix(&masked, 2).unwrap());
self.mem.insert(
usize::from_str_radix(&m.0, 2).unwrap(),
usize::from_str_radix(&masked, 2).unwrap(),
);
}
}
fn aps2(&mut self, s: &Section) {
let mask: Vec<char> = s.mask.chars().collect();
for m in &s.mem {
let mut masked = String::new();
let mut num: usize = 0;
for (i, c) in m.0.chars().enumerate() {
match mask[i] {
'X' => {
masked += "X";
num += 1
}
'1' => masked += "1",
'0' => masked += &c.to_string(),
_ => panic!("err"),
}
}
for d in 0..2_i32.pow(num as u32) {
let s = format!("{:0width$b}", d, width = num);
let mut s = s.chars();
self.mem.insert(
usize::from_str_radix(
&masked
.chars()
.map(|x| if x == 'X' { s.next().unwrap() } else { x })
.collect::<String>(),
2,
)
.unwrap(),
usize::from_str_radix(&m.1, 2).unwrap(),
);
}
//println!("{}: {}", m.0, usize::from_str_radix(&masked, 2).unwrap());
}
}
}
//u8::from_str_radix(&s[..7].replace('F', "0").replace('B', "1"), 2).unwrap()
fn p1(data: &[Section]) -> usize {
let mut m = Machine::new();
for d in data {
m.aps(d);
}
let mut res = 0;
for t in m.mem.values() {
res += t
}
res
}
fn p2(data: &[Section]) -> usize {
let mut m = Machine::new();
for d in data {
m.aps2(d);
}
let mut res = 0;
for t in m.mem.values() {
res += t
}
res
}
fn main() -> Result<(), Box<dyn Error>> {
println!("Hello, Advent Of Code 2020!");
let data: Vec<String> = read_data("./data/data14").unwrap();
let data: Vec<Section> = trans(&data);
// part 1
println!(
" What is the sum of all values left in memory after it completes?: {}",
p1(&data)
);
// part 2
println!("Execute the initialization program using an emulator for a version 2 decoder chip. What is the sum of all values left in memory after it completes? {}", p2(&data));
Ok(())
}
#[test]
fn data_read() {
println!("{:?}", read_data::<String>("./data/data14").unwrap());
}
#[test]
fn calc() {
let data: Vec<String> = vec![
"mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X".to_string(),
"mem[8] = 11".to_string(),
"mem[7] = 101".to_string(),
"mem[8] = 0".to_string(),
];
let data: Vec<Section> = trans(&data);
// part 1
let mut m = Machine::new();
m.aps(&data[0]);
assert_eq!(m.mem[&8], 64);
assert_eq!(m.mem[&7], 101);
assert_eq!(p1(&data), 165);
// part 2
let data: Vec<String> = vec![
"mask = 000000000000000000000000000000X1001X".to_string(),
"mem[42] = 100".to_string(),
"mask = 00000000000000000000000000000000X0XX".to_string(),
"mem[26] = 1".to_string(),
];
let data: Vec<Section> = trans(&data);
let mut m = Machine::new();
m.aps2(&data[0]);
assert_eq!(m.mem[&26], 100);
assert_eq!(m.mem[&27], 100);
assert_eq!(m.mem[&58], 100);
assert_eq!(m.mem[&59], 100);
m.aps2(&data[1]);
assert_eq!(m.mem[&16], 1);
assert_eq!(m.mem[&17], 1);
assert_eq!(m.mem[&18], 1);
assert_eq!(m.mem[&19], 1);
assert_eq!(m.mem[&24], 1);
assert_eq!(m.mem[&25], 1);
assert_eq!(m.mem[&26], 1);
assert_eq!(m.mem[&27], 1);
assert_eq!(p2(&data), 208);
}
|
use std::fmt;
type Result<T> = std::result::Result<T, String>;
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub(crate) struct Location {
offset: usize,
}
impl Location {
pub(crate) fn new(offset: usize) -> Self {
Self { offset }
}
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "offset: {}", self.offset)
}
}
#[derive(Debug, PartialEq)]
pub(super) enum Token {
EndList {
start: Location,
},
EndMap {
start: Location,
},
EndVector {
start: Location,
},
Integer {
value: String,
start: Location,
end: Location,
},
Keyword {
name: String,
start: Location,
end: Location,
},
LineComment {
comment: String,
start: Location,
end: Location,
},
StartList {
start: Location,
},
StartMap {
start: Location,
},
StartVector {
start: Location,
},
String {
value: String,
start: Location,
end: Location,
},
Symbol {
name: String,
start: Location,
end: Location,
},
}
impl Token {
fn end_list(start: Location) -> Self {
Self::EndList { start }
}
fn end_map(start: Location) -> Self {
Self::EndMap { start }
}
fn end_vector(start: Location) -> Self {
Self::EndVector { start }
}
fn integer(value: String, start: Location, end: Location) -> Self {
Self::Integer { value, start, end }
}
fn keyword(name: String, start: Location, end: Location) -> Self {
Self::Keyword { name, start, end }
}
fn line_comment(comment: String, start: Location, end: Location) -> Self {
Self::LineComment {
comment,
start,
end,
}
}
fn start_list(start: Location) -> Self {
Self::StartList { start }
}
fn start_map(start: Location) -> Self {
Self::StartMap { start }
}
fn start_vector(start: Location) -> Self {
Self::StartVector { start }
}
fn string(value: String, start: Location, end: Location) -> Self {
Self::String { value, start, end }
}
fn symbol(name: String, start: Location, end: Location) -> Self {
Self::Symbol { name, start, end }
}
}
pub(super) struct Lexer;
impl Lexer {
pub(super) fn from_str(src: &str) -> Result<Vec<Token>> {
let mut tokens = Vec::new();
let mut current = Location::new(0);
while current.offset < src.chars().count() {
let remaining = &src[current.offset..];
let parsed = Self::parse_whitespace(remaining, &mut tokens, ¤t)
.or_else(|| Self::parse_start_list(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_start_map(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_start_vector(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_end_list(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_end_map(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_end_vector(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_symbol(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_integer(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_keyword(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_string(remaining, &mut tokens, ¤t))
.or_else(|| Self::parse_line_comment(remaining, &mut tokens, ¤t));
match parsed {
Some(n) => current.offset += n,
None => {
return Err(format!(
"reader: unknown character '{}'",
remaining.chars().next().unwrap(),
))
}
}
}
Ok(tokens)
}
fn parse_end_list(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
if src.starts_with(')') {
tokens.push(Token::end_list(current.clone()));
Some(1)
} else {
None
}
}
fn parse_end_map(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
if src.starts_with('}') {
tokens.push(Token::end_map(current.clone()));
Some(1)
} else {
None
}
}
fn parse_end_vector(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
if src.starts_with(']') {
tokens.push(Token::end_vector(current.clone()));
Some(1)
} else {
None
}
}
fn parse_integer(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
let mut chars = src.chars();
if chars.next().unwrap().is_numeric() {
let parsed = chars.take_while(|c| c.is_numeric()).count();
let mut end = current.clone();
end.offset += parsed + 1;
tokens.push(Token::integer(
src[..parsed + 1].to_owned(),
current.clone(),
end,
));
Some(parsed + 1)
} else {
None
}
}
fn parse_keyword(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
if src.starts_with(':') {
let parsed = src
.chars()
.skip(1)
.take_while(|c| c.is_alphanumeric() || ['-', '!'].contains(c))
.count();
let mut end = current.clone();
end.offset += parsed + 1;
tokens.push(Token::keyword(
src[1..parsed + 1].to_owned(),
current.clone(),
end,
));
Some(parsed + 1)
} else {
None
}
}
fn parse_line_comment(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
if src.starts_with(';') {
let parsed = src.lines().next().map(str::len).unwrap_or(0);
let mut end = current.clone();
end.offset += parsed;
tokens.push(Token::line_comment(
src[1..parsed].to_owned(),
current.clone(),
end,
));
Some(parsed)
} else {
None
}
}
fn parse_start_list(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
if src.starts_with('(') {
tokens.push(Token::start_list(current.clone()));
Some(1)
} else {
None
}
}
fn parse_string(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
if src.starts_with('"') {
let parsed = src.chars().skip(1).take_while(|c| *c != '"').count();
let mut end = current.clone();
end.offset += parsed + 2;
tokens.push(Token::string(
src[1..parsed + 1].to_owned(),
current.clone(),
end,
));
Some(parsed + 2)
} else {
None
}
}
fn parse_symbol(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
let mut chars = src.chars();
if chars.next().unwrap().is_alphabetic() {
let parsed = chars
.take_while(|c| c.is_alphanumeric() || ['-', '!'].contains(c))
.count();
let mut end = current.clone();
end.offset += parsed + 1;
tokens.push(Token::symbol(
src[..parsed + 1].to_owned(),
current.clone(),
end,
));
Some(parsed + 1)
} else {
None
}
}
fn parse_start_map(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
if src.starts_with('{') {
tokens.push(Token::start_map(current.clone()));
Some(1)
} else {
None
}
}
fn parse_start_vector(src: &str, tokens: &mut Vec<Token>, current: &Location) -> Option<usize> {
if src.starts_with('[') {
tokens.push(Token::start_vector(current.clone()));
Some(1)
} else {
None
}
}
fn parse_whitespace(src: &str, _: &mut Vec<Token>, _: &Location) -> Option<usize> {
let whitespaces = src.chars().take_while(|c| c.is_whitespace()).count();
if whitespaces > 0 {
Some(whitespaces)
} else {
None
}
}
}
#[cfg(test)]
pub(super) mod tests {
use super::*;
pub(in crate::reader) fn end_list(start: usize) -> Token {
Token::end_list(Location::new(start))
}
pub(in crate::reader) fn end_map(start: usize) -> Token {
Token::end_map(Location::new(start))
}
pub(in crate::reader) fn end_vector(start: usize) -> Token {
Token::end_vector(Location::new(start))
}
pub(in crate::reader) fn integer(value: impl Into<String>, start: usize, end: usize) -> Token {
Token::integer(value.into(), Location::new(start), Location::new(end))
}
pub(in crate::reader) fn keyword(name: impl Into<String>, start: usize, end: usize) -> Token {
Token::keyword(name.into(), Location::new(start), Location::new(end))
}
pub(in crate::reader) fn line_comment(
comment: impl Into<String>,
start: usize,
end: usize,
) -> Token {
Token::line_comment(comment.into(), Location::new(start), Location::new(end))
}
pub(in crate::reader) fn start_list(start: usize) -> Token {
Token::start_list(Location::new(start))
}
pub(in crate::reader) fn start_map(start: usize) -> Token {
Token::start_map(Location::new(start))
}
pub(in crate::reader) fn start_vector(start: usize) -> Token {
Token::start_vector(Location::new(start))
}
pub(in crate::reader) fn string(value: impl Into<String>, start: usize, end: usize) -> Token {
Token::string(value.into(), Location::new(start), Location::new(end))
}
pub(in crate::reader) fn symbol(name: impl Into<String>, start: usize, end: usize) -> Token {
Token::symbol(name.into(), Location::new(start), Location::new(end))
}
#[test]
fn test_end_list() {
assert_eq!(Lexer::from_str(")"), Ok(vec![end_list(0)]));
assert_eq!(Lexer::from_str("))"), Ok(vec![end_list(0), end_list(1)]));
assert_eq!(Lexer::from_str(" ) )"), Ok(vec![end_list(1), end_list(3)]));
}
#[test]
fn test_end_map() {
assert_eq!(Lexer::from_str("}"), Ok(vec![end_map(0)]));
assert_eq!(Lexer::from_str("}}"), Ok(vec![end_map(0), end_map(1)]));
assert_eq!(Lexer::from_str(" } }"), Ok(vec![end_map(1), end_map(3)]));
}
#[test]
fn test_end_vector() {
assert_eq!(Lexer::from_str("]"), Ok(vec![end_vector(0)]));
assert_eq!(
Lexer::from_str("]]"),
Ok(vec![end_vector(0), end_vector(1)])
);
assert_eq!(
Lexer::from_str(" ] ]"),
Ok(vec![end_vector(1), end_vector(3)])
);
}
#[test]
fn test_integer() {
assert_eq!(Lexer::from_str("0"), Ok(vec![integer("0", 0, 1)]));
assert_eq!(Lexer::from_str("123"), Ok(vec![integer("123", 0, 3)]));
assert_eq!(
Lexer::from_str("456 789"),
Ok(vec![integer("456", 0, 3), integer("789", 4, 7)])
);
}
#[test]
fn test_keyword() {
assert_eq!(
Lexer::from_str(":keyword"),
Ok(vec![keyword("keyword", 0, 8)])
);
assert_eq!(
Lexer::from_str(":abc123"),
Ok(vec![keyword("abc123", 0, 7)])
);
assert_eq!(
Lexer::from_str(":1-2-3 :4-5-6"),
Ok(vec![keyword("1-2-3", 0, 6), keyword("4-5-6", 7, 13)])
);
}
#[test]
fn test_line_comment() {
assert_eq!(Lexer::from_str(";"), Ok(vec![line_comment("", 0, 1)]));
assert_eq!(
Lexer::from_str(";comment"),
Ok(vec![line_comment("comment", 0, 8)])
);
assert_eq!(
Lexer::from_str("; comment"),
Ok(vec![line_comment(" comment", 0, 9)])
);
assert_eq!(
Lexer::from_str("; comment;another comment"),
Ok(vec![line_comment(" comment;another comment", 0, 25)])
);
assert_eq!(
Lexer::from_str("; comment ; another comment"),
Ok(vec![line_comment(" comment ; another comment", 0, 27)])
);
assert_eq!(
Lexer::from_str(
"; comment
; another comment"
),
Ok(vec![
line_comment(" comment", 0, 9),
line_comment(" another comment", 27, 44)
])
);
}
#[test]
fn test_start_list() {
assert_eq!(Lexer::from_str("("), Ok(vec![start_list(0)]));
assert_eq!(
Lexer::from_str("(("),
Ok(vec![start_list(0), start_list(1)])
);
assert_eq!(
Lexer::from_str(" ( ("),
Ok(vec![start_list(1), start_list(3)])
);
}
#[test]
fn test_start_map() {
assert_eq!(Lexer::from_str("{"), Ok(vec![start_map(0)]));
assert_eq!(Lexer::from_str("{{"), Ok(vec![start_map(0), start_map(1)]));
assert_eq!(
Lexer::from_str(" { {"),
Ok(vec![start_map(1), start_map(3)])
);
}
#[test]
fn test_start_vector() {
assert_eq!(Lexer::from_str("["), Ok(vec![start_vector(0)]));
assert_eq!(
Lexer::from_str("[["),
Ok(vec![start_vector(0), start_vector(1)])
);
assert_eq!(
Lexer::from_str(" [ ["),
Ok(vec![start_vector(1), start_vector(3)])
);
}
#[test]
fn test_string() {
assert_eq!(Lexer::from_str(r#""""#), Ok(vec![string("", 0, 2)]));
assert_eq!(
Lexer::from_str(r#""string""#),
Ok(vec![string("string", 0, 8)])
);
assert_eq!(
Lexer::from_str(r#""abc 123""#),
Ok(vec![string("abc 123", 0, 9)])
);
assert_eq!(
Lexer::from_str(r#""hello" "world""#),
Ok(vec![string("hello", 0, 7), string("world", 8, 15)])
);
}
#[test]
fn test_symbol() {
assert_eq!(Lexer::from_str("symbol"), Ok(vec![symbol("symbol", 0, 6)]));
assert_eq!(Lexer::from_str("abc123"), Ok(vec![symbol("abc123", 0, 6)]));
assert_eq!(
Lexer::from_str("hello world"),
Ok(vec![symbol("hello", 0, 5), symbol("world", 6, 11)])
);
}
}
|
use postings::compression::{BlockDecoder, VIntDecoder, COMPRESSION_BLOCK_SIZE};
use DocId;
use common::BitSet;
use common::HasLen;
use postings::compression::compressed_block_size;
use docset::{DocSet, SkipResult};
use fst::Streamer;
use postings::serializer::PostingsSerializer;
use postings::FreqReadingOption;
use postings::Postings;
use owned_read::OwnedRead;
use common::{VInt, BinarySerializable};
use postings::USE_SKIP_INFO_LIMIT;
use postings::SkipReader;
use schema::IndexRecordOption;
use positions::PositionReader;
use std::cmp::Ordering;
const EMPTY_ARR: [u8; 0] = [];
struct PositionComputer {
// store the amount of position int
// before reading positions.
//
// if none, position are already loaded in
// the positions vec.
position_to_skip: usize,
position_reader: PositionReader,
}
impl PositionComputer {
pub fn new(position_reader: PositionReader) -> PositionComputer {
PositionComputer {
position_to_skip: 0,
position_reader,
}
}
pub fn add_skip(&mut self, num_skip: usize) {
self.position_to_skip += num_skip;
}
// Positions can only be read once.
pub fn positions_with_offset(&mut self, offset: u32, output: &mut [u32]) {
self.position_reader.skip(self.position_to_skip);
self.position_to_skip = 0;
self.position_reader.read(output);
let mut cum = offset;
for output_mut in output.iter_mut() {
cum += *output_mut;
*output_mut = cum;
}
}
}
/// `SegmentPostings` represents the inverted list or postings associated to
/// a term in a `Segment`.
///
/// As we iterate through the `SegmentPostings`, the frequencies are optionally decoded.
/// Positions on the other hand, are optionally entirely decoded upfront.
pub struct SegmentPostings {
block_cursor: BlockSegmentPostings,
cur: usize,
position_computer: Option<PositionComputer>,
}
impl SegmentPostings {
/// Returns an empty segment postings object
pub fn empty() -> Self {
let empty_block_cursor = BlockSegmentPostings::empty();
SegmentPostings {
block_cursor: empty_block_cursor,
cur: COMPRESSION_BLOCK_SIZE,
position_computer: None,
}
}
/// Creates a segment postings object with the given documents
/// and no frequency encoded.
///
/// This method is mostly useful for unit tests.
///
/// It serializes the doc ids using tantivy's codec
/// and returns a `SegmentPostings` object that embeds a
/// buffer with the serialized data.
pub fn create_from_docs(docs: &[u32]) -> SegmentPostings {
let mut buffer = Vec::new();
{
let mut postings_serializer = PostingsSerializer::new(&mut buffer, false, false);
for &doc in docs {
postings_serializer.write_doc(doc, 1u32);
}
postings_serializer
.close_term(docs.len() as u32)
.expect("In memory Serialization should never fail.");
}
let block_segment_postings = BlockSegmentPostings::from_data(
docs.len() as u32,
OwnedRead::new(buffer),
IndexRecordOption::Basic,
IndexRecordOption::Basic
);
SegmentPostings::from_block_postings(block_segment_postings, None)
}
}
impl SegmentPostings {
/// Reads a Segment postings from an &[u8]
///
/// * `len` - number of document in the posting lists.
/// * `data` - data array. The complete data is not necessarily used.
/// * `freq_handler` - the freq handler is in charge of decoding
/// frequencies and/or positions
pub(crate) fn from_block_postings(
segment_block_postings: BlockSegmentPostings,
positions_stream_opt: Option<PositionReader>,
) -> SegmentPostings {
SegmentPostings {
block_cursor: segment_block_postings,
cur: COMPRESSION_BLOCK_SIZE, // cursor within the block
position_computer: positions_stream_opt.map(PositionComputer::new),
}
}
}
fn exponential_search(target: u32, arr: &[u32]) -> (usize, usize) {
let mut start = 0;
let end = arr.len();
debug_assert!(target >= arr[start]);
debug_assert!(target <= arr[end - 1]);
let mut jump = 1;
loop {
let new = start + jump;
if new >= end {
return (start, end);
}
if arr[new] > target {
return (start, new);
}
start = new;
jump *= 2;
}
}
/// Search the first index containing an element greater or equal to the target.
///
/// # Assumption
///
/// The array is assumed non empty.
/// The target is assumed greater or equal to the first element.
/// The target is assumed smaller or equal to the last element.
fn search_within_block(block_docs: &[u32], target: u32) -> usize {
let (start, end) = exponential_search(target, block_docs);
start.wrapping_add(block_docs[start..end].binary_search(&target).unwrap_or_else(|e| e))
}
impl DocSet for SegmentPostings {
fn skip_next(&mut self, target: DocId) -> SkipResult {
if !self.advance() {
return SkipResult::End;
}
match self.doc().cmp(&target) {
Ordering::Equal => {
return SkipResult::Reached;
}
Ordering::Greater => {
return SkipResult::OverStep;
}
_ => {
// ...
}
}
// In the following, thanks to the call to advance above,
// we know that the position is not loaded and we need
// to skip every doc_freq we cross.
// skip blocks until one that might contain the target
// check if we need to go to the next block
let need_positions = self.position_computer.is_some();
let mut sum_freqs_skipped: u32 = 0;
if !self.block_cursor
.docs()
.last()
.map(|doc| *doc >= target)
.unwrap_or(false) // there should always be at least a document in the block
// since advance returned.
{
// we are not in the right block.
//
// First compute all of the freqs skipped from the current block.
if need_positions {
sum_freqs_skipped = self.block_cursor
.freqs()[self.cur..]
.iter()
.sum();
match self.block_cursor.skip_to(target) {
BlockSegmentPostingsSkipResult::Success(block_skip_freqs) => {
sum_freqs_skipped += block_skip_freqs;
}
BlockSegmentPostingsSkipResult::Terminated => {
return SkipResult::End;
}
}
} else {
// no positions needed. no need to sum freqs.
if self.block_cursor.skip_to(target) == BlockSegmentPostingsSkipResult::Terminated {
return SkipResult::End;
}
}
self.cur = 0;
}
// we're in the right block now, start with an exponential search
let block_docs = self.block_cursor.docs();
debug_assert!(target >= self.doc());
let new_cur = self.cur.wrapping_add(search_within_block(&block_docs[self.cur..], target));
if need_positions {
sum_freqs_skipped += self.block_cursor.freqs()[self.cur..new_cur].iter().sum::<u32>();
self.position_computer
.as_mut()
.unwrap()
.add_skip(sum_freqs_skipped as usize);
}
self.cur = new_cur;
// `doc` is now the first element >= `target`
let doc = block_docs[new_cur];
debug_assert!(doc >= target);
if doc == target {
return SkipResult::Reached;
} else {
return SkipResult::OverStep;
}
}
// goes to the next element.
// next needs to be called a first time to point to the correct element.
#[inline]
fn advance(&mut self) -> bool {
if self.position_computer.is_some() {
let term_freq = self.term_freq() as usize;
self.position_computer.as_mut().unwrap().add_skip(term_freq);
}
self.cur += 1;
if self.cur >= self.block_cursor.block_len() {
self.cur = 0;
if !self.block_cursor.advance() {
self.cur = COMPRESSION_BLOCK_SIZE;
return false;
}
}
true
}
fn size_hint(&self) -> u32 {
self.len() as u32
}
/// Return the current document's `DocId`.
#[inline]
fn doc(&self) -> DocId {
let docs = self.block_cursor.docs();
debug_assert!(
self.cur < docs.len(),
"Have you forgotten to call `.advance()` at least once before calling .doc()."
);
docs[self.cur]
}
fn append_to_bitset(&mut self, bitset: &mut BitSet) {
// finish the current block
if self.advance() {
for &doc in &self.block_cursor.docs()[self.cur..] {
bitset.insert(doc);
}
// ... iterate through the remaining blocks.
while self.block_cursor.advance() {
for &doc in self.block_cursor.docs() {
bitset.insert(doc);
}
}
}
}
}
impl HasLen for SegmentPostings {
fn len(&self) -> usize {
self.block_cursor.doc_freq()
}
}
impl Postings for SegmentPostings {
fn term_freq(&self) -> u32 {
self.block_cursor.freq(self.cur)
}
fn positions_with_offset(&mut self, offset: u32, output: &mut Vec<u32>) {
if self.position_computer.is_some() {
output.resize(self.term_freq() as usize, 0u32);
self.position_computer
.as_mut()
.unwrap()
.positions_with_offset(offset, &mut output[..])
} else {
output.clear();
}
}
}
/// `BlockSegmentPostings` is a cursor iterating over blocks
/// of documents.
///
/// # Warning
///
/// While it is useful for some very specific high-performance
/// use cases, you should prefer using `SegmentPostings` for most usage.
pub struct BlockSegmentPostings {
doc_decoder: BlockDecoder,
freq_decoder: BlockDecoder,
freq_reading_option: FreqReadingOption,
doc_freq: usize,
doc_offset: DocId,
num_vint_docs: usize,
remaining_data: OwnedRead,
skip_reader: SkipReader,
}
fn split_into_skips_and_postings(doc_freq: u32, mut data: OwnedRead) -> (Option<OwnedRead>, OwnedRead) {
if doc_freq >= USE_SKIP_INFO_LIMIT {
let skip_len = VInt::deserialize(&mut data).expect("Data corrupted").0 as usize;
let mut postings_data = data.clone();
postings_data.advance(skip_len);
data.clip(skip_len);
(Some(data), postings_data)
} else {
(None, data)
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum BlockSegmentPostingsSkipResult {
Terminated,
Success(u32) //< number of term freqs to skip
}
impl BlockSegmentPostings {
pub(crate) fn from_data(
doc_freq: u32,
data: OwnedRead,
record_option: IndexRecordOption,
requested_option: IndexRecordOption
) -> BlockSegmentPostings {
let freq_reading_option = match (record_option, requested_option) {
(IndexRecordOption::Basic, _) => FreqReadingOption::NoFreq,
(_, IndexRecordOption::Basic) => FreqReadingOption::SkipFreq,
(_, _) => FreqReadingOption::ReadFreq,
};
let (skip_data_opt, postings_data) = split_into_skips_and_postings(doc_freq, data);
let skip_reader =
match skip_data_opt {
Some(skip_data) => SkipReader::new(skip_data, record_option),
None => SkipReader::new(OwnedRead::new(&EMPTY_ARR[..]), record_option)
};
let doc_freq = doc_freq as usize;
let num_vint_docs = doc_freq % COMPRESSION_BLOCK_SIZE;
BlockSegmentPostings {
num_vint_docs,
doc_decoder: BlockDecoder::new(),
freq_decoder: BlockDecoder::with_val(1),
freq_reading_option,
doc_offset: 0,
doc_freq,
remaining_data: postings_data,
skip_reader,
}
}
// Resets the block segment postings on another position
// in the postings file.
//
// This is useful for enumerating through a list of terms,
// and consuming the associated posting lists while avoiding
// reallocating a `BlockSegmentPostings`.
//
// # Warning
//
// This does not reset the positions list.
pub(crate) fn reset(&mut self, doc_freq: u32, postings_data: OwnedRead) {
let (skip_data_opt, postings_data) = split_into_skips_and_postings(doc_freq, postings_data);
let num_vint_docs = (doc_freq as usize) & (COMPRESSION_BLOCK_SIZE - 1);
self.num_vint_docs = num_vint_docs;
self.remaining_data = postings_data;
if let Some(skip_data) = skip_data_opt {
self.skip_reader.reset(skip_data);
} else {
self.skip_reader.reset(OwnedRead::new(&EMPTY_ARR[..]))
}
self.doc_offset = 0;
self.doc_freq = doc_freq as usize;
}
/// Returns the document frequency associated to this block postings.
///
/// This `doc_freq` is simply the sum of the length of all of the blocks
/// length, and it does not take in account deleted documents.
pub fn doc_freq(&self) -> usize {
self.doc_freq
}
/// Returns the array of docs in the current block.
///
/// Before the first call to `.advance()`, the block
/// returned by `.docs()` is empty.
#[inline]
pub fn docs(&self) -> &[DocId] {
self.doc_decoder.output_array()
}
/// Return the document at index `idx` of the block.
#[inline]
pub fn doc(&self, idx: usize) -> u32 {
self.doc_decoder.output(idx)
}
/// Return the array of `term freq` in the block.
#[inline]
pub fn freqs(&self) -> &[u32] {
self.freq_decoder.output_array()
}
/// Return the frequency at index `idx` of the block.
#[inline]
pub fn freq(&self, idx: usize) -> u32 {
self.freq_decoder.output(idx)
}
/// Returns the length of the current block.
///
/// All blocks have a length of `NUM_DOCS_PER_BLOCK`,
/// except the last block that may have a length
/// of any number between 1 and `NUM_DOCS_PER_BLOCK - 1`
#[inline]
fn block_len(&self) -> usize {
self.doc_decoder.output_len
}
/// position on a block that may contains `doc_id`.
/// Always advance the current block.
///
/// Returns true if a block that has an element greater or equal to the target is found.
/// Returning true does not guarantee that the smallest element of the block is smaller
/// than the target. It only guarantees that the last element is greater or equal.
///
/// Returns false iff all of the document remaining are smaller than
/// `doc_id`. In that case, all of these document are consumed.
///
pub fn skip_to(&mut self,
target_doc: DocId) -> BlockSegmentPostingsSkipResult {
let mut skip_freqs = 0u32;
while self.skip_reader.advance() {
if self.skip_reader.doc() >= target_doc {
// the last document of the current block is larger
// than the target.
//
// We found our block!
let num_bits = self.skip_reader.doc_num_bits();
let num_consumed_bytes = self.doc_decoder
.uncompress_block_sorted(
self.remaining_data.as_ref(),
self.doc_offset,
num_bits);
self.remaining_data.advance(num_consumed_bytes);
let tf_num_bits = self.skip_reader.tf_num_bits();
match self.freq_reading_option {
FreqReadingOption::NoFreq => {}
FreqReadingOption::SkipFreq => {
let num_bytes_to_skip = compressed_block_size(tf_num_bits);
self.remaining_data.advance(num_bytes_to_skip);
}
FreqReadingOption::ReadFreq => {
let num_consumed_bytes = self.freq_decoder
.uncompress_block_unsorted(self.remaining_data.as_ref(),
tf_num_bits);
self.remaining_data.advance(num_consumed_bytes);
}
}
self.doc_offset = self.skip_reader.doc();
return BlockSegmentPostingsSkipResult::Success(skip_freqs);
} else {
skip_freqs += self.skip_reader.tf_sum();
let advance_len = self.skip_reader.total_block_len();
self.doc_offset = self.skip_reader.doc();
self.remaining_data.advance(advance_len);
}
}
// we are now on the last, incomplete, variable encoded block.
if self.num_vint_docs > 0 {
let num_compressed_bytes = self.doc_decoder.uncompress_vint_sorted(
self.remaining_data.as_ref(),
self.doc_offset,
self.num_vint_docs,
);
self.remaining_data.advance(num_compressed_bytes);
match self.freq_reading_option {
FreqReadingOption::NoFreq | FreqReadingOption::SkipFreq => {}
FreqReadingOption::ReadFreq => {
self.freq_decoder
.uncompress_vint_unsorted(self.remaining_data.as_ref(), self.num_vint_docs);
}
}
self.num_vint_docs = 0;
return self.docs()
.last()
.map(|last_doc| {
if *last_doc >= target_doc {
BlockSegmentPostingsSkipResult::Success(skip_freqs)
} else {
BlockSegmentPostingsSkipResult::Terminated
}
})
.unwrap_or(BlockSegmentPostingsSkipResult::Terminated);
}
BlockSegmentPostingsSkipResult::Terminated
}
/// Advance to the next block.
///
/// Returns false iff there was no remaining blocks.
pub fn advance(&mut self) -> bool {
if self.skip_reader.advance() {
let num_bits = self.skip_reader.doc_num_bits();
let num_consumed_bytes = self.doc_decoder
.uncompress_block_sorted(
self.remaining_data.as_ref(),
self.doc_offset,
num_bits);
self.remaining_data.advance(num_consumed_bytes);
let tf_num_bits = self.skip_reader.tf_num_bits();
match self.freq_reading_option {
FreqReadingOption::NoFreq => {}
FreqReadingOption::SkipFreq => {
let num_bytes_to_skip = compressed_block_size(tf_num_bits);
self.remaining_data.advance(num_bytes_to_skip);
}
FreqReadingOption::ReadFreq => {
let num_consumed_bytes = self.freq_decoder
.uncompress_block_unsorted(self.remaining_data.as_ref(),
tf_num_bits);
self.remaining_data.advance(num_consumed_bytes);
}
}
// it will be used as the next offset.
self.doc_offset = self.doc_decoder.output(COMPRESSION_BLOCK_SIZE - 1);
true
} else if self.num_vint_docs > 0 {
let num_compressed_bytes = self.doc_decoder.uncompress_vint_sorted(
self.remaining_data.as_ref(),
self.doc_offset,
self.num_vint_docs,
);
self.remaining_data.advance(num_compressed_bytes);
match self.freq_reading_option {
FreqReadingOption::NoFreq | FreqReadingOption::SkipFreq => {}
FreqReadingOption::ReadFreq => {
self.freq_decoder
.uncompress_vint_unsorted(self.remaining_data.as_ref(), self.num_vint_docs);
}
}
self.num_vint_docs = 0;
true
} else {
false
}
}
/// Returns an empty segment postings object
pub fn empty() -> BlockSegmentPostings {
BlockSegmentPostings {
num_vint_docs: 0,
doc_decoder: BlockDecoder::new(),
freq_decoder: BlockDecoder::with_val(1),
freq_reading_option: FreqReadingOption::NoFreq,
doc_offset: 0,
doc_freq: 0,
remaining_data: OwnedRead::new(vec![]),
skip_reader: SkipReader::new(OwnedRead::new(vec![]), IndexRecordOption::Basic),
}
}
}
impl<'b> Streamer<'b> for BlockSegmentPostings {
type Item = &'b [DocId];
fn next(&'b mut self) -> Option<&'b [DocId]> {
if self.advance() {
Some(self.docs())
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::BlockSegmentPostings;
use super::SegmentPostings;
use common::HasLen;
use core::Index;
use docset::DocSet;
use fst::Streamer;
use schema::IndexRecordOption;
use schema::SchemaBuilder;
use schema::Term;
use schema::INT_INDEXED;
use super::BlockSegmentPostingsSkipResult;
use DocId;
use super::search_within_block;
#[test]
fn test_empty_segment_postings() {
let mut postings = SegmentPostings::empty();
assert!(!postings.advance());
assert!(!postings.advance());
assert_eq!(postings.len(), 0);
}
#[test]
fn test_empty_block_segment_postings() {
let mut postings = BlockSegmentPostings::empty();
assert!(!postings.advance());
assert_eq!(postings.doc_freq(), 0);
}
fn search_within_block_trivial_but_slow(block: &[u32], target: u32) -> usize {
block
.iter()
.cloned()
.enumerate()
.filter(|&(_, ref val)| *val >= target)
.next()
.unwrap().0
}
fn util_test_search_within_block(block: &[u32], target: u32) {
assert_eq!(search_within_block(block, target), search_within_block_trivial_but_slow(block, target));
}
fn util_test_search_within_block_all(block: &[u32]) {
use std::collections::HashSet;
let mut targets = HashSet::new();
for (i, val) in block.iter().cloned().enumerate() {
if i > 0 {
targets.insert(val - 1);
}
targets.insert(val);
}
for target in targets {
util_test_search_within_block(block, target);
}
}
#[test]
fn test_search_within_block() {
for len in 1u32..128u32 {
let v: Vec<u32> = (0..len).map(|i| i*2).collect();
util_test_search_within_block_all(&v[..]);
}
}
#[test]
fn test_block_segment_postings() {
let mut block_segments = build_block_postings((0..100_000).collect::<Vec<u32>>());
let mut offset: u32 = 0u32;
// checking that the block before calling advance is empty
assert!(block_segments.docs().is_empty());
// checking that the `doc_freq` is correct
assert_eq!(block_segments.doc_freq(), 100_000);
while let Some(block) = block_segments.next() {
for (i, doc) in block.iter().cloned().enumerate() {
assert_eq!(offset + (i as u32), doc);
}
offset += block.len() as u32;
}
}
fn build_block_postings(docs: Vec<DocId>) -> BlockSegmentPostings {
let mut schema_builder = SchemaBuilder::default();
let int_field = schema_builder.add_u64_field("id", INT_INDEXED);
let schema = schema_builder.build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_with_num_threads(1, 40_000_000).unwrap();
let mut last_doc = 0u32;
for doc in docs {
for _ in last_doc..doc {
index_writer.add_document(doc!(int_field=>1u64));
}
index_writer.add_document(doc!(int_field=>0u64));
last_doc = doc + 1;
}
index_writer.commit().unwrap();
index.load_searchers().unwrap();
let searcher = index.searcher();
let segment_reader = searcher.segment_reader(0);
let inverted_index = segment_reader.inverted_index(int_field);
let term = Term::from_field_u64(int_field, 0u64);
let term_info = inverted_index.get_term_info(&term).unwrap();
inverted_index.read_block_postings_from_terminfo(&term_info, IndexRecordOption::Basic)
}
#[test]
fn test_block_segment_postings_skip() {
for i in 0..4 {
let mut block_postings = build_block_postings(vec![3]);
assert_eq!(block_postings.skip_to(i), BlockSegmentPostingsSkipResult::Success(0u32));
assert_eq!(block_postings.skip_to(i), BlockSegmentPostingsSkipResult::Terminated);
}
let mut block_postings = build_block_postings(vec![3]);
assert_eq!(block_postings.skip_to(4u32), BlockSegmentPostingsSkipResult::Terminated);
}
#[test]
fn test_block_segment_postings_skip2() {
let mut docs = vec![0];
for i in 0..1300 {
docs.push((i * i / 100) + i);
}
let mut block_postings = build_block_postings(docs.clone());
for i in vec![0, 424, 10000] {
assert_eq!(block_postings.skip_to(i), BlockSegmentPostingsSkipResult::Success(0u32));
let docs = block_postings.docs();
assert!(docs[0] <= i);
assert!(docs.last().cloned().unwrap_or(0u32) >= i);
}
assert_eq!(block_postings.skip_to(100_000), BlockSegmentPostingsSkipResult::Terminated);
assert_eq!(block_postings.skip_to(101_000), BlockSegmentPostingsSkipResult::Terminated);
}
#[test]
fn test_reset_block_segment_postings() {
let mut schema_builder = SchemaBuilder::default();
let int_field = schema_builder.add_u64_field("id", INT_INDEXED);
let schema = schema_builder.build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_with_num_threads(1, 40_000_000).unwrap();
// create two postings list, one containg even number,
// the other containing odd numbers.
for i in 0..6 {
let doc = doc!(int_field=> (i % 2) as u64);
index_writer.add_document(doc);
}
index_writer.commit().unwrap();
index.load_searchers().unwrap();
let searcher = index.searcher();
let segment_reader = searcher.segment_reader(0);
let mut block_segments;
{
let term = Term::from_field_u64(int_field, 0u64);
let inverted_index = segment_reader.inverted_index(int_field);
let term_info = inverted_index.get_term_info(&term).unwrap();
block_segments = inverted_index
.read_block_postings_from_terminfo(&term_info, IndexRecordOption::Basic);
}
assert!(block_segments.advance());
assert_eq!(block_segments.docs(), &[0, 2, 4]);
{
let term = Term::from_field_u64(int_field, 1u64);
let inverted_index = segment_reader.inverted_index(int_field);
let term_info = inverted_index.get_term_info(&term).unwrap();
inverted_index.reset_block_postings_from_terminfo(&term_info, &mut block_segments);
}
assert!(block_segments.advance());
assert_eq!(block_segments.docs(), &[1, 3, 5]);
}
}
|
#[doc = "Register `GPIOI_HWCFGR8` reader"]
pub type R = crate::R<GPIOI_HWCFGR8_SPEC>;
#[doc = "Field `AF_PRIO8` reader - AF_PRIO8"]
pub type AF_PRIO8_R = crate::FieldReader;
#[doc = "Field `AF_PRIO9` reader - AF_PRIO9"]
pub type AF_PRIO9_R = crate::FieldReader;
#[doc = "Field `AF_PRIO10` reader - AF_PRIO10"]
pub type AF_PRIO10_R = crate::FieldReader;
#[doc = "Field `AF_PRIO11` reader - AF_PRIO11"]
pub type AF_PRIO11_R = crate::FieldReader;
#[doc = "Field `AF_PRIO12` reader - AF_PRIO12"]
pub type AF_PRIO12_R = crate::FieldReader;
#[doc = "Field `AF_PRIO13` reader - AF_PRIO13"]
pub type AF_PRIO13_R = crate::FieldReader;
#[doc = "Field `AF_PRIO14` reader - AF_PRIO14"]
pub type AF_PRIO14_R = crate::FieldReader;
#[doc = "Field `AF_PRIO15` reader - AF_PRIO15"]
pub type AF_PRIO15_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:3 - AF_PRIO8"]
#[inline(always)]
pub fn af_prio8(&self) -> AF_PRIO8_R {
AF_PRIO8_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - AF_PRIO9"]
#[inline(always)]
pub fn af_prio9(&self) -> AF_PRIO9_R {
AF_PRIO9_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 8:11 - AF_PRIO10"]
#[inline(always)]
pub fn af_prio10(&self) -> AF_PRIO10_R {
AF_PRIO10_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 12:15 - AF_PRIO11"]
#[inline(always)]
pub fn af_prio11(&self) -> AF_PRIO11_R {
AF_PRIO11_R::new(((self.bits >> 12) & 0x0f) as u8)
}
#[doc = "Bits 16:19 - AF_PRIO12"]
#[inline(always)]
pub fn af_prio12(&self) -> AF_PRIO12_R {
AF_PRIO12_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 20:23 - AF_PRIO13"]
#[inline(always)]
pub fn af_prio13(&self) -> AF_PRIO13_R {
AF_PRIO13_R::new(((self.bits >> 20) & 0x0f) as u8)
}
#[doc = "Bits 24:27 - AF_PRIO14"]
#[inline(always)]
pub fn af_prio14(&self) -> AF_PRIO14_R {
AF_PRIO14_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bits 28:31 - AF_PRIO15"]
#[inline(always)]
pub fn af_prio15(&self) -> AF_PRIO15_R {
AF_PRIO15_R::new(((self.bits >> 28) & 0x0f) as u8)
}
}
#[doc = "For GPIOA, B, C, D, E, F, G, H, I, and GPIOJ: For GPIOK and GPIOZ:\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gpioi_hwcfgr8::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct GPIOI_HWCFGR8_SPEC;
impl crate::RegisterSpec for GPIOI_HWCFGR8_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`gpioi_hwcfgr8::R`](R) reader structure"]
impl crate::Readable for GPIOI_HWCFGR8_SPEC {}
#[doc = "`reset()` method sets GPIOI_HWCFGR8 to value 0"]
impl crate::Resettable for GPIOI_HWCFGR8_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::queue::{ConcurrentQueue, PaddedUsize};
use a19_core::pow2::PowOf2;
use std::cell::UnsafeCell;
use std::mem::replace;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
use std::vec::Vec;
struct MpmcNode<T> {
id: AtomicUsize,
value: Option<T>,
}
pub struct MpmcQueueWrap<T> {
queue: UnsafeCell<MpmcQueue<T>>,
}
unsafe impl<T> Sync for MpmcQueueWrap<T> {}
unsafe impl<T> Send for MpmcQueueWrap<T> {}
impl<T> MpmcQueueWrap<T> {
pub fn new(queue_size: usize) -> Self {
let queue = UnsafeCell::new(MpmcQueue::new(queue_size));
MpmcQueueWrap { queue }
}
pub fn poll(&self) -> Option<T> {
unsafe {
let queue = &mut *self.queue.get();
queue.poll()
}
}
pub fn offer(&self, v: T) -> bool {
unsafe {
let queue = &mut *self.queue.get();
queue.offer(v)
}
}
pub fn drain(&self, act: fn(T), limit: usize) -> usize {
unsafe {
let queue = &mut *self.queue.get();
queue.drain(act, limit)
}
}
}
struct MpmcQueue<T> {
mask: usize,
ring_buffer: Vec<MpmcNode<T>>,
capacity: usize,
sequence_number: PaddedUsize,
producer: PaddedUsize,
}
impl<T> MpmcQueue<T> {
fn new(queue_size: usize) -> Self {
let power_of_2 = queue_size.round_to_power_of_two();
let mut queue = MpmcQueue {
ring_buffer: Vec::with_capacity(power_of_2),
capacity: power_of_2,
mask: power_of_2 - 1,
sequence_number: PaddedUsize {
padding: [0; 15],
counter: AtomicUsize::new(1),
},
producer: PaddedUsize {
padding: [0; 15],
counter: AtomicUsize::new(1),
},
};
for _ in 0..power_of_2 {
let node = MpmcNode {
id: AtomicUsize::new(0),
value: None,
};
queue.ring_buffer.push(node);
}
queue
}
#[inline]
fn pos(&self, index: usize) -> usize {
index & self.mask
}
}
unsafe impl<T> Sync for MpmcQueue<T> {}
unsafe impl<T> Send for MpmcQueue<T> {}
impl<T> ConcurrentQueue<T> for MpmcQueue<T> {
/// Used to poll the queue and moves the value to the option if there is a value.
fn poll(&mut self) -> Option<T> {
loop {
let s_index = self.sequence_number.counter.load(Ordering::Relaxed);
let p_index = self.producer.counter.load(Ordering::Relaxed);
if p_index > s_index {
unsafe {
let last_pos = self.pos(s_index);
let node = self.ring_buffer.get_unchecked_mut(last_pos);
let node_id = node.id.load(Ordering::Acquire);
// Verify the node id matches the index id.
if node_id == s_index {
// Try and claim the slot.
if self.sequence_number.counter.compare_exchange_weak(
s_index,
s_index + 1,
Ordering::Acquire,
Ordering::Relaxed,
).is_ok() {
let v = replace(&mut node.value, Option::None);
node.id.store(0, Ordering::Release);
break v;
}
} else {
thread::yield_now()
}
}
} else {
break None;
}
}
}
/// A quick way to drain all of the values from the queue.
/// # Arguments
/// `act` - The action to run against the queue.
/// # Returns
/// The number of items that where returned.
fn drain(&mut self, act: fn(T), limit: usize) -> usize {
loop {
let p_index = self.producer.counter.load(Ordering::Relaxed);
let s_index = self.sequence_number.counter.load(Ordering::Relaxed);
if p_index <= s_index {
break 0;
} else {
let elements_left = p_index - s_index;
let request = limit.min(elements_left);
// Have to do this a little bit different.
if self.sequence_number.counter.compare_exchange_weak(
s_index,
s_index + request,
Ordering::Acquire,
Ordering::Relaxed,
).is_ok() {
for i in 0..request {
let pos = self.pos(s_index + i);
let node = unsafe { self.ring_buffer.get_unchecked_mut(pos) };
loop {
let node_id = node.id.load(Ordering::Acquire);
if node_id == s_index + i {
let v = replace(&mut node.value, Option::None);
// Need a Store/Store barrier to make sure this is done last.
node.id.store(0, Ordering::Release);
match v {
None => panic!("Found a None!"),
Some(t_value) => act(t_value),
}
break;
} else {
thread::yield_now();
}
}
}
break request;
}
}
}
}
/// Offers a value to the queue. Returns true if the value was successfully added.
/// # Arguments
/// `value` - The vale to add to the queue.
fn offer(&mut self, value: T) -> bool {
let capacity = self.capacity;
loop {
let p_index = self.producer.counter.load(Ordering::Relaxed);
let c_index = self.sequence_number.counter.load(Ordering::Relaxed);
if p_index < capacity || p_index - capacity < c_index {
let pos = self.pos(p_index);
let mut node = unsafe { self.ring_buffer.get_unchecked_mut(pos) };
if node.id.load(Ordering::Acquire) == 0 {
if self.producer.counter.compare_exchange_weak(
p_index,
p_index + 1,
Ordering::Acquire,
Ordering::Relaxed,
).is_ok() {
node.value = Some(value);
// Need a Store/Store barrier to make sure this is done last.
node.id.store(p_index, Ordering::Release);
break true;
}
} else {
thread::yield_now();
}
} else {
break false;
}
}
}
}
#[cfg(test)]
mod tests {
use crate::queue::mpmc_queue::{MpmcQueue, MpmcQueueWrap};
use crate::queue::ConcurrentQueue;
use std::sync::Arc;
use std::thread;
use std::vec::Vec;
use time_test::time_test;
#[test]
pub fn create_queue_test() {
let mut queue: MpmcQueue<u64> = MpmcQueue::new(128);
assert_eq!(128, queue.ring_buffer.len());
queue.offer(1);
let result = queue.poll();
assert_eq!(Some(1), result);
}
#[test]
pub fn use_thread_queue_test() {
time_test!();
let queue: Arc<MpmcQueueWrap<usize>> = Arc::new(MpmcQueueWrap::new(1_000_000));
let write_thread_num = 4;
let mut write_threads: Vec<thread::JoinHandle<_>> = Vec::with_capacity(write_thread_num);
let spins: usize = 10_000_000;
for _ in 0..write_thread_num {
let write_queue = queue.clone();
let write_thread = thread::spawn(move || {
for i in 0..(spins / write_thread_num) {
while !write_queue.offer(i) {
thread::yield_now()
}
}
});
write_threads.push(write_thread);
}
let thread_num: usize = 4;
let mut read_threads: Vec<thread::JoinHandle<_>> = Vec::with_capacity(thread_num);
let read_spins = spins / thread_num;
for _ in 0..thread_num {
let read_queue = queue.clone();
let read_thread = thread::spawn(move || {
let mut count = 0;
loop {
let result = read_queue.poll();
match result {
Some(_) => {
count = count + 1;
if count == read_spins {
break;
}
}
_ => {
thread::yield_now();
}
}
}
});
read_threads.push(read_thread);
}
for num in 0..write_thread_num {
write_threads
.remove(write_thread_num - num - 1)
.join()
.unwrap();
}
for num in 0..thread_num {
read_threads.remove(thread_num - num - 1).join().unwrap();
}
}
#[test]
pub fn use_thread_queue_test_drain() {
time_test!();
let queue: Arc<MpmcQueueWrap<usize>> = Arc::new(MpmcQueueWrap::new(1_000_000));
let write_queue = queue.clone();
let spins: usize = 10_000_000;
let write_thread = thread::spawn(move || {
for i in 0..spins {
while !write_queue.offer(i) {
thread::yield_now()
}
}
});
let thread_num: usize = 2;
let mut read_threads: Vec<thread::JoinHandle<_>> = Vec::with_capacity(thread_num);
let read_spins = spins / thread_num;
for _ in 0..thread_num {
let read_queue = queue.clone();
let read_thread = thread::spawn(move || {
let mut count = 0;
loop {
let result = read_queue.drain(|_| {}, 1000);
count = count + result;
if count == 0 {
thread::yield_now();
}
if count > (read_spins - 1000 * thread_num) {
break;
}
}
});
read_threads.push(read_thread);
}
write_thread.join().unwrap();
for num in 0..thread_num {
read_threads.remove(thread_num - num - 1).join().unwrap();
}
}
}
|
//! Transforms run once for each defined dotfile during the deploy process.
//!
//! They can either be specified for a whole profile, in which case each dotfile
//! is transformed by them or they can be attached to a specific dotfile.
//!
//! The transformation takes place after the template resolving and takes the
//! contents in a textual representation. After processing the text a new text
//! must be returned.
use std::fmt;
use color_eyre::Result;
/// A transform takes the contents of a dotfile, processes it and returns a new
/// version of the content.
///
/// The dotfile is either the text of a resolved template or a non-template
/// dotfile.
pub trait Transform {
/// Takes a string as input, processes it and returns a new version of it.
///
/// # Errors
///
/// If any error occurs during the processing it can be returned.
fn transform(&self, content: String) -> Result<String>;
}
/// List of all available [`Transform`s](`crate::profile::transform::Transform`).
///
/// These can be added to a [`Profile`](`crate::profile::Profile`) or a
/// [`Dotfile`](`crate::profile::dotfile::Dotfile`) to modify the text content.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub enum ContentTransformer {
/// Transformer which replaces line termination characters with either unix
/// style (`\n`) or windows style (`\r\b`).
LineTerminator(LineTerminator),
}
impl Transform for ContentTransformer {
fn transform(&self, content: String) -> Result<String> {
match self {
Self::LineTerminator(lt) => lt.transform(content),
}
}
}
impl fmt::Display for ContentTransformer {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
fmt::Display::fmt(&self, f)
}
}
/// Transformer which replaces line termination characters with either unix
/// style (`\n`) or windows style (`\r\b`).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub enum LineTerminator {
/// Replaces all occurrences of `\r\n` with `\n` (unix style).
LF,
/// Replaces all occurrences of `\n` with `\r\n` (windows style).
CRLF,
}
impl Transform for LineTerminator {
fn transform(&self, mut content: String) -> Result<String> {
match self {
Self::LF => Ok(content.replace("\r\n", "\n")),
Self::CRLF => {
let lf_idxs = content.match_indices('\n');
let mut cr_idxs = content.match_indices('\r').peekable();
// Allowed as it not needless here, the index iterator have a immutable ref
// and are still alive when the string gets modified. To "unborrow" the
// collect is necessary.
#[allow(clippy::needless_collect)]
let lf_idxs = lf_idxs
.filter_map(|(lf_idx, _)| {
while matches!(cr_idxs.peek(), Some((cr_idx,_)) if cr_idx + 1 < lf_idx) {
// pop standalone `\r`
let _ = cr_idxs.next().expect("Failed to advance peeked iterator");
}
if matches!(cr_idxs.peek(), Some((cr_idx, _)) if cr_idx + 1 == lf_idx) {
// pop matched cr_idx
let _ = cr_idxs.next().expect("Failed to advance peeked iterator");
None
} else {
Some(lf_idx)
}
})
.collect::<Vec<_>>();
for (offset, lf_idx) in lf_idxs.into_iter().enumerate() {
content.insert(lf_idx + offset, '\r');
}
Ok(content)
}
}
}
}
impl fmt::Display for LineTerminator {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
fmt::Debug::fmt(&self, f)
}
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::*;
#[test]
fn line_terminator_lf() -> Result<()> {
const CONTENT: &str = "Hello\r\nWorld\nHow\nare\r\nyou today?\r\r\r\nLast line\r\\n";
assert_eq!(
LineTerminator::LF.transform(String::from(CONTENT))?,
"Hello\nWorld\nHow\nare\nyou today?\r\r\nLast line\r\\n"
);
Ok(())
}
#[test]
fn line_terminator_crlf() -> Result<()> {
const CONTENT: &str = "Hello\r\nWorld\nHow\nare\r\nyou today?\r\r\r\nLast line\r\\n";
assert_eq!(
LineTerminator::CRLF.transform(String::from(CONTENT))?,
"Hello\r\nWorld\r\nHow\r\nare\r\nyou today?\r\r\r\nLast line\r\\n"
);
Ok(())
}
}
|
use arrow::{
array::{BooleanArray, Float64Array, Int64Array, StringArray},
record_batch::RecordBatch,
};
use connectorx::{
destinations::arrow::ArrowDestination,
prelude::*,
sources::postgres::{rewrite_tls_args, BinaryProtocol, CSVProtocol, PostgresSource},
sources::PartitionParser,
sql::CXQuery,
transports::PostgresArrowTransport,
};
use postgres::NoTls;
use std::env;
use url::Url;
#[test]
fn load_and_parse() {
let _ = env_logger::builder().is_test(true).try_init();
let dburl = env::var("POSTGRES_URL").unwrap();
#[derive(Debug, PartialEq)]
struct Row(i32, Option<i32>, Option<String>, Option<f64>, Option<bool>);
let url = Url::parse(dburl.as_str()).unwrap();
let (config, _tls) = rewrite_tls_args(&url).unwrap();
let mut source = PostgresSource::<BinaryProtocol, NoTls>::new(config, NoTls, 1).unwrap();
source.set_queries(&[CXQuery::naked("select * from test_table")]);
source.fetch_metadata().unwrap();
let mut partitions = source.partition().unwrap();
assert!(partitions.len() == 1);
let mut partition = partitions.remove(0);
partition.result_rows().expect("run query");
let mut parser = partition.parser().unwrap();
let mut rows: Vec<Row> = Vec::new();
loop {
let (n, is_last) = parser.fetch_next().unwrap();
for _i in 0..n {
rows.push(Row(
parser.produce().unwrap(),
parser.produce().unwrap(),
Produce::<Option<&str>>::produce(&mut parser)
.unwrap()
.map(ToString::to_string),
parser.produce().unwrap(),
parser.produce().unwrap(),
));
}
if is_last {
break;
}
}
assert_eq!(
vec![
Row(1, Some(3), Some("str1".into()), None, Some(true)),
Row(2, None, Some("str2".into()), Some(2.2), Some(false)),
Row(0, Some(5), Some("a".into()), Some(3.1), None),
Row(3, Some(7), Some("b".into()), Some(3.), Some(false)),
Row(4, Some(9), Some("c".into()), Some(7.8), None),
Row(1314, Some(2), None, Some(-10.), Some(true)),
],
rows
);
}
#[test]
fn load_and_parse_csv() {
let _ = env_logger::builder().is_test(true).try_init();
let dburl = env::var("POSTGRES_URL").unwrap();
#[derive(Debug, PartialEq)]
struct Row(i32, Option<i32>, Option<String>, Option<f64>, Option<bool>);
let url = Url::parse(dburl.as_str()).unwrap();
let (config, _tls) = rewrite_tls_args(&url).unwrap();
let mut source = PostgresSource::<CSVProtocol, NoTls>::new(config, NoTls, 1).unwrap();
source.set_queries(&[CXQuery::naked("select * from test_table")]);
source.fetch_metadata().unwrap();
let mut partitions = source.partition().unwrap();
assert!(partitions.len() == 1);
let mut partition = partitions.remove(0);
partition.result_rows().expect("run query");
assert_eq!(6, partition.nrows());
assert_eq!(5, partition.ncols());
let mut parser = partition.parser().unwrap();
let mut rows: Vec<Row> = Vec::new();
loop {
let (n, is_last) = parser.fetch_next().unwrap();
for _i in 0..n {
rows.push(Row(
parser.produce().unwrap(),
parser.produce().unwrap(),
Produce::<Option<&str>>::produce(&mut parser)
.unwrap()
.map(ToString::to_string),
parser.produce().unwrap(),
parser.produce().unwrap(),
));
}
if is_last {
break;
}
}
assert_eq!(
vec![
Row(1, Some(3), Some("str1".into()), None, Some(true)),
Row(2, None, Some("str2".into()), Some(2.2), Some(false)),
Row(0, Some(5), Some("a".into()), Some(3.1), None),
Row(3, Some(7), Some("b".into()), Some(3.), Some(false)),
Row(4, Some(9), Some("c".into()), Some(7.8), None),
Row(1314, Some(2), None, Some(-10.), Some(true)),
],
rows
);
}
#[test]
fn test_postgres() {
let _ = env_logger::builder().is_test(true).try_init();
let dburl = env::var("POSTGRES_URL").unwrap();
let queries = [
CXQuery::naked("select * from test_table where test_int < 2"),
CXQuery::naked("select * from test_table where test_int >= 2"),
];
let url = Url::parse(dburl.as_str()).unwrap();
let (config, _tls) = rewrite_tls_args(&url).unwrap();
let builder = PostgresSource::<BinaryProtocol, NoTls>::new(config, NoTls, 2).unwrap();
let mut destination = ArrowDestination::new();
let dispatcher = Dispatcher::<_, _, PostgresArrowTransport<BinaryProtocol, NoTls>>::new(
builder,
&mut destination,
&queries,
Some(String::from("select * from test_table")),
);
dispatcher.run().expect("run dispatcher");
let result = destination.arrow().unwrap();
verify_arrow_results(result);
}
#[test]
fn test_postgres_csv() {
let _ = env_logger::builder().is_test(true).try_init();
let dburl = env::var("POSTGRES_URL").unwrap();
let queries = [
CXQuery::naked("select * from test_table where test_int < 2"),
CXQuery::naked("select * from test_table where test_int >= 2"),
];
let url = Url::parse(dburl.as_str()).unwrap();
let (config, _tls) = rewrite_tls_args(&url).unwrap();
let builder = PostgresSource::<CSVProtocol, NoTls>::new(config, NoTls, 2).unwrap();
let mut dst = ArrowDestination::new();
let dispatcher = Dispatcher::<_, _, PostgresArrowTransport<CSVProtocol, NoTls>>::new(
builder, &mut dst, &queries, None,
);
dispatcher.run().expect("run dispatcher");
let result = dst.arrow().unwrap();
verify_arrow_results(result);
}
#[test]
fn test_postgres_agg() {
let _ = env_logger::builder().is_test(true).try_init();
let dburl = env::var("POSTGRES_URL").unwrap();
let queries = [CXQuery::naked(
"SELECT test_bool, SUM(test_float) FROM test_table GROUP BY test_bool",
)];
let url = Url::parse(dburl.as_str()).unwrap();
let (config, _tls) = rewrite_tls_args(&url).unwrap();
let builder = PostgresSource::<BinaryProtocol, NoTls>::new(config, NoTls, 1).unwrap();
let mut destination = ArrowDestination::new();
let dispatcher = Dispatcher::<_, _, PostgresArrowTransport<BinaryProtocol, NoTls>>::new(
builder,
&mut destination,
&queries,
Some(format!(
"SELECT test_bool, SUM(test_float) FROM test_table GROUP BY test_bool"
)),
);
dispatcher.run().expect("run dispatcher");
let mut result = destination.arrow().unwrap();
assert!(result.len() == 1);
let rb = result.pop().unwrap();
assert!(rb.columns().len() == 2);
assert!(rb
.column(0)
.as_any()
.downcast_ref::<BooleanArray>()
.unwrap()
.eq(&BooleanArray::from(vec![None, Some(false), Some(true)])));
assert!(rb
.column(1)
.as_any()
.downcast_ref::<Float64Array>()
.unwrap()
.eq(&Float64Array::from(vec![
Some(10.9),
Some(5.2),
Some(-10.0),
])));
}
pub fn verify_arrow_results(result: Vec<RecordBatch>) {
assert!(result.len() == 2);
for r in result {
match r.num_rows() {
2 => {
assert!(r
.column(0)
.as_any()
.downcast_ref::<Int64Array>()
.unwrap()
.eq(&Int64Array::from(vec![1, 0])));
assert!(r
.column(1)
.as_any()
.downcast_ref::<Int64Array>()
.unwrap()
.eq(&Int64Array::from(vec![3, 5])));
assert!(r
.column(2)
.as_any()
.downcast_ref::<StringArray>()
.unwrap()
.eq(&StringArray::from(vec!["str1", "a"])));
assert!(r
.column(3)
.as_any()
.downcast_ref::<Float64Array>()
.unwrap()
.eq(&Float64Array::from(vec![None, Some(3.1)])));
assert!(r
.column(4)
.as_any()
.downcast_ref::<BooleanArray>()
.unwrap()
.eq(&BooleanArray::from(vec![Some(true), None])));
}
4 => {
assert!(r
.column(0)
.as_any()
.downcast_ref::<Int64Array>()
.unwrap()
.eq(&Int64Array::from(vec![2, 3, 4, 1314])));
assert!(r
.column(1)
.as_any()
.downcast_ref::<Int64Array>()
.unwrap()
.eq(&Int64Array::from(vec![None, Some(7), Some(9), Some(2)])));
assert!(r
.column(2)
.as_any()
.downcast_ref::<StringArray>()
.unwrap()
.eq(&StringArray::from(vec![
Some("str2"),
Some("b"),
Some("c"),
None
])));
assert!(r
.column(3)
.as_any()
.downcast_ref::<Float64Array>()
.unwrap()
.eq(&Float64Array::from(vec![2.2, 3., 7.8, -10.])));
assert!(r
.column(4)
.as_any()
.downcast_ref::<BooleanArray>()
.unwrap()
.eq(&BooleanArray::from(vec![
Some(false),
Some(false),
None,
Some(true)
])));
}
_ => unreachable!(),
}
}
}
|
use layouts::{RLayout};
pub struct RApplication {
id: i32
}
impl RApplication {
pub fn new() -> RApplication {
RApplication { id: 0 }
}
pub fn setLayout( &self, layout: &RLayout ) {
}
pub fn run( &self ) {
}
} |
fn main() {
println!("{}", "these\n\
are\n\
three lines");
}
|
use planpoker_common::RoomInfo;
use planpoker_common::RoomMessage;
use planpoker_common::RoomRequest;
use planpoker_common::UserId;
use planpoker_common::Vote;
use yew::prelude::*;
use yew_router::push_route;
use crate::agents::RoomAgent;
use crate::components::card::Card;
use crate::components::loading::Loading;
use crate::route::Route;
#[derive(Properties, Clone, Copy)]
pub struct RoomProps {
pub id: u32,
}
pub struct Room {
props: RoomProps,
link: ComponentLink<Room>,
room_agent: Box<dyn Bridge<RoomAgent>>,
room_info: Option<RoomInfo>,
user_info: Option<UserId>,
vote: Option<u32>,
}
pub enum Msg {
Request(RoomRequest),
Response(RoomMessage),
}
impl Component for Room {
type Message = Msg;
type Properties = RoomProps;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
props,
link: link.clone(),
room_agent: RoomAgent::bridge(link.callback(|msg| Msg::Response(msg))),
room_info: None,
user_info: None,
vote: None,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::Request(req) => self.send_request(req),
Msg::Response(res) => self.handle_response(res),
};
true
}
fn change(&mut self, _props: Self::Properties) -> ShouldRender {
false
}
fn view(&self) -> Html {
if let Some(room_info) = self.room_info.as_ref() {
let vote_results = if room_info.revealed {
let votes = room_info
.users
.iter()
.filter_map(|user| user.vote.value())
.filter_map(|card_index| room_info.cards.get(card_index as usize))
.filter_map(|card| card.value())
.collect::<Vec<_>>();
log::info!("votes: {:?}", &votes);
let sum = votes.iter().sum::<u32>() as f32;
log::info!("sum: {}", sum);
let avg = sum / votes.len() as f32;
log::info!("avg: {:?}", avg);
html! {
<p>{ "Avg: " }{avg}</p>
}
} else {
html! {}
};
let admin_actions = match self.user_info {
Some(user_id) if user_id == room_info.admin => {
html! {
<div class="admin-actions">
<button onclick=self.link.callback(move |_| Msg::Request(RoomRequest::Reveal))>{ "Reveal" }</button>
<button onclick=self.link.callback(move |_| Msg::Request(RoomRequest::Reset))>{ "Reset" }</button>
</div>
}
}
_ => html! {},
};
html! {
<>
{ self.cards_view(room_info) }
{ self.users_view(room_info) }
{ vote_results }
{ admin_actions }
</>
}
} else {
html! {
<Loading/>
}
}
}
}
impl Room {
fn cards_view(&self, room_info: &RoomInfo) -> Html {
let card_views = room_info.cards.iter().enumerate().map(|(i, card)| {
let selected = self.vote == Some(i as u32);
log::info!("{:?} == Some({}) : {}", self.vote, i, selected);
html! {
<Card
onclick=self.link.callback(move |_| Msg::Request(RoomRequest::Vote(i as u32)))
selected=selected
value=card.as_str().to_owned() />
}
});
html! {
{ for card_views }
}
}
fn users_view(&self, room_info: &RoomInfo) -> Html {
let user_views = room_info.users.iter().enumerate().map(|(i, u)| {
let a = match u.vote {
Vote::None => format!("User {} (not voted)", i),
Vote::Unknown => format!("User {} (voted)", i),
Vote::Revealed(vote) => format!(
"User {} (vote: {})",
i,
room_info.cards[vote as usize].as_str()
),
_ => "".to_string(),
};
html! {
{ a }
}
});
html! {
<div class="users">
{ for user_views }
</div>
}
}
fn send_request(&mut self, req: RoomRequest) {
if let RoomRequest::Vote(vote) = req {
if self.vote == Some(vote) {
self.vote = None;
} else {
self.vote = Some(vote)
}
}
self.room_agent.send(req);
}
fn handle_response(&mut self, msg: RoomMessage) {
match msg {
RoomMessage::UserInfo(user_info) => {
self.user_info = Some(user_info);
self.join_room();
}
RoomMessage::NoSuchRoom(id) => {
log::info!("No such room: {}", id);
self.go_to_lobby();
}
RoomMessage::Disconnected => {
log::info!("Disconnected");
self.go_to_lobby();
}
RoomMessage::UserJoined(user_id) => {
log::info!("User joined the room: {}", user_id);
}
RoomMessage::UserLeft(user_id) => {
log::info!("User left the room: {}", user_id);
if Some(user_id) == self.user_info {
self.go_to_lobby();
}
}
RoomMessage::RoomInfo(room_info) => {
log::info!("Room info: {:?}", &room_info);
self.room_info = Some(room_info);
}
RoomMessage::Reset => {
self.vote = None;
}
msg => println!("Unhandled msg: {:?}", msg),
}
}
fn join_room(&mut self) {
log::info!("joining room {}", self.props.id);
self.send_request(RoomRequest::JoinRoom(self.props.id));
}
fn go_to_lobby(&self) {
push_route(Route::Lobby);
}
}
|
pub struct HubResult {}
impl HubResult {
fn new() -> Self {
HubResult {}
}
}
|
pub mod texture_loader;
pub mod font_loader;
pub mod obj_loader;
pub mod math;
pub mod image_m;
/*
pub struct Settings{
pub resolution: (u32, u32),
pub vr: bool
}
impl Settings{
pub fn from_args() -> Settings{
let args: Vec<String> = env::args().collect();
let (mut res_x, mut res_y) = (1024, 768);
let mut vr = false;
let mut num = 0;
for x in args.clone(){
match x{
"vr" => {
if args[num + 1] == "true"{
vr = true;
}
else{
vr = false;
}
},
"resolution" => {
res_x = args[num + 1];
res_y = args[num + 2];
},
}
num += 1;
}
Settings{
resolution: (res_x, res_y),
vr: vr
}
}
}*/
|
// Code based on [https://github.com/defuz/sublimate/blob/master/src/core/syntax/scope.rs](https://github.com/defuz/sublimate/blob/master/src/core/syntax/scope.rs)
// released under the MIT license by @defuz
use bitflags::bitflags;
use serde::{Deserialize, Serialize};
/// Foreground and background colors, with font style
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Style {
/// Foreground color
pub foreground: Color,
/// Background color
pub background: Color,
/// Style of the font
pub font_style: FontStyle,
}
/// A change to a [`Style`] applied incrementally by a theme rule
///
/// Fields left empty (as `None`) will not modify the corresponding field on a `Style`
///
/// [`Style`]: struct.Style.html
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct StyleModifier {
/// Foreground color
pub foreground: Option<Color>,
/// Background color
pub background: Option<Color>,
/// Style of the font
pub font_style: Option<FontStyle>,
}
/// RGBA color, directly from the theme
///
/// Because these numbers come directly from the theme, you might have to do your own color space
/// conversion if you're outputting a different color space from the theme. This can be a problem
/// because some Sublime themes use sRGB and some don't. This is specified in an attribute syntect
/// doesn't parse yet.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Color {
/// Red component
pub r: u8,
/// Green component
pub g: u8,
/// Blue component
pub b: u8,
/// Alpha (transparency) component
pub a: u8,
}
// More compact alternate debug representation by not using a separate line for each color field,
// also adapts the default debug representation to match.
impl std::fmt::Debug for Color {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let Color { r, g, b, a } = self;
if f.alternate() {
// when formatted with "{:#?}"
write!(
f,
"Color {{ r/g/b/a: {: >3}/{: >3}/{: >3}/{: >3} }}",
r, g, b, a
)
} else {
// when formatted with "{:?}"
write!(f, "Color {{ r/g/b/a: {}/{}/{}/{} }}", r, g, b, a)
}
}
}
bitflags! {
/// The color-independent styling of a font - i.e. bold, italicized, and/or underlined
#[derive(Serialize, Deserialize)]
pub struct FontStyle: u8 {
/// Bold font style
const BOLD = 1;
/// Underline font style
const UNDERLINE = 2;
/// Italic font style
const ITALIC = 4;
}
}
impl Color {
/// The color black (`#000000`)
pub const BLACK: Color = Color {
r: 0x00,
g: 0x00,
b: 0x00,
a: 0xFF,
};
/// The color white (`#FFFFFF`)
pub const WHITE: Color = Color {
r: 0xFF,
g: 0xFF,
b: 0xFF,
a: 0xFF,
};
}
impl Style {
/// Applies a change to this style, yielding a new changed style
pub fn apply(&self, modifier: StyleModifier) -> Style {
Style {
foreground: modifier.foreground.unwrap_or(self.foreground),
background: modifier.background.unwrap_or(self.background),
font_style: modifier.font_style.unwrap_or(self.font_style),
}
}
}
impl Default for Style {
fn default() -> Style {
Style {
foreground: Color::BLACK,
background: Color::WHITE,
font_style: FontStyle::empty(),
}
}
}
impl StyleModifier {
/// Applies the other modifier to this one, creating a new modifier.
///
/// Values in `other` are preferred.
pub fn apply(&self, other: StyleModifier) -> StyleModifier {
StyleModifier {
foreground: other.foreground.or(self.foreground),
background: other.background.or(self.background),
font_style: other.font_style.or(self.font_style),
}
}
}
impl Default for FontStyle {
fn default() -> FontStyle {
FontStyle::empty()
}
}
|
mod lexer;
use lexer::{Lexer,Token};
fn main() {
let mut l = Lexer::new();
l.process_string(String::from("a <- a + 2"));
}
|
use crate::neatns::network::node;
use std::collections::HashMap;
pub struct InnovationLog {
pub node_additions: HashMap<u64, InnovationTime>,
pub edge_additions: HashMap<(node::NodeRef, node::NodeRef), u64>,
}
pub struct InnovationTime {
pub node_number: u64,
pub innovation_number: u64,
}
impl InnovationLog {
pub fn new() -> InnovationLog {
InnovationLog {
node_additions: HashMap::new(),
edge_additions: HashMap::new(),
}
}
}
impl InnovationTime {
pub fn new() -> InnovationTime {
InnovationTime {
node_number: 0,
innovation_number: 0,
}
}
}
|
extern crate reqwest;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
use std::env;
use reqwest::Client;
use reqwest::Error;
use std::time::Duration;
use reqwest::ClientBuilder;
fn main() -> Result<(), Error> {
let _ = run1();
let _ = run();
Ok(())
}
fn run1() -> Result<(), Error> {
let user = "shaipe";
let request_url = format!("https://api.github.com/users/{}", user);
println!("{}", request_url);
let timeout = Duration::new(5, 0);
let client = ClientBuilder::new().timeout(timeout).build()?;
let response = client.head(&request_url).send()?;
if response.status().is_success() {
println!("{} is a user!", user);
} else {
println!("{} is not a user!", user);
}
Ok(())
}
#[derive(Deserialize, Debug)]
struct Gist {
id: String,
html_url: String,
}
fn run() -> Result<(), Error> {
let gh_user = "ss";
let gh_pass = "ps";
let gist_body = json!({
"description": "the description for this gist",
"public": true,
"files": {
"main.rs": {
"content": r#"fn main() { println!("hello world!");}"#
}
}});
let request_url = "https://api.github.com/gists";
let mut response = Client::new()
.post(request_url)
.basic_auth(gh_user.clone(), Some(gh_pass.clone()))
.json(&gist_body)
.send()?;
let gist: Gist = response.json()?;
println!("Created {:?}", gist);
let request_url = format!("{}/{}",request_url, gist.id);
let response = Client::new()
.delete(&request_url)
.basic_auth(gh_user, Some(gh_pass))
.send()?;
println!("Gist {} deleted! Status code: {}",gist.id, response.status());
Ok(())
} |
use std::env;
use std::process;
use common::load_file;
#[derive(Debug)]
struct Header {
num_children: usize,
num_metadata: usize,
}
#[derive(Debug)]
struct Node {
header: Header,
children: Vec<Box<Node>>,
metadata: Vec<i32>,
}
fn parse(data: &Vec<i32>, pos: usize) -> (Node, usize) {
let header = Header {
num_children: data[pos] as usize,
num_metadata: data[pos + 1] as usize,
};
let mut p = pos + 2;
let mut children = vec![];
for _ in 0..header.num_children {
let (child, next_pos) = parse(data, p);
children.push(Box::new(child));
p = next_pos;
}
let mut meta = vec![];
for _ in 0..header.num_metadata {
meta.push(data[p]);
p += 1;
}
let node = Node {
header: header,
children: children,
metadata: meta,
};
(node, p)
}
fn sum_metadata(node: &Node) -> i32 {
let mut sum = 0;
for c in node.children.iter() {
sum += sum_metadata(&c);
}
sum += node.metadata.iter().sum::<i32>();
sum
}
fn node_value(node: &Node) -> i32 {
if node.children.len() == 0 {
node.metadata.iter().sum::<i32>()
} else {
let mut value = 0;
for m in node.metadata.iter() {
let child_id = (*m as usize) - 1;
if child_id < node.children.len() {
value += node_value(&node.children[child_id]);
}
}
value
}
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("day8 <file>");
process::exit(1);
}
let rows = load_file(&args[1]);
let data: Vec<i32> = rows[0]
.split(' ')
.map(|x| x.parse::<i32>().unwrap())
.collect();
let (head, _) = parse(&data, 0);
let sum = sum_metadata(&head);
println!("Part 1: Metadata sum {}", sum);
let value = node_value(&head);
println!("Part 2: Node value: {}", value);
}
|
use crate::error::{from_protobuf_error, NiaServerError, NiaServerResult};
use crate::protocol::Serializable;
use protobuf::Message;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ActionMouseButtonRelease {
button_code: i32,
}
impl ActionMouseButtonRelease {
pub fn new(button_code: i32) -> ActionMouseButtonRelease {
ActionMouseButtonRelease { button_code }
}
pub fn get_button_code(&self) -> i32 {
self.button_code
}
}
impl
Serializable<
ActionMouseButtonRelease,
nia_protocol_rust::ActionMouseButtonRelease,
> for ActionMouseButtonRelease
{
fn to_pb(&self) -> nia_protocol_rust::ActionMouseButtonRelease {
let mut action_mouse_button_release_pb =
nia_protocol_rust::ActionMouseButtonRelease::new();
action_mouse_button_release_pb.set_button_code(self.button_code);
action_mouse_button_release_pb
}
fn from_pb(
object_pb: nia_protocol_rust::ActionMouseButtonRelease,
) -> NiaServerResult<ActionMouseButtonRelease> {
let action_mouse_button_release =
ActionMouseButtonRelease::new(object_pb.get_button_code());
Ok(action_mouse_button_release)
}
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn serializable_and_deserializable() {
let expected = 123;
let action_mouse_button_release =
ActionMouseButtonRelease::new(expected);
let bytes = action_mouse_button_release.to_bytes().unwrap();
let action_mouse_button_release =
ActionMouseButtonRelease::from_bytes(bytes).unwrap();
let result = action_mouse_button_release.button_code;
assert_eq!(expected, result)
}
}
|
use std::collections::{HashMap, HashSet, VecDeque};
use std::fmt;
use std::io::{self, Error, ErrorKind, Write};
use std::mem;
use std::pin::Pin;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::Duration;
use crate::asynk::connector::Connector;
use crate::asynk::message::Message;
use crate::asynk::proto::{self, ClientOp, ServerOp};
use crate::smol::io::{AssertAsync, BufReader, BufWriter};
use crate::smol::{self, channel, future, lock, prelude::*, stream, Executor, Timer};
use crate::{inject_delay, inject_io_failure, Headers, Options, ServerInfo};
/// Client state.
struct State {
/// Buffered writer with an active connection.
///
/// When `None`, the client is either reconnecting or closed.
writer: Option<Pin<Box<dyn AsyncWrite + Send>>>,
/// Signals to the client thread that the writer needs a flush.
flush_kicker: channel::Sender<()>,
/// The reconnect buffer.
///
/// When the client is reconnecting, PUB messages get buffered here. When the connection is
/// re-established, contents of the buffer are flushed to the server.
buffer: Buffer,
/// Next subscription ID.
next_sid: u64,
/// Current subscriptions.
subscriptions: HashMap<u64, Subscription>,
/// Expected pongs and their notification channels.
pongs: VecDeque<channel::Sender<()>>,
}
/// A registered subscription.
struct Subscription {
subject: String,
queue_group: Option<String>,
messages: channel::Sender<Message>,
}
/// A NATS client.
#[derive(Clone)]
pub(crate) struct Client {
/// Shared client state.
state: Arc<lock::Mutex<State>>,
/// Server info provided by the last INFO message.
server_info: Arc<Mutex<Option<ServerInfo>>>,
/// Subscriptions that have logically unsubscribed but haven't sent UNSUB yet.
unsubscribed: Arc<Mutex<HashSet<u64>>>,
/// A channel for coordinating shutdown.
///
/// When `None`, that means the client is closed or in the process of closing.
///
/// To initiate shutdown, we create a channel and send its sender. The client thread receives
/// this sender and sends `()` when it stops.
shutdown: Arc<Mutex<Option<channel::Sender<channel::Sender<()>>>>>,
}
impl Client {
/// Creates a new client that will begin connecting in the background.
pub(crate) async fn connect(url: &str, options: Options) -> io::Result<Client> {
// A channel for coordinating shutdown.
let (shutdown, stop) = channel::bounded(1);
// A channel for coordinating flushes.
let (flush_kicker, dirty) = channel::bounded(1);
// Channels for coordinating initial connect.
let (run_sender, run_receiver) = channel::bounded(1);
let (pong_sender, pong_receiver) = channel::bounded(1);
// The client state.
let client = Client {
state: Arc::new(lock::Mutex::new(State {
writer: None,
flush_kicker,
buffer: Buffer::new(options.reconnect_buffer_size),
next_sid: 1,
subscriptions: HashMap::new(),
pongs: VecDeque::from(vec![pong_sender]),
})),
server_info: Arc::new(Mutex::new(None)),
unsubscribed: Arc::new(Mutex::new(HashSet::new())),
shutdown: Arc::new(Mutex::new(Some(shutdown))),
};
let options = Arc::new(options);
// Connector for creating the initial connection and reconnecting when it is broken.
let connector = Connector::new(url, options.clone())?;
// Spawn the client thread responsible for:
// - Maintaining a connection to the server and reconnecting when it is broken.
// - Reading messages from the server and processing them.
// - Forwarding MSG operations to subscribers.
thread::spawn({
let client = client.clone();
move || {
let ex = &Executor::new();
smol::block_on(ex.run(async move {
// Spawn a task that periodically flushes buffered messages.
let flusher = ex.spawn({
let client = client.clone();
async move {
// Wait until at least one message is buffered.
while dirty.recv().await.is_ok() {
{
// Flush the writer.
let mut state = client.state.lock().await;
if let Some(writer) = state.writer.as_mut() {
writer.flush().await.ok();
}
}
// Wait a little bit before flushing again.
Timer::after(Duration::from_millis(1)).await;
}
}
});
// Spawn the main task that processes messages from the server.
let runner = ex.spawn({
let client = client.clone();
async move {
let res = client.run(connector).await;
run_sender.try_send(res).ok();
}
});
// Wait until the client is closed.
let res = stop.recv().await;
// One final flush before shutting down.
// This way we make sure buffered published messages reach the server.
{
let mut state = client.state.lock().await;
if let Some(writer) = state.writer.as_mut() {
writer.flush().await.ok();
}
}
// Cancel spawned tasks.
flusher.cancel().await;
runner.cancel().await;
options.close_callback.call();
// Signal to the shutdown initiator that it is now complete.
if let Ok(s) = res {
s.try_send(()).ok();
}
}));
}
});
future::race(
async {
// Wait for `run()` to error.
run_receiver.recv().await.expect("client has panicked")?;
panic!("client has stopped unexpectedly");
},
async {
// Wait for the connection to get established.
pong_receiver.recv().await.ok();
Ok(client)
},
)
.await
}
/// Retrieves server info as received by the most recent connection.
pub(crate) fn server_info(&self) -> Option<ServerInfo> {
self.server_info.lock().unwrap().clone()
}
/// Makes a round trip to the server to ensure buffered messages reach it.
pub(crate) async fn flush(&self) -> io::Result<()> {
let pong = {
// Inject random delays when testing.
inject_delay().await;
let mut state = self.state.lock().await;
// Check if the client is closed.
if self.shutdown.lock().unwrap().is_none() {
return Err(Error::new(ErrorKind::NotConnected, "the client is closed"));
}
let (sender, receiver) = channel::bounded(1);
// If connected, send a PING.
match state.writer.as_mut() {
None => {}
Some(mut writer) => {
proto::encode(&mut writer, ClientOp::Ping).await?;
writer.flush().await?;
}
}
// Enqueue an expected PONG.
state.pongs.push_back(sender);
receiver
};
// Wait until the PONG operation is received.
match pong.recv().await {
Ok(()) => Ok(()),
Err(_) => Err(Error::new(ErrorKind::ConnectionReset, "flush failed")),
}
}
/// Closes the client.
pub(crate) async fn close(&self) -> io::Result<()> {
// Inject random delays when testing.
inject_delay().await;
let mut state = self.state.lock().await;
// Initiate shutdown process.
if let Some(shutdown) = self.shutdown.lock().unwrap().take() {
// Unsubscribe all subscriptions.
for sid in state.subscriptions.keys() {
self.unsubscribe(*sid);
}
self.cleanup_subscriptions(&mut state).await;
// Flush the writer in case there are buffered messages.
if let Some(writer) = state.writer.as_mut() {
writer.flush().await.ok();
}
// Wake up all pending flushes.
state.pongs.clear();
drop(state);
let (s, r) = channel::bounded(1);
// Signal the thread to stop.
shutdown.try_send(s).ok();
// Wait for the thread to stop.
r.recv().await.ok();
}
Ok(())
}
/// Kicks off the shutdown process, but doesn't wait for its completion.
pub(crate) fn shutdown(&self) {
self.shutdown.lock().unwrap().take();
}
/// Subscribes to a subject.
pub(crate) async fn subscribe(
&self,
subject: &str,
queue_group: Option<&str>,
) -> io::Result<(u64, channel::Receiver<Message>)> {
// Inject random delays when testing.
inject_delay().await;
let mut state = self.state.lock().await;
// Check if the client is closed.
if self.shutdown.lock().unwrap().is_none() {
return Err(Error::new(ErrorKind::NotConnected, "the client is closed"));
}
// Clean up dead subscriptions.
self.cleanup_subscriptions(&mut state).await;
// Generate a subject ID.
let sid = state.next_sid;
state.next_sid += 1;
// If connected, send a SUB operation.
if let Some(writer) = state.writer.as_mut() {
let op = ClientOp::Sub {
subject,
queue_group,
sid,
};
proto::encode(writer, op).await.ok();
state.flush_kicker.try_send(()).ok();
}
// Register the subscription in the hash map.
let (sender, receiver) = channel::unbounded();
state.subscriptions.insert(
sid,
Subscription {
subject: subject.to_string(),
queue_group: queue_group.map(ToString::to_string),
messages: sender,
},
);
Ok((sid, receiver))
}
/// Unsubscribes from a subject.
pub(crate) fn unsubscribe(&self, sid: u64) {
self.unsubscribed.lock().unwrap().insert(sid);
}
/// Publishes a message with optional reply subject and headers.
pub(crate) async fn publish(
&self,
subject: &str,
reply_to: Option<&str>,
headers: Option<&Headers>,
msg: &[u8],
) -> io::Result<()> {
// Inject random delays when testing.
inject_delay().await;
let mut state = self.state.lock().await;
// Check if the client is closed.
if self.shutdown.lock().unwrap().is_none() {
return Err(Error::new(ErrorKind::NotConnected, "the client is closed"));
}
let op = if let Some(headers) = headers {
ClientOp::Hpub {
subject,
reply_to,
payload: msg,
headers,
}
} else {
ClientOp::Pub {
subject,
reply_to,
payload: msg,
}
};
match state.writer.as_mut() {
None => {
// If reconnecting, write into the buffer.
proto::encode(AssertAsync::new(&mut state.buffer), op).await?;
state.buffer.flush()?;
Ok(())
}
Some(mut writer) => {
// If connected, write into the writer.
let res = proto::encode(&mut writer, op).await;
state.flush_kicker.try_send(()).ok();
// If writing fails, disconnect.
if res.is_err() {
state.writer = None;
state.pongs.clear();
}
res
}
}
}
/// Runs the loop that connects and reconnects the client.
async fn run(&self, mut connector: Connector) -> io::Result<()> {
let mut first_connect = true;
loop {
// Don't use backoff on first connect.
let use_backoff = !first_connect;
// Make a connection to the server.
let (server_info, reader, writer) = connector.connect(use_backoff).await?;
let reader = BufReader::with_capacity(128 * 1024, reader);
let writer = BufWriter::with_capacity(128 * 1024, writer);
// Create an endless stream parsing operations from the server.
let server_ops = stream::try_unfold(reader, |mut stream| async {
// Decode a single operation.
match proto::decode(&mut stream).await? {
None => io::Result::Ok(None),
Some(op) => io::Result::Ok(Some((op, stream))),
}
})
.boxed();
// Set up the new connection for this client.
if self.reconnect(server_info, writer).await.is_ok() {
// Connected! Now dispatch MSG operations.
if !first_connect {
connector.get_options().reconnect_callback.call();
}
if self.dispatch(server_ops, &mut connector).await.is_ok() {
// If the client stopped gracefully, return.
return Ok(());
} else {
connector.get_options().disconnect_callback.call();
}
}
// Inject random delays when testing.
inject_delay().await;
// Check if the client is closed.
if self.shutdown.lock().unwrap().is_none() {
return Ok(());
}
first_connect = false;
}
}
/// Puts the client back into connected state with the given writer.
async fn reconnect(
&self,
server_info: ServerInfo,
writer: impl AsyncWrite + Send + 'static,
) -> io::Result<()> {
// Inject random delays when testing.
inject_delay().await;
let mut state = self.state.lock().await;
// Check if the client is closed.
if self.shutdown.lock().unwrap().is_none() {
return Err(Error::new(ErrorKind::NotConnected, "the client is closed"));
}
// Drop the current writer, if there is one.
state.writer = None;
// Pin the new writer on the heap.
let mut writer = Box::pin(writer);
// Inject random I/O failures when testing.
inject_io_failure()?;
// Clean up dead subscriptions.
self.cleanup_subscriptions(&mut state).await;
// Restart subscriptions that existed before the last reconnect.
for (sid, subscription) in &state.subscriptions {
// Send a SUB operation to the server.
proto::encode(
&mut writer,
ClientOp::Sub {
subject: subscription.subject.as_str(),
queue_group: subscription.queue_group.as_deref(),
sid: *sid,
},
)
.await?;
}
// Take out expected PONGs.
let pongs = mem::replace(&mut state.pongs, VecDeque::new());
// Take out buffered operations.
let buffered = state.buffer.clear();
// Write buffered PUB operations into the new writer.
writer.write_all(buffered).await?;
writer.flush().await?;
// All good, continue with this connection.
*self.server_info.lock().unwrap() = Some(server_info);
state.writer = Some(writer);
// Complete PONGs because the connection is healthy.
for p in pongs {
p.try_send(()).ok();
}
Ok(())
}
/// Reads messages from the server and dispatches them to subscribers.
async fn dispatch(
&self,
mut server_ops: impl Stream<Item = io::Result<ServerOp>> + Unpin,
connector: &mut Connector,
) -> io::Result<()> {
// Handle operations received from the server.
while let Some(op) = server_ops.next().await {
let op = op?;
// Inject random delays when testing.
inject_delay().await;
let mut state = self.state.lock().await;
match op {
ServerOp::Info(server_info) => {
for url in &server_info.connect_urls {
connector.add_url(url).ok();
}
*self.server_info.lock().unwrap() = Some(server_info);
}
ServerOp::Ping => {
// Respond with a PONG if connected.
if let Some(w) = state.writer.as_mut() {
proto::encode(w, ClientOp::Pong).await?;
state.flush_kicker.try_send(()).ok();
}
}
ServerOp::Pong => {
// If a PONG is received while disconnected, it came from a connection that isn't
// alive anymore and therefore doesn't correspond to the next expected PONG.
if state.writer.is_some() {
// Take the next expected PONG and complete it by sending a message.
if let Some(pong) = state.pongs.pop_front() {
pong.try_send(()).ok();
}
}
}
ServerOp::Msg {
subject,
sid,
reply_to,
payload,
} => {
// Send the message to matching subscription.
if let Some(subscription) = state.subscriptions.get(&sid) {
let msg = Message {
subject,
reply: reply_to,
data: payload,
headers: None,
client: self.clone(),
};
// Send a message or drop it if the channel is disconnected or full.
subscription.messages.try_send(msg).ok();
} else {
// If there is no matching subscription, clean up.
self.cleanup_subscriptions(&mut state).await;
}
}
ServerOp::Hmsg {
subject,
headers,
sid,
reply_to,
payload,
} => {
// Send the message to matching subscription.
if let Some(subscription) = state.subscriptions.get(&sid) {
let msg = Message {
subject,
reply: reply_to,
data: payload,
headers: Some(headers),
client: self.clone(),
};
// Send a message or drop it if the channel is disconnected or full.
subscription.messages.try_send(msg).ok();
} else {
// If there is no matching subscription, clean up.
self.cleanup_subscriptions(&mut state).await;
}
}
ServerOp::Err(msg) => return Err(Error::new(ErrorKind::Other, msg)),
ServerOp::Unknown(line) => log::warn!("unknown op: {}", line),
}
}
// The stream of operation is broken, meaning the connection was lost.
Err(ErrorKind::ConnectionReset.into())
}
/// Sends UNSUB for dead subscriptions.
async fn cleanup_subscriptions(&self, state: &mut lock::MutexGuard<'_, State>) {
// Keep unsubscribed list in separate variable so it won't be captured by for loop context.
let unsubscribed = mem::replace(&mut *self.unsubscribed.lock().unwrap(), HashSet::new());
for sid in unsubscribed {
// Remove the subscription from the map.
state.subscriptions.remove(&sid);
// Send an UNSUB message and ignore errors.
if let Some(writer) = state.writer.as_mut() {
let max_msgs = None;
proto::encode(writer, ClientOp::Unsub { sid, max_msgs })
.await
.ok();
state.flush_kicker.try_send(()).ok();
}
}
}
}
impl fmt::Debug for Client {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct("Client").finish()
}
}
/// Reconnect buffer.
///
/// If the connection was broken and the client is currently reconnecting, PUB messages get stored
/// in this buffer of limited size. As soon as the connection is then re-established, buffered
/// messages will be sent to the server.
struct Buffer {
/// Bytes in the buffer.
///
/// There are three interesting ranges in this slice:
///
/// - `..flushed` contains buffered PUB messages.
/// - `flushed..written` contains a partial PUB message at the end.
/// - `written..` is empty space in the buffer.
bytes: Box<[u8]>,
/// Number of written bytes.
written: usize,
/// Number of bytes marked as "flushed".
flushed: usize,
}
impl Buffer {
/// Creates a new buffer with the given size.
fn new(size: usize) -> Buffer {
Buffer {
bytes: vec![0_u8; size].into_boxed_slice(),
written: 0,
flushed: 0,
}
}
/// Clears the buffer and returns buffered bytes.
fn clear(&mut self) -> &[u8] {
let buffered = &self.bytes[..self.flushed];
self.written = 0;
self.flushed = 0;
buffered
}
}
impl Write for Buffer {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let n = buf.len();
// Check if `buf` will fit into this `Buffer`.
if self.bytes.len() - self.written < n {
// Fill the buffer to prevent subsequent smaller writes.
self.written = self.bytes.len();
Err(Error::new(
ErrorKind::Other,
"the disconnect buffer is full",
))
} else {
// Append `buf` into the buffer.
let range = self.written..self.written + n;
self.bytes[range].copy_from_slice(&buf[..n]);
self.written += n;
Ok(n)
}
}
fn flush(&mut self) -> io::Result<()> {
self.flushed = self.written;
Ok(())
}
}
|
//! Provides traits for statistical computation
pub use self::iter_statistics::*;
pub use self::order_statistics::*;
pub use self::slice_statistics::*;
pub use self::statistics::*;
pub use self::traits::*;
mod iter_statistics;
mod order_statistics;
// TODO: fix later
mod slice_statistics;
mod statistics;
mod traits;
|
/*
* Free FFT and convolution (Rust)
*
* Copyright (c) 2020 Project Nayuki. (MIT License)
* https://www.nayuki.io/page/free-small-fft-in-multiple-languages
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
* - The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* - The Software is provided "as is", without warranty of any kind, express or
* implied, including but not limited to the warranties of merchantability,
* fitness for a particular purpose and noninfringement. In no event shall the
* authors or copyright holders be liable for any claim, damages or other
* liability, whether in an action of contract, tort or otherwise, arising from,
* out of or in connection with the Software or the use or other dealings in the
* Software.
*/
use std;
/*
* Computes the discrete Fourier transform (DFT) of the given complex vector, storing the result back into the vector.
* The vector can have any length. This is a wrapper function.
*/
pub fn transform(real: &mut [f64], imag: &mut [f64]) {
let n: usize = real.len();
assert_eq!(imag.len(), n);
if n == 0 {
return;
} else if n.is_power_of_two() {
transform_radix2(real, imag);
} else { // More complicated algorithm for arbitrary sizes
transform_bluestein(real, imag);
}
}
/*
* Computes the inverse discrete Fourier transform (IDFT) of the given complex vector, storing the result back into the vector.
* The vector can have any length. This is a wrapper function. This transform does not perform scaling, so the inverse is not a true inverse.
*/
pub fn inverse_transform(real: &mut [f64], imag: &mut [f64]) {
transform(imag, real);
}
/*
* Computes the discrete Fourier transform (DFT) of the given complex vector, storing the result back into the vector.
* The vector's length must be a power of 2. Uses the Cooley-Tukey decimation-in-time radix-2 algorithm.
*/
pub fn transform_radix2(real: &mut [f64], imag: &mut [f64]) {
// Length variables
let n: usize = real.len();
assert_eq!(imag.len(), n, "Mismatched lengths");
assert!(n.is_power_of_two(), "Length is not a power of 2");
if n == 1 {
return;
}
// Trigonometric tables
let mut costable = Vec::<f64>::with_capacity(n / 2);
let mut sintable = Vec::<f64>::with_capacity(n / 2);
for i in 0 .. n / 2 {
let angle: f64 = 2.0 * std::f64::consts::PI * (i as f64) / (n as f64);
costable.push(angle.cos());
sintable.push(angle.sin());
}
// Bit-reversed addressing permutation
let shift: u32 = n.leading_zeros() + 1;
for i in 0 .. n {
let j: usize = i.reverse_bits() >> shift;
if j > i {
real.swap(i, j);
imag.swap(i, j);
}
}
// Cooley-Tukey decimation-in-time radix-2 FFT
let mut size: usize = 2;
while size <= n {
let halfsize: usize = size / 2;
let tablestep: usize = n / size;
let mut i = 0;
while i < n {
let mut k: usize = 0;
for j in i .. i + halfsize {
let l: usize = j + halfsize;
let tpre: f64 = real[l] * costable[k] + imag[l] * sintable[k];
let tpim: f64 = -real[l] * sintable[k] + imag[l] * costable[k];
real[l] = real[j] - tpre;
imag[l] = imag[j] - tpim;
real[j] += tpre;
imag[j] += tpim;
k += tablestep;
}
i += size;
}
if size == n { // Prevent overflow in 'size *= 2'
break;
}
size *= 2;
}
}
/*
* Computes the discrete Fourier transform (DFT) of the given complex vector, storing the result back into the vector.
* The vector can have any length. This requires the convolution function, which in turn requires the radix-2 FFT function.
* Uses Bluestein's chirp z-transform algorithm.
*/
pub fn transform_bluestein(real: &mut [f64], imag: &mut [f64]) {
// Find a power-of-2 convolution length m such that m >= n * 2 + 1
let n: usize = real.len();
assert_eq!(imag.len(), n, "Mismatched lengths");
let m: usize = Some(n)
.and_then(|x| x.checked_mul(2))
.and_then(|x| x.checked_add(1))
.and_then(|x| x.checked_next_power_of_two())
.expect("Array too large");
// Trigonometric tables
let mut costable = Vec::<f64>::with_capacity(n);
let mut sintable = Vec::<f64>::with_capacity(n);
for i in 0 .. n {
let j: u64 = (i as u64) * (i as u64) % ((n as u64) * 2); // This is more accurate than j = i * i
let angle: f64 = std::f64::consts::PI * (j as f64) / (n as f64);
costable.push(angle.cos());
sintable.push(angle.sin());
}
// Temporary vectors and preprocessing
let mut areal = vec![0.0f64; m];
let mut aimag = vec![0.0f64; m];
for i in 0 .. n {
areal[i] = real[i] * costable[i] + imag[i] * sintable[i];
aimag[i] = -real[i] * sintable[i] + imag[i] * costable[i];
}
let mut breal = vec![0.0f64; m];
let mut bimag = vec![0.0f64; m];
breal[0] = costable[0];
bimag[0] = sintable[0];
for i in 1 .. n {
breal[i] = costable[i];
breal[m - i] = costable[i];
bimag[i] = sintable[i];
bimag[m - i] = sintable[i];
}
// Convolution
let (creal, cimag) = convolve_complex(areal, aimag, breal, bimag);
// Postprocessing
for i in 0 .. n {
real[i] = creal[i] * costable[i] + cimag[i] * sintable[i];
imag[i] = -creal[i] * sintable[i] + cimag[i] * costable[i];
}
}
/*
* Computes the circular convolution of the given real vectors. Each vector's length must be the same.
*/
pub fn convolve_real(xvec: Vec<f64>, yvec: Vec<f64>) -> Vec<f64> {
let n: usize = xvec.len();
convolve_complex(xvec, vec![0.0; n], yvec, vec![0.0; n]).0
}
/*
* Computes the circular convolution of the given complex vectors. Each vector's length must be the same.
*/
pub fn convolve_complex(
mut xreal: Vec<f64>, mut ximag: Vec<f64>,
mut yreal: Vec<f64>, mut yimag: Vec<f64>,
) -> (Vec<f64>,Vec<f64>) {
let n: usize = xreal.len();
assert_eq!(ximag.len(), n);
assert_eq!(yreal.len(), n);
assert_eq!(yimag.len(), n);
transform(&mut xreal, &mut ximag);
transform(&mut yreal, &mut yimag);
for i in 0 .. n {
let temp: f64 = xreal[i] * yreal[i] - ximag[i] * yimag[i];
ximag[i] = ximag[i] * yreal[i] + xreal[i] * yimag[i];
xreal[i] = temp;
}
inverse_transform(&mut xreal, &mut ximag);
// Scaling (because this FFT implementation omits it)
for x in xreal.iter_mut() {
*x /= n as f64;
}
for x in ximag.iter_mut() {
*x /= n as f64;
}
(xreal, ximag)
}
|
use crate::uses::*;
use super::NLVec;
#[derive(Debug)]
struct MapNode<K, V>
{
key: K,
value: V,
}
impl<K: Ord, V> MapNode<K, V>
{
fn new(key: K, value: V) -> Self
{
MapNode {
key,
value,
}
}
fn heap(key: K, value: V) -> *mut Self
{
to_heap(Self::new(key, value))
}
}
#[derive(Debug)]
pub struct NLVecMap<K, V>(NLVec<MapNode<K, V>>);
impl<K: Ord + Clone, V> NLVecMap<K, V>
{
pub fn new() -> Self
{
NLVecMap(NLVec::new())
}
pub fn len(&self) -> usize
{
self.0.len()
}
pub fn get(&self, key: &K) -> Option<&V>
{
self.0.write(|vec| match Self::search(vec, key) {
Ok(index) => unsafe { Some(&vec[index].as_ref().unwrap().value) },
Err(_) => None,
})
}
pub fn insert(&self, key: K, value: V) -> Option<V>
{
let node = MapNode::heap(key.clone(), value);
self.0
.write(|vec| match Self::search(vec, &key) {
Ok(index) => {
let out = vec.remove(index);
vec.insert(index, node);
Some(out)
},
Err(index) => {
vec.insert(index, node);
None
},
})
.map(|ptr| unsafe { from_heap(ptr).value })
}
pub fn remove(&self, key: &K) -> Option<V>
{
self.0
.write(|vec| match Self::search(vec, key) {
Ok(index) => Some(vec[index]),
Err(_) => None,
})
.map(|ptr| unsafe { from_heap(ptr).value })
}
// if key is contained in the map, Ok(index of element) is returned
// else, Err(index where element should go) is returned
fn search(vec: &Vec<*const MapNode<K, V>>, key: &K) -> Result<usize, usize>
{
unsafe {
vec.binary_search_by(|ptr| {
let probe_key = &ptr.as_ref().unwrap().key;
probe_key.cmp(key)
})
}
}
}
|
use rsb_derive::Builder;
use crate::api::*;
use crate::errors::*;
use crate::{SlackClient, SlackClientHttpApi};
use crate::listener::{ErrorHandler, SlackClientEventsListener};
use futures::future::{BoxFuture, FutureExt};
use hyper::body::*;
use hyper::{Method, Request, Response};
use log::*;
use std::future::Future;
use std::sync::Arc;
#[derive(Debug, PartialEq, Clone, Builder)]
pub struct SlackOAuthListenerConfig {
pub client_id: String,
pub client_secret: String,
pub bot_scope: String,
pub redirect_callback_host: String,
#[default = "SlackOAuthListenerConfig::DEFAULT_INSTALL_PATH_VALUE.into()"]
pub install_path: String,
#[default = "SlackOAuthListenerConfig::DEFAULT_CALLBACK_PATH_VALUE.into()"]
pub redirect_callback_path: String,
#[default = "SlackOAuthListenerConfig::DEFAULT_INSTALLED_URL_VALUE.into()"]
pub redirect_installed_url: String,
#[default = "SlackOAuthListenerConfig::DEFAULT_CANCELLED_URL_VALUE.into()"]
pub redirect_cancelled_url: String,
#[default = "SlackOAuthListenerConfig::DEFAULT_ERROR_URL_VALUE.into()"]
pub redirect_error_redirect_url: String,
}
impl SlackOAuthListenerConfig {
const DEFAULT_INSTALL_PATH_VALUE: &'static str = "/auth/install";
const DEFAULT_CALLBACK_PATH_VALUE: &'static str = "/auth/callback";
const DEFAULT_INSTALLED_URL_VALUE: &'static str = "/installed";
const DEFAULT_CANCELLED_URL_VALUE: &'static str = "/cancelled";
const DEFAULT_ERROR_URL_VALUE: &'static str = "/error";
const OAUTH_AUTHORIZE_URL_VALUE: &'static str = "https://slack.com/oauth/v2/authorize";
pub fn to_redirect_url(&self) -> String {
format!(
"{}{}",
&self.redirect_callback_host, &self.redirect_callback_path
)
}
}
impl SlackClientEventsListener {
async fn slack_oauth_install_service(
_: Request<Body>,
config: &SlackOAuthListenerConfig,
) -> Result<Response<Body>, Box<dyn std::error::Error + Send + Sync>> {
let full_uri = SlackClientHttpApi::create_url_with_params(
SlackOAuthListenerConfig::OAUTH_AUTHORIZE_URL_VALUE,
&vec![
("client_id", Some(&config.client_id)),
("scope", Some(&config.bot_scope)),
("redirect_uri", Some(&config.to_redirect_url())),
],
);
debug!("Redirecting to Slack OAuth authorize: {}", &full_uri);
SlackClientHttpApi::hyper_redirect_to(&full_uri.to_string())
}
async fn slack_oauth_callback_service<'a, I, IF>(
req: Request<Body>,
config: &'a SlackOAuthListenerConfig,
client: Arc<SlackClient>,
install_service_fn: I,
error_handler: ErrorHandler,
) -> Result<Response<Body>, Box<dyn std::error::Error + Send + Sync>>
where
I: Fn(SlackOAuthV2AccessTokenResponse, Arc<SlackClient>) -> IF
+ 'static
+ Send
+ Sync
+ Clone,
IF: Future<Output = ()> + 'static + Send,
{
let params = SlackClientHttpApi::parse_query_params(&req);
debug!("Received Slack OAuth callback: {:?}", ¶ms);
match (params.get("code"), params.get("error")) {
(Some(code), None) => {
let oauth_access_resp = client
.oauth2_access(
&SlackOAuthV2AccessTokenRequest::from(SlackOAuthV2AccessTokenRequestInit {
client_id: config.client_id.clone(),
client_secret: config.client_secret.clone(),
code: code.into(),
})
.with_redirect_uri(config.to_redirect_url()),
)
.await;
match oauth_access_resp {
Ok(oauth_resp) => {
info!(
"Received slack OAuth access resp for: {} / {} / {}",
&oauth_resp.team.id,
&oauth_resp
.team
.name
.as_ref()
.cloned()
.unwrap_or_else(|| "".into()),
&oauth_resp.authed_user.id
);
install_service_fn(oauth_resp, client).await;
SlackClientHttpApi::hyper_redirect_to(&config.redirect_installed_url)
}
Err(err) => {
error!("Slack OAuth error: {}", &err);
error_handler(err, client);
SlackClientHttpApi::hyper_redirect_to(&config.redirect_error_redirect_url)
}
}
}
(None, Some(err)) => {
info!("Slack OAuth cancelled with the reason: {}", err);
error_handler(
Box::new(SlackClientError::ApiError(SlackClientApiError::new(
err.clone(),
))),
client,
);
let redirect_error_url = format!(
"{}{}",
&config.redirect_error_redirect_url,
req.uri().query().map_or("".into(), |q| format!("?{}", &q))
);
SlackClientHttpApi::hyper_redirect_to(&redirect_error_url)
}
_ => {
error!("Slack OAuth cancelled with unknown reason");
error_handler(
Box::new(SlackClientError::SystemError(SlackClientSystemError::new(
"OAuth cancelled with unknown reason".into(),
))),
client,
);
SlackClientHttpApi::hyper_redirect_to(&config.redirect_error_redirect_url)
}
}
}
pub fn oauth_service_fn<'a, D, F, I, IF>(
&self,
config: Arc<SlackOAuthListenerConfig>,
install_service_fn: I,
) -> impl Fn(
Request<Body>,
D,
) -> BoxFuture<
'a,
Result<Response<Body>, Box<dyn std::error::Error + Send + Sync + 'a>>,
>
+ 'a
+ Send
+ Clone
where
D: Fn(Request<Body>) -> F + 'a + Send + Sync + Clone,
F: Future<Output = Result<Response<Body>, Box<dyn std::error::Error + Send + Sync + 'a>>>
+ 'a
+ Send,
I: Fn(SlackOAuthV2AccessTokenResponse, Arc<SlackClient>) -> IF
+ 'static
+ Send
+ Sync
+ Clone,
IF: Future<Output = ()> + 'static + Send,
{
let client = self.client.clone();
let listener_error_handler = self.error_handler.clone();
move |req: Request<Body>, chain: D| {
let cfg = config.clone();
let install_fn = install_service_fn.clone();
let sc = client.clone();
let error_handler = listener_error_handler.clone();
async move {
match (req.method(), req.uri().path()) {
(&Method::GET, url) if url == cfg.install_path => {
Self::slack_oauth_install_service(req, &cfg).await
}
(&Method::GET, url) if url == cfg.redirect_callback_path => {
Self::slack_oauth_callback_service(req, &cfg, sc, install_fn, error_handler)
.await
}
_ => chain(req).await,
}
}
.boxed()
}
}
}
|
#![no_std]
#![crate_type = "lib"]
#![crate_name = "emlib"]
#![allow(warnings)]
#[macro_use]
// emlib bindings
pub mod acmp;
pub mod adc;
pub mod chip;
pub mod cmu;
pub mod dma;
pub mod ebi;
pub mod emu;
pub mod gpio;
pub mod i2c;
pub mod irq;
pub mod lesense;
pub mod leuart;
pub mod prs;
pub mod rtc;
pub mod timer;
pub mod usart;
mod std {
pub use core::*;
}
|
//! This crate provides various matcher algorithms for line oriented search given the query string.
//!
//! The matcher result consists of the score and the indices of matched items.
//!
//! There two steps to match a line:
//!
//! // RawLine
//! // |
//! // | MatchType: extract the content to match.
//! // |
//! // ↓
//! // MatchText
//! // |
//! // | Algo: run the match algorithm on MatchText.
//! // |
//! // ↓
//! // MatchResult
//!
mod algo;
use source_item::SourceItem;
use structopt::clap::arg_enum;
pub use algo::*;
pub use source_item::MatchType;
pub type Score = i64;
/// A tuple of (score, matched_indices) for the line has a match given the query string.
pub type MatchResult = Option<(Score, Vec<usize>)>;
/// Calculates the bonus score given the match result of base algorithm.
pub fn calculate_bonus(bonus: &Bonus, item: &SourceItem, score: Score, indices: &[usize]) -> Score {
match bonus {
Bonus::FileName => {
if let Some((_, idx)) = pattern::file_name_only(&item.raw) {
let hits = indices.iter().filter(|x| **x >= idx).collect::<Vec<_>>();
if item.raw.len() > idx {
// bonus = base_score * len(matched elements in filename) / len(filename)
let bonus = score * hits.len() as i64 / (item.raw.len() - idx) as i64;
bonus
} else {
0
}
} else {
0
}
}
Bonus::None => 0,
}
}
arg_enum! {
#[derive(Debug, Clone)]
pub enum Bonus {
// Give a bonus if the needle matches in the basename of the haystack.
//
// Ref https://github.com/liuchengxu/vim-clap/issues/561
FileName,
// No additional bonus.
None,
}
}
impl Default for Bonus {
fn default() -> Self {
Self::None
}
}
impl From<String> for Bonus {
fn from(b: String) -> Self {
b.as_str().into()
}
}
impl From<&str> for Bonus {
fn from(b: &str) -> Self {
match b.to_lowercase().as_str() {
"none" => Self::None,
"filename" => Self::FileName,
_ => Self::None,
}
}
}
/// `Matcher` is composed of two components:
///
/// * `match_type`: represents the way of extracting the matching piece from the raw line.
/// * `algo`: algorithm used for matching the text.
/// * `bonus`: add a bonus to the result of base `algo`.
pub struct Matcher {
match_type: MatchType,
algo: Algo,
bonus: Bonus,
}
impl Matcher {
/// Constructs a `Matcher`.
pub fn new(algo: Algo, match_type: MatchType, bonus: Bonus) -> Self {
Self {
algo,
match_type,
bonus,
}
}
/// Match the item without considering the bonus.
#[inline]
pub fn base_match(&self, item: &SourceItem, query: &str) -> MatchResult {
self.algo.apply_match(query, item, &self.match_type)
}
/// Actually performs the matching algorithm.
pub fn do_match(&self, item: &SourceItem, query: &str) -> MatchResult {
self.base_match(item, query).map(|(score, indices)| {
let bonus_score = calculate_bonus(&self.bonus, item, score, &indices);
(score + bonus_score, indices)
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::fzy;
#[test]
fn test_exclude_grep_filepath() {
fn apply_on_grep_line_fzy(item: &SourceItem, query: &str) -> MatchResult {
Algo::Fzy.apply_match(query, item, &MatchType::IgnoreFilePath)
}
let query = "rules";
let line = "crates/maple_cli/src/lib.rs:2:1:macro_rules! println_json {";
let (_, origin_indices) = fzy::fuzzy_indices(line, query).unwrap();
let (_, indices) = apply_on_grep_line_fzy(&line.to_string().into(), query).unwrap();
assert_eq!(origin_indices, indices);
}
#[test]
fn test_file_name_only() {
fn apply_on_file_line_fzy(item: &SourceItem, query: &str) -> MatchResult {
Algo::Fzy.apply_match(query, item, &MatchType::FileName)
}
let query = "lib";
let line = "crates/extracted_fzy/src/lib.rs";
let (_, origin_indices) = fzy::fuzzy_indices(line, query).unwrap();
let (_, indices) = apply_on_file_line_fzy(&line.to_string().into(), query).unwrap();
assert_eq!(origin_indices, indices);
}
#[test]
fn test_filename_bonus() {
let lines = vec![
"autoload/clap/filter.vim",
"autoload/clap/provider/files.vim",
"lua/fzy_filter.lua",
];
let matcher = Matcher::new(Algo::Fzy, MatchType::Full, Bonus::FileName);
let query = "fil";
for line in lines {
let (base_score, indices1) = matcher.base_match(&line.into(), query).unwrap();
let (score_with_bonus, indices2) = matcher.do_match(&line.into(), query).unwrap();
assert!(indices1 == indices2);
assert!(score_with_bonus > base_score);
}
}
}
|
use std::str::FromStr;
use std::{thread, time};
use chrono::Duration;
use chrono::offset::Utc;
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
use serenity::model::id::{UserId, GuildId};
use std::iter;
use connectionpool::*;
use util;
static USAGE: &str = "Usage: `!remindme x scale`, where `x` is a number, \
and scale is `minutes`, `hours`, `days` or `weeks`.";
command!(remind(ctx, msg, args) {
let num = args.single::<i64>().unwrap();
let scale = args.single::<String>().unwrap();
let mut message = String::new();
args.multiple::<String>()
.unwrap()
.into_iter()
.for_each(|s| {
message.push_str(&s);
message.push_str(" ");
});
let interval = match &*scale.to_lowercase() {
"minutes" | "minute" => Duration::minutes(num),
"hours" | "hour" => Duration::hours(num),
"days" | "day" => Duration::days(num),
"weeks" | "week" => Duration::weeks(num),
_ => {
util::print_or_log_error(&format!("Invalid duration scale.\n{}", USAGE), &msg.channel_id);
return Ok(());
}
};
let date = match Utc::now().naive_utc().checked_add_signed(interval) {
Some(v) => v,
None => {
util::print_or_log_error("Invalid date (overflow)", &msg.channel_id);
return Ok(());
}
};
let mut rng = thread_rng();
let bookmark: String = iter::repeat(())
.map(|()| rng.sample(Alphanumeric))
.take(32)
.collect();
util::get_pool(ctx).add_reminder(&msg.author.id, &msg.guild_id(), date, &message, &bookmark)?;
util::print_or_log_error(&format!(
"Reminder set for {} UTC.\nBookmark: `{}`",
date.format("%Y-%m-%d %H:%M"),
bookmark
), &msg.channel_id);
});
/// Infinite loop that checks the database periodically for expired reminders.
pub fn watch_for_reminders(mut pool: ConnectionPool) -> ! {
loop {
thread::sleep(time::Duration::from_secs(60));
// Get expired reminders.
let reminders = match pool.get_expired_reminders() {
Ok(rows) => rows,
Err(why) => {
error!("Failed to get reminders: {:?}", why);
continue;
}
};
// Delete the reminder no matter if the reminder was sent sucessfully
// or not to avoid retrying to message deleted accounts forever.
for reminder in reminders.iter() {
if let Err(why) = pool.delete_reminder(reminder.id) {
warn!("Failed to delete reminder: {}", why);
};
}
// Send all reminders.
for reminder in reminders.into_iter() {
if let Err(why) = dm_with_message(reminder) {
error!("Error while DM'ing: {}", why);
}
}
}
}
/// Parses a user_id and sends a reminder to the user.
fn dm_with_message(reminder: Reminder) -> Result<(), String> {
let userid = UserId::from_str(&reminder.user_id).map_err(|e| format!("Failed to get user id: {}", e))?;
let user = userid
.get()
.map_err(|e| format!("Failed to get user: {}", e))?;
let mut response = match reminder.message {
None => "Hello! You asked me to remind you of something at this time,\
but you didn't specify what!".to_owned(),
Some(m) => format!("Hello! You asked me to remind you of the following:\n{}", m)
};
response.push_str(&format!("\nYou can find the place you issued the command\
by searching for `{}`", reminder.bookmark));
if let Some(server_id) = reminder.server_id {
let servername = u64::from_str(&server_id)
.and_then(|u| Ok(GuildId::from(u)))
.map_err(|e| format!("Failed to get user id: {}", e))?;
response.push_str(&format!(" in {}", servername));
}
user.direct_message(|m| m.content(&response))
.map_err(|why| format!("Failed to DM user: {}", why))?;
Ok(())
}
|
// Copyright 2016 The Fancy Regex Authors.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! Compilation of regexes to VM.
use std::usize;
use crate::analyze::Info;
use crate::vm::{Insn, Prog};
use crate::CompileError;
use crate::Error;
use crate::Expr;
use crate::LookAround;
use crate::LookAround::*;
use crate::RegexOptions;
use crate::Result;
// I'm thinking it probably doesn't make a lot of sense having this split
// out from Compiler.
struct VMBuilder {
prog: Vec<Insn>,
n_saves: usize,
}
impl VMBuilder {
fn new(max_group: usize) -> VMBuilder {
VMBuilder {
prog: Vec::new(),
n_saves: max_group * 2,
}
}
fn build(self) -> Prog {
Prog::new(self.prog, self.n_saves)
}
fn newsave(&mut self) -> usize {
let result = self.n_saves;
self.n_saves += 1;
result
}
fn pc(&self) -> usize {
self.prog.len()
}
// would "emit" be a better name?
fn add(&mut self, insn: Insn) {
self.prog.push(insn);
}
fn set_jmp_target(&mut self, jmp_pc: usize, target: usize) {
match self.prog[jmp_pc] {
Insn::Jmp(ref mut next) => *next = target,
_ => panic!("mutating instruction other than Jmp"),
}
}
fn set_split_target(&mut self, split_pc: usize, target: usize, second: bool) {
match self.prog[split_pc] {
Insn::Split(_, ref mut y) if second => *y = target,
Insn::Split(ref mut x, _) => *x = target,
_ => panic!("mutating instruction other than Split"),
}
}
fn set_repeat_target(&mut self, repeat_pc: usize, target: usize) {
match self.prog[repeat_pc] {
Insn::RepeatGr { ref mut next, .. }
| Insn::RepeatNg { ref mut next, .. }
| Insn::RepeatEpsilonGr { ref mut next, .. }
| Insn::RepeatEpsilonNg { ref mut next, .. } => *next = target,
_ => panic!("mutating instruction other than Repeat"),
}
}
}
struct Compiler {
b: VMBuilder,
options: RegexOptions,
}
impl Compiler {
fn new(max_group: usize) -> Compiler {
Compiler {
b: VMBuilder::new(max_group),
options: Default::default(),
}
}
fn visit(&mut self, info: &Info<'_>, hard: bool) -> Result<()> {
if !hard && !info.hard {
// easy case, delegate entire subexpr
return self.compile_delegate(info);
}
match *info.expr {
Expr::Empty => (),
Expr::Literal { ref val, casei } => {
if !casei {
self.b.add(Insn::Lit(val.clone()));
} else {
self.compile_delegate(info)?;
}
}
Expr::Any { newline: true } => {
self.b.add(Insn::Any);
}
Expr::Any { newline: false } => {
self.b.add(Insn::AnyNoNL);
}
Expr::Concat(_) => {
self.compile_concat(info, hard)?;
}
Expr::Alt(_) => {
let count = info.children.len();
self.compile_alt(count, |compiler, i| compiler.visit(&info.children[i], hard))?;
}
Expr::Group(_) => {
let group = info.start_group;
self.b.add(Insn::Save(group * 2));
self.visit(&info.children[0], hard)?;
self.b.add(Insn::Save(group * 2 + 1));
}
Expr::Repeat { lo, hi, greedy, .. } => {
self.compile_repeat(info, lo, hi, greedy, hard)?;
}
Expr::LookAround(_, la) => {
self.compile_lookaround(info, la)?;
}
Expr::Backref(group) => {
self.b.add(Insn::Backref(group * 2));
}
Expr::BackrefExistsCondition(group) => {
self.b.add(Insn::BackrefExistsCondition(group));
}
Expr::AtomicGroup(_) => {
// TODO optimization: atomic insns are not needed if the
// child doesn't do any backtracking.
self.b.add(Insn::BeginAtomic);
self.visit(&info.children[0], false)?;
self.b.add(Insn::EndAtomic);
}
Expr::Delegate { .. }
| Expr::StartText
| Expr::EndText
| Expr::StartLine
| Expr::EndLine => {
// TODO: might want to have more specialized impls
self.compile_delegate(info)?;
}
Expr::KeepOut => {
self.b.add(Insn::Save(0));
}
Expr::ContinueFromPreviousMatchEnd => {
self.b.add(Insn::ContinueFromPreviousMatchEnd);
}
Expr::Conditional { .. } => {
self.compile_conditional(|compiler, i| compiler.visit(&info.children[i], hard))?;
}
}
Ok(())
}
fn compile_alt<F>(&mut self, count: usize, mut handle_alternative: F) -> Result<()>
where
F: FnMut(&mut Compiler, usize) -> Result<()>,
{
let mut jmps = Vec::new();
let mut last_pc = usize::MAX;
for i in 0..count {
let has_next = i != count - 1;
let pc = self.b.pc();
if has_next {
self.b.add(Insn::Split(pc + 1, usize::MAX));
}
if last_pc != usize::MAX {
self.b.set_split_target(last_pc, pc, true);
}
last_pc = pc;
handle_alternative(self, i)?;
if has_next {
// All except the last branch need to jump over instructions of
// other branches. The last branch can just continue to the next
// instruction.
let pc = self.b.pc();
jmps.push(pc);
self.b.add(Insn::Jmp(0));
}
}
let next_pc = self.b.pc();
for jmp_pc in jmps {
self.b.set_jmp_target(jmp_pc, next_pc);
}
Ok(())
}
fn compile_conditional<F>(&mut self, mut handle_child: F) -> Result<()>
where
F: FnMut(&mut Compiler, usize) -> Result<()>,
{
// here we use atomic group functionality to be able to remove the program counter
// relating to the split instruction's second position if the conditional succeeds
// This is to ensure that if the condition succeeds, but the "true" branch from the
// conditional fails, that it wouldn't jump to the "false" branch.
self.b.add(Insn::BeginAtomic);
let split_pc = self.b.pc();
// add the split instruction - we will update it's second pc later
self.b.add(Insn::Split(split_pc + 1, usize::MAX));
// add the conditional expression
handle_child(self, 0)?;
// mark it as successful to remove the state we added as a split earlier
self.b.add(Insn::EndAtomic);
// add the truth branch
handle_child(self, 1)?;
// add an instruction to jump over the false branch - we will update the jump target later
let jump_over_false_pc = self.b.pc();
self.b.add(Insn::Jmp(0));
// add the false branch, update the split target
self.b.set_split_target(split_pc, self.b.pc(), true);
handle_child(self, 2)?;
// update the jump target for jumping over the false branch
self.b.set_jmp_target(jump_over_false_pc, self.b.pc());
Ok(())
}
fn compile_concat(&mut self, info: &Info<'_>, hard: bool) -> Result<()> {
// First: determine a prefix which is constant size and not hard.
let prefix_end = info
.children
.iter()
.take_while(|c| c.const_size && !c.hard)
.count();
// If incoming difficulty is not hard, the suffix after the last
// hard child can be done with NFA.
let suffix_len = if !hard {
info.children[prefix_end..]
.iter()
.rev()
.take_while(|c| !c.hard)
.count()
} else {
// Even for hard, we can delegate a const-sized suffix
info.children[prefix_end..]
.iter()
.rev()
.take_while(|c| c.const_size && !c.hard)
.count()
};
let suffix_begin = info.children.len() - suffix_len;
self.compile_delegates(&info.children[..prefix_end])?;
for child in info.children[prefix_end..suffix_begin].iter() {
self.visit(child, true)?;
}
self.compile_delegates(&info.children[suffix_begin..])
}
fn compile_repeat(
&mut self,
info: &Info<'_>,
lo: usize,
hi: usize,
greedy: bool,
hard: bool,
) -> Result<()> {
let child = &info.children[0];
if lo == 0 && hi == 1 {
// e?
let pc = self.b.pc();
self.b.add(Insn::Split(pc + 1, pc + 1));
// TODO: do we want to do an epsilon check here? If we do
// it here and in Alt, we might be able to make a good
// bound on stack depth
self.visit(child, hard)?;
let next_pc = self.b.pc();
self.b.set_split_target(pc, next_pc, greedy);
return Ok(());
}
let hard = hard | info.hard;
if hi == usize::MAX && child.min_size == 0 {
// Use RepeatEpsilon instructions to prevent empty repeat
let repeat = self.b.newsave();
let check = self.b.newsave();
self.b.add(Insn::Save0(repeat));
let pc = self.b.pc();
if greedy {
self.b.add(Insn::RepeatEpsilonGr {
lo,
next: usize::MAX,
repeat,
check,
});
} else {
self.b.add(Insn::RepeatEpsilonNg {
lo,
next: usize::MAX,
repeat,
check,
});
}
self.visit(child, hard)?;
self.b.add(Insn::Jmp(pc));
let next_pc = self.b.pc();
self.b.set_repeat_target(pc, next_pc);
} else if lo == 0 && hi == usize::MAX {
// e*
let pc = self.b.pc();
self.b.add(Insn::Split(pc + 1, pc + 1));
self.visit(child, hard)?;
self.b.add(Insn::Jmp(pc));
let next_pc = self.b.pc();
self.b.set_split_target(pc, next_pc, greedy);
} else if lo == 1 && hi == usize::MAX {
// e+
let pc = self.b.pc();
self.visit(child, hard)?;
let next = self.b.pc() + 1;
let (x, y) = if greedy { (pc, next) } else { (next, pc) };
self.b.add(Insn::Split(x, y));
} else {
let repeat = self.b.newsave();
self.b.add(Insn::Save0(repeat));
let pc = self.b.pc();
if greedy {
self.b.add(Insn::RepeatGr {
lo,
hi,
next: usize::MAX,
repeat,
});
} else {
self.b.add(Insn::RepeatNg {
lo,
hi,
next: usize::MAX,
repeat,
});
}
self.visit(child, hard)?;
self.b.add(Insn::Jmp(pc));
let next_pc = self.b.pc();
self.b.set_repeat_target(pc, next_pc);
}
Ok(())
}
fn compile_lookaround(&mut self, info: &Info<'_>, la: LookAround) -> Result<()> {
let inner = &info.children[0];
match la {
LookBehind => {
if let Info {
const_size: false,
expr: &Expr::Alt(_),
..
} = inner
{
// Make const size by transforming `(?<=a|bb)` to `(?<=a)|(?<=bb)`
let alternatives = &inner.children;
self.compile_alt(alternatives.len(), |compiler, i| {
let alternative = &alternatives[i];
compiler.compile_positive_lookaround(alternative, la)
})
} else {
self.compile_positive_lookaround(inner, la)
}
}
LookBehindNeg => {
if let Info {
const_size: false,
expr: &Expr::Alt(_),
..
} = inner
{
// Make const size by transforming `(?<!a|bb)` to `(?<!a)(?<!bb)`
let alternatives = &inner.children;
for alternative in alternatives {
self.compile_negative_lookaround(alternative, la)?;
}
Ok(())
} else {
self.compile_negative_lookaround(inner, la)
}
}
LookAhead => self.compile_positive_lookaround(inner, la),
LookAheadNeg => self.compile_negative_lookaround(inner, la),
}
}
fn compile_positive_lookaround(&mut self, inner: &Info<'_>, la: LookAround) -> Result<()> {
let save = self.b.newsave();
self.b.add(Insn::Save(save));
self.compile_lookaround_inner(inner, la)?;
self.b.add(Insn::Restore(save));
Ok(())
}
fn compile_negative_lookaround(&mut self, inner: &Info<'_>, la: LookAround) -> Result<()> {
let pc = self.b.pc();
self.b.add(Insn::Split(pc + 1, usize::MAX));
self.compile_lookaround_inner(inner, la)?;
self.b.add(Insn::FailNegativeLookAround);
let next_pc = self.b.pc();
self.b.set_split_target(pc, next_pc, true);
Ok(())
}
fn compile_lookaround_inner(&mut self, inner: &Info<'_>, la: LookAround) -> Result<()> {
if la == LookBehind || la == LookBehindNeg {
if !inner.const_size {
return Err(Error::CompileError(CompileError::LookBehindNotConst));
}
self.b.add(Insn::GoBack(inner.min_size));
}
self.visit(inner, false)
}
fn compile_delegates(&mut self, infos: &[Info<'_>]) -> Result<()> {
if infos.is_empty() {
return Ok(());
}
// TODO: might want to do something similar for case insensitive literals
// (have is_literal return an additional bool for casei)
if infos.iter().all(|e| e.is_literal()) {
let mut val = String::new();
for info in infos {
info.push_literal(&mut val);
}
self.b.add(Insn::Lit(val));
return Ok(());
}
let mut delegate_builder = DelegateBuilder::new();
for info in infos {
delegate_builder.push(info);
}
let delegate = delegate_builder.build(&self.options)?;
self.b.add(delegate);
Ok(())
}
fn compile_delegate(&mut self, info: &Info) -> Result<()> {
let insn = if info.is_literal() {
let mut val = String::new();
info.push_literal(&mut val);
Insn::Lit(val)
} else {
DelegateBuilder::new().push(info).build(&self.options)?
};
self.b.add(insn);
Ok(())
}
}
pub(crate) fn compile_inner(inner_re: &str, options: &RegexOptions) -> Result<regex::Regex> {
let mut builder = regex::RegexBuilder::new(inner_re);
if let Some(size_limit) = options.delegate_size_limit {
builder.size_limit(size_limit);
}
if let Some(dfa_size_limit) = options.delegate_dfa_size_limit {
builder.dfa_size_limit(dfa_size_limit);
}
builder
.build()
.map_err(CompileError::InnerError)
.map_err(Error::CompileError)
}
/// Compile the analyzed expressions into a program.
pub fn compile(info: &Info<'_>) -> Result<Prog> {
let mut c = Compiler::new(info.end_group);
c.visit(info, false)?;
c.b.add(Insn::End);
Ok(c.b.build())
}
struct DelegateBuilder {
re: String,
min_size: usize,
const_size: bool,
looks_left: bool,
start_group: Option<usize>,
end_group: usize,
}
impl DelegateBuilder {
fn new() -> Self {
Self {
re: "^".to_string(),
min_size: 0,
const_size: true,
looks_left: false,
start_group: None,
end_group: 0,
}
}
fn push(&mut self, info: &Info<'_>) -> &mut DelegateBuilder {
// TODO: might want to detect case of a group with no captures
// inside, so we can run find() instead of captures()
self.looks_left |= info.looks_left && self.min_size == 0;
self.min_size += info.min_size;
self.const_size &= info.const_size;
if self.start_group.is_none() {
self.start_group = Some(info.start_group);
}
self.end_group = info.end_group;
// Add expression. The precedence argument has to be 1 here to
// ensure correct grouping in these cases:
//
// If we have multiple expressions, we are building a concat.
// Without grouping, we'd turn ["a", "b|c"] into "^ab|c". But we
// want "^a(?:b|c)".
//
// Even with a single expression, because we add `^` at the
// beginning, we need a group. Otherwise `["a|b"]` would be turned
// into `"^a|b"` instead of `"^(?:a|b)"`.
info.expr.to_str(&mut self.re, 1);
self
}
fn build(&self, options: &RegexOptions) -> Result<Insn> {
let start_group = self.start_group.expect("Expected at least one expression");
let end_group = self.end_group;
let compiled = compile_inner(&self.re, options)?;
if self.looks_left {
// The "s" flag is for allowing `.` to match `\n`
let inner1 = ["^(?s:.)", &self.re[1..]].concat();
let compiled1 = compile_inner(&inner1, options)?;
Ok(Insn::Delegate {
inner: Box::new(compiled),
inner1: Some(Box::new(compiled1)),
start_group,
end_group,
})
} else if self.const_size && start_group == end_group {
let size = self.min_size;
Ok(Insn::DelegateSized(Box::new(compiled), size))
} else {
Ok(Insn::Delegate {
inner: Box::new(compiled),
inner1: None,
start_group,
end_group,
})
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::analyze::analyze;
use crate::parse::ExprTree;
use crate::vm::Insn::*;
use bit_set::BitSet;
use matches::assert_matches;
#[test]
fn jumps_for_alternation() {
let tree = ExprTree {
expr: Expr::Alt(vec![
Expr::Literal {
val: "a".into(),
casei: false,
},
Expr::Literal {
val: "b".into(),
casei: false,
},
Expr::Literal {
val: "c".into(),
casei: false,
},
]),
backrefs: BitSet::new(),
named_groups: Default::default(),
};
let info = analyze(&tree).unwrap();
let mut c = Compiler::new(0);
// Force "hard" so that compiler doesn't just delegate
c.visit(&info, true).unwrap();
c.b.add(Insn::End);
let prog = c.b.prog;
assert_eq!(prog.len(), 8, "prog: {:?}", prog);
assert_matches!(prog[0], Split(1, 3));
assert_matches!(prog[1], Lit(ref l) if l == "a");
assert_matches!(prog[2], Jmp(7));
assert_matches!(prog[3], Split(4, 6));
assert_matches!(prog[4], Lit(ref l) if l == "b");
assert_matches!(prog[5], Jmp(7));
assert_matches!(prog[6], Lit(ref l) if l == "c");
assert_matches!(prog[7], End);
}
#[test]
fn look_around_pattern_can_be_delegated() {
let prog = compile_prog("(?=ab*)c");
assert_eq!(prog.len(), 5, "prog: {:?}", prog);
assert_matches!(prog[0], Save(0));
assert_delegate(&prog[1], "^ab*");
assert_matches!(prog[2], Restore(0));
assert_matches!(prog[3], Lit(ref l) if l == "c");
assert_matches!(prog[4], End);
}
#[test]
fn easy_concat_can_delegate_end() {
let prog = compile_prog("(?!x)(?:a|ab)x*");
assert_eq!(prog.len(), 5, "prog: {:?}", prog);
assert_matches!(prog[0], Split(1, 3));
assert_matches!(prog[1], Lit(ref l) if l == "x");
assert_matches!(prog[2], FailNegativeLookAround);
assert_delegate(&prog[3], "^(?:a|ab)x*");
assert_matches!(prog[4], End);
}
#[test]
fn hard_concat_can_delegate_const_size_end() {
let prog = compile_prog("(?:(?!x)(?:a|b)c)x*");
assert_eq!(prog.len(), 6, "prog: {:?}", prog);
assert_matches!(prog[0], Split(1, 3));
assert_matches!(prog[1], Lit(ref l) if l == "x");
assert_matches!(prog[2], FailNegativeLookAround);
assert_delegate_sized(&prog[3], "^(?:a|b)c");
assert_delegate(&prog[4], "^x*");
assert_matches!(prog[5], End);
}
#[test]
fn hard_concat_can_not_delegate_variable_end() {
let prog = compile_prog("(?:(?!x)(?:a|ab))x*");
assert_eq!(prog.len(), 9, "prog: {:?}", prog);
assert_matches!(prog[0], Split(1, 3));
assert_matches!(prog[1], Lit(ref l) if l == "x");
assert_matches!(prog[2], FailNegativeLookAround);
assert_matches!(prog[3], Split(4, 6));
assert_matches!(prog[4], Lit(ref l) if l == "a");
assert_matches!(prog[5], Jmp(7));
assert_matches!(prog[6], Lit(ref l) if l == "ab");
assert_delegate(&prog[7], "^x*");
assert_matches!(prog[8], End);
}
#[test]
fn conditional_expression_can_be_compiled() {
let prog = compile_prog(r"(?(ab)c|d)");
assert_eq!(prog.len(), 8, "prog: {:?}", prog);
assert_matches!(prog[0], BeginAtomic);
assert_matches!(prog[1], Split(2, 6));
assert_matches!(prog[2], Lit(ref l) if l == "ab");
assert_matches!(prog[3], EndAtomic);
assert_matches!(prog[4], Lit(ref l) if l == "c");
assert_matches!(prog[5], Jmp(7));
assert_matches!(prog[6], Lit(ref l) if l == "d");
assert_matches!(prog[7], End);
}
fn compile_prog(re: &str) -> Vec<Insn> {
let tree = Expr::parse_tree(re).unwrap();
let info = analyze(&tree).unwrap();
let prog = compile(&info).unwrap();
prog.body
}
fn assert_delegate(insn: &Insn, re: &str) {
match insn {
Insn::Delegate { inner, .. } => {
assert_eq!(inner.as_str(), re);
}
_ => {
panic!("Expected Insn::Delegate but was {:#?}", insn);
}
}
}
fn assert_delegate_sized(insn: &Insn, re: &str) {
match insn {
Insn::DelegateSized(inner, ..) => {
assert_eq!(inner.as_str(), re);
}
_ => {
panic!("Expected Insn::DelegateSized but was {:#?}", insn);
}
}
}
}
|
mod acquire_lease_response;
pub use self::acquire_lease_response::AcquireLeaseResponse;
mod break_lease_response;
pub use self::break_lease_response::BreakLeaseResponse;
mod get_acl_response;
pub use self::get_acl_response::GetACLResponse;
mod get_properties_response;
pub use self::get_properties_response::GetPropertiesResponse;
mod list_containers_response;
pub use self::list_containers_response::ListContainersResponse;
mod release_lease_response;
pub use self::release_lease_response::ReleaseLeaseResponse;
mod renew_lease_response;
pub use self::renew_lease_response::RenewLeaseResponse;
|
use Default;
use winapi::um::d3d12::{D3D12_GRAPHICS_PIPELINE_STATE_DESC, D3D12_SHADER_BYTECODE, D3D12_BLEND_DESC, D3D12_STREAM_OUTPUT_DESC, D3D12_RASTERIZER_DESC, D3D12_DEPTH_STENCIL_DESC, D3D12_DEPTH_STENCILOP_DESC, D3D12_INPUT_LAYOUT_DESC, D3D12_CACHED_PIPELINE_STATE, D3D12_RENDER_TARGET_BLEND_DESC, D3D12_COLOR_WRITE_ENABLE_ALL, D3D12_FILL_MODE_SOLID, D3D12_CULL_MODE_NONE, D3D12_PRIMITIVE_TOPOLOGY_TYPE_TRIANGLE, D3D12_INDEX_BUFFER_STRIP_CUT_VALUE_DISABLED, D3D12_DEFAULT_SAMPLE_MASK, D3D12_ROOT_SIGNATURE_DESC, D3D12_ROOT_SIGNATURE_FLAG_ALLOW_INPUT_ASSEMBLER_INPUT_LAYOUT, D3D12_DEPTH_WRITE_MASK_ZERO, D3D12_CONSERVATIVE_RASTERIZATION_MODE_OFF, D3D12_PIPELINE_STATE_FLAG_NONE, D3D12_PIPELINE_STATE_FLAG_TOOL_DEBUG};
use winapi::shared::dxgitype::DXGI_SAMPLE_DESC;
use winapi::_core::ptr::{null_mut, null};
use winapi::shared::minwindef::{FALSE, TRUE};
use winapi::shared::dxgiformat::{DXGI_FORMAT_R8G8B8A8_UNORM, DXGI_FORMAT_UNKNOWN};
use winapi::_core::ops::Deref;
pub struct CpD3D12_GRAPHICS_PIPELINE_STATE_DESC(pub D3D12_GRAPHICS_PIPELINE_STATE_DESC);
pub struct CpD3D12_ROOT_SIGNATURE_DESC(pub(crate) D3D12_ROOT_SIGNATURE_DESC);
impl Default for CpD3D12_GRAPHICS_PIPELINE_STATE_DESC {
fn default() -> Self {
let d3d12_graphics_pipeline_state_desc = D3D12_GRAPHICS_PIPELINE_STATE_DESC {
pRootSignature: null_mut(),
VS: D3D12_SHADER_BYTECODE { pShaderBytecode: null(), BytecodeLength: 0 },
PS: D3D12_SHADER_BYTECODE { pShaderBytecode: null(), BytecodeLength: 0 },
DS: D3D12_SHADER_BYTECODE { pShaderBytecode: null(), BytecodeLength: 0 },
HS: D3D12_SHADER_BYTECODE { pShaderBytecode: null(), BytecodeLength: 0 },
GS: D3D12_SHADER_BYTECODE { pShaderBytecode: null(), BytecodeLength: 0 },
StreamOutput: D3D12_STREAM_OUTPUT_DESC {
pSODeclaration: null(),
NumEntries: 0,
pBufferStrides: null(),
NumStrides: 0,
RasterizedStream: 0,
},
BlendState: D3D12_BLEND_DESC {
AlphaToCoverageEnable: FALSE,
IndependentBlendEnable: FALSE,
RenderTarget: [D3D12_RENDER_TARGET_BLEND_DESC {
BlendEnable: FALSE,
LogicOpEnable: FALSE,
SrcBlend: 0,
DestBlend: 0,
BlendOp: 0,
SrcBlendAlpha: 0,
DestBlendAlpha: 0,
BlendOpAlpha: 0,
LogicOp: 0,
RenderTargetWriteMask: D3D12_COLOR_WRITE_ENABLE_ALL as u8,
}, D3D12_RENDER_TARGET_BLEND_DESC {
BlendEnable: FALSE,
LogicOpEnable: FALSE,
SrcBlend: 0,
DestBlend: 0,
BlendOp: 0,
SrcBlendAlpha: 0,
DestBlendAlpha: 0,
BlendOpAlpha: 0,
LogicOp: 0,
RenderTargetWriteMask: 0,
}, D3D12_RENDER_TARGET_BLEND_DESC {
BlendEnable: FALSE,
LogicOpEnable: FALSE,
SrcBlend: 0,
DestBlend: 0,
BlendOp: 0,
SrcBlendAlpha: 0,
DestBlendAlpha: 0,
BlendOpAlpha: 0,
LogicOp: 0,
RenderTargetWriteMask: 0,
}, D3D12_RENDER_TARGET_BLEND_DESC {
BlendEnable: FALSE,
LogicOpEnable: FALSE,
SrcBlend: 0,
DestBlend: 0,
BlendOp: 0,
SrcBlendAlpha: 0,
DestBlendAlpha: 0,
BlendOpAlpha: 0,
LogicOp: 0,
RenderTargetWriteMask: 0,
}, D3D12_RENDER_TARGET_BLEND_DESC {
BlendEnable: FALSE,
LogicOpEnable: FALSE,
SrcBlend: 0,
DestBlend: 0,
BlendOp: 0,
SrcBlendAlpha: 0,
DestBlendAlpha: 0,
BlendOpAlpha: 0,
LogicOp: 0,
RenderTargetWriteMask: 0,
}, D3D12_RENDER_TARGET_BLEND_DESC {
BlendEnable: FALSE,
LogicOpEnable: FALSE,
SrcBlend: 0,
DestBlend: 0,
BlendOp: 0,
SrcBlendAlpha: 0,
DestBlendAlpha: 0,
BlendOpAlpha: 0,
LogicOp: 0,
RenderTargetWriteMask: 0,
}, D3D12_RENDER_TARGET_BLEND_DESC {
BlendEnable: FALSE,
LogicOpEnable: FALSE,
SrcBlend: 0,
DestBlend: 0,
BlendOp: 0,
SrcBlendAlpha: 0,
DestBlendAlpha: 0,
BlendOpAlpha: 0,
LogicOp: 0,
RenderTargetWriteMask: 0,
}, D3D12_RENDER_TARGET_BLEND_DESC {
BlendEnable: FALSE,
LogicOpEnable: FALSE,
SrcBlend: 0,
DestBlend: 0,
BlendOp: 0,
SrcBlendAlpha: 0,
DestBlendAlpha: 0,
BlendOpAlpha: 0,
LogicOp: 0,
RenderTargetWriteMask: 0,
}],
},
SampleMask: D3D12_DEFAULT_SAMPLE_MASK,
RasterizerState: D3D12_RASTERIZER_DESC {
FillMode: D3D12_FILL_MODE_SOLID,
CullMode: D3D12_CULL_MODE_NONE,
FrontCounterClockwise: 0,
DepthBias: 0,
DepthBiasClamp: 0.0,
SlopeScaledDepthBias: 0.0,
DepthClipEnable: TRUE,
MultisampleEnable: 0,
AntialiasedLineEnable: 0,
ForcedSampleCount: 0,
ConservativeRaster: D3D12_CONSERVATIVE_RASTERIZATION_MODE_OFF,
},
DepthStencilState: D3D12_DEPTH_STENCIL_DESC {
DepthEnable: 0,
DepthWriteMask: D3D12_DEPTH_WRITE_MASK_ZERO,
DepthFunc: 0,
StencilEnable: 0,
StencilReadMask: 0,
StencilWriteMask: 0,
FrontFace: D3D12_DEPTH_STENCILOP_DESC {
StencilFailOp: 0,
StencilDepthFailOp: 0,
StencilPassOp: 0,
StencilFunc: 0,
},
BackFace: D3D12_DEPTH_STENCILOP_DESC {
StencilFailOp: 0,
StencilDepthFailOp: 0,
StencilPassOp: 0,
StencilFunc: 0,
},
},
InputLayout: D3D12_INPUT_LAYOUT_DESC { pInputElementDescs: null(), NumElements: 0 },
IBStripCutValue: D3D12_INDEX_BUFFER_STRIP_CUT_VALUE_DISABLED,
PrimitiveTopologyType: D3D12_PRIMITIVE_TOPOLOGY_TYPE_TRIANGLE,
NumRenderTargets: 1,
RTVFormats: [DXGI_FORMAT_R8G8B8A8_UNORM, DXGI_FORMAT_UNKNOWN, DXGI_FORMAT_UNKNOWN, DXGI_FORMAT_UNKNOWN, DXGI_FORMAT_UNKNOWN, DXGI_FORMAT_UNKNOWN, DXGI_FORMAT_UNKNOWN, DXGI_FORMAT_UNKNOWN],
DSVFormat: DXGI_FORMAT_UNKNOWN,
SampleDesc: DXGI_SAMPLE_DESC { Count: 1, Quality: 0 },
NodeMask: 0,
CachedPSO: D3D12_CACHED_PIPELINE_STATE { pCachedBlob: null(), CachedBlobSizeInBytes: 0 },
Flags: D3D12_PIPELINE_STATE_FLAG_NONE,
};
return CpD3D12_GRAPHICS_PIPELINE_STATE_DESC(d3d12_graphics_pipeline_state_desc);
}
}
impl Default for CpD3D12_ROOT_SIGNATURE_DESC {
fn default() -> Self {
let d3d12_root_signature_desc = D3D12_ROOT_SIGNATURE_DESC {
NumParameters: 0,
pParameters: null(),
NumStaticSamplers: 0,
pStaticSamplers: null(),
Flags: D3D12_ROOT_SIGNATURE_FLAG_ALLOW_INPUT_ASSEMBLER_INPUT_LAYOUT,
};
CpD3D12_ROOT_SIGNATURE_DESC(d3d12_root_signature_desc)
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.