text stringlengths 8 4.13M |
|---|
#![allow(clippy::type_complexity)]
use crate::resources::globals::Globals;
use oxygengine::prelude::*;
pub struct CameraControlSystem;
impl<'s> System<'s> for CameraControlSystem {
type SystemData = (
ReadExpect<'s, WebCompositeRenderer>,
Read<'s, Globals>,
ReadStorage<'s, CompositeCamera>,
WriteStorage<'s, CompositeTransform>,
);
fn run(&mut self, (renderer, globals, cameras, mut transforms): Self::SystemData) {
if globals.camera.is_none() || globals.map_size.is_none() {
return;
}
let entity = globals.camera.unwrap();
let map_size = globals.map_size.unwrap();
let screen_size = renderer.view_size();
let view_box = if let Some(transform) = transforms.get(entity) {
if let Some(camera) = cameras.get(entity) {
camera.view_box(transform, screen_size)
} else {
None
}
} else {
None
};
if let Some(mut view_box) = view_box {
view_box.x = view_box.x.max(0.0).min(map_size.x - view_box.w);
view_box.y = view_box.y.max(0.0).min(map_size.y - view_box.h);
transforms
.get_mut(entity)
.unwrap()
.set_translation(view_box.center());
}
}
}
|
use jservice_rs::{model::Clue, JServiceRequester};
use serenity::collector::message_collector::MessageCollectorBuilder;
use serenity::framework::standard::{macros::command, Args, CommandResult};
use serenity::model::prelude::*;
use serenity::prelude::*;
use std::time::Duration;
use tokio::stream::StreamExt;
use crate::keys::*;
use crate::model::{UserState, UserStateDb};
#[command]
pub async fn quiz(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {
let data = ctx.data.read().await;
let client = data.get::<ReqwestContainer>().unwrap();
let game_state = data.get::<GameState>().unwrap();
let num_questions = args.single::<u64>().unwrap_or(3);
if let Some(is_playing) = game_state.channel.get(&msg.channel_id.0) {
if *is_playing {
let _ = msg
.channel_id
.say(&ctx, "There is a quiz ongoing in this channel right now.")
.await?;
return Ok(());
}
}
let clues = match client.get_random_clues(num_questions + 5).await {
Ok(c) => c,
Err(e) => {
tracing::error!("Failed to get clue: {}", e);
let _ = msg.channel_id.say(&ctx, "Failed to fetch clue :(").await?;
return Ok(());
}
};
tracing::info!("Requested {} clues", clues.len());
let clues_filtered: Vec<Clue> = clues
.into_iter()
.filter(|c| {
if let Some(count) = c.invalid_count {
count > 0
} else {
true
}
})
.take(num_questions as usize)
.collect();
if clues_filtered.is_empty() {
tracing::error!(?msg, "Fetched clues are empty");
let _ = msg.channel_id.say(&ctx, "Failed to fetch clues :(").await?;
return Ok(());
};
if clues_filtered.len() < num_questions as usize {
tracing::warn!(?msg, "Number of filtered clues are less than requested");
}
// Save state
game_state.channel.insert(msg.channel_id.0, true);
for (i, clue) in clues_filtered.iter().enumerate() {
if let Err(e) = _quiz(&ctx, &msg, &clue, (i, num_questions as usize)).await {
tracing::error!(?msg, "Quiz error: {}", e);
}
}
game_state.channel.insert(msg.channel_id.0, false);
Ok(())
}
pub async fn _quiz(
ctx: &Context,
msg: &Message,
clue: &Clue,
count: (usize, usize),
) -> CommandResult {
let sent_clue_msg = msg
.channel_id
.send_message(&ctx.http, |m| {
m.embed(|e| {
e.author(|a| a.name(format!("Question #{}/{}", count.0 + 1, count.1)));
e.title(format!(
"Category: {}",
clue.category
.as_ref()
.map_or_else(|| "No Category", |c| &c.title)
));
e.description(&clue.question);
e.field("Value", format!("{}", clue.value.unwrap_or(100)), false);
if let Some(d) = clue.created_at {
e.timestamp(d.to_rfc3339());
}
e.footer(|f| {
f.text(format!(
"Category ID: {}, Game ID: {}",
clue.category_id,
clue.game_id
.map_or_else(|| "N/A".into(), |id| id.to_string())
))
});
e.color(0x9b59b6);
e
})
})
.await?;
tracing::info!(?clue, "Sent clue");
let mut collector = MessageCollectorBuilder::new(&ctx)
.channel_id(msg.channel_id)
.timeout(Duration::from_secs(25))
.await;
while let Some(msg) = collector.next().await {
let (is_match, dist) = crate::util::check_answer(&msg.content, &clue.answer);
if is_match {
let clue_points = clue.value.unwrap_or(100);
let state = UserState::inc(&ctx, msg.author.id.0, clue_points).await?;
let _ = msg
.reply(
ctx,
format!(
"Correct! **+{} points** ({} → {})",
clue_points,
state.points as u64 - clue_points,
state.points
),
)
.await;
sent_clue_msg.delete(&ctx).await?;
return Ok(());
} else if dist > 0.8 {
// if the answer is pretty close, react
let _ = msg.react(ctx, '🤏').await;
} else {
// let _ = msg.react(ctx, '❌').await;
// Don't do anything when user responds wrong
}
}
sent_clue_msg.delete(&ctx).await?;
let _ = msg
.reply(
ctx,
format!("Time up! (15 seconds) The answer is: {}", clue.answer),
)
.await;
Ok(())
}
|
use std::{cmp, fmt, hash, panic};
use std::ops::{Deref, DerefMut};
use std::sync::atomic::{AtomicBool, Ordering};
#[cfg(not(test))]
use std::process;
#[cfg(not(test))]
fn abort_on_panic<F: FnOnce() -> R, R>(f: F) -> R {
panic::catch_unwind(panic::AssertUnwindSafe(f))
.unwrap_or_else(|_| process::abort())
}
#[cfg(test)]
fn abort_on_panic<F: FnOnce() -> R, R>(f: F) -> R {
f()
}
/// A reference to an object with dynamically checked lifetime.
///
/// If the reference escapes the scope it was created in, the program aborts.
///
/// Note that this is not copyable or clonable. If you want that, you need
/// to wrap it inside an `Arc`.
pub struct DynRef<T: ?Sized> { value: *const T, alive: *mut AtomicBool }
unsafe impl<T: Send + ?Sized> Send for DynRef<T> {}
unsafe impl<T: Sync + ?Sized> Sync for DynRef<T> {}
impl<T: fmt::Debug + ?Sized> fmt::Debug for DynRef<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("DynRef")
.field(&&**self)
.finish()
}
}
impl<T: PartialEq + ?Sized> PartialEq for DynRef<T> {
fn eq(&self, other: &Self) -> bool {
(&**self).eq(&**other)
}
}
impl<T: Eq + ?Sized> Eq for DynRef<T> {}
impl<T: PartialOrd + ?Sized> PartialOrd for DynRef<T> {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
(&**self).partial_cmp(&**other)
}
}
impl<T: Ord + ?Sized> Ord for DynRef<T> {
fn cmp(&self, other: &Self) -> cmp::Ordering {
(&**self).cmp(&**other)
}
}
impl<T: hash::Hash + ?Sized> hash::Hash for DynRef<T> {
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
(&**self).hash(hasher)
}
}
impl<T: ?Sized> Drop for DynRef<T> {
fn drop(&mut self) {
unsafe {
(*self.alive).store(false, Ordering::Release)
}
}
}
impl<T: ?Sized> Deref for DynRef<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe {
&*self.value
}
}
}
impl<T: ?Sized> DynRef<T> {
pub fn with<F, R>(value: &T, f: F) -> R
where F: FnOnce(Self) -> R
{
struct Canary(AtomicBool);
impl Drop for Canary {
fn drop(&mut self) {
if self.0.load(Ordering::Acquire) {
panic!("the DynRef object has escaped");
}
}
}
abort_on_panic(move || {
let mut canary = Canary(AtomicBool::new(true));
f(DynRef { value, alive: &mut canary.0 })
})
}
}
/// A mutable reference to an object with dynamically checked lifetime.
///
/// If the reference escapes the scope it was created in, the program aborts.
pub struct DynMut<T: ?Sized> { value: *mut T, alive: *mut AtomicBool }
unsafe impl<T: Send + ?Sized> Send for DynMut<T> {}
unsafe impl<T: Sync + ?Sized> Sync for DynMut<T> {}
impl<T: fmt::Debug + ?Sized> fmt::Debug for DynMut<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("DynMut")
.field(&&**self)
.finish()
}
}
impl<T: PartialEq + ?Sized> PartialEq for DynMut<T> {
fn eq(&self, other: &Self) -> bool {
(&**self).eq(&**other)
}
}
impl<T: Eq + ?Sized> Eq for DynMut<T> {}
impl<T: PartialOrd + ?Sized> PartialOrd for DynMut<T> {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
(&**self).partial_cmp(&**other)
}
}
impl<T: Ord + ?Sized> Ord for DynMut<T> {
fn cmp(&self, other: &Self) -> cmp::Ordering {
(&**self).cmp(&**other)
}
}
impl<T: hash::Hash + ?Sized> hash::Hash for DynMut<T> {
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
(&**self).hash(hasher)
}
}
impl<T: ?Sized> Drop for DynMut<T> {
fn drop(&mut self) {
unsafe {
(*self.alive).store(false, Ordering::Release)
}
}
}
impl<T: ?Sized> Deref for DynMut<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe {
&*self.value
}
}
}
impl<T: ?Sized> DerefMut for DynMut<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe {
&mut *self.value
}
}
}
impl<T: ?Sized> DynMut<T> {
pub fn with<F, R>(value: &mut T, f: F) -> R
where F: FnOnce(Self) -> R
{
struct Canary(AtomicBool);
impl Drop for Canary {
fn drop(&mut self) {
if self.0.load(Ordering::Acquire) {
panic!("the DynMut object has escaped");
}
}
}
abort_on_panic(move || {
let mut canary = Canary(AtomicBool::new(true));
f(DynMut { value, alive: &mut canary.0 })
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn ok() {
DynRef::with(&[] as &[char], |obj| {
assert_eq!(obj.len(), 0);
});
DynMut::with(&mut [] as &mut [char], |obj| {
assert_eq!(obj.len(), 0);
});
}
#[test]
#[should_panic]
fn fail_ref() {
panic::set_hook(Box::new(|_| {}));
DynRef::with(&[] as &[char], |x| x);
}
#[test]
#[should_panic]
fn fail_mut() {
panic::set_hook(Box::new(|_| {}));
DynMut::with(&mut [] as &mut [char], |x| x);
}
}
|
#[macro_use]
extern crate log;
extern crate url;
extern crate bufstream;
extern crate capnp;
mod yak_capnp;
use std::net::{TcpStream,ToSocketAddrs};
use bufstream::BufStream;
use std::io::{self,BufRead,Write};
use std::fmt;
use std::error::Error;
use std::sync::atomic::{AtomicUsize, Ordering};
use capnp::serialize_packed;
use capnp::message::{Builder, Allocator, Reader, ReaderSegments, ReaderOptions};
use url::{Url,SchemeType,UrlParser};
use yak_capnp::*;
fn yak_url_scheme(_scheme: &str) -> SchemeType {
SchemeType::Relative(0)
}
pub type SeqNo = u64;
#[derive(Debug)]
pub enum YakError {
UrlParseError(url::ParseError),
InvalidUrl(Url),
IoError(io::Error),
CapnpError(capnp::Error),
CapnpNotInSchema(capnp::NotInSchema),
ProtocolError
}
impl fmt::Display for YakError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
&YakError::UrlParseError(ref e) => e.fmt(f),
&YakError::InvalidUrl(ref u) => f.write_fmt(format_args!("Invalid URL: {}", u)),
&YakError::IoError(ref e) => e.fmt(f),
&YakError::CapnpError(ref e) => e.fmt(f),
&YakError::CapnpNotInSchema(ref e) => e.fmt(f),
&YakError::ProtocolError => "Protocol Error".fmt(f)
}
}
}
impl Error for YakError {
fn description(&self) -> &str {
match self {
&YakError::InvalidUrl(_) => "Invalid URL",
&YakError::ProtocolError => "Protocol Error",
&YakError::IoError(ref e) => e.description(),
&YakError::CapnpError(ref e) => e.description(),
&YakError::CapnpNotInSchema(ref e) => e.description(),
&YakError::UrlParseError(ref e) => e.description()
}
}
}
#[derive(PartialEq,Eq,PartialOrd,Ord,Debug, Clone)]
pub struct Datum {
pub key: Vec<u8>,
pub content: Vec<u8>
}
#[derive(Debug)]
pub struct Request {
pub sequence: SeqNo,
pub space: String,
pub operation: Operation,
}
#[derive(Debug)]
pub enum Operation {
Read { key: Vec<u8> },
Write { key: Vec<u8>, value: Vec<u8> },
Subscribe,
}
impl Request {
fn read(seq: SeqNo, space: &str, key: &[u8]) -> Request {
Request { sequence: seq, space: space.to_string(), operation: Operation::Read { key: key.to_vec() } }
}
fn write(seq: SeqNo, space: &str, key: &[u8], value: &[u8]) -> Request {
Request { sequence: seq, space: space.to_string(), operation: Operation::Write { key: key.to_owned(), value: value.to_owned() } }
}
fn subscribe(seq: SeqNo, space: &str) -> Request {
Request { sequence: seq, space: space.to_string(), operation: Operation::Subscribe }
}
fn encode_write<A: Allocator>(message: &mut Builder<A>, seq: SeqNo, space: &str, key: &[u8], val: &[u8]) {
let mut rec = message.init_root::<client_request::Builder>();
rec.set_sequence(seq);
rec.set_space(space);
let mut req = rec.init_operation().init_write();
req.set_key(key);
req.set_value(val);
}
fn encode_read<A: Allocator>(message: &mut Builder<A>, seq: SeqNo, space: &str, key: &[u8]) {
let mut rec = message.init_root::<client_request::Builder>();
rec.set_sequence(seq);
rec.set_space(space);
let mut req = rec.init_operation().init_read();
req.set_key(key)
}
fn encode_subscribe<A: Allocator>(message: &mut Builder<A>, seq: SeqNo, space: &str) {
let mut rec = message.init_root::<client_request::Builder>();
rec.set_sequence(seq);
rec.set_space(space);
rec.init_operation().set_subscribe(())
}
}
impl WireMessage for Request {
fn encode<A: Allocator>(&self, message: &mut Builder<A>) {
match &self.operation {
&Operation::Read { ref key } => Self::encode_read(message, self.sequence, &self.space, &key),
&Operation::Write { ref key, ref value } => Self::encode_write(message, self.sequence, &self.space, &key, &value),
&Operation::Subscribe => Self::encode_subscribe(message, self.sequence, &self.space),
}
}
fn decode<S: ReaderSegments>(message: &Reader<S>) -> Result<Self, YakError> {
let msg = try!(message.get_root::<client_request::Reader>());
let space = try!(msg.get_space()).into();
let seq = msg.get_sequence();
let op = try!(msg.get_operation());
match try!(op.which()) {
operation::Read(v) => {
let v = try!(v);
Ok(Request {
sequence: seq,
space: space,
operation: Operation::Read {
key: try!(v.get_key()).into(),
}
})
},
operation::Write(v) => {
let v = try!(v);
Ok(Request {
sequence: seq,
space: space,
operation: Operation::Write {
key: try!(v.get_key()).into(),
value: try!(v.get_value()).into(),
}
})
},
operation::Subscribe(()) => {
Ok(Request {
sequence: seq,
space: space,
operation: Operation::Subscribe,
})
},
}
}
}
#[derive(Debug)]
pub enum Response {
Okay(SeqNo),
OkayData(SeqNo, Vec<Datum>),
Delivery(Datum),
}
impl Response {
pub fn expect_ok(&self) -> Result<SeqNo, YakError> {
match self {
&Response::Okay(seq) => Ok(seq),
&_ => Err(YakError::ProtocolError)
}
}
pub fn expect_datum_list(&self) -> Result<(SeqNo, Vec<Datum>), YakError> {
match self {
&Response::OkayData(seq, ref result) => Ok((seq, result.clone())),
&_ => Err(YakError::ProtocolError)
}
}
pub fn expect_delivery(&self) -> Result<Datum, YakError> {
match self {
&Response::Delivery(ref result) => Ok(result.clone()),
&_ => Err(YakError::ProtocolError)
}
}
}
impl WireMessage for Response {
fn encode<A: Allocator>(&self, message: &mut Builder<A>) {
let mut response = message.init_root::<client_response::Builder>();
match self {
&Response::Okay(seq) => { response.set_sequence(seq); response.set_ok(()) },
&Response::OkayData(seq, ref val) => {
response.set_sequence(seq);
let mut data = response.init_ok_data(val.len() as u32);
for i in 0..val.len() {
let mut datum = data.borrow().get(i as u32);
datum.set_value(&val[i].content)
}
},
&Response::Delivery(ref val) => {
let mut datum = response.init_delivery();
datum.set_key(&val.key);
datum.set_value(&val.content)
}
}
}
fn decode<S: ReaderSegments> (message: &Reader<S>) -> Result<Self, YakError> {
let msg = try!(message.get_root::<client_response::Reader>());
match try!(msg.which()) {
client_response::Ok(()) => Ok(Response::Okay(msg.get_sequence())),
client_response::OkData(d) => {
debug!("Got response Data: ");
let mut data = Vec::with_capacity(try!(d).len() as usize);
for it in try!(d).iter() {
let val : Vec<u8> = try!(it.get_value()).iter().map(|v|v.clone()).collect();
data.push(Datum { key: vec![], content: val });
}
Ok(Response::OkayData(msg.get_sequence(), data))
},
client_response::Delivery(d) => {
let d = try!(d);
let key = try!(d.get_key()).into();
let val = try!(d.get_value()).into();
let datum = Datum { key: key, content: val };
debug!("Got Delivery: {:?}", datum);
Ok(Response::Delivery(datum))
}
}
}
}
#[derive(Debug)]
pub struct WireProtocol<S: io::Read+io::Write> {
connection: BufStream<S>,
}
pub trait WireMessage {
fn encode<A: Allocator>(&self, message: &mut Builder<A>);
fn decode<S: ReaderSegments>(message: &Reader<S>) -> Result<Self, YakError>;
}
impl WireProtocol<TcpStream> {
pub fn connect<A: ToSocketAddrs>(addr: A) -> Result<Self, YakError> {
let sock = try!(TcpStream::connect(addr));
debug!("connected:{:?}", sock);
Ok(WireProtocol::new(sock))
}
}
impl<S: io::Read+io::Write> WireProtocol<S> {
pub fn new(conn: S) -> WireProtocol<S> {
let stream = BufStream::new(conn);
WireProtocol { connection: stream }
}
pub fn send<M : WireMessage + fmt::Debug>(&mut self, req: &M) -> Result<(), YakError> {
trace!("Send: {:?}", req);
let mut message = Builder::new_default();
req.encode(&mut message);
try!(serialize_packed::write_message(&mut self.connection, &mut message));
try!(self.connection.flush());
Ok(())
}
pub fn read<M : WireMessage + fmt::Debug>(&mut self) -> Result<Option<M>,YakError> {
let len = try!(self.connection.fill_buf()).len();
if len == 0 {
trace!("EOF");
Ok(None)
} else {
trace!("Reading");
let message_reader =
try!(serialize_packed::read_message(
&mut self.connection, ReaderOptions::new()));
let resp = try!(M::decode(&message_reader));
trace!("Read: {:?}", resp);
Ok(Some(resp))
}
}
}
struct SeqCtr(AtomicUsize);
impl SeqCtr {
fn new() -> SeqCtr {
SeqCtr(AtomicUsize::new(0))
}
fn next(&mut self) -> u64 {
self.0.fetch_add(1, Ordering::Relaxed) as u64
}
}
impl fmt::Debug for SeqCtr {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "SeqCtr{{ approx: {:?} }}", self.0.load(Ordering::Relaxed))
}
}
#[derive(Debug)]
pub struct Client {
protocol: WireProtocol<TcpStream>,
space: String,
sequence: SeqCtr,
}
pub struct Subscription {
protocol: WireProtocol<TcpStream>,
}
impl Client {
pub fn connect(loc: &str) -> Result<Client, YakError> {
let mut p = UrlParser::new();
p.scheme_type_mapper(yak_url_scheme);
let url = try!(p.parse(loc));
debug!("yak:url: {:?}", url);
let (addr, space) = match (url.domain(), url.port(), url.serialize_path()) {
(Some(host), Some(port), Some(path)) => ((host, port), path),
_ => return Err(YakError::InvalidUrl(url.clone()))
};
let proto = try!(WireProtocol::connect(addr));
let seq = SeqCtr::new();
Ok(Client { protocol: proto, space: space, sequence: seq })
}
pub fn write(&mut self, key: &[u8], val: &[u8]) -> Result<(), YakError> {
let req = Request::write(self.sequence.next(), &self.space, key, val);
try!(self.protocol.send(&req));
trace!("Waiting for response: {:?}", req);
try!(self.protocol.read::<Response>())
.map(|r| r.expect_ok())
.unwrap_or(Err(YakError::ProtocolError))
.map(|_| ())
}
pub fn read(&mut self, key: &[u8]) -> Result<Vec<Datum>, YakError> {
let req = Request::read(self.sequence.next(), &self.space, key);
try!(self.protocol.send(&req));
trace!("Waiting for response: {:?}", req);
try!(self.protocol.read::<Response>())
.map(|r| r.expect_datum_list())
.unwrap_or(Err(YakError::ProtocolError))
.map(|(_seq, data)| data)
}
pub fn subscribe(mut self) -> Result<Subscription, YakError> {
let req = Request::subscribe(self.sequence.next(), &self.space);
try!(self.protocol.send(&req));
trace!("Waiting for response: {:?}", req);
let resp = try!(self.protocol.read::<Response>());
let resp_seq = try!(resp.map(|r| r.expect_ok()).unwrap_or(Err(YakError::ProtocolError)));
trace!("Got response: {:?}", resp_seq);
Ok(Subscription { protocol: self.protocol })
}
}
impl Subscription {
pub fn fetch_next(&mut self) -> Result<Option<Datum>, YakError> {
debug!("Waiting for next delivery");
let next = try!(self.protocol.read::<Response>());
let next = try!(next.map(Ok).unwrap_or(Err(YakError::ProtocolError)));
match next {
Response::Okay(_) => Ok(None),
Response::Delivery(d) => Ok(Some(d)),
_ => Err(YakError::ProtocolError),
}
}
}
impl From<url::ParseError> for YakError {
fn from(err: url::ParseError) -> YakError {
YakError::UrlParseError(err)
}
}
impl From<io::Error> for YakError {
fn from(err: io::Error) -> YakError {
YakError::IoError(err)
}
}
impl From<capnp::Error> for YakError {
fn from(err: capnp::Error) -> YakError {
YakError::CapnpError(err)
}
}
impl From<capnp::NotInSchema> for YakError {
fn from(err: capnp::NotInSchema) -> YakError {
YakError::CapnpNotInSchema(err)
}
}
|
//! Meshes and Materials
use bevy::prelude::*;
/// Meshes and Materials
pub struct Data {
pub cube_mesh: Handle<Mesh>,
pub cube_material: Handle<StandardMaterial>,
}
|
/// Indentation-aware printing.
mod error;
mod newline;
#[macro_use] mod styles;
mod stringify;
pub use crate::styles::{Style, Styles};
pub use crate::newline::Newline;
use std::collections::{HashMap};
use std::hash::Hash;
pub trait Stringify {
/// Stringify a datum. To achieve this, there are a number of
/// knobs that can be twisted to achieve the desired result:
/// - `parent_init` and `parent_rest` are the Styles used for `self`:
/// + `parent_init` is used at the start of stringifying `self`
/// + `parent_rest` is used everywhere else during the
/// stringification of `self`
/// - `child_init` and `child_rest` are auxiliary Styles
/// + `child_init` is used at the start of stringifying some
/// internal component of `self`
/// + `child_rest` is used everywhere else during the
/// stringification of `self`
/// - `buffer` is the buffer to stringify to. In order to keep
/// stringification as cheap as possible, a `&mut` to the buffer is
/// passed in rather than allocating and returning the buffer.
fn stringify(&self,
parent_init: Style,
parent_rest: Style,
child_init: Style,
child_rest: Style,
buffer: &mut String);
/// Convenience method that is an allocating version of `.stringify()`.
fn stringify_new(&self,
parent_init: Style,
parent_rest: Style,
child_init: Style,
child_rest: Style) -> String {
let mut buffer = String::new();
self.stringify(parent_init, parent_rest, child_init, child_rest, &mut buffer);
buffer
}
/// Convenience method to help stringify an enum variant / struct field.
fn stringify_field<V>(&self,
name: &str,
value: &V,
name_style: Style,
value_style: Style,
buffer: &mut String)
where V: Stringify {
self.indent( name_style, buffer);
buffer.push_str(name);
buffer.push_str("=");
value.stringify(value_style, value_style, value_style, value_style, buffer);
}
fn stringify_primitive(&self, buffer: &mut String) {
self.stringify(
Style::default(), // unused
Style::default(), // unused
Style::default(), // unused
Style::default(), // unused
buffer
)
}
fn stringify_primitive_new(&self) -> String {
let mut buffer = String::new();
self.stringify_primitive(&mut buffer);
buffer
}
/// If `style.newline` == `Newline::Add`, write a newline.
/// Then, regardless of whether or not a newline was written,
/// apply `style.indent` exactly `style.indent_level` times.
fn indent(&self, style: Style, buffer: &mut String) {
if style.newline == Newline::Add { buffer.push_str("\n"); }
for _ in 0 .. style.indent_level {
buffer.push_str(style.indent);
}
}
}
impl<K, V> Stringify for HashMap<K, V>
where K: Stringify + Eq + Hash,
V: Stringify {
fn stringify(&self,
parent_init: Style,
parent_rest: Style,
key_style: Style,
value_style: Style,
buffer: &mut String) {
if self.is_empty() {
buffer.push_str("HashMap {}");
return;
}
self.indent(parent_init, buffer);
buffer.push_str("HashMap {");
for (key, value) in self.iter() {
key.stringify(key_style, key_style, key_style, key_style, buffer);
buffer.push_str(" : ");
value.stringify(value_style, value_style, value_style, value_style, buffer);
buffer.push_str(",");
}
self.indent(Style::standard(Newline::Add, parent_rest.indent_level + 1), buffer);
buffer.push_str("}");
}
}
impl<T> Stringify for Vec<T>
where T: Stringify {
fn stringify(&self,
parent_init: Style,
parent_rest: Style,
elt_init: Style,
elt_rest: Style,
buffer: &mut String) {
self.indent(parent_init, buffer);
if self.is_empty() {
buffer.push_str("Vec []");
return;
}
buffer.push_str("Vec [");
for item in self.iter() {
self.indent(parent_rest + 1, buffer);
item.stringify(
elt_init,
elt_rest,
Style::default(), // unused
Style::default(), // unused
buffer
);
buffer.push_str(",");
}
self.indent(parent_rest, buffer);
buffer.push_str("]");
}
}
impl<T, E> Stringify for Result<T, E>
where T: Stringify,
E: Stringify {
fn stringify(&self,
parent_init: Style,
parent_rest: Style,
child_init: Style,
child_rest: Style,
buffer: &mut String) {
self.indent(parent_init, buffer);
match self {
Ok(ok) => {
buffer.push_str("Ok(");
ok.stringify(parent_init, parent_rest, child_init, child_rest, buffer);
buffer.push_str(")");
},
Err(err) => {
buffer.push_str("Err(");
err.stringify(parent_init, parent_rest, child_init, child_rest, buffer);
buffer.push_str(")");
},
}
}
}
impl Stringify for bool {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for String {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&self);
}
}
impl<'s> Stringify for &'s str {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(self);
}
}
impl Stringify for usize {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for u8 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for u16 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for u32 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for u64 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for u128 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for isize {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for i8 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for i16 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for i32 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for i64 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for i128 {
fn stringify(&self, _: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
buffer.push_str(&format!("{}", self));
}
}
impl Stringify for Style {
fn stringify(&self,
parent_init: Style,
parent_rest: Style,
_child_init: Style,
_child_rest: Style,
buffer: &mut String) {
self.indent(parent_init, buffer);
buffer.push_str("Style {");
self.stringify_field(
"newline",
&self.newline,
Style { newline: Newline::Add, indent_level: 0, indent: Style::INDENT },
Style { newline: Newline::Omit, indent_level: 0, indent: Style::INDENT },
buffer
);
self.stringify_field(
"indent_level",
&self.indent_level,
Style { newline: Newline::Add, indent_level: 0, indent: Style::INDENT },
Style { newline: Newline::Omit, indent_level: 0, indent: Style::INDENT },
buffer
);
self.indent(parent_rest, buffer);
buffer.push_str("}");
}
}
impl Stringify for Newline {
fn stringify(&self, style: Style, _: Style, _: Style, _: Style, buffer: &mut String) {
self.indent(style, buffer);
buffer.push_str(&format!("Newline::{:?}", self));
}
}
// #[cfg(test)]
// mod tests {
// #[test]
// fn it_works() {
// assert_eq!(2 + 2, 4);
// }
// }
|
/*
chapter 4
syntax and semantics
ending iteration early
*/
fn main() {
for n in 0..10 {
if n % 2 == 0 { continue; }
println!("{}", n);
}
}
// output should be:
/*
1
3
5
7
9
*/
|
//Started script log at Wed 10 Jul 2013 15:10:43 CEST
addBody(86, '{"awake":true,"type":"dynamic"}');
getBody(86).setPosition(0,0);
getBody(86).setPosition(0,0);getBody(86).delete();
|
use failure::ResultExt;
use crate::{
link::{nlas::LinkNla, LinkBuffer, LinkHeader},
traits::{Emitable, Parseable, ParseableParametrized},
DecodeError,
};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LinkMessage {
pub header: LinkHeader,
pub nlas: Vec<LinkNla>,
}
impl Default for LinkMessage {
fn default() -> Self {
LinkMessage::new()
}
}
impl LinkMessage {
pub fn new() -> Self {
LinkMessage::from_parts(LinkHeader::new(), vec![])
}
pub fn into_parts(self) -> (LinkHeader, Vec<LinkNla>) {
(self.header, self.nlas)
}
pub fn from_parts(header: LinkHeader, nlas: Vec<LinkNla>) -> Self {
LinkMessage { header, nlas }
}
}
impl Emitable for LinkMessage {
fn buffer_len(&self) -> usize {
self.header.buffer_len() + self.nlas.as_slice().buffer_len()
}
fn emit(&self, buffer: &mut [u8]) {
self.header.emit(buffer);
self.nlas
.as_slice()
.emit(&mut buffer[self.header.buffer_len()..]);
}
}
impl<'buffer, T: AsRef<[u8]> + 'buffer> Parseable<LinkMessage> for LinkBuffer<&'buffer T> {
fn parse(&self) -> Result<LinkMessage, DecodeError> {
let header: LinkHeader = self
.parse()
.context("failed to parse link message header")?;
let interface_family = header.interface_family;
Ok(LinkMessage {
header,
nlas: self
.parse_with_param(interface_family)
.context("failed to parse link message NLAs")?,
})
}
}
impl<'buffer, T: AsRef<[u8]> + 'buffer> ParseableParametrized<Vec<LinkNla>, u16>
for LinkBuffer<&'buffer T>
{
fn parse_with_param(&self, family: u16) -> Result<Vec<LinkNla>, DecodeError> {
let mut nlas = vec![];
for nla_buf in self.nlas() {
nlas.push(nla_buf?.parse_with_param(family)?);
}
Ok(nlas)
}
}
impl<'buffer, T: AsRef<[u8]> + 'buffer> ParseableParametrized<Vec<LinkNla>, u8>
for LinkBuffer<&'buffer T>
{
fn parse_with_param(&self, family: u8) -> Result<Vec<LinkNla>, DecodeError> {
self.parse_with_param(family as u16)
}
}
#[cfg(test)]
mod test {
use crate::{
link::{
address_families::AF_INET,
nlas::{LinkNla, LinkState},
LinkBuffer, LinkFlags, LinkHeader, LinkLayerType, LinkMessage, IFF_LOOPBACK,
IFF_LOWER_UP, IFF_RUNNING, IFF_UP,
},
traits::{Emitable, ParseableParametrized},
};
#[rustfmt::skip]
static HEADER: [u8; 96] = [
0x00, // interface family
0x00, // reserved
0x04, 0x03, // link layer type 772 = loopback
0x01, 0x00, 0x00, 0x00, // interface index = 1
// Note: in the wireshark capture, the thrid byte is 0x01
// but that does not correpond to any of the IFF_ flags...
0x49, 0x00, 0x00, 0x00, // device flags: UP, LOOPBACK, RUNNING, LOWERUP
0x00, 0x00, 0x00, 0x00, // reserved 2 (aka device change flag)
// nlas
0x07, 0x00, 0x03, 0x00, 0x6c, 0x6f, 0x00, // device name L=7,T=3,V=lo
0x00, // padding
0x08, 0x00, 0x0d, 0x00, 0xe8, 0x03, 0x00, 0x00, // TxQueue length L=8,T=13,V=1000
0x05, 0x00, 0x10, 0x00, 0x00, // OperState L=5,T=16,V=0 (unknown)
0x00, 0x00, 0x00, // padding
0x05, 0x00, 0x11, 0x00, 0x00, // Link mode L=5,T=17,V=0
0x00, 0x00, 0x00, // padding
0x08, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, // MTU L=8,T=4,V=65536
0x08, 0x00, 0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, // Group L=8,T=27,V=9
0x08, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, // Promiscuity L=8,T=30,V=0
0x08, 0x00, 0x1f, 0x00, 0x01, 0x00, 0x00, 0x00, // Number of Tx Queues L=8,T=31,V=1
0x08, 0x00, 0x28, 0x00, 0xff, 0xff, 0x00, 0x00, // Maximum GSO segment count L=8,T=40,V=65536
0x08, 0x00, 0x29, 0x00, 0x00, 0x00, 0x01, 0x00, // Maximum GSO size L=8,T=41,V=65536
];
#[test]
fn packet_header_read() {
let packet = LinkBuffer::new(&HEADER[0..16]);
assert_eq!(packet.interface_family(), 0);
assert_eq!(packet.reserved_1(), 0);
assert_eq!(packet.link_layer_type(), LinkLayerType::Loopback);
assert_eq!(packet.link_index(), 1);
assert_eq!(
packet.flags(),
LinkFlags::from(IFF_UP | IFF_LOOPBACK | IFF_RUNNING)
);
assert!(packet.flags().is_running());
assert!(packet.flags().is_loopback());
assert!(packet.flags().is_up());
assert_eq!(packet.change_mask(), LinkFlags::new());
}
#[test]
fn packet_header_build() {
let mut buf = vec![0xff; 16];
{
let mut packet = LinkBuffer::new(&mut buf);
packet.set_interface_family(0);
packet.set_reserved_1(0);
packet.set_link_layer_type(LinkLayerType::Loopback);
packet.set_link_index(1);
let mut flags = LinkFlags::new();
flags.set_up();
flags.set_loopback();
flags.set_running();
packet.set_flags(flags);
packet.set_change_mask(LinkFlags::new());
}
assert_eq!(&buf[..], &HEADER[0..16]);
}
#[test]
fn packet_nlas_read() {
let packet = LinkBuffer::new(&HEADER[..]);
assert_eq!(packet.nlas().count(), 10);
let mut nlas = packet.nlas();
// device name L=7,T=3,V=lo
let nla = nlas.next().unwrap().unwrap();
nla.check_buffer_length().unwrap();
assert_eq!(nla.length(), 7);
assert_eq!(nla.kind(), 3);
assert_eq!(nla.value(), &[0x6c, 0x6f, 0x00]);
let parsed: LinkNla = nla.parse_with_param(AF_INET).unwrap();
assert_eq!(parsed, LinkNla::IfName(String::from("lo")));
// TxQueue length L=8,T=13,V=1000
let nla = nlas.next().unwrap().unwrap();
nla.check_buffer_length().unwrap();
assert_eq!(nla.length(), 8);
assert_eq!(nla.kind(), 13);
assert_eq!(nla.value(), &[0xe8, 0x03, 0x00, 0x00]);
let parsed: LinkNla = nla.parse_with_param(AF_INET).unwrap();
assert_eq!(parsed, LinkNla::TxQueueLen(1000));
// OperState L=5,T=16,V=0 (unknown)
let nla = nlas.next().unwrap().unwrap();
nla.check_buffer_length().unwrap();
assert_eq!(nla.length(), 5);
assert_eq!(nla.kind(), 16);
assert_eq!(nla.value(), &[0x00]);
let parsed: LinkNla = nla.parse_with_param(AF_INET).unwrap();
assert_eq!(parsed, LinkNla::OperState(LinkState::Unknown));
// Link mode L=5,T=17,V=0
let nla = nlas.next().unwrap().unwrap();
nla.check_buffer_length().unwrap();
assert_eq!(nla.length(), 5);
assert_eq!(nla.kind(), 17);
assert_eq!(nla.value(), &[0x00]);
let parsed: LinkNla = nla.parse_with_param(AF_INET).unwrap();
assert_eq!(parsed, LinkNla::LinkMode(0));
// MTU L=8,T=4,V=65536
let nla = nlas.next().unwrap().unwrap();
nla.check_buffer_length().unwrap();
assert_eq!(nla.length(), 8);
assert_eq!(nla.kind(), 4);
assert_eq!(nla.value(), &[0x00, 0x00, 0x01, 0x00]);
let parsed: LinkNla = nla.parse_with_param(AF_INET).unwrap();
assert_eq!(parsed, LinkNla::Mtu(65_536));
// 0x00, 0x00, 0x00, 0x00,
// Group L=8,T=27,V=9
let nla = nlas.next().unwrap().unwrap();
nla.check_buffer_length().unwrap();
assert_eq!(nla.length(), 8);
assert_eq!(nla.kind(), 27);
assert_eq!(nla.value(), &[0x00, 0x00, 0x00, 0x00]);
let parsed: LinkNla = nla.parse_with_param(AF_INET).unwrap();
assert_eq!(parsed, LinkNla::Group(0));
// Promiscuity L=8,T=30,V=0
let nla = nlas.next().unwrap().unwrap();
nla.check_buffer_length().unwrap();
assert_eq!(nla.length(), 8);
assert_eq!(nla.kind(), 30);
assert_eq!(nla.value(), &[0x00, 0x00, 0x00, 0x00]);
let parsed: LinkNla = nla.parse_with_param(AF_INET).unwrap();
assert_eq!(parsed, LinkNla::Promiscuity(0));
// Number of Tx Queues L=8,T=31,V=1
// 0x01, 0x00, 0x00, 0x00
let nla = nlas.next().unwrap().unwrap();
nla.check_buffer_length().unwrap();
assert_eq!(nla.length(), 8);
assert_eq!(nla.kind(), 31);
assert_eq!(nla.value(), &[0x01, 0x00, 0x00, 0x00]);
let parsed: LinkNla = nla.parse_with_param(AF_INET).unwrap();
assert_eq!(parsed, LinkNla::NumTxQueues(1));
}
#[test]
fn emit() {
let mut header = LinkHeader::new();
header.link_layer_type = LinkLayerType::Loopback;
header.index = 1;
header.flags = LinkFlags::from(IFF_UP | IFF_LOOPBACK | IFF_RUNNING | IFF_LOWER_UP);
let nlas = vec![
LinkNla::IfName("lo".into()),
LinkNla::TxQueueLen(1000),
LinkNla::OperState(LinkState::Unknown),
LinkNla::LinkMode(0),
LinkNla::Mtu(0x1_0000),
LinkNla::Group(0),
LinkNla::Promiscuity(0),
LinkNla::NumTxQueues(1),
LinkNla::GsoMaxSegs(0xffff),
LinkNla::GsoMaxSize(0x1_0000),
];
let packet = LinkMessage::from_parts(header, nlas);
let mut buf = vec![0; 96];
assert_eq!(packet.buffer_len(), 96);
packet.emit(&mut buf[..]);
}
}
|
mod helpers;
use jsonprima;
// Empty document.
test!(test_0, "", vec![("E100", 0, 1)]);
// Start with plus sign (+).
test!(test_1, "+1", vec![("E106", 0, 1)]);
// Positive Infinity.
test!(test_2, "Infinity", vec![("E106", 0, 1)]);
// Negative Infinity.
test!(test_3, "-Infinity", vec![("E110", 0, 2)]);
// Positive number with one leading zero.
test!(test_4, "01", vec![("E111", 0, 2)]);
// Positive number with multiple leading zeros.
test!(test_5, "000001", vec![("E111", 0, 2)]);
// Negative number with one leading zero.
test!(test_6, "-01", vec![("E111", 0, 3)]);
// Negative number with multiple leading zeros.
test!(test_7, "-0001", vec![("E111", 0, 3)]);
// Positive number with dot.
test!(test_8, "1005.", vec![("E110", 0, 5)]);
// Negative number with dot.
test!(test_9, "-100.", vec![("E110", 0, 5)]);
// NaN.
test!(test_10, "NaN", vec![("E106", 0, 1)]);
// Number with wrong exponential (-) in fractional part.
test!(test_11, "1.-5", vec![("E110", 0, 3)]);
// Number with wrong exponential (+) in fractional part.
test!(test_12, "1.+5", vec![("E110", 0, 3)]);
// Number without integer part.
test!(test_13, ".5e+1", vec![("E106", 0, 1)]);
// Number with wrong exponential (only e) in fractional part.
test!(test_14, "0.5e", vec![("E110", 0, 4)]);
// Number with wrong exponential (only E) in fractional part.
test!(test_15, "0.5E", vec![("E110", 0, 4)]);
// Exponential (e) after dot.
test!(test_16, "0.e", vec![("E110", 0, 3)]);
// Exponential (E) after dot.
test!(test_17, "0.E", vec![("E110", 0, 3)]);
// Number with wrong exponential (wrong position of +) in integer part.
test!(test_18, "1115e+", vec![("E110", 0, 6)]);
// Number with wrong exponential (wrong position of -) in integer part.
test!(test_19, "1115e-", vec![("E110", 0, 6)]);
// Number with wrong exponential (only e) in integer part.
test!(test_20, "1115e", vec![("E110", 0, 5)]);
// Number with wrong exponential (only E) in integer part.
test!(test_21, "1115E", vec![("E110", 0, 5)]);
// Number with wrong exponential (e and E) in integer part.
test!(test_22, "1115Ee+1", vec![("E110", 0, 6)]);
// Number with wrong exponential (e and E) in fractional part.
test!(test_23, "0.1115Ee+1", vec![("E110", 0, 8)]);
// Full width 1 (U+FF11).
test!(test_24, "1", vec![("E106", 0, 1)]);
// Exponential number with two operators (+ and -).
test!(test_25, "1.1e+-1", vec![("E110", 0, 6)]);
// Number starting with dot.
test!(test_26, ".2", vec![("E106", 0, 1)]);
// Number starting with dot and minus sign.
test!(test_27, "+.2", vec![("E106", 0, 1)]);
// Non finite positive number.
test!(test_28, "10E4000000", vec![("E112", 0, 10)]);
// Non finite negative number.
test!(test_29, "-10E4000000", vec![("E112", 0, 11)]);
// Non-zero number followed by decimal point.
test!(test_30, "1.", vec![("E110", 0, 2)]);
// Zero number followed by decimal point.
test!(test_31, "0.", vec![("E110", 0, 2)]);
// Control character inside number.
test!(test_32, "0.\n", vec![("E110", 0, 3)]);
// Reverse solidus at the beginning of the number.
test!(test_33, "\\0", vec![("E106", 0, 1)]);
// Reverse solidus after decimal point character.
test!(test_35, "0.\\", vec![("E110", 0, 3)]);
// Reverse solidus after exponential (e) character.
test!(test_36, "0.5e\\", vec![("E110", 0, 5)]);
// Reverse solidus after exponential (E) character.
test!(test_37, "0.5E\\", vec![("E110", 0, 5)]);
// Reverse solidus after exponential minus sign (-) character.
test!(test_38, "0.5E-\\", vec![("E110", 0, 6)]);
// Reverse solidus after exponential addition sign (+) character.
test!(test_39, "0.5E+\\", vec![("E110", 0, 6)]);
// number number
test!(test_40, "\n 1 \n 12", vec![("E109", 7, 8)]);
// number true
test!(test_41, "0true", vec![("E103", 1, 2)]);
test!(test_42, "0 true", vec![("E103", 2, 3)]);
test!(test_43, " 0 true", vec![("E103", 3, 4)]);
test!(test_44, " \n\r 0 \ttrue", vec![("E103", 7, 8)]);
// number false
test!(test_45, "0false", vec![("E107", 1, 2)]);
test!(test_46, "0 false", vec![("E107", 2, 3)]);
test!(test_47, " 0 false", vec![("E107", 3, 4)]);
test!(test_48, " \n\r 0 \nfalse", vec![("E107", 7, 8)]);
// number null
test!(test_49, "0null", vec![("E108", 1, 2)]);
test!(test_50, "0 null", vec![("E108", 2, 3)]);
test!(test_51, " 0 null", vec![("E108", 3, 4)]);
test!(test_52, " \n\r 0 \tnull", vec![("E108", 7, 8)]);
// number string
test!(test_53, "0\"\"", vec![("E114", 1, 2)]);
test!(test_54, "0 \"\"", vec![("E114", 2, 3)]);
test!(test_55, " 0 \"\"", vec![("E114", 3, 4)]);
test!(test_56, " \n\r 0 \t\"\"", vec![("E114", 7, 8)]);
// number begin-array
test!(test_57, "0[", vec![("E125", 1, 2)]);
test!(test_58, "0 [", vec![("E125", 2, 3)]);
test!(test_59, " 0 [", vec![("E125", 3, 4)]);
test!(test_60, " \n\r 0 \t[", vec![("E125", 7, 8)]);
// number end-array
test!(test_61, "0]", vec![("E126", 1, 2)]);
test!(test_62, "0 ]", vec![("E126", 2, 3)]);
test!(test_63, " 0 ]", vec![("E126", 3, 4)]);
test!(test_64, " \n\r 0 \t]", vec![("E126", 7, 8)]);
// number value-separator
test!(test_65, "0,", vec![("E124", 1, 2)]);
test!(test_66, "0 ,", vec![("E124", 2, 3)]);
test!(test_67, " 0 ,", vec![("E124", 3, 4)]);
test!(test_68, " \n\r 0 \t,", vec![("E124", 7, 8)]);
// number begin-object
test!(test_69, "0{", vec![("E130", 1, 2)]);
test!(test_70, "0 {", vec![("E130", 2, 3)]);
test!(test_71, " 0 {", vec![("E130", 3, 4)]);
test!(test_72, " \n\r 0 \t{", vec![("E130", 7, 8)]);
// number end-object
test!(test_73, "0}", vec![("E131", 1, 2)]);
test!(test_74, "0 }", vec![("E131", 2, 3)]);
test!(test_75, " 0 }", vec![("E131", 3, 4)]);
test!(test_76, " \n\r 0 \t}", vec![("E131", 7, 8)]);
// number name-separator
test!(test_77, "0:", vec![("E136", 1, 2)]);
test!(test_78, "0 :", vec![("E136", 2, 3)]);
test!(test_79, " 0 :", vec![("E136", 3, 4)]);
test!(test_80, " \n\r 0 \t:", vec![("E136", 7, 8)]);
|
fn main() {
proconio::input! {
n: String
}
let cs: Vec<char> = n.chars().rev().collect();
// println!("{:?}", cs);
let mut com: Vec<char> = vec![];
let mut zero_flag = true;
for i in 0..(cs.len()) {
let c = cs[i];
if c == '0' && zero_flag {
// 末尾に続く0を無視
continue;
} else {
zero_flag = false;
}
com.push(c);
}
if com.len() < 2 {
println!("Yes");
} else {
let mut com_rev = com.clone();
com_rev.reverse();
let mut flg = true;
for i in 0..(com.len()) {
if com[i] != com_rev[i] {
flg = false;
}
}
if flg {
println!("Yes")
} else {
println!("No")
}
}
}
|
use axum::{
extract::{ConnectInfo, Path, State},
response::IntoResponse,
Json, Router,
};
use hyper::StatusCode;
use rand::{distributions::Alphanumeric, prelude::Distribution, thread_rng};
use serde::Deserialize;
use serde_json::json;
use std::net::SocketAddr;
use super::model::DBPost;
use crate::{
_entry::state::AppState,
_utils::{
database::DBOrderDirection,
error::{DataAccessError, SecurityError},
string::slugify,
vec::sort_and_dedup_vec,
},
account::model::{AccountNameTrait, DBAccount},
auth::service::{ScopedToken, TokenScope},
security::service::RateLimitConstraint,
task::model::{DBTask, TaskName, TaskStatus, TaskType},
};
pub async fn get_all_posts_for_feed(State(app_state): State<AppState>) -> impl IntoResponse {
let compact_posts = app_state
.post_repository
.get_many_published_compact_posts("published_at", DBOrderDirection::DESC, 20, 0)
.await;
if !compact_posts.is_ok() {
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let compact_posts = compact_posts.unwrap();
let mut unique_tag_ids: Vec<u32> = Vec::new();
let mut unique_poster_ids: Vec<u32> = Vec::new();
for post in compact_posts.iter() {
unique_tag_ids.append(&mut post.tag_ids.clone());
unique_poster_ids.push(post.poster_id);
}
sort_and_dedup_vec(&mut unique_tag_ids);
sort_and_dedup_vec(&mut unique_poster_ids);
let compact_tags = app_state
.tag_repository
.get_many_compact_tags_by_ids(&unique_tag_ids)
.await;
if !compact_tags.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let compact_tags = compact_tags.unwrap();
let compact_posters = app_state
.account_repository
.get_many_compact_accounts_by_ids(unique_poster_ids.clone())
.await;
if !compact_posters.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let compact_posters = compact_posters.unwrap();
Json(json!({
"posts": compact_posts,
"tags": compact_tags,
"posters": compact_posters,
}))
.into_response()
}
pub async fn get_one_post_by_id(
State(app_state): State<AppState>,
Path(id): Path<u32>,
) -> impl IntoResponse {
let post = app_state.post_repository.get_one_post_by_id(id).await;
if !post.is_ok() {
match post {
Err(DataAccessError::NotFound) => {
return StatusCode::NOT_FOUND.into_response();
}
_ => {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
}
}
let post = post.unwrap();
let compact_tags = app_state
.tag_repository
.get_many_compact_tags_by_ids(&post.tag_ids)
.await;
if !compact_tags.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let compact_tags = compact_tags.unwrap();
let poster = app_state
.account_repository
.get_one_account_by_id(post.poster_id)
.await;
if !poster.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let poster = poster.unwrap();
Json(json!({
"post": post,
"tags": compact_tags,
"poster": poster,
}))
.into_response()
}
pub async fn get_many_similar_posts_by_id(
State(app_state): State<AppState>,
Path(id): Path<u32>,
) -> impl IntoResponse {
let post = app_state.post_repository.get_one_post_by_id(id).await;
if post.is_err() {
// @TODO-ZM: log error reason
return StatusCode::NOT_FOUND.into_response();
}
let post = post.unwrap();
let poster = app_state
.account_repository
.get_one_account_by_id(post.poster_id)
.await;
if poster.is_err() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let poster = poster.unwrap();
let tags = app_state
.tag_repository
.get_many_compact_tags_by_ids(&post.tag_ids)
.await;
if tags.is_err() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let tags = tags.unwrap();
let post_ids = app_state
.search_service
.search_posts(&format!(
"{} {} {}",
post.title,
poster.get_display_name(),
tags
.iter()
.map(|tag| tag.name.clone())
.collect::<Vec<String>>()
.join(" ")
))
.await;
if !post_ids.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let post_ids = post_ids.unwrap();
let post_ids = post_ids
.into_iter()
.filter(|&id| id != post.id)
.collect::<Vec<u32>>();
let similar_compact_posts = app_state
.post_repository
.get_many_compact_posts_by_ids(post_ids.clone())
.await;
if !similar_compact_posts.is_ok() {
match similar_compact_posts {
Err(DataAccessError::NotFound) => {
return StatusCode::NOT_FOUND.into_response();
}
_ => {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
}
}
let similar_compact_posts = similar_compact_posts.unwrap();
let mut unique_tag_ids: Vec<u32> = Vec::new();
let mut unique_poster_ids: Vec<u32> = Vec::new();
for post in similar_compact_posts.iter() {
unique_tag_ids.append(&mut post.tag_ids.clone());
unique_poster_ids.push(post.poster_id);
}
sort_and_dedup_vec(&mut unique_tag_ids);
sort_and_dedup_vec(&mut unique_poster_ids);
let compact_tags = app_state
.tag_repository
.get_many_compact_tags_by_ids(&unique_tag_ids)
.await;
if !compact_tags.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let compact_tags = compact_tags.unwrap();
let compact_posters = app_state
.account_repository
.get_many_compact_accounts_by_ids(unique_poster_ids.clone())
.await;
if !compact_posters.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let compact_posters = compact_posters.unwrap();
Json(json!({
"posts": similar_compact_posts,
"tags": compact_tags,
"posters": compact_posters,
}))
.into_response()
}
pub async fn get_post_count(State(app_state): State<AppState>) -> impl IntoResponse {
let post_count = app_state.post_repository.get_published_post_count().await;
if !post_count.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let post_count = post_count.unwrap();
Json(json!({
"count": post_count,
}))
.into_response()
}
#[derive(Deserialize)]
pub struct CreateOnePostWithPosterBody {
poster: DBAccount,
post: DBPost,
}
pub async fn create_one_post_with_poster(
// @TODO-ZM: make sure this is a secure ip
ConnectInfo(ip): ConnectInfo<SocketAddr>,
State(app_state): State<AppState>,
Json(body): Json<CreateOnePostWithPosterBody>,
) -> impl IntoResponse {
match body.poster.r#type.to_string().as_str() {
"Individual" | "Company" => {}
_ => {
return StatusCode::BAD_REQUEST.into_response();
}
}
// @TODO-ZM: write a macro for this
match app_state.security_service.rate_limit(vec![
RateLimitConstraint {
id: format!("create_one_post_with_poster-1-{}", body.poster.email),
max_requests: 1,
duration_ms: 2000,
},
RateLimitConstraint {
id: format!("create_one_post_with_poster-2-{}", ip.ip()),
max_requests: 60,
duration_ms: 60_000,
},
]) {
Ok(_) => {}
Err(SecurityError::InternalError) => {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
Err(SecurityError::RateLimitError) => {
return StatusCode::TOO_MANY_REQUESTS.into_response();
}
}
let poster_id;
let existing_poster = app_state
.account_repository
.get_one_account_by_email(&body.poster.email)
.await;
if !existing_poster.is_ok() {
match existing_poster {
Err(DataAccessError::NotFound) => {
let poster_id_result = app_state
.account_repository
.create_one_account(&DBAccount {
slug: slugify(&body.poster.get_display_name()),
..body.poster.clone()
})
.await;
if !poster_id_result.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
poster_id = poster_id_result.unwrap();
}
_ => {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
}
} else {
poster_id = existing_poster.unwrap().id;
}
let compact_tags = app_state
.tag_repository
.get_many_compact_tags_by_ids(&body.post.tag_ids)
.await;
if !compact_tags.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let compact_tags = compact_tags.unwrap();
let post_id = app_state
.post_repository
.create_one_post(&DBPost {
poster_id,
slug: slugify(&body.post.title),
is_published: false,
// @TODO-ZM: summarize description using AI
short_description: body
.post
.description
.split_whitespace()
.take(20)
.collect::<Vec<&str>>()
.join(" "),
tag_ids: compact_tags.iter().map(|tag| tag.id).collect::<Vec<u32>>(),
..body.post.clone()
})
.await;
if !post_id.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let post_id = post_id.unwrap();
// @TODO-ZM: use generate_confirmation_object from AuthService
let random_16: String = Alphanumeric
.sample_iter(&mut thread_rng())
.take(16)
.map(char::from)
.collect();
let random_16 = random_16.to_uppercase();
let confirmation_id = &random_16[..12];
let confirmation_code = &random_16[12..];
let kv_db_result = app_state
.main_kv_db
.insert(post_id.to_be_bytes(), random_16.as_bytes());
if !kv_db_result.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let email_result = app_state
.email_service
.send_one_email(
&body.poster.email,
&"Confirm your email".to_string(),
&format!(
r#"Your email is used to create a FREE job post at algeriastartupjobs.com with title:
{}
Please confirm your email by copying the code below into the confirmation page:
<div style="width: 100%; text-align: center;">
<span style="font-size: x-large; letter-spacing: .2em; border: 1px solid #9999; border-radius: .2em; padding: .4em; display: inline-block;">{}</span>
</div>
Thank you for using our service!
ASJ Team
contact@algeriastartupjobs.com
https://www.algeriastartupjobs.com
"#,
&body.post.title, confirmation_code,
),
)
.await;
if !email_result.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
Json(json!({
"post_id": post_id,
"poster_id": poster_id,
"confirmation_id": confirmation_id,
}))
.into_response()
}
#[derive(Deserialize)]
pub struct ConfirmPostBody {
post_id: u32,
confirmation_id: String,
confirmation_code: String,
}
pub async fn confirm_post(
ConnectInfo(ip): ConnectInfo<SocketAddr>,
State(app_state): State<AppState>,
Json(body): Json<ConfirmPostBody>,
) -> impl IntoResponse {
match app_state
.security_service
.rate_limit(vec![RateLimitConstraint {
id: format!("confirm_login-ip-{}", ip.ip()),
max_requests: 60,
duration_ms: 60_000,
}]) {
Ok(_) => {}
Err(SecurityError::InternalError) => {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
Err(SecurityError::RateLimitError) => {
return StatusCode::TOO_MANY_REQUESTS.into_response();
}
}
let kv_db_result = app_state.main_kv_db.compare_and_swap(
body.post_id.to_be_bytes(),
Some(format!("{}{}", body.confirmation_id, body.confirmation_code).as_bytes()),
None as Option<&[u8]>,
);
if !kv_db_result.is_ok() || kv_db_result.unwrap().is_err() {
// @TODO-ZM: log error reason
return StatusCode::UNAUTHORIZED.into_response();
}
let update_result = app_state
.post_repository
.publish_one_post_by_id(body.post_id)
.await;
if !update_result.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let post = app_state
.post_repository
.get_one_post_by_id(body.post_id)
.await;
if !post.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let post = post.unwrap();
let task_id = app_state
.task_repository
.create_one_task(DBTask {
name: TaskName::Indexing {
model_name: "post".to_string(),
model_id: post.id,
},
status: TaskStatus::Pending,
r#type: TaskType::Automated,
})
.await;
if !task_id.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let poster = app_state
.account_repository
.get_one_account_by_id(post.poster_id)
.await;
if !poster.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let poster = poster.unwrap();
let compact_tags = app_state
.tag_repository
.get_many_compact_tags_by_ids(&post.tag_ids)
.await;
if !compact_tags.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let compact_tags = compact_tags.unwrap();
let auth_token = app_state
.auth_service
.generate_scoped_token(TokenScope::CreatePost, poster.id)
.await;
if !auth_token.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let auth_token = auth_token.unwrap();
Json(json!({
"post": post,
"poster": poster,
"tags": compact_tags,
"auth_token": auth_token,
}))
.into_response()
}
#[derive(Deserialize)]
pub struct CreateOnePostBody {
post: DBPost,
}
pub async fn create_one_post(
ConnectInfo(ip): ConnectInfo<SocketAddr>,
State(app_state): State<AppState>,
scoped_token: ScopedToken,
Json(body): Json<CreateOnePostBody>,
) -> impl IntoResponse {
let poster = app_state
.account_repository
.get_one_account_by_id(scoped_token.id)
.await;
if !poster.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let poster = poster.unwrap();
match poster.r#type.to_string().as_str() {
"Individual" | "Company" => {}
_ => {
return StatusCode::BAD_REQUEST.into_response();
}
}
// @TODO-ZM: write a macro for this
match app_state.security_service.rate_limit(vec![
RateLimitConstraint {
id: format!("create_one_post_with_poster-1-{}", poster.email),
max_requests: 1,
duration_ms: 2000,
},
RateLimitConstraint {
id: format!("create_one_post_with_poster-2-{}", ip.ip()),
max_requests: 60,
duration_ms: 60_000,
},
]) {
Ok(_) => {}
Err(SecurityError::InternalError) => {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
Err(SecurityError::RateLimitError) => {
return StatusCode::TOO_MANY_REQUESTS.into_response();
}
}
let compact_tags = app_state
.tag_repository
.get_many_compact_tags_by_ids(&body.post.tag_ids)
.await;
if !compact_tags.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let compact_tags = compact_tags.unwrap();
let post_id = app_state
.post_repository
.create_one_post(&DBPost {
poster_id: poster.id,
slug: slugify(&body.post.title),
is_published: true,
// @TODO-ZM: summarize description using AI
short_description: body
.post
.description
.split_whitespace()
.take(20)
.collect::<Vec<&str>>()
.join(" "),
published_at: chrono::Utc::now().to_rfc3339(),
tag_ids: compact_tags.iter().map(|tag| tag.id).collect::<Vec<u32>>(),
..body.post.clone()
})
.await;
if !post_id.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let post_id = post_id.unwrap();
let post = app_state.post_repository.get_one_post_by_id(post_id).await;
if !post.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let post = post.unwrap();
let task_id = app_state
.task_repository
.create_one_task(DBTask {
name: TaskName::Indexing {
model_name: "post".to_string(),
model_id: post.id,
},
status: TaskStatus::Pending,
r#type: TaskType::Automated,
})
.await;
if !task_id.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let poster = app_state
.account_repository
.get_one_account_by_id(post.poster_id)
.await;
if !poster.is_ok() {
// @TODO-ZM: log error reason
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
let poster = poster.unwrap();
Json(json!({
"post": post,
"poster": poster,
"tags": compact_tags,
}))
.into_response()
}
pub fn create_post_router() -> Router<AppState> {
Router::new()
.route("/feed", axum::routing::get(get_all_posts_for_feed))
.route("/:post_id", axum::routing::get(get_one_post_by_id))
.route(
"/:post_id/similar",
axum::routing::get(get_many_similar_posts_by_id),
)
.route("/count", axum::routing::get(get_post_count))
.route("/confirm", axum::routing::post(confirm_post))
.route(
"/via_email",
axum::routing::post(create_one_post_with_poster),
)
.route("/", axum::routing::post(create_one_post))
}
|
use std::io::{BufWriter, Write};
use std::fs::{File};
extern crate clap;
use clap::{Arg, App, ArgMatches};
extern crate blobber;
fn main() {
let matches = get_matches();
let size_str = matches.value_of("size").unwrap_or("1m");
let size = parse_file_size(size_str);
let path = matches.value_of("outfile").unwrap_or("out.txt");
println!("output: {}", path);
let file = File::create(path).expect("Unable to create file");
let mut w_buf = BufWriter::new(file);
let lorem = blobber::get_lorem(size, true);
println!("actual output size: {}", get_file_size(lorem.as_bytes().len()));
w_buf.write_all(lorem.as_bytes()).expect("write failed");
}
fn get_file_size(size: usize) -> String {
let mut counter = 0;
let mut updated_size = size as f32;
while updated_size >= 1024.0 {
updated_size /= 1024.0;
counter += 1;
}
match counter {
0 => format!("{:.2} bytes", updated_size),
1 => format!("{:.2} kilobytes", updated_size),
2 => format!("{:.2} megabytes", updated_size),
3 => format!("{:.2} gigabytes", updated_size),
4 => format!("{:.2} terabytes", updated_size),
5 => format!("{:.2} petabytes", updated_size),
_ => panic!("error parsing size")
}
}
fn parse_file_size(arg: &str) -> usize {
let lowered = arg.to_lowercase();
if lowered.ends_with('k') {
let trimmed = arg.get(0..arg.len() - 1).expect("unknown size");
trimmed.parse::<usize>().expect("unknow size") * 1024
} else if lowered.ends_with('m') {
let trimmed = arg.get(0..arg.len() - 1).expect("unknown size");
trimmed.parse::<usize>().expect("unknown size") * 1024 * 1024
} else if lowered.ends_with('g') {
let trimmed = arg.get(0..arg.len() - 1).expect("unknown size");
trimmed.parse::<usize>().expect("unknown size") * 1024 * 1024 * 1024
} else {
arg.parse::<usize>().expect("unknown size")
}
}
fn get_matches() -> ArgMatches<'static> {
App::new("dd_stat")
.version("0.1.0")
.arg(Arg::with_name("size")
.short("s")
.long("size")
.takes_value(true)
.required(false))
.arg(Arg::with_name("outfile")
.short("o")
.long("outfile")
.takes_value(true)
.required(false))
.get_matches()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn number_to_text() {
let bytes = get_file_size(10);
assert_eq!(bytes, "10.00 bytes".to_string());
let kilobytes = get_file_size(1024);
assert_eq!(kilobytes, "1.00 kilobytes".to_string());
let mega = get_file_size(1024 * 1024);
assert_eq!(mega, "1.00 megabytes".to_string());
let gig = get_file_size(1024 * 1024 * 1024);
assert_eq!(gig, "1.00 gigabytes".to_string());
let tera = get_file_size(1024 * 1024 * 1024 * 1024);
assert_eq!(tera, "1.00 terabytes".to_string());
let peta = get_file_size(1024 * 1024 * 1024 * 1024 * 1024);
assert_eq!(peta, "1.00 petabytes".to_string());
}
#[test]
fn text_to_number() {
let bytes = parse_file_size("10");
assert_eq!(bytes, 10);
let kilo = parse_file_size("1k");
assert_eq!(kilo, 1024);
let meg = parse_file_size("1m");
assert_eq!(meg, 1024 * 1024);
let gig = parse_file_size("1g");
assert_eq!(gig, 1024 * 1024 * 1024);
}
} |
use chrono::{DateTime, Utc};
use serde::{Serialize, Deserialize};
#[derive(Serialize, Deserialize, Default)]
pub struct Clicks {
pub amount: u32,
pub last_click_ts: DateTime<Utc>,
} |
use crate::ray::Ray;
use crate::vec3::{Point3, Vector3};
use crate::Num;
use rand::{random, thread_rng, Rng};
use std::ops::Range;
use std::time::Duration;
pub struct Camera {
pub origin: Point3,
pub viewport: Viewport,
pub horizontal: Vector3,
pub vertical: Vector3,
pub u: Vector3,
pub v: Vector3,
lower_left_corner: Vector3,
lens_radius: Num,
focus_distance: Num,
exposure: Range<Num>,
}
impl Camera {
pub const ASPECT_RATIO: Num = 3. / 2.;
pub const APERTURE: Num = 0.1;
pub(crate) fn new(
look_from: Point3,
look_at: Point3,
vup: Vector3,
vfov: Num,
exposure: Range<Num>,
) -> Self {
let theta = vfov.to_radians();
let h = (theta / 2.).tan();
let viewport = {
let height = 2. * h;
let width = Camera::ASPECT_RATIO * height;
Viewport { height, width }
};
let focus_distance = 10.0;
let w = (look_from - look_at).normalize();
let u = vup.cross(w).normalize();
let v = w.cross(u);
let origin = look_from;
let horizontal = focus_distance * viewport.width * u;
let vertical = focus_distance * viewport.height * v;
let lower_left_corner = origin - horizontal / 2. - vertical / 2. - focus_distance * w;
Self {
origin,
viewport,
horizontal,
vertical,
u,
v,
lower_left_corner,
lens_radius: Self::APERTURE / 2.,
focus_distance,
exposure,
}
}
pub(crate) fn llc(&self) -> Vector3 {
self.lower_left_corner
}
pub(crate) fn cast_ray(&self, u: Num, v: Num) -> Ray {
let rd = self.lens_radius * Vector3::random_in_unit_sphere(&mut rand::thread_rng());
let offset = self.u * rd.x + self.v * rd.y;
Ray::from(
self.origin + offset,
self.lower_left_corner + u * self.horizontal + v * self.vertical - self.origin - offset,
rand::thread_rng().gen_range(self.exposure.clone()),
)
}
}
pub struct Viewport {
pub width: Num,
pub height: Num,
}
|
use ansi_term::{ANSIString, Style};
use std::fmt;
/// A segment is a single configurable element in a module. This will usually
/// contain a data point to provide context for the prompt's user
/// (e.g. The version that software is running).
#[derive(Clone)]
pub struct Segment {
/// The segment's style. If None, will inherit the style of the module containing it.
pub style: Option<Style>,
/// The string value of the current segment.
pub value: String,
}
impl Segment {
/// Creates a new segment.
pub fn new<T>(style: Option<Style>, value: T) -> Self
where
T: Into<String>,
{
Self {
style,
value: value.into(),
}
}
// Returns the ANSIString of the segment value, not including its prefix and suffix
pub fn ansi_string(&self) -> ANSIString {
match self.style {
Some(style) => style.paint(&self.value),
None => ANSIString::from(&self.value),
}
}
}
impl fmt::Display for Segment {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.ansi_string())
}
}
|
use std::rc::Rc;
use std::sync::{Arc};
use lazy_static::lazy_static;
use dominator::{Dom, class, html};
use crate::parent::{Parent};
pub struct App {
parent: Arc<Parent>,
}
impl App {
pub fn new() -> Rc<Self> {
Rc::new(Self {
parent: Parent::new(),
})
}
pub fn render(app: Rc<Self>) -> Dom {
lazy_static! {
static ref ROOT_CLASS: String = class! {
.style("overflow-x", "hidden")
.style("color", "red")
};
}
html!("div", {
.class(&*ROOT_CLASS)
.children(&mut [
Parent::render(app.parent.clone()),
])
})
}
}
|
use crate::services::set_token;
use yew::prelude::*;
pub struct AuthPage;
impl Component for AuthPage {
type Message = ();
type Properties = ();
fn create(_: Self::Properties, _link: ComponentLink<Self>) -> Self {
set_token(None);
Self
}
fn update(&mut self, _msg: Self::Message) -> ShouldRender {
false
}
fn change(&mut self, _props: Self::Properties) -> ShouldRender {
false
}
fn view(&self) -> Html {
html! {
<>
<div class="navbar-fixed">
<nav>
<div class="nav-wrapper">
<a href="#!" class="brand-logo">{"Oikos kitchen"}</a>
</div>
</nav>
</div>
<div>
<div class="section">
<div class="row">
<form class="col s12">
<div class="row">
<div class="input-field col s12">
<a href="https://github.com/login/oauth/authorize?client_id=6243e7d6a656115a9871&scope=repo,write:org" class="waves-effect waves-light btn">{"Log in with github"}</a>
</div>
</div>
</form>
</div>
</div>
</div>
</>
}
}
}
|
use std;
use libc;
use ffi;
pub fn get_ipv4byname(name: &str) -> std::io::Result<std::net::Ipv4Addr> {
let mut ifni = ffi::ifreq_addr::new();
ifni.ifr_name.set(name);
unsafe {
let fd: libc::c_int = libc::socket(libc::AF_INET, libc::SOCK_DGRAM, libc::IPPROTO_IP);
if fd < 0 {
return Err(std::io::Error::last_os_error());
}
let rt: libc::c_int = libc::ioctl(fd, ffi::SIOCGIFADDR, &mut ifni);
if rt != 0 {
libc::close(fd);
return Err(std::io::Error::last_os_error());
}
libc::close(fd);
return Ok(std::net::Ipv4Addr::from(ifni.ifr_addr.get_addr()));
}
}
pub fn get_maskv4byname(name: &str) -> std::io::Result<std::net::Ipv4Addr> {
let mut ifni = ffi::ifreq_addr::new();
ifni.ifr_name.set(name);
unsafe {
let fd: libc::c_int = libc::socket(libc::AF_INET, libc::SOCK_DGRAM, libc::IPPROTO_IP);
if fd < 0 {
return Err(std::io::Error::last_os_error());
}
let rt: libc::c_int = libc::ioctl(fd, ffi::SIOCGIFNETMASK, &mut ifni);
if rt != 0 {
libc::close(fd);
return Err(std::io::Error::last_os_error());
}
libc::close(fd);
return Ok(std::net::Ipv4Addr::from(ifni.ifr_addr.get_addr()));
}
}
pub fn set_ipv4byname(name: &str, ip: std::net::Ipv4Addr) -> std::io::Result<()> {
let mut ifni = ffi::ifreq_addr::new();
ifni.ifr_name.set(name);
ifni.ifr_addr.set_addr(ip);
ifni.ifr_addr.set_family(super::AF_INET);
unsafe {
let fd: libc::c_int = libc::socket(libc::AF_INET, libc::SOCK_DGRAM, libc::IPPROTO_IP);
if fd < 0 {
return Err(std::io::Error::last_os_error());
}
let rt: libc::c_int = libc::ioctl(fd, ffi::SIOCSIFADDR, &mut ifni);
if rt != 0 {
libc::close(fd);
return Err(std::io::Error::last_os_error());
}
libc::close(fd);
}
return Ok(());
}
pub fn set_maskv4byname(name: &str, ip: std::net::Ipv4Addr) -> std::io::Result<()> {
let mut ifni = ffi::ifreq_addr::new();
ifni.ifr_name.set(name);
ifni.ifr_addr.set_addr(ip);
ifni.ifr_addr.set_family(super::AF_INET);
unsafe {
let fd: libc::c_int = libc::socket(libc::AF_INET, libc::SOCK_DGRAM, libc::IPPROTO_IP);
if fd < 0 {
return Err(std::io::Error::last_os_error());
}
let rt: libc::c_int = libc::ioctl(fd, ffi::SIOCSIFNETMASK, &mut ifni);
if rt != 0 {
libc::close(fd);
return Err(std::io::Error::last_os_error());
}
libc::close(fd);
}
return Ok(());
}
|
#![deny(unused_imports)]
extern crate image;
extern crate num;
extern crate num_cpus;
extern crate rand;
extern crate rustc_serialize;
extern crate threadpool;
extern crate time;
use std::fs::File;
use std::io::{self, Read, Write};
use std::env;
use std::process;
use std::sync::Arc;
use rustc_serialize::json;
use rustc_serialize::json::DecoderError::MissingFieldError;
mod geometry;
mod light;
mod material;
mod my_scene;
mod raytracer;
mod scene;
mod util;
mod vec3;
mod mat4;
// Replace this with argparse eventually
struct ProgramArgs {
config_file: String
}
#[derive(RustcDecodable, RustcEncodable)]
struct SceneConfig {
name: String,
size: (u32, u32),
fov: f64,
reflect_depth: u32,
refract_depth: u32,
shadow_samples: u32,
gloss_samples: u32,
pixel_samples: u32,
output_file: String,
animating: bool,
fps: f64,
time_slice: (f64, f64),
starting_frame_number: u32
}
fn parse_args(args: env::Args) -> Result<ProgramArgs, String> {
let args = args.collect::<Vec<String>>();
let program_name = &args[0];
match args.len() {
// I wouldn't expect this in the wild
0 => panic!("Args do not even include a program name"),
2 => Ok(ProgramArgs { config_file: args[1].clone() }),
_ => Err(format!("Usage: {} scene_config.json", program_name)),
}
}
fn main() {
let start_time = ::time::get_time().sec;
let program_args = match parse_args(env::args()) {
Ok(program_args) => program_args,
Err(error_str) => {
write!(&mut io::stderr(), "{}\n", error_str).unwrap();
process::exit(1)
}
};
let mut file_handle = match File::open(&program_args.config_file) {
Ok(file) => file,
Err(err) => {
write!(&mut io::stderr(), "{}\n", err).unwrap();
process::exit(1)
}
};
let mut json_data = String::new();
if let Err(ref err) = file_handle.read_to_string(&mut json_data) {
write!(&mut io::stderr(), "{}\n", err).unwrap();
process::exit(1);
}
let config: SceneConfig = match json::decode(&json_data) {
Ok(data) => data,
Err(err) => {
let msg = match err {
MissingFieldError(field_name) => {
format!("parse failure, missing field ``{}''\n", field_name)
},
_ => {
format!("parse failure: {:?}", err)
}
};
write!(&mut io::stderr(), "{}\n", msg).unwrap();
process::exit(1)
}
};
println!("Job started at {}...\nLoading scene...", start_time);
let scene_config = match my_scene::scene_by_name(&config.name) {
Some(scene_config) => scene_config,
None => {
write!(&mut io::stderr(), "unknown scene ``{}''\n", config.name).unwrap();
process::exit(1)
}
};
let (image_width, image_height) = config.size;
let fov = config.fov;
// Hackish solution for animator
let shared_scene = Arc::new(scene_config.get_scene());
let camera = if config.animating {
scene_config.get_animation_camera(image_width, image_height, fov)
} else {
scene_config.get_camera(image_width, image_height, fov)
};
let scene_time = ::time::get_time().sec;
println!("Scene loaded at {} ({}s)...", scene_time, scene_time - start_time);
let render_options = raytracer::RenderOptions {
reflect_depth: config.reflect_depth,
refract_depth: config.refract_depth,
shadow_samples: config.shadow_samples,
gloss_samples: config.gloss_samples,
pixel_samples: config.pixel_samples,
};
let renderer = raytracer::Renderer {
options: render_options,
tasks: ::num_cpus::get(), // Number of tasks to spawn. Will use up max available cores.
};
if config.animating {
let (animate_from, animate_to) = config.time_slice;
let animator = raytracer::animator::Animator {
fps: config.fps,
animate_from: animate_from,
animate_to: animate_to,
starting_frame_number: config.starting_frame_number,
renderer: renderer
};
println!("Animating - tasks: {}, FPS: {}, start: {}s, end:{}s, starting frame: {}",
::num_cpus::get(), animator.fps, animator.animate_from, animator.animate_to,
animator.starting_frame_number);
animator.animate(camera, shared_scene, &config.output_file);
let render_time = ::time::get_time().sec;
println!("Render done at {} ({}s)",
render_time, render_time - scene_time);
} else {
// Still frame
println!("Rendering with {} tasks...", ::num_cpus::get());
let image_data = renderer.render(camera, shared_scene);
let render_time = ::time::get_time().sec;
println!("Render done at {} ({}s)...\nWriting file...",
render_time, render_time - scene_time);
let out_file = format!("{}{}", config.output_file, ".ppm");
util::export::to_ppm(&image_data, &out_file).expect("ppm write failure");
let export_time = ::time::get_time().sec;
println!("Write done: {} ({}s). Written to {}\nTotal: {}s",
export_time, export_time - render_time,
config.output_file, export_time - start_time);
}
}
|
#[doc = "Register `HWCFGR2` reader"]
pub type R = crate::R<HWCFGR2_SPEC>;
#[doc = "Register `HWCFGR2` writer"]
pub type W = crate::W<HWCFGR2_SPEC>;
#[doc = "Field `CFG1` reader - LUART hardware configuration 1"]
pub type CFG1_R = crate::FieldReader;
#[doc = "Field `CFG1` writer - LUART hardware configuration 1"]
pub type CFG1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `CFG2` reader - LUART hardware configuration 2"]
pub type CFG2_R = crate::FieldReader;
#[doc = "Field `CFG2` writer - LUART hardware configuration 2"]
pub type CFG2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
impl R {
#[doc = "Bits 0:3 - LUART hardware configuration 1"]
#[inline(always)]
pub fn cfg1(&self) -> CFG1_R {
CFG1_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - LUART hardware configuration 2"]
#[inline(always)]
pub fn cfg2(&self) -> CFG2_R {
CFG2_R::new(((self.bits >> 4) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - LUART hardware configuration 1"]
#[inline(always)]
#[must_use]
pub fn cfg1(&mut self) -> CFG1_W<HWCFGR2_SPEC, 0> {
CFG1_W::new(self)
}
#[doc = "Bits 4:7 - LUART hardware configuration 2"]
#[inline(always)]
#[must_use]
pub fn cfg2(&mut self) -> CFG2_W<HWCFGR2_SPEC, 4> {
CFG2_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "LPUART Hardware Configuration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hwcfgr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`hwcfgr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HWCFGR2_SPEC;
impl crate::RegisterSpec for HWCFGR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hwcfgr2::R`](R) reader structure"]
impl crate::Readable for HWCFGR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`hwcfgr2::W`](W) writer structure"]
impl crate::Writable for HWCFGR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets HWCFGR2 to value 0x13"]
impl crate::Resettable for HWCFGR2_SPEC {
const RESET_VALUE: Self::Ux = 0x13;
}
|
use std::sync::Once;
/// setup env_logger for test.
pub fn setup_test_logger() {
static INIT: Once = Once::new();
INIT.call_once(|| {
let _ = env_logger::builder()
.is_test(false) // To enable color. Logs are not captured by test framework.
.try_init();
});
log::info!("setup_test_logger(): done");
}
|
#[derive(Debug)]
pub enum XError {
OpenDisplayError,
BadAtom,
BadProperty,
UnknownEventType,
BadKeyCode,
BadKeyString
}
|
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::ToTokens;
use std::convert::{TryFrom, TryInto};
use syn::spanned::Spanned;
// _ __
// __ _____ _ __(_)/ _|_ _
// \ \ / / _ \ '__| | |_| | | |
// \ V / __/ | | | _| |_| |
// \_/ \___|_| |_|_| \__, |
// |___/
// FIGLET: verify
#[proc_macro_attribute]
pub fn verify(_attr: TokenStream, item: TokenStream) -> TokenStream {
match generate_verifiable_item(item) {
Ok(verfiable_item) => verfiable_item,
Err(e) => e.to_compile_error().into(),
}
}
fn generate_verifiable_item(item: TokenStream) -> syn::Result<TokenStream> {
let item: VerifiableItem = syn::parse(item)?;
Ok(item.0.into_token_stream().into())
}
// ____ _
// | _ \ __ _ _ __ ___(_)_ __ __ _
// | |_) / _` | '__/ __| | '_ \ / _` |
// | __/ (_| | | \__ \ | | | | (_| |
// |_| \__,_|_| |___/_|_| |_|\__, |
// |___/
// FIGLET: Parsing
struct VerifiableItem(syn::Item);
impl syn::parse::Parse for VerifiableItem {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let mut item: syn::Item = input.parse()?;
item.translate()?;
Ok(Self(item))
}
}
impl quote::ToTokens for VerifiableItem {
fn to_tokens(&self, out: &mut proc_macro2::TokenStream) {
self.0.to_tokens(out)
}
}
impl From<Vec<syn::GenericArgument>> for Logic {
fn from(generics: Vec<syn::GenericArgument>) -> Self {
Self {
clauses: generics
.into_iter()
.map(|arg| syn::parse_quote! { #arg })
.collect(),
}
}
}
impl From<syn::PathArguments> for Logic {
fn from(generics: syn::PathArguments) -> Self {
match generics {
syn::PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments {
args,
..
}) => args.into_iter().collect::<Vec<_>>().into(),
_ => Logic { clauses: vec![] },
}
}
}
// _____ _ _
// |_ _| __ __ _ _ __ ___| | __ _| |_ ___ _ __
// | || '__/ _` | '_ \/ __| |/ _` | __/ _ \| '__|
// | || | | (_| | | | \__ \ | (_| | || (_) | |
// |_||_| \__,_|_| |_|___/_|\__,_|\__\___/|_|
// FIGLET: Translator
trait Translate {
fn translate(&mut self) -> syn::Result<()>;
}
impl Translate for syn::Item {
fn translate(&mut self) -> syn::Result<()> {
// TODO:
// o support translation of other item types.
// o support translation of statements in blocks.
match self {
syn::Item::Fn(item) => item.translate(),
syn::Item::Impl(item) => item.translate(),
item => Err(syn::Error::new(
item.span(),
"expected `fn` or `impl`".to_string(),
)),
}
}
}
impl Translate for syn::ItemFn {
fn translate(&mut self) -> syn::Result<()> {
let mut additional_where_clause = syn::parse_quote! { where };
Translator::new(&mut additional_where_clause).translate(&mut self.sig)?;
self.sig
.generics
.make_where_clause()
.predicates
.extend(additional_where_clause.predicates);
Ok(())
}
}
impl Translate for syn::ItemImpl {
fn translate(&mut self) -> syn::Result<()> {
let mut _unused_where_clause = syn::parse_quote! { where };
Translator::new(&mut _unused_where_clause).translate(self)?;
let mut where_clause = self.generics.make_where_clause();
let mut translator = Translator::new(&mut where_clause);
for item in &mut self.items {
translator.translate(item)?;
}
Ok(())
}
}
struct Translator<'g> {
where_clause: &'g mut syn::WhereClause,
}
impl<'g> Translator<'g> {
fn new(where_clause: &'g mut syn::WhereClause) -> Self {
Self { where_clause }
}
fn translate(&mut self, item: &mut impl Generics) -> syn::Result<()> {
let verify_predicates =
remove_verify_bound(item.where_clause().unwrap_or(self.where_clause))?;
for generics in item.generics() {
if let syn::PathArguments::AngleBracketed(generic_args) = generics {
for arg in generic_args.args.iter_mut() {
self.translate(arg)?;
}
}
}
let logic = item
.generics()
.iter()
.clone()
.map(|generics| Into::<Logic>::into((*generics).clone()))
.collect::<Vec<_>>();
for (generics, logic) in item.generics().into_iter().zip(logic.iter()) {
let generics: &mut syn::PathArguments = generics;
let new_generics: &Vec<syn::Type> = &logic
.clauses
.clone()
.into_iter()
.map(|expr| {
TryInto::<Op>::try_into(expr).and_then(|op| TryInto::<syn::Type>::try_into(op))
})
.collect::<syn::Result<Vec<_>>>()?;
if let syn::PathArguments::AngleBracketed(generic_args) = generics {
generic_args.args = std::iter::FromIterator::<syn::GenericArgument>::from_iter(
new_generics.into_iter().map(|ty| syn::parse_quote!(#ty)),
);
}
}
let where_clause = item.where_clause().unwrap_or(self.where_clause);
for logic in logic {
where_clause.predicates.extend(
&mut logic
.clauses
.into_iter()
.filter(
// Unwrap because we would have errored earlier.
|expr| match TryInto::<Op>::try_into(expr.clone()).unwrap() {
Op::BinOp { .. } | Op::UnOp { .. } => true,
Op::Path(_) => false,
},
)
.map(|expr| {
Ok(
TryInto::<Vec<syn::PredicateType>>::try_into(TryInto::<Op>::try_into(
expr,
)?)?
.into_iter()
.map(|ty| syn::WherePredicate::Type(ty)),
)
})
.collect::<syn::Result<Vec<_>>>()?
.into_iter()
.flatten(),
);
}
where_clause.predicates.extend(verify_predicates);
Ok(())
}
}
// ____ _
// / ___| ___ _ __ ___ _ __(_) ___ ___
// | | _ / _ \ '_ \ / _ \ '__| |/ __/ __|
// | |_| | __/ | | | __/ | | | (__\__ \
// \____|\___|_| |_|\___|_| |_|\___|___/
// FIGLET: Generics
trait Generics: ToTokens {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments>;
fn where_clause<'g>(&'g mut self) -> Option<&'g mut syn::WhereClause> {
None
}
}
impl Generics for syn::Signature {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
let mut generics = self.generics.generics();
generics.append(&mut self.inputs.generics());
generics.append(&mut self.output.generics());
generics
}
fn where_clause<'g>(&'g mut self) -> Option<&'g mut syn::WhereClause> {
self.generics.where_clause()
}
}
impl Generics for syn::Generics {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.make_where_clause();
let where_clause = self.where_clause.as_mut().unwrap();
let mut generics = where_clause.generics();
generics.append(
&mut self
.params
.iter_mut()
.map(|param| match param {
syn::GenericParam::Type(item) => item.bounds.generics(),
// TODO: add support for other cases
_ => vec![],
})
.flatten()
.collect::<Vec<_>>(),
);
generics
}
fn where_clause<'g>(&'g mut self) -> Option<&'g mut syn::WhereClause> {
Some(self.make_where_clause())
}
}
impl Generics for syn::GenericArgument {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
match self {
syn::GenericArgument::Type(item) => item.generics(),
// TODO: add support for other cases
_ => vec![],
}
}
}
impl Generics for syn::WhereClause {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.predicates
.iter_mut()
.map(|predicate| match predicate {
syn::WherePredicate::Type(item) => {
let mut generics = item.bounded_ty.generics();
generics.append(&mut item.bounds.generics());
generics
}
// TODO: add support for other cases
_ => vec![],
})
.flatten()
.collect()
}
}
impl Generics for syn::punctuated::Punctuated<syn::TypeParamBound, syn::Token!(+)> {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.iter_mut()
.map(|bound| match bound {
syn::TypeParamBound::Trait(trait_bound) => trait_bound.path.generics(),
syn::TypeParamBound::Lifetime(_) => vec![],
})
.flatten()
.collect()
}
}
impl Generics for syn::punctuated::Punctuated<syn::FnArg, syn::Token!(,)> {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.iter_mut()
.map(|arg| arg.generics())
.flatten()
.collect()
}
}
impl Generics for syn::ItemFn {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.sig.generics()
}
fn where_clause<'g>(&'g mut self) -> Option<&'g mut syn::WhereClause> {
self.sig.where_clause()
}
}
impl Generics for syn::ItemImpl {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
let mut generics = self.generics.generics();
generics.append(&mut self.self_ty.generics());
generics
}
fn where_clause<'g>(&'g mut self) -> Option<&'g mut syn::WhereClause> {
self.generics.where_clause()
}
}
impl Generics for syn::FnArg {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
match self {
syn::FnArg::Receiver(_) => vec![],
syn::FnArg::Typed(pat) => pat.ty.generics(),
}
}
}
impl Generics for syn::BareFnArg {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.ty.generics()
}
}
impl Generics for syn::Path {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.segments
.iter_mut()
.map(|segment| segment.generics())
.flatten()
.filter(|generics| **generics != syn::PathArguments::None)
.collect()
}
}
impl Generics for syn::ReturnType {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
match self {
syn::ReturnType::Default => vec![],
syn::ReturnType::Type(_, ty) => ty.generics(),
}
}
}
impl Generics for syn::ImplItem {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
// TODO: implement support for other items.
match self {
syn::ImplItem::Method(item) => item.generics(),
syn::ImplItem::Type(item) => item.generics(),
_ => vec![],
}
}
fn where_clause<'g>(&'g mut self) -> Option<&'g mut syn::WhereClause> {
match self {
syn::ImplItem::Method(item) => item.where_clause(),
syn::ImplItem::Type(item) => item.where_clause(),
_ => None,
}
}
}
impl Generics for syn::ImplItemMethod {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.sig.generics()
}
fn where_clause<'g>(&'g mut self) -> Option<&'g mut syn::WhereClause> {
self.sig.where_clause()
}
}
impl Generics for syn::ImplItemType {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.ty.generics()
}
}
impl Generics for syn::Type {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
match self {
syn::Type::Array(ty) => ty.generics(),
syn::Type::BareFn(ty) => ty.generics(),
syn::Type::ImplTrait(ty) => ty.generics(),
syn::Type::Paren(ty) => ty.generics(),
syn::Type::Path(ty) => ty.generics(),
syn::Type::Ptr(ty) => ty.generics(),
syn::Type::Reference(ty) => ty.generics(),
syn::Type::Slice(ty) => ty.generics(),
syn::Type::TraitObject(ty) => ty.generics(),
syn::Type::Tuple(ty) => ty.generics(),
_ => vec![],
}
}
}
impl Generics for syn::TypeArray {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.elem.generics()
}
}
impl Generics for syn::TypeBareFn {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.inputs
.iter_mut()
.map(|arg| arg.generics())
.flatten()
.chain(self.output.generics())
.collect()
}
}
impl Generics for syn::TypeImplTrait {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.bounds
.iter_mut()
.map(|bound| match bound {
syn::TypeParamBound::Trait(trait_bound) => trait_bound.path.generics(),
syn::TypeParamBound::Lifetime(_) => vec![],
})
.flatten()
.collect()
}
}
impl Generics for syn::TypeParen {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.elem.generics()
}
}
impl Generics for syn::TypePath {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.path.generics()
}
}
impl Generics for syn::TypePtr {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.elem.generics()
}
}
impl Generics for syn::TypeReference {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.elem.generics()
}
}
impl Generics for syn::TypeSlice {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.elem.generics()
}
}
impl Generics for syn::TypeTraitObject {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.bounds
.iter_mut()
.map(|bound| match bound {
syn::TypeParamBound::Trait(trait_bound) => trait_bound.path.generics(),
syn::TypeParamBound::Lifetime(_) => vec![],
})
.flatten()
.collect()
}
}
impl Generics for syn::TypeTuple {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.elems
.iter_mut()
.map(|ty| ty.generics())
.flatten()
.collect()
}
}
impl Generics for syn::PathSegment {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
self.arguments.generics()
}
}
impl Generics for syn::PathArguments {
fn generics<'g>(&'g mut self) -> Vec<&'g mut syn::PathArguments> {
vec![self]
}
}
// _____ _ _ _
// |_ _| __ __ _ _ __ ___| | __ _| |_(_) ___ _ __ ___
// | || '__/ _` | '_ \/ __| |/ _` | __| |/ _ \| '_ \/ __|
// | || | | (_| | | | \__ \ | (_| | |_| | (_) | | | \__ \
// |_||_| \__,_|_| |_|___/_|\__,_|\__|_|\___/|_| |_|___/
// FIGLET: Translations
fn remove_verify_bound(
where_clause: &mut syn::WhereClause,
) -> syn::Result<Vec<syn::WherePredicate>> {
let (predicates, inferred_bounds): (Vec<_>, Vec<_>) = where_clause
.clone()
.predicates
.into_iter()
.partition(|clause| match clause {
syn::WherePredicate::Type(syn::PredicateType {
bounded_ty: syn::Type::Infer(_),
..
}) => false,
_ => true,
});
where_clause.predicates = predicates.into_iter().collect();
if inferred_bounds.len() == 0 {
return Ok(vec![]);
}
if inferred_bounds.len() > 1 {
return Err(syn::Error::new(
inferred_bounds[1].span(),
"did not expect to find second `Verify` bound",
));
}
let bounds = if let Some(syn::WherePredicate::Type(syn::PredicateType { bounds, .. })) =
inferred_bounds.first()
{
Ok(bounds)
} else {
Err(syn::Error::new(
where_clause.span(),
"expected `_: Verify<_>`",
))
}?;
let syn::TraitBound { ref path, .. } =
if let Some(syn::TypeParamBound::Trait(bound)) = bounds.first() {
Ok(bound)
} else {
Err(syn::Error::new(where_clause.span(), "expected `Verify<_>`"))
}?;
let generics = if path.segments.len() == 1
&& path.segments.last().unwrap().ident.to_string() == "Verify"
{
Ok(&path.segments.last().unwrap().arguments)
} else {
Err(syn::Error::new(path.span(), "expected `Verify<_>`"))
}?;
let logic: Logic = generics.clone().into();
let predicates = logic
.clauses
.into_iter()
.map(|clause| {
Ok(vec![syn::WherePredicate::Type(TryInto::try_into(
TryInto::<Op>::try_into(clause.clone())?,
)?)]
.into_iter()
.chain(
TryInto::<Vec<_>>::try_into(TryInto::<Op>::try_into(clause)?)?
.into_iter()
.map(|p| syn::WherePredicate::Type(p)),
))
})
.collect::<syn::Result<Vec<_>>>()?;
Ok(predicates.into_iter().flatten().collect())
}
macro_rules! predicate {
({$($left:tt)*}: {$($right:tt)*}) => {
syn::PredicateType {
bounded_ty: syn::parse_quote! { $($left)* },
bounds: syn::parse_quote! { $($right)* },
lifetimes: Default::default(),
colon_token: Default::default(),
}
};
}
// This conversion does not include the "truthiness" bound, which can be obtained by converting the
// Op into a single PredicateType.
impl TryFrom<Op> for Vec<syn::PredicateType> {
type Error = syn::Error;
fn try_from(from: Op) -> syn::Result<Self> {
let op_name = from.get_op_name();
Ok(match from {
Op::BinOp { op, left, right } => {
let op_name = op_name?;
let left_ty: syn::Type = (*left.clone()).try_into()?;
let right_ty: syn::Type = (*right.clone()).try_into()?;
match op {
syn::BinOp::Eq(_)
| syn::BinOp::Ne(_)
| syn::BinOp::Le(_)
| syn::BinOp::Ge(_) => vec![predicate! {{ #left_ty }: { Cmp<#right_ty> }}],
syn::BinOp::Lt(_) => vec![
predicate! {{ #left_ty }: { Cmp<#right_ty> }},
predicate! {{ #left_ty }: { IsLessOrEqual<#right_ty> }},
],
syn::BinOp::Gt(_) => vec![
predicate! {{ #left_ty }: { Cmp<#right_ty> }},
predicate! {{ #left_ty }: { IsGreaterOrEqual<#right_ty> }},
],
syn::BinOp::Add(_)
| syn::BinOp::Div(_)
| syn::BinOp::Mul(_)
| syn::BinOp::Rem(_)
| syn::BinOp::Shl(_)
| syn::BinOp::Shr(_)
| syn::BinOp::Sub(_) => vec![
predicate! {{ <#left_ty as #op_name<#right_ty>>::Output }: { Unsigned }},
predicate! {{ <#left_ty as #op_name<#right_ty>>::Output }: { Cmp }},
predicate! {{
<#left_ty as #op_name<#right_ty>>::Output }:
{ IsEqual<<#left_ty as #op_name<#right_ty>>::Output>
}},
],
_ => vec![],
}
.into_iter()
.chain(vec![predicate! {{ #left_ty }: { #op_name<#right_ty> }}])
.chain(TryInto::<Self>::try_into(*left)?.into_iter())
.chain(TryInto::<Self>::try_into(*right)?.into_iter())
.collect()
}
Op::UnOp { left, .. } => {
let op_name = op_name?;
let left_ty: syn::Type = (*left.clone()).try_into()?;
vec![predicate! {{ #left_ty }: { #op_name }}]
.into_iter()
.chain(TryInto::<Self>::try_into(*left)?.into_iter())
.collect()
}
Op::Path(_) => vec![],
})
}
}
impl TryFrom<Op> for syn::PredicateType {
type Error = syn::Error;
fn try_from(from: Op) -> syn::Result<Self> {
let op_name = from.get_op_name();
match from {
Op::BinOp {
op: syn::BinOp::Eq(_),
left,
right,
} => {
let op = op_name?;
let left_op = left.get_op_name();
let right_op = right.get_op_name();
match (*left, *right) {
(
Op::BinOp {
left: left_left,
right: left_right,
..
},
right,
) => {
let left_op = left_op?;
let left_left: syn::Type = (*left_left).try_into()?;
let left_right: syn::Type = (*left_right).try_into()?;
let right: syn::Type = (right).try_into()?;
Ok(predicate! {{ #left_left }: { #left_op<#left_right, Output = #right> }})
}
(
left,
Op::BinOp {
left: right_left,
right: right_right,
..
},
) => {
let right_op = right_op?;
let right_left: syn::Type = (*right_left).try_into()?;
let right_right: syn::Type = (*right_right).try_into()?;
let left: syn::Type = (left).try_into()?;
Ok(
predicate! {{ #right_left }: { #right_op<#right_right, Output = #left> }},
)
}
(left, right) => {
let left: syn::Type = (left).try_into()?;
let right: syn::Type = (right).try_into()?;
Ok(predicate! {{ #left }: { #op<#right, Output = B1> }})
}
}
}
Op::BinOp { left, right, .. } => {
let op = op_name?;
let left: syn::Type = (*left).try_into()?;
let right: syn::Type = (*right).try_into()?;
Ok(predicate! {{ #left }: { #op<#right, Output = B1> }})
}
Op::UnOp { left, .. } => {
let op_name = op_name?;
let left: syn::Type = (*left).try_into()?;
Ok(predicate! {{ #left }: { #op_name<Output = B1> }})
}
Op::Path(path) => Ok(predicate! {{ #path }: { IsEqual<B1, Output = B1> }}),
}
}
}
impl TryFrom<Op> for syn::Type {
type Error = syn::Error;
fn try_from(from: Op) -> syn::Result<Self> {
let op_name = from.get_op_name();
match from {
Op::BinOp { left, right, .. } => {
let op = op_name?;
let left: syn::Type = (*left).try_into()?;
let right: syn::Type = (*right).try_into()?;
Ok(syn::parse_quote! { <#left as #op<#right>>::Output })
}
Op::UnOp { left, .. } => {
let op_name = op_name?;
let left: syn::Type = (*left).try_into()?;
Ok(syn::parse_quote! { <#left as #op_name>::Output })
}
Op::Path(path) => Ok(syn::parse_quote!(#path)),
}
}
}
impl TryFrom<syn::GenericArgument> for Op {
type Error = syn::Error;
fn try_from(arg: syn::GenericArgument) -> syn::Result<Self> {
match arg {
syn::GenericArgument::Const(expr) => expr.try_into(),
syn::GenericArgument::Type(ty) => Ok(Self::Path(syn::parse_quote!(#ty))),
unsupported_expr => Err(syn::Error::new(
unsupported_expr.span(),
"unsupported logical expression",
)),
}
}
}
impl TryFrom<syn::Expr> for Op {
type Error = syn::Error;
fn try_from(expr: syn::Expr) -> syn::Result<Self> {
match expr {
syn::Expr::Binary(syn::ExprBinary {
op, left, right, ..
}) => Ok(Op::BinOp {
op,
left: Box::new((*left).try_into()?),
right: Box::new((*right).try_into()?),
}),
syn::Expr::Unary(syn::ExprUnary { op, expr, .. }) => Ok(Op::UnOp {
op,
left: Box::new((*expr).try_into()?),
}),
syn::Expr::Lit(syn::ExprLit { lit, .. }) => Ok(lit.try_into()?),
syn::Expr::Path(syn::ExprPath { path, .. }) => Ok(Op::Path(path)),
syn::Expr::Paren(syn::ExprParen { expr, .. }) => Ok((*expr).try_into()?),
syn::Expr::Block(syn::ExprBlock {
block: syn::Block { stmts, .. },
..
}) if stmts.len() == 1 => {
let stmt = stmts.first().unwrap();
let expr: syn::Expr = syn::parse_quote!(#stmt);
expr.try_into()
}
unsupported_expr => Err(syn::Error::new(
unsupported_expr.span(),
"unsupported logical expression",
)),
}
}
}
impl TryFrom<syn::Lit> for Op {
type Error = syn::Error;
fn try_from(lit: syn::Lit) -> syn::Result<Self> {
match lit {
syn::Lit::Bool(syn::LitBool { value, .. }) => Ok(Op::Path(if value {
syn::parse_quote!(B1)
} else {
syn::parse_quote!(B0)
})),
syn::Lit::Int(value) => Ok(Op::Path({
let ty = syn::Ident::new(
&format!("U{}", value.base10_parse::<usize>()?),
value.span(),
);
syn::parse_quote!(#ty)
})),
unsupported_expr => Err(syn::Error::new(
unsupported_expr.span(),
"only bool and int literals are supported here",
)),
}
}
}
// _____
// |_ _| _ _ __ ___ ___
// | || | | | '_ \ / _ \/ __|
// | || |_| | |_) | __/\__ \
// |_| \__, | .__/ \___||___/
// |___/|_|
// FIGLET: Types
#[derive(Debug)]
struct Logic {
clauses: Vec<syn::GenericArgument>,
}
#[derive(Clone, Debug)]
enum Op {
BinOp {
op: syn::BinOp,
left: Box<Self>,
right: Box<Self>,
},
UnOp {
op: syn::UnOp,
left: Box<Self>,
},
Path(syn::Path),
}
impl Op {
fn get_op_name(&self) -> syn::Result<syn::Path> {
match self {
Op::BinOp { op, .. } => {
macro_rules! op_names {
($op:ident { $($from:ident => $to:tt,)* }) => {
match $op {
$(syn::BinOp::$from(_) => Ok(syn::parse_quote!($to)),)*
unsupported_expr => Err(syn::Error::new(
unsupported_expr.span(),
"unsupported logical expression",
)),
}
}
}
op_names! {
op {
Add => Add,
BitAnd => BitAnd,
BitOr => BitOr,
BitXor => BitXor,
Div => Div,
Eq => IsEqual,
Ge => IsGreaterOrEqual,
Gt => IsGreater,
Le => IsLessOrEqual,
Lt => IsLess,
Mul => Mul,
Rem => Rem,
Ne => IsNotEqual,
Shl => Shl,
Shr => Shr,
Sub => Sub,
}
}
}
Op::UnOp { op, .. } => match op {
syn::UnOp::Not(_) => Ok(syn::parse_quote!(Not)),
unsupported_expr => Err(syn::Error::new(
unsupported_expr.span(),
"unsupported logical expression",
)),
},
_ => Err(syn::Error::new(
proc_macro2::Span::call_site(),
"unimplemented",
)),
}
}
}
// _____ _
// |_ _|__ ___| |_ ___
// | |/ _ \/ __| __/ __|
// | | __/\__ \ |_\__ \
// |_|\___||___/\__|___/
// FIGLET: Tests
#[cfg(test)]
#[allow(non_snake_case)]
mod tests {
use super::*;
use std::convert::{TryFrom, TryInto};
use syn::parse_quote;
macro_rules! op {
($($t:tt)*) => {{
let expr: syn::Expr = parse_quote! { $($t)* };
Op::try_from(expr).unwrap()
}};
}
macro_rules! ty {
($($t:tt)*) => {{
let ty: syn::Type = parse_quote! { $($t)* };
ty
}};
}
macro_rules! generics {
($(<$($t:tt),*>),*$(,)?) => {
vec![
$(&mut syn::PathArguments::AngleBracketed(parse_quote! { <$($t),*> })),*
]
};
($(::<$($t:tt),*>),*$(,)?) => {
vec![
$(&mut syn::PathArguments::AngleBracketed(parse_quote! { ::<$($t),*> })),*
]
};
}
fn assert_into<L: TryInto<R>, R: std::fmt::Debug + Eq>(l: L, r: R)
where
<L as TryInto<R>>::Error: std::fmt::Debug,
{
assert_eq!(l.try_into().unwrap(), r)
}
fn assert_generics<Ty: Generics>(mut ty: Ty, expected: Vec<&mut syn::PathArguments>) {
assert_eq!(ty.generics(), expected)
}
// _____ __ ____ _
// |_ _| _ _ __ ___ \ \ / ___| ___ _ __ ___ _ __(_) ___ ___
// | || | | | '_ \ / _ \ _____\ \ | | _ / _ \ '_ \ / _ \ '__| |/ __/ __|
// | || |_| | |_) | __/ |_____/ / | |_| | __/ | | | __/ | | | (__\__ \
// |_| \__, | .__/ \___| /_/ \____|\___|_| |_|\___|_| |_|\___|___/
// |___/|_|
// FIGLET: Type -> Generics
#[test]
fn ident_path_segment_yields_generic_args() {
assert_generics::<syn::Type>(parse_quote! { Type<A, B, C> }, generics![<A, B, C>]);
}
#[test]
fn colon_path_segment_yields_generic_args() {
assert_generics::<syn::Type>(parse_quote! { Type::<A, B, C> }, generics![::<A, B, C>]);
}
#[test]
fn path_yields_generics_from_all_segments() {
assert_generics::<syn::Type>(
parse_quote! { a::<A>::b::<B>::c::<C> },
generics![::<A>, ::<B>, ::<C>],
);
}
#[test]
fn generics_from_each_tuple_element_are_collected() {
assert_generics::<syn::Type>(parse_quote! { (L<A, B>, R<C>) }, generics![<A, B>, <C>]);
}
#[test]
fn slice_type_yields_generic_args() {
assert_generics::<syn::Type>(parse_quote! { [Type<A, B>] }, generics![<A, B>]);
}
#[test]
fn array_type_yields_generic_args() {
assert_generics::<syn::Type>(parse_quote! { [Type<C>; 0] }, generics![<C>]);
}
#[test]
fn ptr_type_yields_generic_args() {
assert_generics::<syn::Type>(parse_quote! { *const Type<C> }, generics![<C>]);
}
#[test]
fn ref_type_yields_generic_args() {
assert_generics::<syn::Type>(parse_quote! { &Type<C> }, generics![<C>]);
}
#[test]
fn paren_type_yields_generic_args() {
assert_generics::<syn::Type>(parse_quote! { (Type<C>) }, generics![<C>]);
}
#[test]
fn impl_trait_type_yields_generic_args() {
assert_generics::<syn::Type>(
parse_quote! { impl Trait0<A> + Trait1<B, C>},
generics![<A>, <B, C>],
);
}
#[test]
fn trait_object_type_yields_generic_args() {
assert_generics::<syn::Type>(
parse_quote! { dyn Trait0<A, B> + Trait1<C>},
generics![<A, B>, <C>],
);
}
#[test]
fn bare_fn_type_yields_generic_args_for_inputs_and_outputs() {
assert_generics::<syn::Type>(
parse_quote! { fn(l: Left<A, B>, r: Right<C>) -> Out<D> },
generics![<A, B>, <C>, <D>],
);
}
#[test]
fn impl_item_method_yields_generic_args_for_inputs_and_outputs() {
assert_generics::<syn::ImplItem>(
parse_quote! { fn f(l: Left<A, B>, r: Right<C>) -> Out<D> { Default::default() } },
generics![<A, B>, <C>, <D>],
);
}
#[test]
fn impl_item_type_yields_generic_args_for_inputs_and_outputs() {
assert_generics::<syn::ImplItem>(parse_quote! { type Type = Impl<D>; }, generics![<D>]);
}
#[test]
fn item_fn_yields_generic_args_for_inputs_outputs_and_generics() {
assert_generics::<syn::ItemFn>(
parse_quote! {
fn f<G0: Trait<A>, G1: Trait<B>>(a0: A0<G0, C>, a1: A1<G1, D>) -> R<E>
where
P0<F>: Trait<G>,
{
}
},
generics![<F>, <G>, <A>, <B>, <G0, C>, <G1, D>, <E>],
);
}
#[test]
fn item_impl_yields_generic_args_for_self_and_generics() {
assert_generics::<syn::ItemImpl>(
parse_quote! {
impl<G0: Trait<A>, G1: Trait<B>> Type<G0, G1>
where
P0<F>: Trait<G>,
{
}
},
generics![<F>, <G>, <A>, <B>, <G0, G1>],
);
}
// ___ __ _____
// / _ \ _ __ \ \ |_ _| _ _ __ ___
// | | | | '_ \ _____\ \ | || | | | '_ \ / _ \
// | |_| | |_) | |_____/ / | || |_| | |_) | __/
// \___/| .__/ /_/ |_| \__, | .__/ \___|
// |_| |___/|_|
// FIGLET: Op -> Type
#[test]
fn binary_op_as_type_yields_output_of_op_trait() {
assert_into(op! { L + R }, ty! { <L as Add<R>>::Output });
assert_into(op! { L & R }, ty! { <L as BitAnd<R>>::Output });
assert_into(op! { L | R }, ty! { <L as BitOr<R>>::Output });
assert_into(op! { L ^ R }, ty! { <L as BitXor<R>>::Output });
assert_into(op! { L / R }, ty! { <L as Div<R>>::Output });
assert_into(op! { L == R }, ty! { <L as IsEqual<R>>::Output });
assert_into(op! { L >= R }, ty! { <L as IsGreaterOrEqual<R>>::Output });
assert_into(op! { L > R }, ty! { <L as IsGreater<R>>::Output });
assert_into(op! { L <= R }, ty! { <L as IsLessOrEqual<R>>::Output });
assert_into(op! { L < R }, ty! { <L as IsLess<R>>::Output });
assert_into(op! { L * R }, ty! { <L as Mul<R>>::Output });
assert_into(op! { L % R }, ty! { <L as Rem<R>>::Output });
assert_into(op! { L != R }, ty! { <L as IsNotEqual<R>>::Output });
assert_into(op! { L << R }, ty! { <L as Shl<R>>::Output });
assert_into(op! { L >> R }, ty! { <L as Shr<R>>::Output });
assert_into(op! { L - R }, ty! { <L as Sub<R>>::Output });
}
#[test]
fn unary_op_as_type_yields_output_of_op_trait() {
assert_into(op! { !V }, ty! { <V as Not>::Output });
}
#[test]
fn path_op_as_type_yields_path() {
assert_into(op! { V }, ty! { V });
}
// ___ __ ____ _ _ _
// / _ \ _ __ \ \ | _ \ _ __ ___ __| (_) ___ __ _| |_ ___
// | | | | '_ \ _____\ \ | |_) | '__/ _ \/ _` | |/ __/ _` | __/ _ \
// | |_| | |_) | |_____/ / | __/| | | __/ (_| | | (_| (_| | || __/
// \___/| .__/ /_/ |_| |_| \___|\__,_|_|\___\__,_|\__\___|
// |_|
// FIGLET: Op -> Predicate
#[test]
fn path_as_predicate_yeilds_is_equal_to_true_bound() {
assert_into(op! { V }, predicate! {{ V }: { IsEqual<B1, Output = B1> }});
}
#[test]
fn binary_ops_with_boolean_output_yield_op_on_right_with_true_output_bound() {
// NB: == is special, and is tested separately
assert_into(
op! { A & B },
predicate! {{ A }: { BitAnd<B, Output = B1> }},
);
assert_into(op! { A | B }, predicate! {{ A }: { BitOr<B, Output = B1> }});
assert_into(
op! { A ^ B },
predicate! {{ A }: { BitXor<B, Output = B1> }},
);
assert_into(
op! { A >= B },
predicate! {{ A }: { IsGreaterOrEqual<B, Output = B1> }},
);
assert_into(
op! { A > B },
predicate! {{ A }: { IsGreater<B, Output = B1> }},
);
assert_into(
op! { A <= B },
predicate! {{ A }: { IsLessOrEqual<B, Output = B1> }},
);
assert_into(
op! { A < B },
predicate! {{ A }: { IsLess<B, Output = B1> }},
);
assert_into(
op! { A != B },
predicate! {{ A }: { IsNotEqual<B, Output = B1> }},
);
}
#[test]
fn binary_eq_op_with_path_or_unary_on_both_sides_yields_equality_bound_with_output_of_true() {
assert_into(
op! { A == B },
predicate! {{ A }: { IsEqual<B, Output = B1> }},
);
assert_into(
op! { A == !B },
predicate! {{ A }: { IsEqual<<B as Not>::Output, Output = B1> }},
);
assert_into(
op! { !A == B },
predicate! {{ <A as Not>::Output }: { IsEqual<B, Output = B1> }},
);
assert_into(
op! { !A == !B },
predicate! {{ <A as Not>::Output }: { IsEqual<<B as Not>::Output, Output = B1> }},
);
}
#[test]
fn binary_eq_op_with_binary_op_on_left_yields_bound_of_left_op_with_output_of_right_op() {
assert_into(
op! { A + B == C },
predicate! {{ A }: { Add<B, Output = C> }},
);
}
#[test]
fn binary_eq_op_with_binary_op_on_right_yields_bound_of_right_op_with_output_of_left_op() {
assert_into(
op! { C == A + B },
predicate! {{ A }: { Add<B, Output = C> }},
);
}
// ___ __
// / _ \ _ __ \ \
// | | | | '_ \ _____\ \
// | |_| | |_) | |_____/ /
// \___/| .__/ /_/
// |_|
// __ __ ______ _ _ _ __
// \ \ / /__ ___ / / _ \ _ __ ___ __| (_) ___ __ _| |_ ___\ \
// \ \ / / _ \/ __/ /| |_) | '__/ _ \/ _` | |/ __/ _` | __/ _ \\ \
// \ V / __/ (__\ \| __/| | | __/ (_| | | (_| (_| | || __// /
// \_/ \___|\___|\_\_| |_| \___|\__,_|_|\___\__,_|\__\___/_/
// FIGLET: Op -> Vec<Predicate>
#[test]
fn path_op_yields_empty() {
assert_into(op! { V }, vec![]);
}
#[test]
fn unary_op_yields_unary_op_bound_and_bounds_for_operand() {
assert_into(op! { !V }, vec![predicate! {{ V }: { Not }}]);
assert_into(
op! { !(A | (B & C)) },
vec![
predicate! {{ <A as BitOr<<B as BitAnd<C>>::Output>>::Output }: { Not }},
predicate! {{ A }: { BitOr<<B as BitAnd<C>>::Output> }},
predicate! {{ B }: { BitAnd<C> }},
],
);
}
#[test]
fn binary_eq_ne_le_and_ge_add_extra_cmp_bound() {
assert_into(
op! { A == B },
vec![
predicate! {{ A }: { Cmp<B> }},
predicate! {{ A }: { IsEqual<B> }},
],
);
assert_into(
op! { A != B },
vec![
predicate! {{ A }: { Cmp<B> }},
predicate! {{ A }: { IsNotEqual<B> }},
],
);
assert_into(
op! { A <= B },
vec![
predicate! {{ A }: { Cmp<B> }},
predicate! {{ A }: { IsLessOrEqual<B> }},
],
);
assert_into(
op! { A >= B },
vec![
predicate! {{ A }: { Cmp<B> }},
predicate! {{ A }: { IsGreaterOrEqual<B> }},
],
);
}
#[test]
fn binary_lt_and_gt_add_extra_cmp_and_le_or_ge_bound() {
assert_into(
op! { A < B },
vec![
predicate! {{ A }: { Cmp<B> }},
predicate! {{ A }: { IsLessOrEqual<B> }},
predicate! {{ A }: { IsLess<B> }},
],
);
assert_into(
op! { A > B },
vec![
predicate! {{ A }: { Cmp<B> }},
predicate! {{ A }: { IsGreaterOrEqual<B> }},
predicate! {{ A }: { IsGreater<B> }},
],
);
}
#[test]
fn binary_add_div_mul_rem_shl_shr_sub_add_extra_unsigned_cmp_and_eq_bounds() {
assert_into(
op! { A + B },
vec![
predicate! {{ <A as Add<B>>::Output }: { Unsigned }},
predicate! {{ <A as Add<B>>::Output }: { Cmp }},
predicate! {{ <A as Add<B>>::Output }: { IsEqual<<A as Add<B>>::Output> }},
predicate! {{ A }: { Add<B> }},
],
);
assert_into(
op! { A / B },
vec![
predicate! {{ <A as Div<B>>::Output }: { Unsigned }},
predicate! {{ <A as Div<B>>::Output }: { Cmp }},
predicate! {{ <A as Div<B>>::Output }: { IsEqual<<A as Div<B>>::Output> }},
predicate! {{ A }: { Div<B> }},
],
);
assert_into(
op! { A * B },
vec![
predicate! {{ <A as Mul<B>>::Output }: { Unsigned }},
predicate! {{ <A as Mul<B>>::Output }: { Cmp }},
predicate! {{ <A as Mul<B>>::Output }: { IsEqual<<A as Mul<B>>::Output> }},
predicate! {{ A }: { Mul<B> }},
],
);
assert_into(
op! { A % B },
vec![
predicate! {{ <A as Rem<B>>::Output }: { Unsigned }},
predicate! {{ <A as Rem<B>>::Output }: { Cmp }},
predicate! {{ <A as Rem<B>>::Output }: { IsEqual<<A as Rem<B>>::Output> }},
predicate! {{ A }: { Rem<B> }},
],
);
assert_into(
op! { A << B },
vec![
predicate! {{ <A as Shl<B>>::Output }: { Unsigned }},
predicate! {{ <A as Shl<B>>::Output }: { Cmp }},
predicate! {{ <A as Shl<B>>::Output }: { IsEqual<<A as Shl<B>>::Output> }},
predicate! {{ A }: { Shl<B> }},
],
);
assert_into(
op! { A >> B },
vec![
predicate! {{ <A as Shr<B>>::Output }: { Unsigned }},
predicate! {{ <A as Shr<B>>::Output }: { Cmp }},
predicate! {{ <A as Shr<B>>::Output }: { IsEqual<<A as Shr<B>>::Output> }},
predicate! {{ A }: { Shr<B> }},
],
);
assert_into(
op! { A - B },
vec![
predicate! {{ <A as Sub<B>>::Output }: { Unsigned }},
predicate! {{ <A as Sub<B>>::Output }: { Cmp }},
predicate! {{ <A as Sub<B>>::Output }: { IsEqual<<A as Sub<B>>::Output> }},
predicate! {{ A }: { Sub<B> }},
],
);
}
// ____
// | _ \ __ _ _ __ ___ ___
// | |_) / _` | '__/ __|/ _ \
// | __/ (_| | | \__ \ __/
// |_| \__,_|_| |___/\___|
// FIGLET: Parse
macro_rules! parse_test {
(
parse {
$in:item
},
expect {
$out:item
},
) => {
let code_in: VerifiableItem = parse_quote! {
$in
};
let code_out = code_in.to_token_stream();
let expected: syn::Item = parse_quote! {
$out
};
assert_eq!(
code_out.to_string(),
expected.into_token_stream().to_string(),
);
};
}
#[test]
fn Multiple_Bit_identity_clauses_are_converted_to_multiple_bounds() {
parse_test! {
parse {
fn f<A: Bit, B: Bit>()
where
_: Verify<{ A }, { B }>,
{
}
},
expect {
fn f<A: Bit, B: Bit>()
where
A: IsEqual<B1, Output = B1>,
B: IsEqual<B1, Output = B1>
{
}
},
}
}
#[test]
fn Bit_equality_clause_is_converted_to_IsEqual_bound_with_added_output_of_true() {
parse_test! {
parse {
fn f<A: Bit, B: Bit>()
where
_: Verify<{ A == B }>,
{
}
},
expect {
fn f<A: Bit, B: Bit>()
where
A: IsEqual<B, Output = B1>,
A: Cmp<B>,
A: IsEqual<B>,
{
}
},
}
}
#[test]
fn parenthesized_Bit_clauses_are_unwrapped() {
parse_test! {
parse {
fn f<B: Bit>()
where
_: Verify<{ (!B) }>,
{
}
},
expect {
fn f<B: Bit>()
where
B: Not<Output = B1>,
B: Not,
{
}
},
}
}
#[test]
fn bool_literals_converted_to_B0_or_B1() {
parse_test! {
parse {
fn f<B: Bit>()
where
_: Verify<{ false == !B }, { true == B }>,
{
}
},
expect {
fn f<B: Bit>()
where
B0: IsEqual<<B as Not>::Output, Output = B1>,
B0: Cmp<<B as Not>::Output>,
B0: IsEqual<<B as Not>::Output>,
B: Not,
B1: IsEqual<B, Output = B1>,
B1: Cmp<B>,
B1: IsEqual<B>,
{
}
},
}
}
#[test]
fn usize_literals_converted_to_U() {
parse_test! {
parse {
fn f<Six: Unsigned, Zero: Unsigned>()
where
_: Verify<{ 6 == Six }, { 0 == Zero }>,
{
}
},
expect {
fn f<Six: Unsigned, Zero: Unsigned>()
where
U6: IsEqual<Six, Output = B1>,
U6: Cmp<Six>,
U6: IsEqual<Six>,
U0: IsEqual<Zero, Output = B1>,
U0: Cmp<Zero>,
U0: IsEqual<Zero>,
{
}
},
}
}
#[test]
fn can_verify_type_construction_in_fn_return_value() {
parse_test! {
parse {
fn f<A: Unsigned>() -> Container<{A + 1}>
{
}
},
expect {
fn f<A: Unsigned>() -> Container<<A as Add<U1>>::Output>
where
<A as Add<U1>>::Output: Unsigned,
<A as Add<U1>>::Output: Cmp,
<A as Add<U1>>::Output: IsEqual<<A as Add<U1>>::Output>,
A: Add<U1>,
{
}
},
}
}
#[test]
fn can_verify_impl_bounds() {
parse_test! {
parse {
impl<A: Unsigned, B: Unsigned> Trait for Struct<A, B>
where
_: Verify<{ A + B == 3 }>,
{
}
},
expect {
impl<A: Unsigned, B: Unsigned> Trait for Struct<A, B>
where
A: Add<B, Output = U3>,
<A as Add<B>>::Output: Cmp<U3>,
<A as Add<B>>::Output: IsEqual<U3>,
<A as Add<B>>::Output: Unsigned,
<A as Add<B>>::Output: Cmp,
<A as Add<B>>::Output: IsEqual<<A as Add<B>>::Output>,
A: Add<B>,
{
}
},
}
}
#[test]
fn can_verify_type_construction_in_associate_type() {
parse_test! {
parse {
impl<A: Unsigned, B: Unsigned> Trait for Struct<A, B>
{
type Type = Output<{A + B}>;
}
},
expect {
impl<A: Unsigned, B: Unsigned> Trait for Struct<A, B>
where
<A as Add<B>>::Output: Unsigned,
<A as Add<B>>::Output: Cmp,
<A as Add<B>>::Output: IsEqual<<A as Add<B>>::Output>,
A: Add<B>,
{
type Type = Output<<A as Add<B>>::Output>;
}
},
}
}
#[test]
fn can_verify_type_construction_in_nested_types() {
parse_test! {
parse {
fn f<L0: Unsigned, L1: Unsigned>() -> Container<{L0 + 0}, Outer<{L1 + 1}>>
{
Default::default()
}
},
expect {
fn f<L0: Unsigned, L1: Unsigned>(
) -> Container<<L0 as Add<U0>>::Output, Outer<<L1 as Add<U1>>::Output>>
where
<L0 as Add<U0>>::Output: Unsigned,
<L0 as Add<U0>>::Output: Cmp,
<L0 as Add<U0>>::Output: IsEqual<<L0 as Add<U0>>::Output>,
L0: Add<U0>,
<L1 as Add<U1>>::Output: Unsigned,
<L1 as Add<U1>>::Output: Cmp,
<L1 as Add<U1>>::Output: IsEqual<<L1 as Add<U1>>::Output>,
L1: Add<U1>,
{
Default::default()
}
},
}
}
}
|
use crypto_api_chachapoly::{ ChachaPolyError, ChachaPolyIetf };
include!("read_test_vectors.rs");
#[derive(Debug)]
pub struct TestVector {
line: usize,
key___: Vec<u8>,
nonce_: Vec<u8>,
ad____: Vec<u8>,
input_: Vec<u8>,
output: Vec<u8>
}
impl TestVector {
pub fn test(&self) {
match self.ad____.is_empty() {
true => self.test_cipher(),
false => self.test_aead_cipher()
}
}
fn test_cipher(&self) {
// Create the cipher instance
let cipher = ChachaPolyIetf::cipher();
// Encrypt the data
let mut buf = vec![0; self.output.len()];
let out_len = cipher
.encrypt_to(&mut buf, &self.input_, &self.key___, &self.nonce_)
.unwrap();
assert_eq!(self.output, &buf[..out_len], "@{} failed", self.line);
// Decrypt the data
let mut buf = vec![0; self.output.len()];
let out_len = cipher
.decrypt_to(&mut buf, &self.output, &self.key___, &self.nonce_)
.unwrap();
assert_eq!(self.input_, &buf[..out_len], "@{} failed", self.line);
}
fn test_aead_cipher(&self) {
// Create the cipher instance
let aead_cipher = ChachaPolyIetf::aead_cipher();
// Seal the data
let mut buf = vec![0; self.output.len()];
let out_len = aead_cipher.seal_to(
&mut buf, &self.input_, &self.ad____,
&self.key___, &self.nonce_
).unwrap();
assert_eq!(self.output, &buf[..out_len], "@{} failed", self.line);
// Open the data
let mut buf = vec![0; self.output.len()];
let out_len = aead_cipher.open_to(
&mut buf, &self.output, &self.ad____,
&self.key___, &self.nonce_
).unwrap();
assert_eq!(self.input_, &buf[..out_len], "@{} failed", self.line);
}
}
#[test]
fn test() {
// Read test vectors
let vectors: Vec<TestVector> = read_test_vectors!(
"chachapoly_ietf.txt"
=> TestVector{ line, key___, nonce_, ad____, input_, output }
);
// Test all vectors
for vector in vectors { vector.test() }
}
#[derive(Debug)]
pub struct ErrTestVector {
line: usize,
key__: Vec<u8>,
nonce: Vec<u8>,
ad___: Vec<u8>,
input: Vec<u8>
}
impl ErrTestVector {
pub fn test(&self) {
match self.ad___.is_empty() {
true => self.test_cipher(),
false => self.test_aead_cipher()
}
}
fn test_cipher(&self) {
// Create the cipher instance
let cipher = ChachaPolyIetf::cipher();
// Decrypt the data
let mut buf = vec![0; self.input.len()];
let err = cipher
.decrypt_to(&mut buf, &self.input, &self.key__, &self.nonce)
.unwrap_err();
match err.downcast_ref::<ChachaPolyError>() {
Some(ChachaPolyError::InvalidData) => (),
_ => panic!("Invalid error returned @{}", self.line)
}
}
fn test_aead_cipher(&self) {
// Create the cipher instance
let aead_cipher = ChachaPolyIetf::aead_cipher();
// Open the data
let mut buf = vec![0; self.input.len()];
let err = aead_cipher.open_to(
&mut buf, &self.input, &self.ad___,
&self.key__, &self.nonce
).unwrap_err();
match err.downcast_ref::<ChachaPolyError>() {
Some(ChachaPolyError::InvalidData) => (),
_ => panic!("Invalid error returned @{}", self.line)
}
}
}
#[test]
fn test_err() {
// Read test vectors
let vectors: Vec<ErrTestVector> = read_test_vectors!(
"chachapoly_ietf_err.txt"
=> ErrTestVector{ line, key__, nonce, ad___, input }
);
// Test all vectors
for vector in vectors { vector.test() }
}
#[derive(Debug)]
pub struct ApiTestVector {
line: usize,
key_len___: usize,
nonce_len_: usize,
ad_len____: usize,
input_len_: usize,
output_len: usize,
error_desc: &'static str
}
impl ApiTestVector {
pub fn test(&self) {
match self.ad_len____ {
0 => self.test_cipher(),
_ => self.test_aead_cipher()
}
}
fn test_cipher(&self) {
// Create the cipher instance
let cipher = ChachaPolyIetf::cipher();
// Generate fake inputs
let key = vec![0; self.key_len___];
let nonce = vec![0; self.nonce_len_];
let input = vec![0; self.input_len_];
let mut buf = vec![0; self.output_len];
// Helper to check the error
macro_rules! test_err {
($fn:expr => $call:expr) => ({
let result = $call
.expect_err(&format!("`{}`: Unexpected success @{}", $fn, self.line));
match result.downcast_ref::<ChachaPolyError>() {
Some(ChachaPolyError::ApiMisuse(desc)) => assert_eq!(
*desc, self.error_desc,
"`{}`: Invalid API-error description @{}", $fn, self.line
),
_ => panic!("`{}`: Invalid error returned @{}", $fn, self.line)
}
});
}
// Test `encrypt` and `encrypt_to`
test_err!("encrypt" => cipher.encrypt(&mut buf, input.len(), &key, &nonce));
test_err!("encrypt_to" => cipher.encrypt_to(&mut buf, &input, &key, &nonce));
// Test `decrypt` and `decrypt_to`
test_err!("decrypt" => cipher.decrypt(&mut buf, input.len(), &key, &nonce));
test_err!("decrypt_to" => cipher.decrypt_to(&mut buf, &input, &key, &nonce));
}
fn test_aead_cipher(&self) {
// Create the cipher instance
let aead_cipher = ChachaPolyIetf::aead_cipher();
// Generate fake inputs
let key = vec![0; self.key_len___];
let nonce = vec![0; self.nonce_len_];
let ad = vec![0; self.ad_len____];
let input = vec![0; self.input_len_];
let mut buf = vec![0; self.output_len];
// Helper to check the error
macro_rules! test_err {
($fn:expr => $call:expr) => ({
let result = $call
.expect_err(&format!("`{}`: Unexpected success @{}", $fn, self.line));
match result.downcast_ref::<ChachaPolyError>() {
Some(ChachaPolyError::ApiMisuse(desc)) => assert_eq!(
*desc, self.error_desc,
"`{}`: Invalid API-error description @{}", $fn, self.line
),
_ => panic!("`{}`: Invalid error returned @{}", $fn, self.line)
}
});
}
// Test `seal` and `seal_to`
test_err!("seal" => aead_cipher.seal(&mut buf, input.len(), &ad, &key, &nonce));
test_err!("seal_to" => aead_cipher.seal_to(&mut buf, &input, &ad, &key, &nonce));
// Test `open` and `open_to`
test_err!("open" => aead_cipher.open(&mut buf, input.len(), &ad, &key, &nonce));
test_err!("open_to" => aead_cipher.open_to(&mut buf, &input, &ad, &key, &nonce));
}
}
#[test]
fn test_api() {
// Read test vectors
let vectors: Vec<ApiTestVector> = read_test_vectors!(
"chachapoly_ietf_api.txt" => ApiTestVector {
line, key_len___, nonce_len_,
ad_len____, input_len_, output_len, error_desc
}
);
// Test all vectors
for vector in vectors { vector.test() }
} |
#[doc = "Reader of register DDRCTRL_DERATEEN"]
pub type R = crate::R<u32, super::DDRCTRL_DERATEEN>;
#[doc = "Writer for register DDRCTRL_DERATEEN"]
pub type W = crate::W<u32, super::DDRCTRL_DERATEEN>;
#[doc = "Register DDRCTRL_DERATEEN `reset()`'s with value 0"]
impl crate::ResetValue for super::DDRCTRL_DERATEEN {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DERATE_ENABLE`"]
pub type DERATE_ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DERATE_ENABLE`"]
pub struct DERATE_ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> DERATE_ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `DERATE_VALUE`"]
pub type DERATE_VALUE_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DERATE_VALUE`"]
pub struct DERATE_VALUE_W<'a> {
w: &'a mut W,
}
impl<'a> DERATE_VALUE_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 1)) | (((value as u32) & 0x03) << 1);
self.w
}
}
#[doc = "Reader of field `DERATE_BYTE`"]
pub type DERATE_BYTE_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DERATE_BYTE`"]
pub struct DERATE_BYTE_W<'a> {
w: &'a mut W,
}
impl<'a> DERATE_BYTE_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4);
self.w
}
}
impl R {
#[doc = "Bit 0 - DERATE_ENABLE"]
#[inline(always)]
pub fn derate_enable(&self) -> DERATE_ENABLE_R {
DERATE_ENABLE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bits 1:2 - DERATE_VALUE"]
#[inline(always)]
pub fn derate_value(&self) -> DERATE_VALUE_R {
DERATE_VALUE_R::new(((self.bits >> 1) & 0x03) as u8)
}
#[doc = "Bits 4:7 - DERATE_BYTE"]
#[inline(always)]
pub fn derate_byte(&self) -> DERATE_BYTE_R {
DERATE_BYTE_R::new(((self.bits >> 4) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bit 0 - DERATE_ENABLE"]
#[inline(always)]
pub fn derate_enable(&mut self) -> DERATE_ENABLE_W {
DERATE_ENABLE_W { w: self }
}
#[doc = "Bits 1:2 - DERATE_VALUE"]
#[inline(always)]
pub fn derate_value(&mut self) -> DERATE_VALUE_W {
DERATE_VALUE_W { w: self }
}
#[doc = "Bits 4:7 - DERATE_BYTE"]
#[inline(always)]
pub fn derate_byte(&mut self) -> DERATE_BYTE_W {
DERATE_BYTE_W { w: self }
}
}
|
use alloc::vec::Vec;
use core::marker::PhantomData;
use necsim_core_bond::{NonNegativeF64, PositiveF64};
use necsim_core::{
cogs::{
Backup, CoalescenceRngSample, EmigrationExit, Habitat, LineageReference,
LocallyCoherentLineageStore, RngCore,
},
landscape::{IndexedLocation, Location},
lineage::MigratingLineage,
simulation::partial::emigration_exit::PartialSimulation,
};
use crate::decomposition::Decomposition;
#[allow(clippy::module_name_repetitions)]
#[derive(Debug)]
pub struct DomainEmigrationExit<H: Habitat, C: Decomposition<H>> {
decomposition: C,
emigrants: Vec<(u32, MigratingLineage)>,
_marker: PhantomData<H>,
}
#[contract_trait]
impl<H: Habitat, C: Decomposition<H>> Backup for DomainEmigrationExit<H, C> {
unsafe fn backup_unchecked(&self) -> Self {
Self {
decomposition: self.decomposition.backup_unchecked(),
emigrants: self
.emigrants
.iter()
.map(|(partition, migrating_lineage)| {
(*partition, migrating_lineage.backup_unchecked())
})
.collect(),
_marker: PhantomData::<H>,
}
}
}
#[contract_trait]
impl<
H: Habitat,
C: Decomposition<H>,
G: RngCore,
R: LineageReference<H>,
S: LocallyCoherentLineageStore<H, R>,
> EmigrationExit<H, G, R, S> for DomainEmigrationExit<H, C>
{
#[must_use]
#[debug_ensures(ret.is_some() == (
old(self.decomposition.map_location_to_subdomain_rank(
&dispersal_target, &simulation.habitat
)) == self.decomposition.get_subdomain_rank()
), "lineage only emigrates to other subdomains")]
fn optionally_emigrate(
&mut self,
lineage_reference: R,
dispersal_origin: IndexedLocation,
dispersal_target: Location,
prior_time: NonNegativeF64,
event_time: PositiveF64,
simulation: &mut PartialSimulation<H, G, R, S>,
rng: &mut G,
) -> Option<(R, IndexedLocation, Location, NonNegativeF64, PositiveF64)> {
let target_subdomain = self
.decomposition
.map_location_to_subdomain_rank(&dispersal_target, &simulation.habitat);
if target_subdomain == self.decomposition.get_subdomain_rank() {
return Some((
lineage_reference,
dispersal_origin,
dispersal_target,
prior_time,
event_time,
));
}
self.emigrants.push((
target_subdomain,
MigratingLineage {
global_reference: simulation.lineage_store.emigrate(lineage_reference),
dispersal_origin,
dispersal_target,
prior_time,
event_time,
coalescence_rng_sample: CoalescenceRngSample::new(rng),
},
));
None
}
}
impl<H: Habitat, C: Decomposition<H>> DomainEmigrationExit<H, C> {
#[must_use]
pub fn new(decomposition: C) -> Self {
Self {
decomposition,
emigrants: Vec::new(),
_marker: PhantomData::<H>,
}
}
pub fn len(&self) -> usize {
self.emigrants.len()
}
pub fn is_empty(&self) -> bool {
self.emigrants.is_empty()
}
}
impl<H: Habitat, C: Decomposition<H>> Iterator for DomainEmigrationExit<H, C> {
type Item = (u32, MigratingLineage);
fn next(&mut self) -> Option<Self::Item> {
self.emigrants.pop()
}
}
|
#[cfg(unix)]
pub use self::unix::*;
#[cfg(windows)]
pub use self::windows::*;
#[cfg(unix)]
mod unix {
use std::fs::File;
use std::io;
use std::os::unix::fs::FileExt;
#[inline]
pub fn read_offset(file: &File, buf: &mut [u8], offset: u64) -> io::Result<usize> {
file.read_at(buf, offset)
}
#[inline]
pub fn write_offset(file: &File, buf: &[u8], offset: u64) -> io::Result<usize> {
file.write_at(buf, offset)
}
}
#[cfg(windows)]
mod windows {
use std::fs::File;
use std::io;
use std::os::windows::fs::FileExt;
#[inline]
pub fn read_offset(file: &File, buf: &mut [u8], offset: u64) -> io::Result<usize> {
file.seek_read(buf, offset)
}
#[inline]
pub fn write_offset(file: &File, buf: &[u8], offset: u64) -> io::Result<usize> {
file.seek_write(buf, offset)
}
}
|
use std::env;
use std::process;
use common::load_file;
use std::collections::HashSet;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("day3 <file>");
process::exit(1);
}
let rows = load_file(&args[1]);
// 1000 x 1000
let mut state = [[0i32; 1000]; 1000];
let mut collisions = HashSet::new();
for row in rows.iter() {
// #1 @ 1,3: 4x4
let parts: Vec<&str> = row.split(' ').collect();
let id = parts[0].replace("#", "").parse::<i32>().unwrap();
let coord: Vec<&str> = parts[2].split(',').collect();
let size: Vec<&str> = parts[3].split('x').collect();
let x = coord[0].parse::<u32>().unwrap();
let y = coord[1].replace(':', "").parse::<u32>().unwrap();
let w = size[0].parse::<u32>().unwrap();
let h = size[1].parse::<u32>().unwrap();
for xx in x..x + w {
for yy in y..y + h {
if state[xx as usize][yy as usize] > 0 {
// already occupied
collisions.insert(state[xx as usize][yy as usize]);
collisions.insert(id);
state[xx as usize][yy as usize] = -1;
} else if state[xx as usize][yy as usize] < 0 {
// already occupied
collisions.insert(id);
} else if state[xx as usize][yy as usize] == 0 {
state[xx as usize][yy as usize] = id;
}
}
}
}
let mut count = 0;
for x in 0..1000 {
for y in 0..1000 {
if state[x][y] < 0 {
count += 1;
}
}
}
println!("overlapping inches {:?}", count);
for row in rows.iter() {
let parts: Vec<&str> = row.split(' ').collect();
let id = parts[0].replace("#", "").parse::<i32>().unwrap();
if !collisions.contains(&id) {
println!("no collision: {}", id);
}
}
}
|
extern crate time;
use byteorder::{ReadBytesExt, WriteBytesExt, BigEndian};
use self::time::Timespec;
use std::io::prelude::*;
use Result;
use types::{Type, FromSql, ToSql, IsNull, SessionInfo};
const USEC_PER_SEC: i64 = 1_000_000;
const NSEC_PER_USEC: i64 = 1_000;
// Number of seconds from 1970-01-01 to 2000-01-01
const TIME_SEC_CONVERSION: i64 = 946684800;
impl FromSql for Timespec {
fn from_sql<R: Read>(_: &Type, raw: &mut R, _: &SessionInfo) -> Result<Timespec> {
let t = try!(raw.read_i64::<BigEndian>());
let mut sec = t / USEC_PER_SEC + TIME_SEC_CONVERSION;
let mut usec = t % USEC_PER_SEC;
if usec < 0 {
sec -= 1;
usec = USEC_PER_SEC + usec;
}
Ok(Timespec::new(sec, (usec * NSEC_PER_USEC) as i32))
}
accepts!(Type::Timestamp, Type::Timestamptz);
}
impl ToSql for Timespec {
fn to_sql<W: Write + ?Sized>(&self,
_: &Type,
mut w: &mut W,
_: &SessionInfo)
-> Result<IsNull> {
let t = (self.sec - TIME_SEC_CONVERSION) * USEC_PER_SEC + self.nsec as i64 / NSEC_PER_USEC;
try!(w.write_i64::<BigEndian>(t));
Ok(IsNull::No)
}
accepts!(Type::Timestamp, Type::Timestamptz);
to_sql_checked!();
}
|
#[doc = "Reader of register ADV_ACCADDR_H"]
pub type R = crate::R<u32, super::ADV_ACCADDR_H>;
#[doc = "Writer for register ADV_ACCADDR_H"]
pub type W = crate::W<u32, super::ADV_ACCADDR_H>;
#[doc = "Register ADV_ACCADDR_H `reset()`'s with value 0x8e89"]
impl crate::ResetValue for super::ADV_ACCADDR_H {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x8e89
}
}
#[doc = "Reader of field `ADV_ACCADDR_H`"]
pub type ADV_ACCADDR_H_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `ADV_ACCADDR_H`"]
pub struct ADV_ACCADDR_H_W<'a> {
w: &'a mut W,
}
impl<'a> ADV_ACCADDR_H_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15 - higher 16 bit of ADV packet access code"]
#[inline(always)]
pub fn adv_accaddr_h(&self) -> ADV_ACCADDR_H_R {
ADV_ACCADDR_H_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - higher 16 bit of ADV packet access code"]
#[inline(always)]
pub fn adv_accaddr_h(&mut self) -> ADV_ACCADDR_H_W {
ADV_ACCADDR_H_W { w: self }
}
}
|
use std::{time, thread};
fn main() {
let mut i = 0;
loop {
println!("hello world");
thread::sleep(time::Duration::from_secs(5));
i += 1;
if i > 3 { std::process::exit(42) }
}
}
|
#![feature(plugin)]
#![plugin(rocket_codegen)]
extern crate todomvc;
extern crate rocket;
extern crate rocket_contrib;
extern crate core;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate diesel;
extern crate dotenv;
#[macro_use] extern crate dotenv_codegen;
extern crate todomvc_models;
use rocket_contrib::{Template, Json};
use rocket::response::NamedFile;
use rocket::request::{self, FromRequest};
use rocket::{Request, State, Outcome};
use rocket::http::{Status};
use std::ops::Deref;
use std::path::Path;
use dotenv::dotenv;
use std::env;
// use std::sync::atomic::AtomicUsize;
// use core::sync::atomic::Ordering;
use diesel::pg::PgConnection;
use diesel::r2d2::{ConnectionManager, Pool, PooledConnection};
use self::todomvc::*;
use self::diesel::prelude::*;
use todomvc_models::*;
// An alias to the type for a pool of Diesel SQLite connections.
type PgPool = Pool<ConnectionManager<PgConnection>>;
static DATABASE_URL: &'static str = dotenv!("DATABASE_URL");
fn init_pool() -> PgPool {
let manager = ConnectionManager::<PgConnection>::new(DATABASE_URL);
Pool::new(manager).expect("db pool")
}
pub struct DbConn(pub PooledConnection<ConnectionManager<PgConnection>>);
/// Attempts to retrieve a single connection from the managed database pool. If
/// no pool is currently managed, fails with an `InternalServerError` status. If
/// no connections are available, fails with a `ServiceUnavailable` status.
impl<'a, 'r> FromRequest<'a, 'r> for DbConn {
type Error = ();
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
let pool = request.guard::<State<PgPool>>()?;
match pool.get() {
Ok(conn) => Outcome::Success(DbConn(conn)),
Err(_) => Outcome::Failure((Status::ServiceUnavailable, ()))
}
}
}
// For the convenience of using an &DbConn as an &SqliteConnection.
impl Deref for DbConn {
type Target = PgConnection;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[get("/")]
fn index() -> Template {
let context = "";
Template::render("index", &context)
}
#[get("/counter.json")]
fn get_counter(conn: DbConn) -> QueryResult<Json<Vec<Item>>> {
use todomvc::schema::items::dsl::*;
items
.limit(5)
.load::<Item>(&*conn)
.map(|item| Json(item))
}
#[get("/todomvc_client.js")]
fn app_js() -> Option<NamedFile> {
NamedFile::open(Path::new("todomvc_client/target/wasm32-unknown-emscripten/debug/todomvc_client.js")).ok()
}
#[get("/todomvc_client.wasm")]
fn app_wasm() -> Option<NamedFile> {
NamedFile::open(Path::new("todomvc_client/target/wasm32-unknown-emscripten/debug/todomvc_client.wasm")).ok()
}
fn main() {
rocket::ignite()
.mount("/", routes![index, app_js, app_wasm, get_counter])
.attach(Template::fairing())
.manage(init_pool())
.launch();
} |
mod text;
mod reexport;
mod summary;
mod mark;
mod document;
mod implementation;
pub use text::*;
pub use reexport::*;
pub use summary::*;
pub use mark::*;
pub use document::*;
pub use implementation::*;
pub type Code = String;
#[derive(Debug)]
pub struct SimpleItem {
declaration: Code,
mark: Mark,
description: Vec<Section>,
}
pub trait Described {
fn description(&self) -> &[Section];
}
pub trait Exportable {
fn re_exports(&self) -> &[ExportItem];
}
pub trait ItemContainer {
fn sub_item(&self) -> &[SummarySection];
}
pub trait Declared {
fn declaration(&self) -> &Code;
}
pub trait Implementable {
fn trait_impls(&self) -> &[Implementation];
fn auto_impls(&self) -> &[Implementation];
fn blanket_impls(&self) -> &[Implementation];
}
pub trait Marked {
fn mark(&self) -> &Mark;
}
impl Described for SimpleItem {
fn description(&self) -> &[Section] {
&self.description
}
}
impl Marked for SimpleItem {
fn mark(&self) -> &Mark {
&self.mark
}
}
use kuchiki::{NodeRef, NodeData, iter::NodeEdge};
use html5ever::local_name;
pub(crate) fn parse_simple_item_forward(head: NodeRef) -> Option<(Option<NodeRef>, SimpleItem)> {
let declaration = parse_generic_code(&head);
let head = head.next_sibling().and_then(skip_uninformative);
let (head, mark) = if let Some(head) = head {
parse_marks_forward(head)
} else {
(None, Mark::default())
};
let head = head.and_then(skip_uninformative);
let (head, description) = if let Some(head) = head {
(head.next_sibling(), parse_docblock(&head)?)
} else {
(None, Vec::new())
};
Some((head, SimpleItem {
declaration,
mark,
description
}))
}
pub(crate) fn parse_generic_code(pre: &NodeRef) -> Code {
let mut output = Code::new();
for edge in pre.traverse() {
if let NodeEdge::Start(node_start) = edge {
match node_start.data() {
NodeData::Text(text) => output.push_str(&text.borrow()),
NodeData::Element(element) => {
match element.name.local {
local_name!("br") => output.push('\n'),
local_name!("span") => {
let attributes = element.attributes.borrow();
let has_newline_option = attributes.get("class")
.map(|class| class.contains("fmt-newline"))
.unwrap_or(false);
if has_newline_option {
output.push('\n');
}
}
_ => {}
}
}
_ => {}
}
}
}
output
}
pub(crate) fn skip_uninformative(head: NodeRef) -> Option<NodeRef> {
head.inclusive_following_siblings().find(|n| {
if let Some(element) = n.as_element() {
if let Some(class) = element.attributes.borrow().get("class") {
class.contains("toggle")
} else {
false
}
} else {
false
}
})
} |
use log::error;
use std::{ffi::c_void, panic};
use wayland_sys::{
ffi_dispatch,
server::{wl_display, wl_event_source},
};
use wlroots_sys::WAYLAND_SERVER_HANDLE;
type Callback = extern "C" fn(*mut c_void) -> i32;
/// Unpack a Rust closure, extracting a `void*` pointer to the data and a
/// trampoline function which can be used to invoke it.
///
/// # Safety
///
/// It is the user's responsibility to ensure the closure outlives the returned
/// `void*` pointer.
///
/// Calling the trampoline function with anything except the `void*` pointer
/// will result in *Undefined Behaviour*.
///
/// The closure should guarantee that it never panics, seeing as panicking
/// across the FFI barrier is *Undefined Behaviour*. You may find
/// `std::panic::catch_unwind()` useful.
unsafe fn unpack_closure<F>(closure: *mut F) -> (*mut c_void, Callback)
where
F: FnMut(),
{
extern "C" fn trampoline<F>(data: *mut c_void) -> i32
where
F: FnMut(),
{
let result = panic::catch_unwind(move || {
let mut closure: Box<F> = unsafe { Box::from_raw(data as *mut F) };
closure();
});
if let Err(error) = result {
error!("Error while invoking timer callback: {:?}", error);
}
0
}
(closure as *mut F as *mut c_void, trampoline::<F>)
}
/// A wrapper around wl_event_loop timers to call a handler after a
/// specified timeout.
pub(crate) struct WlTimer(*mut wl_event_source, Option<Box<dyn FnOnce()>>);
impl WlTimer {
pub(crate) unsafe fn init<F>(
display: *mut wl_display,
timeout_ms: u32,
handler: F,
) -> Result<WlTimer, ()>
where
F: FnMut(),
F: 'static,
{
let handler_ptr = Box::into_raw(Box::new(handler));
let drop_handler = Box::new(move || {
Box::from_raw(handler_ptr);
});
let (closure, callback) = unpack_closure(handler_ptr);
let event_loop = ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_display_get_event_loop, display);
let timer = ffi_dispatch!(
WAYLAND_SERVER_HANDLE,
wl_event_loop_add_timer,
event_loop,
callback,
closure
);
if timer.is_null() {
drop_handler();
return Err(());
}
let success = ffi_dispatch!(
WAYLAND_SERVER_HANDLE,
wl_event_source_timer_update,
timer,
timeout_ms as i32
);
if success < 0 {
drop_handler();
return Err(());
}
Ok(WlTimer(timer, Some(drop_handler)))
}
}
impl Drop for WlTimer {
fn drop(&mut self) {
if !self.0.is_null() {
unsafe {
ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_event_source_remove, self.0);
}
}
if let Some(drop) = self.1.take() {
drop();
}
}
}
|
#[cfg(feature = "extern")]
pub mod ex;
#[cfg(feature = "python")]
pub mod py;
mod test;
use std::f64::consts::PI;
use crate::physics::
{
PLANCK_CONSTANT,
BOLTZMANN_CONSTANT
};
use crate::physics::single_chain::ZERO;
/// The structure of the thermodynamics of the FJC model in the modified canonical ensemble approximated using an asymptotic approach valid for weak potentials.
pub struct FJC
{
/// The mass of each hinge in the chain in units of kg/mol.
pub hinge_mass: f64,
/// The length of each link in the chain in units of nm.
pub link_length: f64,
/// The number of links in the chain.
pub number_of_links: u8
}
/// The expected end-to-end length as a function of the applied potential distance, potential stiffness, and temperature, parameterized by the number of links and link length.
pub fn end_to_end_length(number_of_links: &u8, link_length: &f64, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
let nondimensional_force = potential_stiffness*potential_distance*link_length/BOLTZMANN_CONSTANT/temperature;
(*number_of_links as f64)*link_length*(1.0/nondimensional_force.tanh() - 1.0/nondimensional_force)*(1.0 - potential_stiffness*(*number_of_links as f64)*link_length.powi(2)/BOLTZMANN_CONSTANT/temperature*(nondimensional_force.powi(-2) - (nondimensional_force.sinh()).powi(-2)))
}
/// The expected end-to-end length per link as a function of the applied potential distance, potential stiffness, and temperature, parameterized by the number of links and link length.
pub fn end_to_end_length_per_link(number_of_links: &u8, link_length: &f64, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
let nondimensional_force = potential_stiffness*potential_distance*link_length/BOLTZMANN_CONSTANT/temperature;
link_length*(1.0/nondimensional_force.tanh() - 1.0/nondimensional_force)*(1.0 - potential_stiffness*(*number_of_links as f64)*link_length.powi(2)/BOLTZMANN_CONSTANT/temperature*(nondimensional_force.powi(-2) - (nondimensional_force.sinh()).powi(-2)))
}
/// The expected nondimensional end-to-end length as a function of the applied nondimensional potential distance and nondimensional potential stiffness, parameterized by the number of links.
pub fn nondimensional_end_to_end_length(number_of_links: &u8, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
let nondimensional_force = (*number_of_links as f64)*nondimensional_potential_stiffness*nondimensional_potential_distance;
(*number_of_links as f64)*(1.0/nondimensional_force.tanh() - 1.0/nondimensional_force)*(1.0 - (*number_of_links as f64)*nondimensional_potential_stiffness*(nondimensional_force.powi(-2) - (nondimensional_force.sinh()).powi(-2)))
}
/// The expected nondimensional end-to-end length per link as a function of the applied nondimensional potential distance and nondimensional potential stiffness, parameterized by the number of links.
pub fn nondimensional_end_to_end_length_per_link(number_of_links: &u8, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
let nondimensional_force = (*number_of_links as f64)*nondimensional_potential_stiffness*nondimensional_potential_distance;
(1.0/nondimensional_force.tanh() - 1.0/nondimensional_force)*(1.0 - (*number_of_links as f64)*nondimensional_potential_stiffness*(nondimensional_force.powi(-2) - (nondimensional_force.sinh()).powi(-2)))
}
/// The expected force as a function of the applied potential distance, potential stiffness, and temperature.
pub fn force(potential_distance: &f64, potential_stiffness: &f64) -> f64
{
potential_stiffness*potential_distance
}
/// The expected nondimensional force as a function of the applied nondimensional potential distance and nondimensional potential stiffness, parameterized by the number of links.
pub fn nondimensional_force(number_of_links: &u8, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
(*number_of_links as f64)*nondimensional_potential_stiffness*nondimensional_potential_distance
}
/// The Gibbs free energy as a function of the applied potential distance, potential stiffness, and temperature, parameterized by the number of links, link length, and hinge mass.
pub fn gibbs_free_energy(number_of_links: &u8, link_length: &f64, hinge_mass: &f64, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
let nondimensional_force = potential_stiffness*potential_distance*link_length/BOLTZMANN_CONSTANT/temperature;
-(*number_of_links as f64)*BOLTZMANN_CONSTANT*temperature*(nondimensional_force.sinh()/nondimensional_force).ln() + 0.5*potential_stiffness*((*number_of_links as f64)*link_length).powi(2)*(1.0/nondimensional_force.tanh() - 1.0/nondimensional_force).powi(2) - (*number_of_links as f64)*BOLTZMANN_CONSTANT*temperature*(8.0*PI.powi(2)*hinge_mass*link_length.powi(2)*BOLTZMANN_CONSTANT*temperature/PLANCK_CONSTANT.powi(2)).ln()
}
/// The Gibbs free energy epr link as a function of the applied potential distance, potential stiffness, and temperature, parameterized by the number of links, link length, and hinge mass.
pub fn gibbs_free_energy_per_link(number_of_links: &u8, link_length: &f64, hinge_mass: &f64, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
let nondimensional_force = potential_stiffness*potential_distance*link_length/BOLTZMANN_CONSTANT/temperature;
-BOLTZMANN_CONSTANT*temperature*(nondimensional_force.sinh()/nondimensional_force).ln() + 0.5*potential_stiffness*((*number_of_links as f64)*link_length).powi(2)/(*number_of_links as f64)*(1.0/nondimensional_force.tanh() - 1.0/nondimensional_force).powi(2) - BOLTZMANN_CONSTANT*temperature*(8.0*PI.powi(2)*hinge_mass*link_length.powi(2)*BOLTZMANN_CONSTANT*temperature/PLANCK_CONSTANT.powi(2)).ln()
}
/// The relative Gibbs free energy as a function of the applied potential distance, potential stiffness, and temperature, parameterized by the number of links and link length.
pub fn relative_gibbs_free_energy(number_of_links: &u8, link_length: &f64, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
gibbs_free_energy(number_of_links, link_length, &1.0, potential_distance, potential_stiffness, temperature) - gibbs_free_energy(number_of_links, link_length, &1.0, &(ZERO*(*number_of_links as f64)*link_length), potential_stiffness, temperature)
}
/// The relative Gibbs free energy per link as a function of the applied potential distance, potential stiffness, and temperature, parameterized by the number of links and link length.
pub fn relative_gibbs_free_energy_per_link(number_of_links: &u8, link_length: &f64, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
gibbs_free_energy_per_link(number_of_links, link_length, &1.0, potential_distance, potential_stiffness, temperature) - gibbs_free_energy_per_link(number_of_links, link_length, &1.0, &(ZERO*(*number_of_links as f64)*link_length), potential_stiffness, temperature)
}
/// The nondimensional Gibbs free energy as a function of the applied nondimensional potential distance, nondimensional potential stiffness, and temperature, parameterized by the number of links, link length, and hinge mass.
pub fn nondimensional_gibbs_free_energy(number_of_links: &u8, link_length: &f64, hinge_mass: &f64, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64, temperature: &f64) -> f64
{
let nondimensional_force = (*number_of_links as f64)*nondimensional_potential_stiffness*nondimensional_potential_distance;
-(*number_of_links as f64)*(nondimensional_force.sinh()/nondimensional_force).ln() + 0.5*nondimensional_potential_stiffness*(*number_of_links as f64).powi(2)*(1.0/nondimensional_force.tanh() - 1.0/nondimensional_force).powi(2) - (*number_of_links as f64)*(8.0*PI.powi(2)*hinge_mass*link_length.powi(2)*BOLTZMANN_CONSTANT*temperature/PLANCK_CONSTANT.powi(2)).ln()
}
/// The nondimensional Gibbs free energy per link as a function of the applied nondimensional potential distance, nondimensional potential stiffness, and temperature, parameterized by the number of links, link length, and hinge mass.
pub fn nondimensional_gibbs_free_energy_per_link(number_of_links: &u8, link_length: &f64, hinge_mass: &f64, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64, temperature: &f64) -> f64
{
let nondimensional_force = (*number_of_links as f64)*nondimensional_potential_stiffness*nondimensional_potential_distance;
-(nondimensional_force.sinh()/nondimensional_force).ln() + 0.5*nondimensional_potential_stiffness*(*number_of_links as f64)*(1.0/nondimensional_force.tanh() - 1.0/nondimensional_force).powi(2) - (8.0*PI.powi(2)*hinge_mass*link_length.powi(2)*BOLTZMANN_CONSTANT*temperature/PLANCK_CONSTANT.powi(2)).ln()
}
/// The nondimensional relative Gibbs free energy as a function of the applied nondimensional potential distance and nondimensional potential stiffness, parameterized by the number of links.
pub fn nondimensional_relative_gibbs_free_energy(number_of_links: &u8, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
nondimensional_gibbs_free_energy(number_of_links, &1.0, &1.0, nondimensional_potential_distance, nondimensional_potential_stiffness, &300.0) - nondimensional_gibbs_free_energy(number_of_links, &1.0, &1.0, &ZERO, nondimensional_potential_stiffness, &300.0)
}
/// The nondimensional relative Gibbs free energy per link as a function of the applied nondimensional potential distance and nondimensional potential stiffness, parameterized by the number of links.
pub fn nondimensional_relative_gibbs_free_energy_per_link(number_of_links: &u8, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
nondimensional_gibbs_free_energy_per_link(number_of_links, &1.0, &1.0, nondimensional_potential_distance, nondimensional_potential_stiffness, &300.0) - nondimensional_gibbs_free_energy_per_link(number_of_links, &1.0, &1.0, &ZERO, nondimensional_potential_stiffness, &300.0)
}
/// The implemented functionality of the thermodynamics of the FJC model in the modified canonical ensemble approximated using an asymptotic approach valid for weak potentials.
impl FJC
{
/// Initializes and returns an instance of the thermodynamics of the FJC model in the modified canonical ensemble approximated using an asymptotic approach valid for weak potentials.
pub fn init(number_of_links: u8, link_length: f64, hinge_mass: f64) -> Self
{
FJC
{
hinge_mass,
link_length,
number_of_links
}
}
/// The expected end-to-end length as a function of the applied potential distance, potential stiffness, and temperature.
pub fn end_to_end_length(&self, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
end_to_end_length(&self.number_of_links, &self.link_length, potential_distance, potential_stiffness, temperature)
}
/// The expected end-to-end length per link as a function of the applied potential distance, potential stiffness, and temperature.
pub fn end_to_end_length_per_link(&self, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
end_to_end_length_per_link(&self.number_of_links, &self.link_length, potential_distance, potential_stiffness, temperature)
}
/// The expected nondimensional end-to-end length as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
pub fn nondimensional_end_to_end_length(&self, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
nondimensional_end_to_end_length(&self.number_of_links, nondimensional_potential_distance, nondimensional_potential_stiffness)
}
/// The expected nondimensional end-to-end length per link as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
pub fn nondimensional_end_to_end_length_per_link(&self, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
nondimensional_end_to_end_length_per_link(&self.number_of_links, nondimensional_potential_distance, nondimensional_potential_stiffness)
}
/// The expected force as a function of the applied potential distance and potential stiffness
pub fn force(&self, potential_distance: &f64, potential_stiffness: &f64) -> f64
{
force(potential_distance, potential_stiffness)
}
/// The expected nondimensional force as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
pub fn nondimensional_force(&self, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
nondimensional_force(&self.number_of_links, nondimensional_potential_distance, nondimensional_potential_stiffness)
}
/// The Gibbs free energy as a function of the applied potential distance, potential stiffness, and temperature.
pub fn gibbs_free_energy(&self, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
gibbs_free_energy(&self.number_of_links, &self.link_length, &self.hinge_mass, potential_distance, potential_stiffness, temperature)
}
/// The Gibbs free energy epr link as a function of the applied potential distance, potential stiffness, and temperature.
pub fn gibbs_free_energy_per_link(&self, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
gibbs_free_energy_per_link(&self.number_of_links, &self.link_length, &self.hinge_mass, potential_distance, potential_stiffness, temperature)
}
/// The relative Gibbs free energy as a function of the applied potential distance, potential stiffness, and temperature.
pub fn relative_gibbs_free_energy(&self, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
relative_gibbs_free_energy(&self.number_of_links, &self.link_length, potential_distance, potential_stiffness, temperature)
}
/// The relative Gibbs free energy per link as a function of the applied potential distance, potential stiffness, and temperature.
pub fn relative_gibbs_free_energy_per_link(&self, potential_distance: &f64, potential_stiffness: &f64, temperature: &f64) -> f64
{
relative_gibbs_free_energy_per_link(&self.number_of_links, &self.link_length, potential_distance, potential_stiffness, temperature)
}
/// The nondimensional Gibbs free energy as a function of the applied nondimensional potential distance, nondimensional potential stiffness, and temperature.
pub fn nondimensional_gibbs_free_energy(&self, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64, temperature: &f64) -> f64
{
nondimensional_gibbs_free_energy(&self.number_of_links, &self.link_length, &self.hinge_mass, nondimensional_potential_distance, nondimensional_potential_stiffness, temperature)
}
/// The nondimensional Gibbs free energy per link as a function of the applied nondimensional potential distance, nondimensional potential stiffness, and temperature.
pub fn nondimensional_gibbs_free_energy_per_link(&self, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64, temperature: &f64) -> f64
{
nondimensional_gibbs_free_energy_per_link(&self.number_of_links, &self.link_length, &self.hinge_mass, nondimensional_potential_distance, nondimensional_potential_stiffness, temperature)
}
/// The nondimensional relative Gibbs free energy as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
pub fn nondimensional_relative_gibbs_free_energy(&self, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
nondimensional_relative_gibbs_free_energy(&self.number_of_links, nondimensional_potential_distance, nondimensional_potential_stiffness)
}
/// The nondimensional relative Gibbs free energy per link as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
pub fn nondimensional_relative_gibbs_free_energy_per_link(&self, nondimensional_potential_distance: &f64, nondimensional_potential_stiffness: &f64) -> f64
{
nondimensional_relative_gibbs_free_energy_per_link(&self.number_of_links, nondimensional_potential_distance, nondimensional_potential_stiffness)
}
}
|
// Copyright (C) 2020 Sebastian Dröge <sebastian@centricular.com>
//
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use super::HeaderName;
pub const ACCEPT: HeaderName = HeaderName::from_static_str_unchecked("Accept");
pub const ACCEPT_CREDENTIALS: HeaderName =
HeaderName::from_static_str_unchecked("Accept-Credentials");
pub const ACCEPT_ENCODING: HeaderName = HeaderName::from_static_str_unchecked("Accept-Encoding");
pub const ACCEPT_LANGUAGE: HeaderName = HeaderName::from_static_str_unchecked("Accept-Language");
pub const ACCEPT_RANGES: HeaderName = HeaderName::from_static_str_unchecked("Accept-Ranges");
pub const ALLOW: HeaderName = HeaderName::from_static_str_unchecked("Allow");
pub const AUTHENTICATION_INFO: HeaderName =
HeaderName::from_static_str_unchecked("Authentication-Info");
pub const AUTHORIZATION: HeaderName = HeaderName::from_static_str_unchecked("Authorization");
pub const BANDWIDTH: HeaderName = HeaderName::from_static_str_unchecked("Bandwidth");
pub const BLOCKSIZE: HeaderName = HeaderName::from_static_str_unchecked("Blocksize");
pub const CACHE_CONTROL: HeaderName = HeaderName::from_static_str_unchecked("Cache-Control");
pub const CONNECTION: HeaderName = HeaderName::from_static_str_unchecked("Connection");
pub const CONNECTION_CREDENTIALS: HeaderName =
HeaderName::from_static_str_unchecked("Connection-Credentials");
pub const CONTENT_BASE: HeaderName = HeaderName::from_static_str_unchecked("Content-Base");
pub const CONTENT_ENCODING: HeaderName = HeaderName::from_static_str_unchecked("Content-Encoding");
pub const CONTENT_LANGUAGE: HeaderName = HeaderName::from_static_str_unchecked("Content-Language");
pub const CONTENT_LENGTH: HeaderName = HeaderName::from_static_str_unchecked("Content-Length");
pub const CONTENT_LOCATION: HeaderName = HeaderName::from_static_str_unchecked("Content-Location");
pub const CONTENT_TYPE: HeaderName = HeaderName::from_static_str_unchecked("Content-Type");
pub const CSEQ: HeaderName = HeaderName::from_static_str_unchecked("CSeq");
pub const DATE: HeaderName = HeaderName::from_static_str_unchecked("Date");
pub const EXPIRES: HeaderName = HeaderName::from_static_str_unchecked("Expires");
pub const FROM: HeaderName = HeaderName::from_static_str_unchecked("From");
pub const IF_MATCH: HeaderName = HeaderName::from_static_str_unchecked("If-Match");
pub const IF_MODIFIED_SINCE: HeaderName =
HeaderName::from_static_str_unchecked("If-Modified-Since");
pub const IF_NONE_MATCH: HeaderName = HeaderName::from_static_str_unchecked("If-None-Match");
pub const LAST_MODIFIED: HeaderName = HeaderName::from_static_str_unchecked("Last-Modified");
pub const LOCATION: HeaderName = HeaderName::from_static_str_unchecked("Location");
pub const MEDIA_PROPERTIES: HeaderName = HeaderName::from_static_str_unchecked("Media-Properties");
pub const MEDIA_RANGE: HeaderName = HeaderName::from_static_str_unchecked("Media-Range");
pub const MTAG: HeaderName = HeaderName::from_static_str_unchecked("MTag");
pub const NOTIFY_REASON: HeaderName = HeaderName::from_static_str_unchecked("Notify-Reason");
pub const PIPELINED_REQUESTS: HeaderName =
HeaderName::from_static_str_unchecked("Pipelined-Requests");
pub const PROXY_AUTHENTICATE: HeaderName =
HeaderName::from_static_str_unchecked("Proxy-Authenticate");
pub const PROXY_AUTHENTICATION_INFO: HeaderName =
HeaderName::from_static_str_unchecked("Proxy-Authentication-Info");
pub const PROXY_AUTHORIZATION: HeaderName =
HeaderName::from_static_str_unchecked("Proxy-Authorization");
pub const PROXY_REQUIRE: HeaderName = HeaderName::from_static_str_unchecked("Proxy-Require");
pub const PROXY_SUPPORTED: HeaderName = HeaderName::from_static_str_unchecked("Proxy-Supported");
pub const PUBLIC: HeaderName = HeaderName::from_static_str_unchecked("Public");
pub const RANGE: HeaderName = HeaderName::from_static_str_unchecked("Range");
pub const REFERRER: HeaderName = HeaderName::from_static_str_unchecked("Referrer");
pub const REQUEST_STATUS: HeaderName = HeaderName::from_static_str_unchecked("Request-Status");
pub const REQUIRE: HeaderName = HeaderName::from_static_str_unchecked("Require");
pub const RETRY_AFTER: HeaderName = HeaderName::from_static_str_unchecked("Retry-After");
pub const RTP_INFO: HeaderName = HeaderName::from_static_str_unchecked("RTP-Info");
pub const SCALE: HeaderName = HeaderName::from_static_str_unchecked("Scale");
pub const SEEK_STYLE: HeaderName = HeaderName::from_static_str_unchecked("Seek-Style");
pub const SERVER: HeaderName = HeaderName::from_static_str_unchecked("Server");
pub const SESSION: HeaderName = HeaderName::from_static_str_unchecked("Session");
pub const SPEED: HeaderName = HeaderName::from_static_str_unchecked("Speed");
pub const SUPPORTED: HeaderName = HeaderName::from_static_str_unchecked("Supported");
pub const TERMINATE_REASON: HeaderName = HeaderName::from_static_str_unchecked("Terminate-Reason");
pub const TIMESTAMP: HeaderName = HeaderName::from_static_str_unchecked("Timestamp");
pub const TRANSPORT: HeaderName = HeaderName::from_static_str_unchecked("Transport");
pub const UNSUPPORTED: HeaderName = HeaderName::from_static_str_unchecked("Unsupported");
pub const USER_AGENT: HeaderName = HeaderName::from_static_str_unchecked("User-Agent");
pub const VIA: HeaderName = HeaderName::from_static_str_unchecked("Via");
pub const WWW_AUTHENTICATE: HeaderName = HeaderName::from_static_str_unchecked("WWW-Authenticate");
|
// #![windows_subsystem = "windows"]
extern crate find_folder;
extern crate piston_window;
extern crate rand;
extern crate tetris_lib;
use piston_window::*;
use rand::{thread_rng, Rng};
use tetris_lib::game::*;
fn main() {
let mut window: PistonWindow = WindowSettings::new("tetris", [500, 500])
.exit_on_esc(true)
.build()
.unwrap();
let assets = find_folder::Search::ParentsThenKids(3, 3)
.for_folder("assets")
.unwrap();
let mut glyphs = window
.load_font(assets.join("FiraSans-Regular.ttf"))
.unwrap();
let mut rng = thread_rng();
let rand_gen = Box::new(move || rng.gen::<u32>());
let mut game = Game::new(rand_gen);
let mut button = [false; 7];
let mut frames = 0;
// window.set_lazy(true);
while let Some(e) = window.next() {
if let Some(_) = e.render_args() {
const WHITE: [f32; 4] = [1.0, 1.0, 1.0, 1.0];
const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0];
const GRAY: [f32; 4] = [0.3, 0.3, 0.3, 1.0];
window.draw_2d(&e, |c, g, device| {
// Clear the screen.
clear(WHITE, g);
let c2 = c.trans(173.0, 98.0);
let rect = [0.0, 0.0, 154.0, 304.0];
rectangle(WHITE, rect, c2.transform, g);
Rectangle::new_border(BLACK, 2.0).draw(rect, &c2.draw_state, c2.transform, g);
// hold
for (i, line) in game.rend_hold().iter().enumerate() {
for (j, block) in line.iter().enumerate() {
let square = rectangle::square(0.0, 0.0, 15.0);
let (x, y) = (130 + (i as u32) * 15, 100 + (j as u32) * 15);
let transform = c.transform.trans(y as f64, x as f64);
let color = if !block.is_filled() || game.can_use_hold() {
block.get_color().to_rgb()
} else {
GRAY
};
rectangle(color, square, transform, g);
}
}
// field
for (i, line) in game.rend_field().iter().enumerate() {
for (j, block) in line.iter().enumerate() {
let square = rectangle::square(0.0, 0.0, 15.0);
let (x, y) = (100 + (i as u32) * 15, 175 + (j as u32) * 15);
let transform = c.transform.trans(y as f64, x as f64);
let mut color = block.get_color().to_rgb();
if block.is_clearing() {
color[3] = 1.0 - game.get_interval_ratio();
}
rectangle(color, square, transform, g);
}
}
// nexts
for k in 0..3 {
let size = if k == 0 { 10 } else { 8 };
for (i, line) in game.rend_next(2 - k).iter().enumerate() {
for (j, block) in line.iter().enumerate() {
let square = rectangle::square(0.0, 0.0, size as f64);
let k = k as u32;
let (x, y) =
((120 + k * 42) + (i as u32) * size, 345 + (j as u32) * size);
let transform = c.transform.trans(y as f64, x as f64);
rectangle(block.get_color().to_rgb(), square, transform, g);
}
}
}
// moji
let transform = c.transform.trans(80.0, 100.0);
text::Text::new_color(BLACK, 32)
.draw("Hold", &mut glyphs, &c.draw_state, transform, g)
.unwrap();
let transform = c.transform.trans(350.0, 100.0);
text::Text::new_color(BLACK, 32)
.draw("Next", &mut glyphs, &c.draw_state, transform, g)
.unwrap();
let transform = c.transform.trans(350.0, 270.0);
text::Text::new_color(BLACK, 24)
.draw(
&format!("Score: {}", game.get_score()),
&mut glyphs,
&c.draw_state,
transform,
g,
)
.unwrap();
let transform = c.transform.trans(350.0, 294.0);
text::Text::new_color(BLACK, 24)
.draw(
&format!("Lines: {}", game.get_clearlines()),
&mut glyphs,
&c.draw_state,
transform,
g,
)
.unwrap();
glyphs.factory.encoder.flush(device);
});
}
if let Some(btn) = e.press_args() {
match btn {
Button::Keyboard(Key::Space) => button[0] = true,
Button::Keyboard(Key::Z) => button[0] = true,
Button::Keyboard(Key::X) => button[1] = true,
Button::Keyboard(Key::Up) => button[2] = true,
Button::Keyboard(Key::Down) => button[3] = true,
Button::Keyboard(Key::Right) => button[4] = true,
Button::Keyboard(Key::Left) => button[5] = true,
Button::Keyboard(Key::LShift) => button[6] = true,
_ => (),
}
}
e.update(|u| {
frames += 1;
if frames % std::cmp::max((1.0 / (u.dt * 60.0)) as u32, 1) != 0 {
return;
}
if !game.is_gameover() {
game.tick(button);
}
button = [false; 7];
});
}
}
|
use crate::config;
use crate::state;
use futures::lock;
use std::sync;
const SHIP_TEXTURE_WIDTH: f64 = 36.0;
const SHIP_TEXTURE_HEIGHT: f64 = 36.0;
const ENERGY_BAR_WIDTH: f64 = 40.0;
const ENERGY_BAR_HEIGHT: f64 = 6.0;
pub async fn run(state: sync::Arc<lock::Mutex<state::State>>) -> Result<(), failure::Error> {
let mut window: piston_window::PistonWindow = piston_window::WindowSettings::new(
concat!("shipthing ", env!("CARGO_PKG_VERSION")),
[config::WORLD_WIDTH, config::WORLD_HEIGHT],
)
.exit_on_esc(true)
.resizable(false)
.build()
.map_err(|e| failure::err_msg(format!("{}", e)))?;
let assets = find_folder::Search::ParentsThenKids(3, 3).for_folder("assets")?;
let mut glyphs = window.load_font(assets.join("FiraSans-Regular.ttf"))?;
let main_text_style = piston_window::text::Text::new_color([0.0, 1.0, 0.0, 1.0], 24);
let ship_text_style = piston_window::text::Text::new_color([1.0, 1.0, 1.0, 1.0], 10);
let mut texture_context = piston_window::TextureContext {
factory: window.factory.clone(),
encoder: window.factory.create_command_buffer().into(),
};
let ship_texture = piston_window::Texture::from_path(
&mut texture_context,
assets.join("ship.png"),
piston_window::Flip::None,
&piston_window::TextureSettings::new(),
)
.map_err(failure::err_msg)?;
while let Some(e) = window.next() {
log::trace!("event: {:?}", e);
// Create a clone so we don't hold the lock while rendering
let state = state.lock().await.clone();
if let Some(result) =
window.draw_2d::<_, _, Result<(), failure::Error>>(&e, |c, g, device| {
use piston_window::character::CharacterCache;
use piston_window::Transformed;
piston_window::clear([0.0, 0.0, 0.0, 1.0], g);
for ship in state.iter_ships() {
let (x, y) = ship.position;
let mut draw_ship = |x, y| -> Result<(), failure::Error> {
let width = glyphs.width(ship_text_style.font_size, &ship.name)?;
ship_text_style.draw(
&ship.name,
&mut glyphs,
&c.draw_state,
c.transform.trans(x - width / 2.0, y - SHIP_TEXTURE_HEIGHT),
g,
)?;
piston_window::rectangle(
[1.0, 1.0, 1.0, 1.0],
[
-ENERGY_BAR_WIDTH / 2.0,
-ENERGY_BAR_HEIGHT,
ENERGY_BAR_WIDTH * ship.energy / config::ENERGY_MAX_LEVEL,
ENERGY_BAR_HEIGHT,
],
c.transform
.trans(x, y - SHIP_TEXTURE_HEIGHT / 2.0 - ENERGY_BAR_HEIGHT),
g,
);
piston_window::image(
&ship_texture,
c.transform
.trans(x, y)
.rot_rad(ship.direction)
.trans(-SHIP_TEXTURE_WIDTH / 2.0, -SHIP_TEXTURE_HEIGHT / 2.0),
g,
);
Ok(())
};
wrapping_draw(
x,
y,
SHIP_TEXTURE_WIDTH,
SHIP_TEXTURE_HEIGHT,
config::WORLD_WIDTH as f64,
config::WORLD_HEIGHT as f64,
|x, y| draw_ship(x, y),
)?;
}
main_text_style.draw(
"Hello world!",
&mut glyphs,
&c.draw_state,
c.transform.trans(0.0, 24.0),
g,
)?;
// Update glyphs before rendering.
glyphs.factory.encoder.flush(device);
Ok(())
})
{
result?;
}
}
Ok(())
}
fn wrapping_draw(
x: f64,
y: f64,
width: f64,
height: f64,
view_width: f64,
view_height: f64,
mut inner_draw: impl FnMut(f64, f64) -> Result<(), failure::Error>,
) -> Result<(), failure::Error> {
let wraps_left = x < width;
let wraps_right = x > view_width - width;
let wraps_top = y < height;
let wraps_bottom = y > view_height - height;
inner_draw(x, y)?;
if wraps_top {
inner_draw(x, y + view_height)?;
}
if wraps_top && wraps_right {
inner_draw(x - view_width, y + view_height)?;
}
if wraps_right {
inner_draw(x - view_width, y)?;
}
if wraps_right && wraps_bottom {
inner_draw(x - view_width, y - view_height)?;
}
if wraps_bottom {
inner_draw(x, y - view_height)?;
}
if wraps_bottom && wraps_left {
inner_draw(x + view_width, y - view_height)?;
}
if wraps_left {
inner_draw(x + view_width, y)?;
}
if wraps_left && wraps_top {
inner_draw(x + view_width, y + view_height)?;
}
Ok(())
}
|
use crate::part::Part;
use std::fs::File;
use std::io::{BufRead, BufReader};
fn part1(ns: &[i64], target: i64) -> Option<i64> {
let mut l = 0;
let mut r = ns.len() - 1;
while l < r {
if ns[l] + ns[r] == target {
return Some(ns[l] * ns[r]);
} else if target - ns[r] > ns[l] {
l += 1;
} else {
r -= 1;
}
}
None
}
fn part2(ns: &[i64], target: i64) -> Option<i64> {
for i in 0..ns.len() - 1 {
let mut l = i + 1;
let mut r = ns.len() - 1;
while l < r {
if ns[l] + ns[r] + ns[i] == target {
return Some(ns[l] * ns[r] * ns[i]);
} else if target - ns[r] - ns[l] > ns[i] {
l += 1;
} else {
r -= 1;
}
}
}
None
}
pub fn run(part: Part, input_path: &str) -> i64 {
let f = File::open(input_path).expect("failed to open input file");
let reader = BufReader::new(f);
let mut ns = reader
.lines()
.map(|s| s.expect("failed to read line"))
.map(|l| l.parse().expect("failed to parse entry"))
.collect::<Vec<_>>();
ns.sort_unstable();
match part {
Part::Part1 => part1(&ns, 2020).unwrap(),
Part::Part2 => part2(&ns, 2020).unwrap(),
}
}
|
use super::super::rocket;
use rocket::local::Client;
use rocket::http::{Status, ContentType};
#[test]
fn index() {
let client = Client::new(rocket()).expect("valid rocket instance");
let response = client.get("/").dispatch();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(), Some(ContentType::HTML));
}
|
use std::collections::HashMap;
use std::io::prelude::*;
use std::fs::File;
use yaml_rust::{YamlLoader, Yaml};
pub type Inventory = HashMap<String, Vec<String>>;
#[derive(Debug, RustcEncodable)]
pub struct Property {
pub name: String,
pub module: String,
pub params: HashMap<String, String>,
}
#[derive(Debug, RustcEncodable)]
pub struct Check {
pub inventory_name: String,
pub properties: Vec<Property>,
}
#[derive(Debug, RustcEncodable)]
pub struct CheckSuite {
pub inventory: Inventory,
pub checks: Vec<Check>,
}
impl CheckSuite {
pub fn read_from_file(filename: &str) -> Option<CheckSuite> {
let mut f = File::open(filename).unwrap();
let mut yaml_str = String::new();
let _ = f.read_to_string(&mut yaml_str);
CheckSuite::read_from_string(&yaml_str)
}
pub fn read_from_string(yaml_str: &str) -> Option<CheckSuite> {
let key_inventory: Yaml = Yaml::from_str("inventory");
let key_host = Yaml::from_str("hosts");
let key_properties = Yaml::from_str("properties");
let key_name = Yaml::from_str("name");
let docs = YamlLoader::load_from_str(&yaml_str).unwrap();
/*
[Array([
Hash({
String("inventory"): Hash({
String("raspi"): Array([
String("heimbot.fritz.box")])})}),
Hash({
String("hosts"): String("raspi"),
String("properties"): Array([
Hash({
String("name"): String("Checking SSH"),
String("ssh"): Hash({
String("port"): Integer(22),
String("software"): String("OpenSSH.*"),
String("version"): Real("2.0")})})])})])]
*/
// We assume only one document in the file which consists of a list of hashes
let mut inventory = Inventory::new();
let mut checks: Vec<Check> = Vec::new();
for hash in docs[0].as_vec().unwrap() {
let map = hash.as_hash().unwrap();
if map.contains_key(&key_inventory) {
debug!("Found inventory: {:?}", hash);
let inventory_yaml = map.get(&key_inventory).unwrap().as_hash().unwrap();
for hosts_name_yaml in inventory_yaml.keys() {
let hosts_name = hosts_name_yaml.as_str().unwrap().to_string();
let mut hosts: Vec<String> = Vec::new();
for host in inventory_yaml.get(hosts_name_yaml).unwrap().as_vec().unwrap() {
let h = host.as_str().unwrap().to_string();
debug!("Host: {}", h);
hosts.push(h);
}
debug!("- - Inventory name: '{:?}'", hosts_name);
debug!("- - Inventory hosts: '{:?}'", hosts);
inventory.insert(hosts_name.to_string(), hosts);
}
}
}
for hash in docs[0].as_vec().unwrap() {
let map = hash.as_hash().unwrap();
if map.contains_key(&key_inventory) {
} else {
if map.contains_key(&key_host) && map.contains_key(&key_properties) {
debug!("- Found check: {:?}", hash);
let inventory_name = map.get(&key_host).unwrap().as_str().unwrap();
let mut properties = Vec::new();
let properties_yaml = map.get(&key_properties).unwrap().as_vec().unwrap();
for property_yml in properties_yaml {
let mut name: Option<String> = None;
let mut params = HashMap::new();
let mut module: Option<String> = None;
for elem in property_yml.as_hash().unwrap() {
if elem.0 == &key_name {
name = Some(elem.1.as_str().unwrap().to_string());
} else {
module = Some(elem.0.as_str().unwrap().to_string());
}
}
if module.is_some() {
let params_yaml = property_yml.as_hash()
.unwrap()
.get(&Yaml::from_str(module.as_ref()
.unwrap()))
.unwrap()
.as_hash()
.unwrap();
for kv in params_yaml {
let value: String = match *kv.1 {
Yaml::Integer(i) => i.to_string(),
Yaml::Real(ref r) => r.to_string(),
Yaml::String(ref string) => string.to_string(),
_ => "<could not translate YAML value>".to_string(),
};
params.insert(kv.0.as_str().unwrap().to_string(), value);
}
}
properties.push(Property {
name: name.unwrap(),
module: module.unwrap(),
params: params,
});
}
let check = Check {
inventory_name: inventory_name.to_string(),
properties: properties,
};
debug!("- Created a check: {:?}", check);
checks.push(check);
}
}
}
info!("* Inventory: {:?}", inventory);
info!("* Checks: {:?}", checks);
let suite = CheckSuite {
inventory: inventory,
checks: checks,
};
return Some(suite);
}
}
|
pub mod workdir;
|
$NetBSD: patch-third__party_rust_libc_src_unix_bsd_netbsdlike_netbsd_mod.rs,v 1.1 2020/10/07 11:10:35 wiz Exp $
Based on: https://bugzilla.mozilla.org/show_bug.cgi?id=1594342
--- third_party/rust/libc/src/unix/bsd/netbsdlike/netbsd/mod.rs.orig 2020-01-03 18:58:20.000000000 +0000
+++ third_party/rust/libc/src/unix/bsd/netbsdlike/netbsd/mod.rs
@@ -1,6 +1,7 @@
pub type clock_t = ::c_uint;
pub type suseconds_t = ::c_int;
pub type dev_t = u64;
+pub type key_t = c_long;
pub type blksize_t = i32;
pub type fsblkcnt_t = u64;
pub type fsfilcnt_t = u64;
@@ -282,6 +283,30 @@ s_no_extra_traits! {
}
#[repr(packed)]
+ pub struct ipc_perm {
+ pub cuid: ::uid_t,
+ pub cgid: ::gid_t,
+ pub uid: ::uid_t,
+ pub gid: ::gid_t,
+ pub mode: ::mode_t,
+ pub seq: ::c_ushort,
+ pub key: ::key_t,
+ }
+
+ #[repr(packed)]
+ pub struct shmid_ds {
+ pub shm_perm: ::ipc_perm,
+ pub shm_segsz: ::size_t,
+ pub shm_lpid: ::pid_t,
+ pub shm_cpid: ::pid_t,
+ pub shm_nattch: ::c_short,
+ pub shm_atime: ::time_t,
+ pub shm_dtime: ::time_t,
+ pub shm_ctime: ::time_t,
+ pub shm_internal: *mut ::c_void,
+ }
+
+ #[repr(packed)]
pub struct in_addr {
pub s_addr: ::in_addr_t,
}
@@ -907,11 +932,18 @@ pub const SCM_CREDS: ::c_int = 0x10;
pub const O_DSYNC : ::c_int = 0x10000;
+pub const MAP_ANONYMOUS : ::c_int = 0x1000;
pub const MAP_RENAME : ::c_int = 0x20;
pub const MAP_NORESERVE : ::c_int = 0x40;
pub const MAP_HASSEMAPHORE : ::c_int = 0x200;
pub const MAP_WIRED: ::c_int = 0x800;
+pub const IPC_PRIVATE: ::key_t = 0;
+pub const IPC_CREAT: ::c_int = 0x1000;
+pub const IPC_EXCL: ::c_int = 0x2000;
+pub const IPC_NOWAIT: ::c_int = 0x4000;
+pub const IPC_RMID: ::c_int = 0;
+
pub const DCCP_TYPE_REQUEST: ::c_int = 0;
pub const DCCP_TYPE_RESPONSE: ::c_int = 1;
pub const DCCP_TYPE_DATA: ::c_int = 2;
@@ -1583,6 +1615,13 @@ extern {
pid: ::pid_t,
addr: *mut ::c_void,
data: ::c_int) -> ::c_int;
+ pub fn shmget(key: ::key_t, size: ::size_t, shmflg: ::c_int) -> ::c_int;
+ pub fn shmat(shmid: ::c_int, shmaddr: *const ::c_void,
+ shmflg: ::c_int) -> *mut ::c_void;
+ pub fn shmdt(shmaddr: *const ::c_void) -> ::c_int;
+ #[cfg_attr(target_os = "netbsd", link_name = "__shmctl50")]
+ pub fn shmctl(shmid: ::c_int, cmd: ::c_int,
+ buf: *mut ::shmid_ds) -> ::c_int;
pub fn pthread_setname_np(t: ::pthread_t,
name: *const ::c_char,
arg: *mut ::c_void) -> ::c_int;
|
use crate::ast::main::AST;
use super::{
expressions::expr_identifier,
types::{
ASTNode, AssignVariableStmt, BlockStmt, ClassStmt, ConditionStmt, Extendable, ForStmt,
FunctionParam, FunctionStmt, GlobalBlockStmt, GlobalNode, Identifier, InterfaceMember,
InterfaceStmt, ReturnStmt, WhileStmt,
},
};
use super::{
expressions::{expr_access, Accessable},
types::{
ASTValue, Assignable, ExportStmt, Exportable, ImportStmt, InitVariableStmt, TypeExpr,
VarType,
},
};
use crate::common::position::{Position, Span};
use crate::lexer::parser::{Token, TokenType};
pub fn stmt_init_variable(ast: &mut AST) -> InitVariableStmt {
let init_token = ast
.check_token(
Some(vec![TokenType::Keyword]),
Some(vec![String::from("let"), String::from("const")]),
true,
true,
0,
false,
)
.unwrap();
let constant = init_token.value == "const";
let name = expr_identifier(ast);
let mut type_expr: Option<TypeExpr> = None;
if ast
.check_one(
TokenType::AssignmentOperator,
String::from(":="),
false,
false,
)
.is_none()
{
ast.check_one(TokenType::Operator, String::from(":"), true, true)
.unwrap();
type_expr = Some(ast.get_type_expr());
}
let mut value: Option<ASTValue> = None;
if ast
.check_mult(
TokenType::AssignmentOperator,
vec![String::from("="), String::from(":=")],
false,
true,
)
.is_some()
{
value = ast.get_ast_value(true, vec![], None);
}
InitVariableStmt {
ntype: String::from("InitVariableStmt"),
constant,
name: name.clone(),
vtype: if type_expr.is_some() {
VarType::Static(type_expr.clone().unwrap())
} else {
VarType::Infer(())
},
value: value.clone(),
span: Span {
start: init_token.start,
end: if value.is_some() {
ASTValue::get_span(&value.unwrap()).end
} else {
if type_expr.is_some() {
type_expr.unwrap().span.end
} else {
name.span.end
}
},
},
}
}
pub fn stmt_assign_variable(ast: &mut AST, up: Option<Assignable>) -> AssignVariableStmt {
let target = if up.is_some() {
up.unwrap()
} else {
Accessable::get_assignable(&expr_access(ast, false, false, true).unwrap()).unwrap()
};
let op = ast
.check_token(
Some(vec![TokenType::AssignmentOperator]),
None,
true,
true,
0,
false,
)
.unwrap()
.value;
let value = ast.get_ast_value(true, vec![], None).unwrap();
AssignVariableStmt {
ntype: String::from("AssignVariableStmt"),
target: target.clone(),
value: value.clone(),
op,
span: Span {
start: target.get_span().start,
end: value.get_span2().end,
},
}
}
pub fn stmt_function(ast: &mut AST, name_req: bool, word: Option<String>) -> FunctionStmt {
let token_start = ast
.check_token(
Some(vec![if word.is_some() {
TokenType::Word
} else {
TokenType::Keyword
}]),
Some(vec![if word.is_some() {
word.clone().unwrap()
} else {
String::from("func")
}]),
true,
true,
0,
true,
)
.unwrap();
let mut name: Option<Identifier> = None;
if word.is_none()
&& ast
.check_token(Some(vec![TokenType::Word]), None, false, false, 0, false)
.is_some()
{
name = Some(expr_identifier(ast));
} else {
if name_req {
let err_tok = ast.get_token(0, true, true);
ast.error(
format!("Expected function name, but found something else"),
if err_tok.is_some() {
err_tok.clone().unwrap().start.line
} else {
0
},
if err_tok.is_some() {
err_tok.unwrap().start.col
} else {
0
},
);
}
}
ast.check_one(TokenType::Parenthesis, String::from("("), true, true)
.unwrap();
let mut params: Vec<FunctionParam> = vec![];
while ast
.check_one(TokenType::Parenthesis, String::from(")"), false, true)
.is_none()
{
let name = expr_identifier(ast);
ast.check_one(TokenType::Operator, String::from(":"), true, true)
.unwrap();
let ptype = ast.get_type_expr();
let mut default: Option<ASTValue> = None;
if ast
.check_one(
TokenType::AssignmentOperator,
String::from("="),
false,
true,
)
.is_some()
{
default = ast.get_ast_value(true, vec![], None);
}
ast.check_one(TokenType::Operator, String::from(","), false, true);
params.push(FunctionParam {
name: name.clone(),
span: Span {
start: name.span.start,
end: if default.is_some() {
default.clone().unwrap().get_span2().end
} else {
ptype.span.end
},
},
ptype: VarType::Static(ptype),
default,
})
}
let rtype;
if ast
.check_one(TokenType::Operator, String::from(":"), false, true)
.is_some()
{
rtype = VarType::Static(ast.get_type_expr());
} else {
rtype = VarType::Infer(());
}
let block = stmt_block(ast, false);
FunctionStmt {
ntype: String::from("FunctionStmt"),
span: Span {
start: token_start.start,
end: block.span.end,
},
block,
name,
rtype,
params,
}
}
pub fn stmt_return(ast: &mut AST) -> ReturnStmt {
let token_start = ast
.check_one(TokenType::Keyword, String::from("return"), true, true)
.unwrap();
let value = ast.get_ast_value(false, vec![], None);
ReturnStmt {
ntype: String::from("ReturnStmt"),
span: Span {
start: token_start.start,
end: if value.is_some() {
value.clone().unwrap().get_span2().end
} else {
token_start.end
},
},
value,
}
}
pub fn stmt_block(ast: &mut AST, global: bool) -> BlockStmt {
let mut body: Vec<ASTNode> = vec![];
let mut start = Position::new(0, 0);
if !global {
start = ast
.check_one(TokenType::Braces, String::from("{"), true, true)
.unwrap()
.start;
}
let mut token = ast.get_token(0, false, true);
while token.is_some()
&& token.clone().unwrap().ttype != TokenType::Braces
&& token.clone().unwrap().value != "}"
{
let tok = token.clone().unwrap();
if tok.ttype == TokenType::Keyword {
if tok.value == "let" || tok.value == "const" {
body.push(ASTNode::InitVariableStmt(stmt_init_variable(ast)));
} else if tok.value == "return" {
body.push(ASTNode::ReturnStmt(stmt_return(ast)));
} else if tok.value == "interface" {
body.push(ASTNode::InterfaceStmt(stmt_interface(ast)));
} else if tok.value == "while" {
body.push(ASTNode::WhileStmt(stmt_while(ast)));
} else if tok.value == "if" {
body.push(ASTNode::ConditionStmt(stmt_condition(ast)));
} else if tok.value == "for" {
body.push(ASTNode::ForStmt(stmt_for(ast)));
} else if tok.value == "class" {
body.push(ASTNode::ClassStmt(stmt_class(ast)));
} else {
let val = ast.get_ast_value(false, vec![], None);
if val.is_some() {
body.push(ASTNode::Value(val.unwrap()));
}
}
} else {
let val = ast.get_ast_value(false, vec![], None);
if val.is_some() {
body.push(ASTNode::Value(val.unwrap()));
}
}
token = ast.get_token(0, false, true);
}
let mut end: Option<Position> = None;
if token.is_some() {
end = Some(token.unwrap().end);
}
if !global {
end = Some(
ast.check_one(TokenType::Braces, String::from("}"), true, true)
.unwrap()
.end,
);
}
if end.is_none() {
let prev_tok = ast.get_token(0, false, true);
end = Some(if prev_tok.is_some() {
prev_tok.unwrap().end
} else {
Position::new(0, 0)
});
}
BlockStmt {
ntype: String::from("BlockStmt"),
span: Span {
start,
end: end.unwrap(),
},
body,
}
}
pub fn stmt_interface(ast: &mut AST) -> InterfaceStmt {
let token_start = ast
.check_one(TokenType::Keyword, String::from("interface"), true, true)
.unwrap();
let name = expr_identifier(ast);
let mut members: Vec<InterfaceMember> = vec![];
ast.check_one(TokenType::Braces, String::from("{"), true, true);
while ast
.check_one(TokenType::Braces, String::from("}"), false, false)
.is_none()
{
let el_name = expr_identifier(ast);
ast.check_one(TokenType::Operator, String::from(":"), true, true);
let el_type = ast.get_type_expr();
ast.check_one(TokenType::Operator, String::from(","), false, true);
members.push(InterfaceMember {
span: Span {
start: el_name.span.start,
end: el_type.span.end,
},
name: el_name,
etype: el_type,
})
}
let token_end = ast
.check_one(TokenType::Braces, String::from("}"), true, true)
.unwrap();
InterfaceStmt {
ntype: String::from("InterfaceStmt"),
name,
members,
span: Span {
start: token_start.start,
end: token_end.end,
},
}
}
pub fn stmt_while(ast: &mut AST) -> WhileStmt {
let token_start = ast
.check_one(TokenType::Keyword, String::from("while"), true, true)
.unwrap();
let condition = ast.get_ast_value(true, vec![], None).unwrap();
let block = stmt_block(ast, false);
WhileStmt {
ntype: String::from("WhileStmt"),
span: Span {
start: token_start.start,
end: block.span.end,
},
condition,
block,
}
}
pub fn stmt_condition(ast: &mut AST) -> ConditionStmt {
let token_start = ast
.check_one(TokenType::Keyword, String::from("if"), true, true)
.unwrap();
let condition = ast
.get_ast_value(true, vec![String::from("DictParsed")], None)
.unwrap();
let block = stmt_block(ast, false);
let mut else_stmt: Option<Box<ConditionStmt>> = None;
if ast
.check_one(TokenType::Keyword, String::from("else"), false, false)
.is_some()
{
let else_token = ast
.check_one(TokenType::Keyword, String::from("else"), false, false)
.unwrap();
if ast
.check_one(TokenType::Keyword, String::from("if"), false, false)
.is_some()
{
else_stmt = Some(Box::new(stmt_condition(ast)));
} else {
let block = stmt_block(ast, false);
else_stmt = Some(Box::new(ConditionStmt {
ntype: String::from("ConditionStmt"),
span: Span {
start: else_token.start,
end: block.span.end,
},
condition: None,
else_stmt: None,
block,
}));
}
}
ConditionStmt {
ntype: String::from("ConditionStmt"),
span: Span {
start: token_start.start,
end: if else_stmt.is_some() {
else_stmt.clone().unwrap().span.end
} else {
block.span.end
},
},
block,
condition: Some(condition),
else_stmt,
}
}
pub fn stmt_for(ast: &mut AST) -> ForStmt {
let token_start = ast
.check_one(TokenType::Keyword, String::from("for"), true, true)
.unwrap();
let mut iter_val: Option<Identifier> = None;
let mut idx_val: Option<Identifier> = None;
if ast
.check_token(Some(vec![TokenType::Word]), None, false, false, 0, false)
.is_some()
&& ast
.check_token(
Some(vec![TokenType::Keyword]),
Some(vec![String::from("of")]),
false,
false,
0,
false,
)
.is_some()
{
iter_val = Some(expr_identifier(ast));
}
let iterable = ast.get_ast_value(true, vec![], None).unwrap();
if ast
.check_one(TokenType::Keyword, String::from("at"), false, true)
.is_some()
{
idx_val = Some(expr_identifier(ast));
}
let block = stmt_block(ast, false);
ForStmt {
ntype: String::from("ForStmt"),
span: Span {
start: token_start.start,
end: block.span.end,
},
iter_val,
idx_val,
block,
iterable,
}
}
pub fn stmt_class(ast: &mut AST) -> ClassStmt {
let token_start = ast
.check_one(TokenType::Keyword, String::from("class"), true, true)
.unwrap();
let name = expr_identifier(ast);
let mut extends: Option<Extendable> = None;
if ast
.check_one(TokenType::Keyword, String::from("extends"), false, true)
.is_some()
{
let extend = expr_access(ast, false, false, false);
match extend.unwrap() {
Accessable::CallFunctionExpr(e) => ast.error(
String::from("Expected class, but found Function Call Expression"),
e.span.start.line,
e.span.start.col,
),
Accessable::AccessIndexExpr(e) => ast.error(
String::from("Expected class, but found Access Index Expression"),
e.span.start.line,
e.span.start.col,
),
Accessable::Identifier(e) => {
extends = Some(Extendable::Ident(e));
}
Accessable::AccessDotExpr(e) => {
extends = Some(Extendable::DotExpr(e));
}
}
}
let mut implements: Vec<Extendable> = vec![];
let mut to_impl_check = ast
.check_one(TokenType::Keyword, String::from("implements"), false, true)
.is_some();
while to_impl_check {
let implc = expr_access(ast, false, false, false);
let mut impls: Option<Extendable> = None;
match implc.unwrap() {
Accessable::CallFunctionExpr(e) => ast.error(
String::from("Expected interface, but found Function Call Expression"),
e.span.start.line,
e.span.start.col,
),
Accessable::AccessIndexExpr(e) => ast.error(
String::from("Expected interface, but found Access Index Expression"),
e.span.start.line,
e.span.start.col,
),
Accessable::Identifier(e) => {
impls = Some(Extendable::Ident(e));
}
Accessable::AccessDotExpr(e) => {
impls = Some(Extendable::DotExpr(e));
}
}
if impls.is_some() {
implements.push(impls.unwrap());
}
to_impl_check = ast
.check_one(TokenType::Operator, String::from(","), false, true)
.is_some()
&& ast
.check_token(Some(vec![TokenType::Word]), None, false, false, 0, false)
.is_some();
}
let mut initializer: Option<FunctionStmt> = None;
let mut properties: Vec<InitVariableStmt> = vec![];
let mut methods: Vec<FunctionStmt> = vec![];
ast.check_one(TokenType::Braces, String::from("{"), true, true);
while ast
.check_one(TokenType::Braces, String::from("}"), false, false)
.is_none()
{
if ast
.check_mult(
TokenType::Keyword,
vec![String::from("let"), String::from("const")],
false,
false,
)
.is_some()
{
properties.push(stmt_init_variable(ast));
} else {
if ast
.check_one(TokenType::Word, String::from("init"), false, false)
.is_some()
{
initializer = Some(stmt_function(ast, false, Some(String::from("init"))));
} else if ast
.check_one(TokenType::Keyword, String::from("func"), false, false)
.is_some()
{
methods.push(stmt_function(ast, true, None));
} else {
let tok = ast.get_token(0, true, false);
if tok.is_some() {
let tok = tok.unwrap();
ast.error(
format!("Unexpected token {}", tok.value),
tok.start.line,
tok.start.col,
);
}
}
}
}
let token_end = ast
.check_one(TokenType::Braces, String::from("}"), true, true)
.unwrap();
ClassStmt {
ntype: String::from("ClassStmt"),
span: Span {
start: token_start.start,
end: token_end.end,
},
name,
extends,
implements,
initializer,
properties,
methods,
}
}
pub fn stmt_global_block(ast: &mut AST) -> GlobalBlockStmt {
let mut body: Vec<GlobalNode> = vec![];
let mut token: Option<Token> = ast.get_token(0, false, false);
let mut is_export = false;
while token.is_some() {
let tok = token.unwrap();
let mut other_node = false;
if tok.ttype == TokenType::Comment || tok.ttype == TokenType::CommentMultiline {
} else if tok.ttype == TokenType::Keyword {
if tok.value == "import" {
let mut spec: Vec<Identifier> = vec![];
while ast
.check_token(Some(vec![TokenType::Word]), None, false, false, 0, false)
.is_some()
{
spec.push(expr_identifier(ast));
ast.check_one(TokenType::Operator, String::from(","), false, true);
}
if spec.len() == 0 {
ast.error(
String::from("Expected import specifier"),
tok.start.line,
tok.start.col,
);
} else {
body.push(GlobalNode::ImportStmt(ImportStmt {
span: Span {
start: tok.start,
end: spec.last().unwrap().span.end,
},
spec,
ntype: String::from("ImportStmt"),
}));
}
} else if tok.value == "export" {
if is_export {
ast.error(
String::from("Unexpected keyword export"),
tok.start.line,
tok.start.col,
);
}
is_export = true;
} else {
other_node = true;
}
} else {
other_node = true;
}
if other_node {
let block = stmt_block(ast, true);
for node in block.body {
if is_export {
match node {
ASTNode::ClassStmt(e) => {
body.push(GlobalNode::ExportStmt(ExportStmt {
ntype: String::from("ExportStmt"),
item: Exportable::Class(e),
}));
}
ASTNode::InterfaceStmt(e) => {
body.push(GlobalNode::ExportStmt(ExportStmt {
ntype: String::from("ExportStmt"),
item: Exportable::Interface(e),
}));
}
ASTNode::FunctionStmt(e) => {
body.push(GlobalNode::ExportStmt(ExportStmt {
ntype: String::from("ExportStmt"),
item: Exportable::Function(e),
}));
}
ASTNode::InitVariableStmt(e) => {
body.push(GlobalNode::ExportStmt(ExportStmt {
ntype: String::from("ExportStmt"),
item: Exportable::Variable(e),
}));
}
_ => {
let span = node.get_span();
ast.error(
String::from("Invalid item to export"),
span.start.line,
span.start.col,
);
}
}
} else {
body.push(GlobalNode::Node(node));
}
}
}
token = ast.get_token(0, true, false);
}
GlobalBlockStmt {
ntype: String::from("GlobalBlockStmt"),
span: Span {
start: Position::new(0, 0),
end: if token.is_some() {
token.unwrap().end
} else {
Position::new(0, 0)
},
},
body,
}
}
|
// module sb::poker::rank
use std::cmp::Ordering;
//--- card ------------------------------------------------------------------------------------------
#[derive(Debug,Copy,Clone)]
pub struct Card {
rank : u8,
suit : u8,
}
impl PartialEq for Card
{
fn eq(&self, other: &Self) -> bool {
self.rank == other.rank
}
}
impl PartialOrd for Card {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
if self.rank == other.rank {
return Some(Ordering::Equal);
}
if self.rank > other.rank {
return Some(Ordering::Greater);
}
//if self.rank < other.rank {
return Some(Ordering::Less);
}
}
#[allow(dead_code)]
// should this be new() ?
pub fn make_card(rank : u8, suit : u8) -> Card {
let mut rv = Card { rank: 0, suit: 0 };
const TWO : char = 2 as char;
const FOURTEEN : char = 14 as char;
match rank as char {
'2'..='9' => rv.rank = rank as u8 - '0' as u8,
'T' => rv.rank = 10,
'J' => rv.rank = 11,
'Q' => rv.rank = 12,
'K' => rv.rank = 13,
'A' => rv.rank = 14,
(TWO ..= FOURTEEN) => rv.rank = rank,
_ => panic!("Unexpected rank: {}", rank),
}
const ONE : char = 1 as char;
const FOUR : char = 4 as char;
match suit as char {
'C' => rv.suit = 1,
'D' => rv.suit = 2,
'H' => rv.suit = 3,
'S' => rv.suit = 4,
(ONE ..= FOUR) => rv.suit = suit,
_ => panic!("Unexpected suit: {}", suit),
}
rv
}
//--- rank ------------------------------------------------------------------------------------------
#[derive(Debug,Clone)]
#[allow(dead_code)]
pub enum Rank {
HighCard{ rank: Vec::<u8> }, // rank[0] is high card, followed by remaining card ranks in descending order
Pair{ rank: Vec::<u8> }, // rank[0] is pair cards, followed by remaining card ranks in descending order
TwoPair{ rank: Vec::<u8> }, // rank[0] is high pair cards, rank[1] is low pair cards, rank[3] is remaining singleton
Three{ rank : Vec::<u8> }, // rank[0] is 3 of a kind cards, followed by remaining card ranks in descending order
Straight{ rank: u8 }, // rank is high card, no other values
Flush{ rank: Vec::<u8> }, // rank[0] is high card, followed by remaining card ranks in descending order
FullHouse{ rank: Vec::<u8> },// rank[0] is three cards, rank[1] is pair cards
Four{ rank: Vec::<u8> }, // rank[0] is four cards, rank[1] is remaining singleton
StraightFlush{ rank: u8 }, // rank is high card, no other values
RoyalFlush, // no values
}
pub trait RankHand {
fn rank(&self) -> Rank;
}
impl PartialEq for Rank {
fn eq(&self, other: &Self) -> bool {
match self {
Rank::HighCard { rank : self_rank } => {
if let Rank::HighCard { rank : other_rank } = other {
return self_rank == other_rank;
}
else {
return false;
}
},
Rank::Pair { rank : self_rank } => {
if let Rank::Pair { rank : other_rank } = other {
return self_rank == other_rank;
}
else {
return false;
}
},
Rank::TwoPair { rank : self_rank } => {
if let Rank::TwoPair { rank : other_rank } = other {
return self_rank == other_rank;
}
else {
return false;
}
},
Rank::Three { rank : self_rank } => {
if let Rank::Three { rank : other_rank } = other {
return self_rank == other_rank;
}
else {
return false;
}
},
Rank::Straight { rank : self_rank } => {
if let Rank::Straight { rank : other_rank } = other {
return self_rank == other_rank;
}
else {
return false;
}
},
Rank::Flush { rank : self_rank } => {
if let Rank::Flush { rank : other_rank } = other {
return self_rank == other_rank;
}
else {
return false;
}
},
Rank::FullHouse { rank : self_rank } => {
if let Rank::FullHouse { rank : other_rank } = other {
return self_rank == other_rank;
}
else {
return false;
}
},
Rank::Four { rank : self_rank } => {
if let Rank::Four { rank : other_rank } = other {
return self_rank == other_rank;
}
else {
return false;
}
},
Rank::StraightFlush { rank : self_rank } => {
if let Rank::StraightFlush { rank : other_rank } = other {
return self_rank == other_rank;
}
else {
return false;
}
},
Rank::RoyalFlush => {
if let Rank::RoyalFlush = other {
return true;
}
else {
return false;
}
},
}
}
}
impl PartialOrd for Rank {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
fn rank_to_int(rank : &Rank) -> u8 {
match rank {
Rank::HighCard{ rank : _ } => { return 0; },
Rank::Pair{ rank : _ } => { return 1; },
Rank::TwoPair{ rank : _ } => { return 2; },
Rank::Three{ rank : _ } => { return 3; },
Rank::Straight{ rank : _ } => { return 4; },
Rank::Flush{ rank : _ } => { return 5; },
Rank::FullHouse{ rank : _ } => { return 6; },
Rank::Four{ rank : _ } => { return 7; },
Rank::StraightFlush{ rank : _ } => { return 8; },
Rank::RoyalFlush => { return 9; },
}
// panic!("Invalid rank!");
}
let self_int = rank_to_int(self);
let other_int = rank_to_int(other);
if self_int > other_int {
return Some(Ordering::Greater);
}
else if self_int < other_int {
return Some(Ordering::Less);
}
match self {
Rank::HighCard { rank : self_rank } => {
if let Rank::HighCard { rank : other_rank } = other {
if self_rank == other_rank {
return Some(Ordering::Equal);
}
else if self_rank > other_rank {
return Some(Ordering::Greater);
}
else {
return Some(Ordering::Less);
}
}
},
Rank::Pair { rank : self_rank } => {
if let Rank::Pair { rank : other_rank } = other {
if self_rank == other_rank {
return Some(Ordering::Equal);
}
else if self_rank > other_rank {
return Some(Ordering::Greater);
}
else {
return Some(Ordering::Less);
}
}
},
Rank::TwoPair { rank : self_rank } => {
if let Rank::TwoPair { rank : other_rank } = other {
if self_rank == other_rank {
return Some(Ordering::Equal);
}
else if self_rank > other_rank {
return Some(Ordering::Greater);
}
else {
return Some(Ordering::Less);
}
}
},
Rank::Three { rank : self_rank } => {
if let Rank::Three { rank : other_rank } = other {
if self_rank == other_rank {
return Some(Ordering::Equal);
}
else if self_rank > other_rank {
return Some(Ordering::Greater);
}
else {
return Some(Ordering::Less);
}
}
},
Rank::Straight { rank : self_rank } => {
if let Rank::Straight { rank : other_rank } = other {
if self_rank == other_rank {
return Some(Ordering::Equal);
}
else if self_rank > other_rank {
return Some(Ordering::Greater);
}
else {
return Some(Ordering::Less);
}
}
},
Rank::Flush { rank : self_rank } => {
if let Rank::Flush { rank : other_rank } = other {
if *self_rank == *other_rank {
return Some(Ordering::Equal);
}
else if self_rank > other_rank {
return Some(Ordering::Greater);
}
else {
return Some(Ordering::Less);
}
}
},
Rank::FullHouse { rank : self_rank } => {
if let Rank::FullHouse { rank : other_rank } = other {
if self_rank == other_rank {
return Some(Ordering::Equal);
}
else if self_rank > other_rank {
return Some(Ordering::Greater);
}
else {
return Some(Ordering::Less);
}
}
},
Rank::Four { rank : self_rank } => {
if let Rank::Four { rank : other_rank } = other {
if self_rank == other_rank {
return Some(Ordering::Equal);
}
else if self_rank > other_rank {
return Some(Ordering::Greater);
}
else {
return Some(Ordering::Less);
}
}
},
Rank::StraightFlush { rank : self_rank } => {
if let Rank::StraightFlush { rank : other_rank } = other {
if self_rank == other_rank {
return Some(Ordering::Equal);
}
else if self_rank > other_rank {
return Some(Ordering::Greater);
}
else {
return Some(Ordering::Less);
}
}
},
Rank::RoyalFlush => {
if let Rank::RoyalFlush = other {
return Some(Ordering::Equal);
}
},
}
None
}
}
impl RankHand for Vec::<Card> {
fn rank(&self) -> Rank {
// Test for flush.
let mut is_flush = true;
let suit = self[0].suit;
for i in 1..self.len() {
if self[i].suit != suit {
is_flush = false;
break;
}
}
// Rank cards... (Note that duplicates are removed in ranked!)
let mut ranked = Vec::<u8>::with_capacity(self.len());
for c in self {
ranked.push(c.rank);
}
ranked.sort_by(|a,b|b.cmp(a));
ranked.dedup();
// Test for straight.
let mut is_straight = true;
if ranked.len() != self.len() {
is_straight = false;
}
else {
let mut r = ranked[0];
for i in 1..ranked.len() {
r -= 1;
if ranked[i] != r {
is_straight = false;
break;
}
}
}
// Royal Flush and Straigh Flush
// If it's a straight AND it's a flush, decide on the flavor (StraightFlush, or RoyalFlush) and return
if is_straight && is_flush {
let high_card = ranked[0];
if high_card == 14_u8 { // ace
return Rank::RoyalFlush;
}
return Rank::StraightFlush { rank : high_card };
}
// Test for rank multiples
let mut multiples = [0_u32; 15]; // 0 and 1 are dummy values, 2 is 2, J-11, Q-12, K-13, A-14
for c in self {
multiples[c.rank as usize] += 1;
}
// Four of a kind and Full house
if ranked.len() == 2 {
let mut high : u8 = 0;
let mut low : u8 = 0;
let mut is_four = false;
// Four of a kind
for (card,count) in multiples.iter().enumerate() {
match count {
4 => {
is_four = true;
high = card as u8;
},
3 => high = card as u8,
_ => low = card as u8,
}
if is_four {
return Rank::Four{ rank : vec!(high, low) };
}
else {
return Rank::FullHouse{ rank : vec!(high, low) };
}
}
}
if is_straight {
return Rank::StraightFlush { rank : ranked[0] };
}
// Three of a kind, two pair, and single pair
let mut is_pair = 0_u8;
for (card,count) in multiples.iter().enumerate() {
match count {
3 => {
let mut result = Vec::<u8>::with_capacity(ranked.len());
result.push(card as u8);
for c in ranked {
if c != card as u8 {
result.push(c);
}
}
return Rank::Three { rank : result };
},
2 => {
if is_pair != 0 {
let mut result = Vec::<u8>::with_capacity(ranked.len());
result.push(card as u8);
result.push(is_pair);
result.sort_by(|a,b|b.cmp(a));
for c in ranked {
if c != card as u8 && c != is_pair {
result.push(c);
}
}
return Rank::TwoPair { rank : result };
}
else {
is_pair = card as u8;
}
}
_ => {},
}
}
if is_pair != 0 {
let mut result = Vec::<u8>::with_capacity(ranked.len());
result.push(is_pair);
for c in ranked {
if c != is_pair {
result.push(c);
}
}
return Rank::Pair { rank : result };
}
return Rank::HighCard { rank : ranked };
}
}
//--- hand ------------------------------------------------------------------------------------------
#[derive(Debug,Clone)]
pub struct Hand {
pub cards : Vec::<Card>, // doesn't exactly need to be public...
}
impl Hand {
pub fn new() -> Self {
Hand { cards : Vec::<Card>::with_capacity(5) }
}
pub fn add_card(&mut self, rank : u8, suit : u8) {
self.cards.push( make_card(rank,suit) );
}
}
impl RankHand for Hand {
fn rank(&self) -> Rank {
self.cards.rank()
}
}
impl PartialEq for Hand
{
fn eq(&self, other: &Self) -> bool {
self.rank() == other.rank()
}
}
impl PartialOrd for Hand {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.rank().partial_cmp(&other.rank())
}
}
//--- tests ------------------------------------------------------------------------------------------
#[test]
pub fn test_rank() {
let hand_rf1 : Vec::<Card> = vec![ make_card(14, 1), make_card(13, 1), make_card(12, 1), make_card(11, 1), make_card(10,1) ];
let hand_rf2 : Vec::<Card> = vec![ make_card(14, 2), make_card(13, 2), make_card(12, 2), make_card(11, 2), make_card(10,2) ];
let hand_straight_ace : Vec::<Card> = vec![ make_card(14, 2), make_card(13, 1), make_card(12, 1), make_card(11, 1), make_card(10,1) ];
let hand_straight_king : Vec::<Card> = vec![ make_card(13, 1), make_card(12, 1), make_card(11, 1), make_card(10,1), make_card(9,3) ];
assert!(hand_rf1.rank() == hand_rf2.rank());
println!("{:?}", hand_rf1.rank());
println!("{:?}", hand_straight_ace.rank());
assert!(hand_rf1.rank() != hand_straight_ace.rank());
assert!(hand_straight_king.rank() != hand_straight_ace.rank());
}
|
// https://zhuanlan.zhihu.com/p/78333162
fn main() {
let mut flock = DuckFlock::new(33);
{
let thread: &mut dyn Thread = &mut flock;
thread.kill(10);
}
{
let flock: &mut dyn Flock = &mut flock;
flock.kill(10);
}
{
let thread: &mut dyn Thread = &mut flock;
thread.kill(10);
}
}
trait Thread {
fn kill(&mut self, signal: i32);
}
trait Flock {
fn kill(&mut self, amount: i32);
}
struct DuckFlock {
ducks: i32,
}
impl DuckFlock {
pub fn new(amount: i32) -> DuckFlock {
DuckFlock { ducks: amount }
}
}
impl Thread for DuckFlock {
fn kill(&mut self, signal: i32) {
if signal == 10 {
println!("We have {} ducks", self.ducks);
} else {
println!("Unknow signal {}", signal);
}
}
}
impl Flock for DuckFlock {
fn kill(&mut self, amount: i32) {
self.ducks -= amount;
println!("{} ducks killed!", amount);
}
}
|
use std::mem;
use std::ptr;
use messages::Message;
use libc::{c_char, size_t};
use assets::AssetBundle;
use capi::common::*;
use transactions::delete_assets::DeleteAssetsWrapper;
use error::{Error, ErrorKind};
ffi_fn! {
fn dmbc_tx_delete_assets_create(
public_key: *const c_char,
seed: u64,
error: *mut Error,
) -> *mut DeleteAssetsWrapper {
let public_key = match parse_public_key(public_key) {
Ok(pk) => pk,
Err(err) => {
unsafe {
if !error.is_null() {
*error = err;
}
return ptr::null_mut();
}
}
};
Box::into_raw(Box::new(DeleteAssetsWrapper::new(&public_key, seed)))
}
}
ffi_fn! {
fn dmbc_tx_delete_assets_free(wrapper: *const DeleteAssetsWrapper) {
if !wrapper.is_null() {
unsafe { Box::from_raw(wrapper as *mut DeleteAssetsWrapper); }
}
}
}
ffi_fn! {
fn dmbc_tx_delete_assets_add_asset(
wrapper: *mut DeleteAssetsWrapper,
asset: *const AssetBundle,
error: *mut Error,
) -> bool {
let wrapper = match DeleteAssetsWrapper::from_ptr(wrapper) {
Ok(wrapper) => wrapper,
Err(err) => {
unsafe {
if !error.is_null() {
*error = err;
}
return false
}
}
};
if asset.is_null() {
unsafe {
if !error.is_null() {
*error = Error::new(ErrorKind::Text("Invalid asset pointer.".to_string()));
}
return false;
}
}
let asset = AssetBundle::from_ptr(asset);
wrapper.add_asset(asset.clone());
true
}
}
ffi_fn! {
fn dmbc_tx_delete_assets_into_bytes(
wrapper: *mut DeleteAssetsWrapper,
length: *mut size_t,
error: *mut Error,
) -> *const u8 {
let wrapper = match DeleteAssetsWrapper::from_ptr(wrapper) {
Ok(wrapper) => wrapper,
Err(err) => {
unsafe {
if !error.is_null() {
*error = err;
}
return ptr::null();
}
}
};
let bytes = wrapper.unwrap().raw().body().to_vec();
assert!(bytes.len() == bytes.capacity());
let length = unsafe { &mut *length };
let len = bytes.len() as size_t;
*length = len;
let ptr = bytes.as_ptr();
mem::forget(bytes);
ptr
}
}
|
pub mod cssom;
pub mod dom;
pub mod error;
pub mod font_list;
pub mod layout;
pub mod painter;
pub mod parser;
pub mod str;
pub mod style;
pub mod window;
|
// This is a comment, and is ignored by the compiler
// You can test this code by clicking the "Run" button over there ->// This is a comment, and is ignored by the compiler
// You can test this code by clicking the "Run" button over there ->
// or if you prefer to use your keyboard, you can use the "Ctrl + Enter" shortcut
// This code is editable, feel free to hack it!
// You can always return to the original code by clicking the "Reset" button ->
// This is the main function
fn main() {
// Statements here are executed when the compiled binary is called
// Print text to the console
println!("Hello World!");
}
// In this file we will try to practice
// TODO
// Based on the practice Book
// Chapter 15 . Croping rules - https://doc.rust-lang.org/rust-by-example/scope.html
// RAII - Ownership and moves - Borrowing - Lifetimes
// Based on the Courses Book
// 4 - Understanding Ownership - 4.2 Borrowing
// 10.3 Lifetimes
// From https://doc.rust-lang.org/rust-by-example/scope.htm
// Multible solutions have been implementedd in RUST to take care of Memory Safety.4
// "Rust enforces RAII (Resource Acquisition Is Initialization), so whenever an object goes out of scope, its destructor is called and its owned resources are freed."
// "This behavior shields against resource leak bugs, so you'll never have to manually free memory or worry about memory leaks again!"
// RAII is a pattern that can also be used in C++
// Ressources in the heap are owned.
// There is different notions :
// Owners / Scope / Drop / Move
// Clone / Copy
// Ownership / Functions
// Return Values and Scope
// Borrowing
// Mutable references
// Dangling reference exemple at 4.2
// Life time
|
use gl;
use std;
use std::ffi::CString;
use std::fs::File;
use std::io::Read;
use std::ops::Drop;
#[derive(Debug, Copy, Clone)]
pub enum ShaderType {
Vertex,
Fragment,
Geometry,
Compute,
}
#[derive(Debug, Default)]
pub struct Shader {
source_file: String,
gl_handle: u32,
shader_type: ShaderType,
}
#[derive(Debug, Default)]
pub struct ShaderProgram {
gl_handle: u32,
}
impl ShaderProgram {
pub fn new() -> ShaderProgram {
ShaderProgram {
gl_handle: unsafe { gl::CreateProgram() },
}
}
pub fn attach_shader(&mut self, shader: &Shader) {
unsafe {
let t = get_gl_shader_type(&shader.shader_type);
println!(
"Attaching shader of type {}, handle:{} to program {}",
t,
shader.gl_handle,
self.gl_handle
);
gl::AttachShader(self.gl_handle, shader.gl_handle);
}
}
pub fn link(&self) {
unsafe {
gl::LinkProgram(self.gl_handle);
let mut success = gl::FALSE as gl::types::GLint;
gl::GetProgramiv(self.gl_handle, gl::LINK_STATUS, &mut success);
if success != gl::TRUE as gl::types::GLint {
let info_log = String::with_capacity(256);
let mut error_size = 0i32;
gl::GetShaderInfoLog(self.gl_handle, 512, &mut error_size, info_log.as_ptr() as _);
println!(
"Error link failed with error: {:?} for: {:?}",
info_log,
self.gl_handle
);
} else {
println!("Linked successfully {}", self.gl_handle);
}
}
}
pub fn bind(&self) {
unsafe {
gl::UseProgram(self.gl_handle);
}
}
pub fn unbind(&self) {
unsafe {
gl::UseProgram(0);
}
}
pub fn set_uniform4f(&self, name: &str, values: &[f32; 4]) {
let location = self.get_uniform_location(name);
unsafe {
gl::Uniform4f(location, values[0], values[1], values[2], values[3]);
}
}
pub fn set_uniform_matrix4(&self, name: &str, values: &[f32; 16]) {
let location = self.get_uniform_location(name);
unsafe {
gl::UniformMatrix4fv(location, 1, gl::FALSE, values.as_ptr() as *const _);
}
}
pub fn set_uniform_3fv(&self, name: &str, count: i32, values: &[f32]) {
let location = self.get_uniform_location(name);
unsafe {
gl::Uniform3fv(location, count, values.as_ptr() as *const _);
}
}
pub fn set_uniform_1fv(&self, name: &str, count: i32, values: &[f32]) {
let location = self.get_uniform_location(name);
unsafe {
gl::Uniform1fv(location, count, values.as_ptr() as *const _);
}
}
pub fn set_uniform_1i(&self, name: &str, value: i32) {
let location = self.get_uniform_location(name);
unsafe {
gl::Uniform1i(location, value);
}
}
pub fn set_uniform_1f(&self, name: &str, value: f32) {
let location = self.get_uniform_location(name);
unsafe {
gl::Uniform1f(location, value);
}
}
fn get_uniform_location(&self, name: &str) -> i32 {
unsafe {
let c_name = std::ffi::CString::new(name).unwrap();
gl::GetUniformLocation(self.gl_handle, c_name.as_ptr())
}
}
}
impl Shader {
pub fn new(shader_type: ShaderType, source_file: &str) -> Shader {
Shader {
source_file: source_file.to_string(),
gl_handle: 0,
shader_type: shader_type,
}
}
pub fn compile(&mut self) {
unsafe {
let shader_type = get_gl_shader_type(&self.shader_type);
println!(
"Creating shader of type: {} Vtx={} Fragment={}",
shader_type,
gl::VERTEX_SHADER,
gl::FRAGMENT_SHADER
);
self.gl_handle = gl::CreateShader(shader_type);
let file_buf = self.read_shader_file();
let shader_str = CString::new(file_buf).unwrap();
gl::ShaderSource(self.gl_handle, 1, &shader_str.as_ptr(), std::ptr::null());
gl::CompileShader(self.gl_handle);
let mut success = gl::FALSE as gl::types::GLint;
gl::GetShaderiv(self.gl_handle, gl::COMPILE_STATUS, &mut success);
if success != gl::TRUE as gl::types::GLint {
let info_log = String::with_capacity(256);
let mut error_size = 0i32;
gl::GetShaderInfoLog(self.gl_handle, 512, &mut error_size, info_log.as_ptr() as _);
println!(
"Error compile failed with error: {:?} for: {:?}",
info_log,
self.gl_handle
);
}
}
}
fn read_shader_file(&self) -> Vec<u8> {
let mut file = File::open(&self.source_file).expect("ERROR: Shader file not found!");
let mut file_buf = Vec::new();
file.read_to_end(&mut file_buf).unwrap();
file_buf
}
}
pub struct ShaderInputData {
shader_type: ShaderType,
shader_source_file: String,
}
impl ShaderInputData {
pub fn new(shader_type: ShaderType, shader_source_file: &str) -> ShaderInputData {
ShaderInputData {
shader_type: shader_type,
shader_source_file: shader_source_file.to_string(),
}
}
}
pub fn create_shader_from(input: &[ShaderInputData]) -> ShaderProgram {
let mut result = ShaderProgram::new();
for field in input {
let mut shader = Shader::new(field.shader_type, &field.shader_source_file);
shader.compile();
result.attach_shader(&shader);
}
result.link();
result
}
impl Drop for Shader {
fn drop(&mut self) {
unsafe {
gl::DeleteShader(self.gl_handle);
}
}
}
fn get_gl_shader_type(shader_type: &ShaderType) -> gl::types::GLenum {
match *shader_type {
ShaderType::Vertex => gl::VERTEX_SHADER,
ShaderType::Fragment => gl::FRAGMENT_SHADER,
ShaderType::Geometry => gl::GEOMETRY_SHADER,
ShaderType::Compute => gl::COMPUTE_SHADER,
}
}
impl Default for ShaderType {
fn default() -> Self {
ShaderType::Vertex
}
}
|
use std::io::Error;
use bincode::rustc_serialize::DecodingError;
use std::convert::From;
#[derive(Debug)]
pub enum IterError {
IO(Error),
Decode(DecodingError),
}
impl From<Error> for IterError {
fn from(e: Error) -> Self {
IterError::IO(e)
}
}
impl From<DecodingError> for IterError {
fn from(e: DecodingError) -> Self {
IterError::Decode(e)
}
}
|
mod nand;
mod not;
mod and;
mod or;
mod xor;
mod nor;
pub use self::nand::NANDGate;
pub use self::not::NOTGate;
pub use self::and::ANDGate;
pub use self::or::ORGate;
pub use self::xor::XORGate;
pub use self::nor::NORGate;
mod gates {}
|
use serde_bytes::{ByteBuf, Bytes};
use serde_derive::{Deserialize, Serialize};
use std::borrow::Cow;
#[derive(Serialize, Deserialize, PartialEq, Debug)]
struct Test<'a> {
#[serde(with = "serde_bytes")]
slice: &'a [u8],
#[serde(with = "serde_bytes")]
vec: Vec<u8>,
#[serde(with = "serde_bytes")]
bytes: &'a Bytes,
#[serde(with = "serde_bytes")]
byte_buf: ByteBuf,
#[serde(with = "serde_bytes")]
cow_slice: Cow<'a, [u8]>,
#[serde(with = "serde_bytes")]
cow_bytes: Cow<'a, Bytes>,
#[serde(with = "serde_bytes")]
boxed_slice: Box<[u8]>,
#[serde(with = "serde_bytes")]
boxed_bytes: Box<Bytes>,
}
#[derive(Serialize)]
struct Dst {
#[serde(with = "serde_bytes")]
bytes: [u8],
}
|
//! Errors in leetcode-cli
use crate::cmds::{Command, DataCommand};
use colored::Colorize;
use std::{fmt, string::FromUtf8Error};
// fixme: use this_error
/// Error enum
pub enum Error {
MatchError,
DownloadError(String),
NetworkError(String),
ParseError(String),
CacheError(String),
FeatureError(String),
ScriptError(String),
CookieError,
PremiumError,
DecryptError,
SilentError,
Utf8ParseError,
NoneError,
ChromeNotLogin,
Anyhow(anyhow::Error),
}
impl std::fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let e = "error:".bold().red();
match self {
Error::CacheError(s) => write!(f, "{} {}, please try again", e, s),
Error::CookieError => write!(
f,
"{} \
Your leetcode cookies seems expired, \
{} \
Either you can handwrite your `LEETCODE_SESSION` and `csrf` into `leetcode.toml`, \
more info please checkout this: \
https://github.com/clearloop/leetcode-cli/blob/master/README.md#cookies",
e,
"please make sure you have logined in leetcode.com with chrome. "
.yellow()
.bold(),
),
Error::PremiumError => write!(
f,
"{} \
Your leetcode account lacks a premium subscription, which the given problem requires.\n \
If this looks like a mistake, please open a new issue at: {}",
e,
"https://github.com/clearloop/leetcode-cli/".underline()),
Error::DownloadError(s) => write!(f, "{} Download {} failed, please try again", e, s),
Error::NetworkError(s) => write!(f, "{} {}, please try again", e, s),
Error::ParseError(s) => write!(f, "{} {}", e, s),
Error::FeatureError(s) => write!(f, "{} {}", e, s),
Error::MatchError => write!(f, "{} Nothing matches", e),
Error::DecryptError => write!(f, "{} openssl decrypt failed", e),
Error::ScriptError(s) => write!(f, "{} {}", e, s),
Error::SilentError => write!(f, ""),
Error::NoneError => write!(f,
"json from response parse failed, please open a new issue at: {}.",
"https://github.com/clearloop/leetcode-cli/".underline(),
),
Error::ChromeNotLogin => write!(f, "maybe you not login on the Chrome, you can login and retry."),
Error::Anyhow(e) => write!(f, "{} {}", e, e),
Error::Utf8ParseError => write!(f, "cannot parse utf8 from buff {}", e),
}
}
}
// network
impl std::convert::From<reqwest::Error> for Error {
fn from(err: reqwest::Error) -> Self {
Error::NetworkError(err.to_string())
}
}
// utf8 parse
impl std::convert::From<FromUtf8Error> for Error {
fn from(_err: FromUtf8Error) -> Self {
Error::Utf8ParseError
}
}
// nums
impl std::convert::From<std::num::ParseIntError> for Error {
fn from(err: std::num::ParseIntError) -> Self {
Error::ParseError(err.to_string())
}
}
// sql
impl std::convert::From<diesel::result::Error> for Error {
fn from(err: diesel::result::Error) -> Self {
match err {
diesel::result::Error::NotFound => {
println!("NotFound, you may update cache, and try it again\r\n");
DataCommand::usage().print_help().unwrap_or(());
Error::SilentError
}
_ => Error::CacheError(err.to_string()),
}
}
}
// serde
impl std::convert::From<serde_json::error::Error> for Error {
fn from(err: serde_json::Error) -> Self {
Error::ParseError(err.to_string())
}
}
// toml
impl std::convert::From<toml::de::Error> for Error {
fn from(_err: toml::de::Error) -> Self {
#[cfg(debug_assertions)]
let err_msg = format!(
"{}, {}{}{}{}{}{}",
_err,
"Parse config file failed, ",
"leetcode-cli has just generated a new leetcode.toml at ",
"~/.leetcode/leetcode.tmp.toml,".green().bold().underline(),
" the current one at ",
"~/.leetcode/leetcode.toml".yellow().bold().underline(),
" seems missing some keys, Please compare the new file and add the missing keys.\n",
);
#[cfg(not(debug_assertions))]
let err_msg = format!(
"{}{}{}{}{}{}",
"Parse config file failed, ",
"leetcode-cli has just generated a new leetcode.toml at ",
"~/.leetcode/leetcode_tmp.toml,".green().bold().underline(),
" the current one at ",
"~/.leetcode/leetcode.toml".yellow().bold().underline(),
" seems missing some keys, Please compare the new file and add the missing keys.\n",
);
Error::ParseError(err_msg.trim_start().into())
}
}
impl std::convert::From<toml::ser::Error> for Error {
fn from(err: toml::ser::Error) -> Self {
Error::ParseError(err.to_string())
}
}
// io
impl std::convert::From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self {
Error::CacheError(err.to_string())
}
}
// openssl
impl std::convert::From<openssl::error::ErrorStack> for Error {
fn from(_: openssl::error::ErrorStack) -> Self {
Error::DecryptError
}
}
impl From<anyhow::Error> for Error {
fn from(err: anyhow::Error) -> Self {
Error::Anyhow(err)
}
}
// pyo3
#[cfg(feature = "pym")]
impl std::convert::From<pyo3::PyErr> for Error {
fn from(_: pyo3::PyErr) -> Self {
Error::ScriptError("Python script went Error".to_string())
}
}
|
use crate::FifoWindow;
use alga::general::AbstractMonoid;
use alga::general::Operator;
use std::marker::PhantomData;
#[derive(Debug)]
struct Item<Value: Clone> {
agg: Value,
val: Value,
}
impl<Value: Clone> Item<Value> {
fn new(agg: Value, val: Value) -> Item<Value> {
Item { agg, val }
}
}
#[derive(Debug)]
pub struct TwoStacks<Value, BinOp>
where
Value: AbstractMonoid<BinOp> + Clone,
BinOp: Operator,
{
front: Vec<Item<Value>>,
back: Vec<Item<Value>>,
op: PhantomData<BinOp>,
}
impl<Value, BinOp> FifoWindow<Value, BinOp> for TwoStacks<Value, BinOp>
where
Value: AbstractMonoid<BinOp> + Clone,
BinOp: Operator,
{
fn new() -> TwoStacks<Value, BinOp> {
TwoStacks {
front: Vec::new(),
back: Vec::new(),
op: PhantomData,
}
}
fn push(&mut self, v: Value) {
self.back
.push(Item::new(Self::agg(&self.back).operate(&v), v));
}
fn pop(&mut self) {
if self.front.is_empty() {
while let Some(top) = self.back.pop() {
self.front
.push(Item::new(top.val.operate(&Self::agg(&self.front)), top.val))
}
}
self.front.pop();
}
fn query(&self) -> Value {
Self::agg(&self.front).operate(&Self::agg(&self.back))
}
}
impl<T, O> TwoStacks<T, O>
where
T: AbstractMonoid<O> + Clone,
O: Operator,
{
#[inline(always)]
fn agg(stack: &Vec<Item<T>>) -> T {
if let Some(top) = stack.last() {
top.agg.clone()
} else {
T::identity()
}
}
}
|
use std::mem;
#[derive(Clone, Copy, Debug, Default, PartialEq)]
struct TestRead {
a: u8,
b: u16,
c: u8,
}
#[test]
fn mem_size_test() {
assert_eq!(4, mem::size_of::<i32>());
// 结构体size: u8 + u16 + u8 = 1 + 2 + 1 = 4
assert_eq!(4, mem::size_of::<TestRead>());
let orig = TestRead {
a: 0x05,
b: 0x1006,
c: 0x07,
};
// 结构体变量size,等于结构体size
assert_eq!(4, mem::size_of_val(&orig));
let v = vec![1_i32, 2, 3, 4, 5];
// 集合对象使用`&*var`才能准确指向对象的引用
assert_eq!(20, mem::size_of_val(&*v));
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::schema::pending_transaction_schema::PendingTransactionSchema;
use crate::schema_db::SchemaDB;
use anyhow::Result;
use schemadb::SchemaBatch;
use sgtypes::pending_txn::PendingTransaction;
#[derive(Debug, Clone)]
pub struct PendingTxnStore<S> {
db: S,
}
impl<S> PendingTxnStore<S> {
pub fn new(db: S) -> Self {
Self { db }
}
}
impl<S> PendingTxnStore<S>
where
S: SchemaDB,
{
const PENDING_TXN_KEY: &'static str = "pending";
pub fn get_pending_txn(&self) -> Result<Option<PendingTransaction>> {
self.db
.get::<PendingTransactionSchema>(&Self::PENDING_TXN_KEY.to_string())
}
pub fn clear(&self, write_batch: &mut SchemaBatch) -> Result<()> {
write_batch.delete::<PendingTransactionSchema>(&Self::PENDING_TXN_KEY.to_string())
}
pub fn save_pending_txn(
&self,
pending_txn: &PendingTransaction,
write_batch: &mut SchemaBatch,
) -> Result<()> {
write_batch.put::<PendingTransactionSchema>(&Self::PENDING_TXN_KEY.to_string(), pending_txn)
}
}
|
extern crate testing;
fn main() {
assert_eq!(4, testing::mult_two(2));
}
|
use regex::Regex;
use std::collections::{BTreeMap, HashSet};
const INITIAL_STATE: &str = "#.#..#..###.###.#..###.#####...########.#...#####...##.#....#.####.#.#..#..#.#..###...#..#.#....##";
//const INITIAL_STATE: &str = "#..#.#..##......###...###";
const FIVE_DIGITS: usize = 1 << 5;
const FINAL_GENERATION: i64 = 50000000000;
fn print_state(state: &BTreeMap<i32, bool>, generation: usize, leftmost: i32) {
let mut result = 0;
let mut debug = "".to_string();
for (n, plant) in state.clone() {
match n {
0 => debug.push('['),
_ => {}
}
match plant {
true => {
debug.push('#');
result += n
}
false => debug.push('.'),
}
match n {
0 => debug.push(']'),
_ => {}
}
}
println!("{} ({}/{}): {}", generation, leftmost, result, debug);
}
fn print_state_vec(state: &Vec<bool>, generation: usize, leftmost: i64) {
let mut result = 0;
let mut debug = "".to_string();
for (n, plant) in (leftmost..).zip(state.clone()) {
match n {
0 => debug.push('['),
_ => {}
}
match plant {
true => {
debug.push('#');
result += n
}
false => debug.push('.'),
}
match n {
0 => debug.push(']'),
_ => {}
}
}
println!("{} ({}/{}): {}", generation, leftmost, result, debug);
}
fn print_digits(prefix: &str, number: u64) {
let mut n = number;
let mut debug = "".to_string();
while n > 0 {
match n % 2 {
0 => debug.insert(0, '.'),
1 => debug.insert(0, '#'),
_ => debug.insert(0, '?'),
}
n >>= 1;
}
println!("{}: {}", prefix, debug);
}
pub fn run1(filename: &String) {
let mut plants = BTreeMap::<i32, bool>::new();
let mut generators = HashSet::<usize>::new();
let mut leftmost: i32 = 0;
let mut rightmost: i32 = 0;
for (n, ch) in INITIAL_STATE.chars().enumerate() {
match ch {
'#' => plants.insert(n as i32, true),
_ => plants.insert(n as i32, false),
};
rightmost = n as i32;
}
lazy_static! {
static ref line_re: Regex = Regex::new(r"([\.#]{5}) => ([\.#])").unwrap();
};
let lines = super::input::read_lines(filename.to_string());
for line in lines {
let captures = line_re.captures(line.as_str()).unwrap();
let pattern = captures.get(1).unwrap().as_str();
let result = captures.get(2).unwrap().as_str();
if result == "." {
continue;
}
let mut generator = 0;
for ch in pattern.chars() {
generator <<= 1;
match ch {
'#' => generator += 1,
_ => {}
}
}
generators.insert(generator);
}
print_state(&plants, 0, leftmost);
let mut result = 0;
for (n, plant) in plants.clone() {
if plant {
result += n;
}
}
println!("current total: {}", result);
for generation in 1..=20 {
let mut new_plants = BTreeMap::<i32, bool>::new();
let mut last_five: usize = 0;
for n in (leftmost as i32 - 3)..=(rightmost as i32 + 3) {
last_five <<= 1;
last_five %= FIVE_DIGITS;
let mut plant = false;
if n >= leftmost && n <= rightmost {
plant = *plants.get(&n).unwrap();
}
if plant {
last_five += 1;
}
let new_plant = n - 2;
if generators.contains(&last_five) {
new_plants.insert(new_plant, true);
if new_plant < leftmost {
leftmost = new_plant;
} else if new_plant > rightmost {
rightmost = new_plant;
}
} else if new_plant >= leftmost && new_plant <= rightmost {
new_plants.insert(new_plant, false);
}
}
print_state(&new_plants, generation, leftmost);
let mut result = 0;
for (n, plant) in plants {
if plant {
result += n;
}
}
println!("current total: {}", result);
plants = new_plants;
}
let mut result = 0;
for (n, plant) in plants {
if plant {
result += n;
}
}
println!("result: {}", result);
}
pub fn run2(filename: &String) {
let mut plants = Vec::<bool>::new();
let mut generators = HashSet::<usize>::new();
let mut leftmost: i64 = 0;
for ch in INITIAL_STATE.chars() {
match ch {
'#' => plants.push(true),
_ => plants.push(false),
};
}
lazy_static! {
static ref line_re: Regex = Regex::new(r"([\.#]{5}) => ([\.#])").unwrap();
};
let lines = super::input::read_lines(filename.to_string());
for line in lines {
let captures = line_re.captures(line.as_str()).unwrap();
let pattern = captures.get(1).unwrap().as_str();
let result = captures.get(2).unwrap().as_str();
if result == "." {
continue;
}
let mut generator = 0;
for ch in pattern.chars() {
generator <<= 1;
match ch {
'#' => generator += 1,
_ => {}
}
}
generators.insert(generator);
}
let mut generation = 1;
let mut leftmost_delta;
loop {
let mut last_five: usize = 0;
let mut new_plants = Vec::<bool>::new();
let rightmost = plants.len() as i64 + leftmost;
let mut new_leftmost = leftmost;
let mut found_first = false;
for n in (leftmost - 3)..(rightmost + 3) {
last_five <<= 1;
last_five %= FIVE_DIGITS;
let mut plant = false;
if n >= leftmost && n < rightmost {
plant = plants[(n - leftmost) as usize];
}
if plant {
last_five += 1;
}
let i = n - 2;
if generators.contains(&last_five) {
new_plants.push(true);
if !found_first {
new_leftmost = i;
found_first = true;
}
} else if found_first && i < rightmost {
new_plants.push(false);
}
}
leftmost_delta = new_leftmost - leftmost;
leftmost = new_leftmost;
if new_plants == plants {
break;
}
plants = new_plants;
print_state_vec(&plants, generation, leftmost);
generation += 1;
}
let generation_delta: i64 = FINAL_GENERATION - generation as i64;
leftmost = leftmost + (generation_delta * leftmost_delta);
let mut result = 0;
for (n, plant) in (leftmost..).zip(plants) {
if plant {
result += n;
}
}
println!("result: {}", result);
}
|
pub const WORDLIST: &'static [&'static str] = &[
"abbinare",
"abbonato",
"abisso",
"abitare",
"abominio",
"accadere",
"accesso",
"acciaio",
"accordo",
"accumulo",
"acido",
"acqua",
"acrobata",
"acustico",
"adattare",
"addetto",
"addio",
"addome",
"adeguato",
"aderire",
"adorare",
"adottare",
"adozione",
"adulto",
"aereo",
"aerobica",
"affare",
"affetto",
"affidare",
"affogato",
"affronto",
"africano",
"afrodite",
"agenzia",
"aggancio",
"aggeggio",
"aggiunta",
"agio",
"agire",
"agitare",
"aglio",
"agnello",
"agosto",
"aiutare",
"albero",
"albo",
"alce",
"alchimia",
"alcool",
"alfabeto",
"algebra",
"alimento",
"allarme",
"alleanza",
"allievo",
"alloggio",
"alluce",
"alpi",
"alterare",
"altro",
"aluminio",
"amante",
"amarezza",
"ambiente",
"ambrosia",
"america",
"amico",
"ammalare",
"ammirare",
"amnesia",
"amnistia",
"amore",
"ampliare",
"amputare",
"analisi",
"anamnesi",
"ananas",
"anarchia",
"anatra",
"anca",
"ancorato",
"andare",
"androide",
"aneddoto",
"anello",
"angelo",
"angolino",
"anguilla",
"anidride",
"anima",
"annegare",
"anno",
"annuncio",
"anomalia",
"antenna",
"anticipo",
"aperto",
"apostolo",
"appalto",
"appello",
"appiglio",
"applauso",
"appoggio",
"appurare",
"aprile",
"aquila",
"arabo",
"arachidi",
"aragosta",
"arancia",
"arbitrio",
"archivio",
"arco",
"argento",
"argilla",
"aria",
"ariete",
"arma",
"armonia",
"aroma",
"arrivare",
"arrosto",
"arsenale",
"arte",
"artiglio",
"asfalto",
"asfissia",
"asino",
"asparagi",
"aspirina",
"assalire",
"assegno",
"assolto",
"assurdo",
"asta",
"astratto",
"atlante",
"atletica",
"atomo",
"atropina",
"attacco",
"attesa",
"attico",
"atto",
"attrarre",
"auguri",
"aula",
"aumento",
"aurora",
"auspicio",
"autista",
"auto",
"autunno",
"avanzare",
"avarizia",
"avere",
"aviatore",
"avido",
"avorio",
"avvenire",
"avviso",
"avvocato",
"azienda",
"azione",
"azzardo",
"azzurro",
"babbuino",
"bacio",
"badante",
"baffi",
"bagaglio",
"bagliore",
"bagno",
"balcone",
"balena",
"ballare",
"balordo",
"balsamo",
"bambola",
"bancomat",
"banda",
"barato",
"barba",
"barista",
"barriera",
"basette",
"basilico",
"bassista",
"bastare",
"battello",
"bavaglio",
"beccare",
"beduino",
"bellezza",
"bene",
"benzina",
"berretto",
"bestia",
"bevitore",
"bianco",
"bibbia",
"biberon",
"bibita",
"bici",
"bidone",
"bilancia",
"biliardo",
"binario",
"binocolo",
"biologia",
"biondina",
"biopsia",
"biossido",
"birbante",
"birra",
"biscotto",
"bisogno",
"bistecca",
"bivio",
"blindare",
"bloccare",
"bocca",
"bollire",
"bombola",
"bonifico",
"borghese",
"borsa",
"bottino",
"botulino",
"braccio",
"bradipo",
"branco",
"bravo",
"bresaola",
"bretelle",
"brevetto",
"briciola",
"brigante",
"brillare",
"brindare",
"brivido",
"broccoli",
"brontolo",
"bruciare",
"brufolo",
"bucare",
"buddista",
"budino",
"bufera",
"buffo",
"bugiardo",
"buio",
"buono",
"burrone",
"bussola",
"bustina",
"buttare",
"cabernet",
"cabina",
"cacao",
"cacciare",
"cactus",
"cadavere",
"caffe",
"calamari",
"calcio",
"caldaia",
"calmare",
"calunnia",
"calvario",
"calzone",
"cambiare",
"camera",
"camion",
"cammello",
"campana",
"canarino",
"cancello",
"candore",
"cane",
"canguro",
"cannone",
"canoa",
"cantare",
"canzone",
"caos",
"capanna",
"capello",
"capire",
"capo",
"capperi",
"capra",
"capsula",
"caraffa",
"carbone",
"carciofo",
"cardigan",
"carenza",
"caricare",
"carota",
"carrello",
"carta",
"casa",
"cascare",
"caserma",
"cashmere",
"casino",
"cassetta",
"castello",
"catalogo",
"catena",
"catorcio",
"cattivo",
"causa",
"cauzione",
"cavallo",
"caverna",
"caviglia",
"cavo",
"cazzotto",
"celibato",
"cemento",
"cenare",
"centrale",
"ceramica",
"cercare",
"ceretta",
"cerniera",
"certezza",
"cervello",
"cessione",
"cestino",
"cetriolo",
"chiave",
"chiedere",
"chilo",
"chimera",
"chiodo",
"chirurgo",
"chitarra",
"chiudere",
"ciabatta",
"ciao",
"cibo",
"ciccia",
"cicerone",
"ciclone",
"cicogna",
"cielo",
"cifra",
"cigno",
"ciliegia",
"cimitero",
"cinema",
"cinque",
"cintura",
"ciondolo",
"ciotola",
"cipolla",
"cippato",
"circuito",
"cisterna",
"citofono",
"ciuccio",
"civetta",
"civico",
"clausola",
"cliente",
"clima",
"clinica",
"cobra",
"coccole",
"cocktail",
"cocomero",
"codice",
"coesione",
"cogliere",
"cognome",
"colla",
"colomba",
"colpire",
"coltello",
"comando",
"comitato",
"commedia",
"comodino",
"compagna",
"comune",
"concerto",
"condotto",
"conforto",
"congiura",
"coniglio",
"consegna",
"conto",
"convegno",
"coperta",
"copia",
"coprire",
"corazza",
"corda",
"corleone",
"cornice",
"corona",
"corpo",
"corrente",
"corsa",
"cortesia",
"corvo",
"coso",
"costume",
"cotone",
"cottura",
"cozza",
"crampo",
"cratere",
"cravatta",
"creare",
"credere",
"crema",
"crescere",
"crimine",
"criterio",
"croce",
"crollare",
"cronaca",
"crostata",
"croupier",
"cubetto",
"cucciolo",
"cucina",
"cultura",
"cuoco",
"cuore",
"cupido",
"cupola",
"cura",
"curva",
"cuscino",
"custode",
"danzare",
"data",
"decennio",
"decidere",
"decollo",
"dedicare",
"dedurre",
"definire",
"delegare",
"delfino",
"delitto",
"demone",
"dentista",
"denuncia",
"deposito",
"derivare",
"deserto",
"designer",
"destino",
"detonare",
"dettagli",
"diagnosi",
"dialogo",
"diamante",
"diario",
"diavolo",
"dicembre",
"difesa",
"digerire",
"digitare",
"diluvio",
"dinamica",
"dipinto",
"diploma",
"diramare",
"dire",
"dirigere",
"dirupo",
"discesa",
"disdetta",
"disegno",
"disporre",
"dissenso",
"distacco",
"dito",
"ditta",
"diva",
"divenire",
"dividere",
"divorare",
"docente",
"dolcetto",
"dolore",
"domatore",
"domenica",
"dominare",
"donatore",
"donna",
"dorato",
"dormire",
"dorso",
"dosaggio",
"dottore",
"dovere",
"download",
"dragone",
"dramma",
"dubbio",
"dubitare",
"duetto",
"durata",
"ebbrezza",
"eccesso",
"eccitare",
"eclissi",
"economia",
"edera",
"edificio",
"editore",
"edizione",
"educare",
"effetto",
"egitto",
"egiziano",
"elastico",
"elefante",
"eleggere",
"elemento",
"elenco",
"elezione",
"elmetto",
"elogio",
"embrione",
"emergere",
"emettere",
"eminenza",
"emisfero",
"emozione",
"empatia",
"energia",
"enfasi",
"enigma",
"entrare",
"enzima",
"epidemia",
"epilogo",
"episodio",
"epoca",
"equivoco",
"erba",
"erede",
"eroe",
"erotico",
"errore",
"eruzione",
"esaltare",
"esame",
"esaudire",
"eseguire",
"esempio",
"esigere",
"esistere",
"esito",
"esperto",
"espresso",
"essere",
"estasi",
"esterno",
"estrarre",
"eterno",
"etica",
"euforico",
"europa",
"evacuare",
"evasione",
"evento",
"evidenza",
"evitare",
"evolvere",
"fabbrica",
"facciata",
"fagiano",
"fagotto",
"falco",
"fame",
"famiglia",
"fanale",
"fango",
"fantasia",
"farfalla",
"farmacia",
"faro",
"fase",
"fastidio",
"faticare",
"fatto",
"favola",
"febbre",
"femmina",
"femore",
"fenomeno",
"fermata",
"feromoni",
"ferrari",
"fessura",
"festa",
"fiaba",
"fiamma",
"fianco",
"fiat",
"fibbia",
"fidare",
"fieno",
"figa",
"figlio",
"figura",
"filetto",
"filmato",
"filosofo",
"filtrare",
"finanza",
"finestra",
"fingere",
"finire",
"finta",
"finzione",
"fiocco",
"fioraio",
"firewall",
"firmare",
"fisico",
"fissare",
"fittizio",
"fiume",
"flacone",
"flagello",
"flirtare",
"flusso",
"focaccia",
"foglio",
"fognario",
"follia",
"fonderia",
"fontana",
"forbici",
"forcella",
"foresta",
"forgiare",
"formare",
"fornace",
"foro",
"fortuna",
"forzare",
"fosforo",
"fotoni",
"fracasso",
"fragola",
"frantumi",
"fratello",
"frazione",
"freccia",
"freddo",
"frenare",
"fresco",
"friggere",
"frittata",
"frivolo",
"frizione",
"fronte",
"frullato",
"frumento",
"frusta",
"frutto",
"fucile",
"fuggire",
"fulmine",
"fumare",
"funzione",
"fuoco",
"furbizia",
"furgone",
"furia",
"furore",
"fusibile",
"fuso",
"futuro",
"gabbiano",
"galassia",
"gallina",
"gamba",
"gancio",
"garanzia",
"garofano",
"gasolio",
"gatto",
"gazebo",
"gazzetta",
"gelato",
"gemelli",
"generare",
"genitori",
"gennaio",
"geologia",
"germania",
"gestire",
"gettare",
"ghepardo",
"ghiaccio",
"giaccone",
"giaguaro",
"giallo",
"giappone",
"giardino",
"gigante",
"gioco",
"gioiello",
"giorno",
"giovane",
"giraffa",
"giudizio",
"giurare",
"giusto",
"globo",
"gloria",
"glucosio",
"gnocca",
"gocciola",
"godere",
"gomito",
"gomma",
"gonfiare",
"gorilla",
"governo",
"gradire",
"graffiti",
"granchio",
"grappolo",
"grasso",
"grattare",
"gridare",
"grissino",
"grondaia",
"grugnito",
"gruppo",
"guadagno",
"guaio",
"guancia",
"guardare",
"gufo",
"guidare",
"guscio",
"gusto",
"icona",
"idea",
"identico",
"idolo",
"idoneo",
"idrante",
"idrogeno",
"igiene",
"ignoto",
"imbarco",
"immagine",
"immobile",
"imparare",
"impedire",
"impianto",
"importo",
"impresa",
"impulso",
"incanto",
"incendio",
"incidere",
"incontro",
"incrocia",
"incubo",
"indagare",
"indice",
"indotto",
"infanzia",
"inferno",
"infinito",
"infranto",
"ingerire",
"inglese",
"ingoiare",
"ingresso",
"iniziare",
"innesco",
"insalata",
"inserire",
"insicuro",
"insonnia",
"insulto",
"interno",
"introiti",
"invasori",
"inverno",
"invito",
"invocare",
"ipnosi",
"ipocrita",
"ipotesi",
"ironia",
"irrigare",
"iscritto",
"isola",
"ispirare",
"isterico",
"istinto",
"istruire",
"italiano",
"jazz",
"labbra",
"labrador",
"ladro",
"lago",
"lamento",
"lampone",
"lancetta",
"lanterna",
"lapide",
"larva",
"lasagne",
"lasciare",
"lastra",
"latte",
"laurea",
"lavagna",
"lavorare",
"leccare",
"legare",
"leggere",
"lenzuolo",
"leone",
"lepre",
"letargo",
"lettera",
"levare",
"levitare",
"lezione",
"liberare",
"libidine",
"libro",
"licenza",
"lievito",
"limite",
"lince",
"lingua",
"liquore",
"lire",
"listino",
"litigare",
"litro",
"locale",
"lottare",
"lucciola",
"lucidare",
"luglio",
"luna",
"macchina",
"madama",
"madre",
"maestro",
"maggio",
"magico",
"maglione",
"magnolia",
"mago",
"maialino",
"maionese",
"malattia",
"male",
"malloppo",
"mancare",
"mandorla",
"mangiare",
"manico",
"manopola",
"mansarda",
"mantello",
"manubrio",
"manzo",
"mappa",
"mare",
"margine",
"marinaio",
"marmotta",
"marocco",
"martello",
"marzo",
"maschera",
"matrice",
"maturare",
"mazzetta",
"meandri",
"medaglia",
"medico",
"medusa",
"megafono",
"melone",
"membrana",
"menta",
"mercato",
"meritare",
"merluzzo",
"mese",
"mestiere",
"metafora",
"meteo",
"metodo",
"mettere",
"miele",
"miglio",
"miliardo",
"mimetica",
"minatore",
"minuto",
"miracolo",
"mirtillo",
"missile",
"mistero",
"misura",
"mito",
"mobile",
"moda",
"moderare",
"moglie",
"molecola",
"molle",
"momento",
"moneta",
"mongolia",
"monologo",
"montagna",
"morale",
"morbillo",
"mordere",
"mosaico",
"mosca",
"mostro",
"motivare",
"moto",
"mulino",
"mulo",
"muovere",
"muraglia",
"muscolo",
"museo",
"musica",
"mutande",
"nascere",
"nastro",
"natale",
"natura",
"nave",
"navigare",
"negare",
"negozio",
"nemico",
"nero",
"nervo",
"nessuno",
"nettare",
"neutroni",
"neve",
"nevicare",
"nicotina",
"nido",
"nipote",
"nocciola",
"noleggio",
"nome",
"nonno",
"norvegia",
"notare",
"notizia",
"nove",
"nucleo",
"nuda",
"nuotare",
"nutrire",
"obbligo",
"occhio",
"occupare",
"oceano",
"odissea",
"odore",
"offerta",
"officina",
"offrire",
"oggetto",
"oggi",
"olfatto",
"olio",
"oliva",
"ombelico",
"ombrello",
"omuncolo",
"ondata",
"onore",
"opera",
"opinione",
"opuscolo",
"opzione",
"orario",
"orbita",
"orchidea",
"ordine",
"orecchio",
"orgasmo",
"orgoglio",
"origine",
"orologio",
"oroscopo",
"orso",
"oscurare",
"ospedale",
"ospite",
"ossigeno",
"ostacolo",
"ostriche",
"ottenere",
"ottimo",
"ottobre",
"ovest",
"pacco",
"pace",
"pacifico",
"padella",
"pagare",
"pagina",
"pagnotta",
"palazzo",
"palestra",
"palpebre",
"pancetta",
"panfilo",
"panino",
"pannello",
"panorama",
"papa",
"paperino",
"paradiso",
"parcella",
"parente",
"parlare",
"parodia",
"parrucca",
"partire",
"passare",
"pasta",
"patata",
"patente",
"patogeno",
"patriota",
"pausa",
"pazienza",
"peccare",
"pecora",
"pedalare",
"pelare",
"pena",
"pendenza",
"penisola",
"pennello",
"pensare",
"pentirsi",
"percorso",
"perdono",
"perfetto",
"perizoma",
"perla",
"permesso",
"persona",
"pesare",
"pesce",
"peso",
"petardo",
"petrolio",
"pezzo",
"piacere",
"pianeta",
"piastra",
"piatto",
"piazza",
"piccolo",
"piede",
"piegare",
"pietra",
"pigiama",
"pigliare",
"pigrizia",
"pilastro",
"pilota",
"pinguino",
"pioggia",
"piombo",
"pionieri",
"piovra",
"pipa",
"pirata",
"pirolisi",
"piscina",
"pisolino",
"pista",
"pitone",
"piumino",
"pizza",
"plastica",
"platino",
"poesia",
"poiana",
"polaroid",
"polenta",
"polimero",
"pollo",
"polmone",
"polpetta",
"poltrona",
"pomodoro",
"pompa",
"popolo",
"porco",
"porta",
"porzione",
"possesso",
"postino",
"potassio",
"potere",
"poverino",
"pranzo",
"prato",
"prefisso",
"prelievo",
"premio",
"prendere",
"prestare",
"pretesa",
"prezzo",
"primario",
"privacy",
"problema",
"processo",
"prodotto",
"profeta",
"progetto",
"promessa",
"pronto",
"proposta",
"proroga",
"prossimo",
"proteina",
"prova",
"prudenza",
"pubblico",
"pudore",
"pugilato",
"pulire",
"pulsante",
"puntare",
"pupazzo",
"puzzle",
"quaderno",
"qualcuno",
"quarzo",
"quercia",
"quintale",
"rabbia",
"racconto",
"radice",
"raffica",
"ragazza",
"ragione",
"rammento",
"ramo",
"rana",
"randagio",
"rapace",
"rapinare",
"rapporto",
"rasatura",
"ravioli",
"reagire",
"realista",
"reattore",
"reazione",
"recitare",
"recluso",
"record",
"recupero",
"redigere",
"regalare",
"regina",
"regola",
"relatore",
"reliquia",
"remare",
"rendere",
"reparto",
"resina",
"resto",
"rete",
"retorica",
"rettile",
"revocare",
"riaprire",
"ribadire",
"ribelle",
"ricambio",
"ricetta",
"richiamo",
"ricordo",
"ridurre",
"riempire",
"riferire",
"riflesso",
"righello",
"rilancio",
"rilevare",
"rilievo",
"rimanere",
"rimborso",
"rinforzo",
"rinuncia",
"riparo",
"ripetere",
"riposare",
"ripulire",
"risalita",
"riscatto",
"riserva",
"riso",
"rispetto",
"ritaglio",
"ritmo",
"ritorno",
"ritratto",
"rituale",
"riunione",
"riuscire",
"riva",
"robotica",
"rondine",
"rosa",
"rospo",
"rosso",
"rotonda",
"rotta",
"roulotte",
"rubare",
"rubrica",
"ruffiano",
"rumore",
"ruota",
"ruscello",
"sabbia",
"sacco",
"saggio",
"sale",
"salire",
"salmone",
"salto",
"salutare",
"salvia",
"sangue",
"sanzioni",
"sapere",
"sapienza",
"sarcasmo",
"sardine",
"sartoria",
"sbalzo",
"sbarcare",
"sberla",
"sborsare",
"scadenza",
"scafo",
"scala",
"scambio",
"scappare",
"scarpa",
"scatola",
"scelta",
"scena",
"sceriffo",
"scheggia",
"schiuma",
"sciarpa",
"scienza",
"scimmia",
"sciopero",
"scivolo",
"sclerare",
"scolpire",
"sconto",
"scopa",
"scordare",
"scossa",
"scrivere",
"scrupolo",
"scuderia",
"scultore",
"scuola",
"scusare",
"sdraiare",
"secolo",
"sedativo",
"sedere",
"sedia",
"segare",
"segreto",
"seguire",
"semaforo",
"seme",
"senape",
"seno",
"sentiero",
"separare",
"sepolcro",
"sequenza",
"serata",
"serpente",
"servizio",
"sesso",
"seta",
"settore",
"sfamare",
"sfera",
"sfidare",
"sfiorare",
"sfogare",
"sgabello",
"sicuro",
"siepe",
"sigaro",
"silenzio",
"silicone",
"simbiosi",
"simpatia",
"simulare",
"sinapsi",
"sindrome",
"sinergia",
"sinonimo",
"sintonia",
"sirena",
"siringa",
"sistema",
"sito",
"smalto",
"smentire",
"smontare",
"soccorso",
"socio",
"soffitto",
"software",
"soggetto",
"sogliola",
"sognare",
"soldi",
"sole",
"sollievo",
"solo",
"sommario",
"sondare",
"sonno",
"sorpresa",
"sorriso",
"sospiro",
"sostegno",
"sovrano",
"spaccare",
"spada",
"spagnolo",
"spalla",
"sparire",
"spavento",
"spazio",
"specchio",
"spedire",
"spegnere",
"spendere",
"speranza",
"spessore",
"spezzare",
"spiaggia",
"spiccare",
"spiegare",
"spiffero",
"spingere",
"sponda",
"sporcare",
"spostare",
"spremuta",
"spugna",
"spumante",
"spuntare",
"squadra",
"squillo",
"staccare",
"stadio",
"stagione",
"stallone",
"stampa",
"stancare",
"starnuto",
"statura",
"stella",
"stendere",
"sterzo",
"stilista",
"stimolo",
"stinco",
"stiva",
"stoffa",
"storia",
"strada",
"stregone",
"striscia",
"studiare",
"stufa",
"stupendo",
"subire",
"successo",
"sudare",
"suono",
"superare",
"supporto",
"surfista",
"sussurro",
"svelto",
"svenire",
"sviluppo",
"svolta",
"svuotare",
"tabacco",
"tabella",
"tabu",
"tacchino",
"tacere",
"taglio",
"talento",
"tangente",
"tappeto",
"tartufo",
"tassello",
"tastiera",
"tavolo",
"tazza",
"teatro",
"tedesco",
"telaio",
"telefono",
"tema",
"temere",
"tempo",
"tendenza",
"tenebre",
"tensione",
"tentare",
"teologia",
"teorema",
"termica",
"terrazzo",
"teschio",
"tesi",
"tesoro",
"tessera",
"testa",
"thriller",
"tifoso",
"tigre",
"timbrare",
"timido",
"tinta",
"tirare",
"tisana",
"titano",
"titolo",
"toccare",
"togliere",
"topolino",
"torcia",
"torrente",
"tovaglia",
"traffico",
"tragitto",
"training",
"tramonto",
"transito",
"trapezio",
"trasloco",
"trattore",
"trazione",
"treccia",
"tregua",
"treno",
"triciclo",
"tridente",
"trilogia",
"tromba",
"troncare",
"trota",
"trovare",
"trucco",
"tubo",
"tulipano",
"tumulto",
"tunisia",
"tuono",
"turista",
"tuta",
"tutelare",
"tutore",
"ubriaco",
"uccello",
"udienza",
"udito",
"uffa",
"umanoide",
"umore",
"unghia",
"unguento",
"unicorno",
"unione",
"universo",
"uomo",
"uragano",
"uranio",
"urlare",
"uscire",
"utente",
"utilizzo",
"vacanza",
"vacca",
"vaglio",
"vagonata",
"valle",
"valore",
"valutare",
"valvola",
"vampiro",
"vaniglia",
"vanto",
"vapore",
"variante",
"vasca",
"vaselina",
"vassoio",
"vedere",
"vegetale",
"veglia",
"veicolo",
"vela",
"veleno",
"velivolo",
"velluto",
"vendere",
"venerare",
"venire",
"vento",
"veranda",
"verbo",
"verdura",
"vergine",
"verifica",
"vernice",
"vero",
"verruca",
"versare",
"vertebra",
"vescica",
"vespaio",
"vestito",
"vesuvio",
"veterano",
"vetro",
"vetta",
"viadotto",
"viaggio",
"vibrare",
"vicenda",
"vichingo",
"vietare",
"vigilare",
"vigneto",
"villa",
"vincere",
"violino",
"vipera",
"virgola",
"virtuoso",
"visita",
"vita",
"vitello",
"vittima",
"vivavoce",
"vivere",
"viziato",
"voglia",
"volare",
"volpe",
"volto",
"volume",
"vongole",
"voragine",
"vortice",
"votare",
"vulcano",
"vuotare",
"zabaione",
"zaffiro",
"zainetto",
"zampa",
"zanzara",
"zattera",
"zavorra",
"zenzero",
"zero",
"zingaro",
"zittire",
"zoccolo",
"zolfo",
"zombie",
"zucchero",
];
|
use std::fs::File;
use std::io::{BufReader, BufRead, Result};
use std::collections::{HashMap};
fn main() -> Result<()> {
let path = "numbers.txt";
let input = File::open(path)?;
let buffered = BufReader::new(input);
let mut vector: Vec<i32> = vec![];
for line in buffered.lines() {
match line {
Err(why) => panic!("{:?}", why),
Ok(string) => match string.trim().parse::<i32>() {
Err(why2) => panic!("{:?}", why2),
Ok(number)=> vector.push(number)
}
}
}
let res1: i32 = vector.iter().sum();
println!("{}", res1);
let res2: i32 = get_first_repeated_sum(vector);
println!("{}", res2);
Ok(())
}
fn get_first_repeated_sum(vector: Vec<i32>) -> i32 {
let mut lookup: HashMap<i32, bool> = HashMap::new();
let mut current_idx: usize = 0;
let mut current_sum: i32 = 0;
loop {
let current_num = vector[current_idx];
current_sum = current_sum + current_num;
if lookup.contains_key(¤t_sum) {
break current_sum;
} else {
lookup.insert(current_sum, true);
}
current_idx = if current_idx == vector.len() -1 { 0 } else { current_idx + 1 };
}
}
|
// Copyright (c) 2017 The Noise-rs Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT
// or http://opensource.org/licenses/MIT>, at your option. All files in the
// project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
use modules::NoiseModule;
use num_traits::Float;
/// Noise module that clamps the output value from the source module to a
/// range of values.
pub struct Clamp<'a, T: 'a, U: 'a> {
/// Outputs a value.
pub source: &'a NoiseModule<T, U>,
/// Lower bound of the clamping range. Default is -1.0.
pub lower_bound: U,
/// Upper bound of the clamping range. Default is 1.0.
pub upper_bound: U,
}
impl<'a, T, U> Clamp<'a, T, U>
where U: Float,
{
pub fn new(source: &'a NoiseModule<T, U>) -> Clamp<'a, T, U> {
Clamp {
source: source,
lower_bound: -U::one(),
upper_bound: U::one(),
}
}
pub fn set_lower_bound(self, lower_bound: U) -> Clamp<'a, T, U> {
Clamp { lower_bound: lower_bound, ..self }
}
pub fn set_upper_bound(self, upper_bound: U) -> Clamp<'a, T, U> {
Clamp { upper_bound: upper_bound, ..self }
}
pub fn set_bounds(self, lower_bound: U, upper_bound: U) -> Clamp<'a, T, U> {
Clamp {
lower_bound: lower_bound,
upper_bound: upper_bound,
..self
}
}
}
impl<'a, T, U> NoiseModule<T, U> for Clamp<'a, T, U>
where U: Float,
{
fn get(&self, point: T) -> U {
let value = self.source.get(point);
match () {
_ if value < self.lower_bound => self.lower_bound,
_ if value > self.upper_bound => self.upper_bound,
_ => value,
}
}
}
|
use sqlx::MySqlPool;
use crate::db::entities::{Candidate, Vote};
use serenity::prelude::TypeMapKey;
use std::sync::Arc;
pub struct Db{
pool: MySqlPool
}
impl Db{
pub async fn new(url: &str) -> sqlx::Result<Self>{
Ok(
Self{
pool: MySqlPool::connect(url).await?
}
)
}
pub async fn list_candidates(&self) -> sqlx::Result<Vec<Candidate>>{
Ok(
sqlx::query_as!(Candidate, "SELECT id, name FROM candidates ORDER BY id ASC").fetch_all(&self.pool).await?
)
}
pub async fn set_vote(&self, user: u64, votes: Vec<u32>) -> sqlx::Result<()>{
let mut transaction = self.pool.begin().await?;
//first, remove existing vote if any
sqlx::query!("DELETE FROM votes WHERE user=?", user).execute(&mut transaction).await?;
//now, add new votes
for i in 0..votes.len(){
sqlx::query!("INSERT INTO votes(user, option, choice_number) VALUES(?, ?, ?)", user, votes[i], (i+1) as u32)
.execute(&mut transaction)
.await?;
}
transaction.commit().await?;
Ok(())
}
pub async fn get_nth_vote(&self, user: u64, n: u32) -> sqlx::Result<Option<u32>>{
Ok(
sqlx::query!("SELECT option FROM votes WHERE user=? AND choice_number=?", user, n)
.fetch_optional(&self.pool)
.await?
.map(|r| r.option)
)
}
pub async fn get_1st_votes(&self) -> sqlx::Result<Vec<Vote>>{
Ok(
sqlx::query_as!(Vote, "SELECT user, option FROM votes WHERE choice_number=1")
.fetch_all(&self.pool)
.await?
)
}
}
pub mod entities{
pub struct Candidate{
pub id: u32,
pub name: String
}
pub struct Vote{
pub user: u64,
pub option: u32
}
}
impl TypeMapKey for Db{
type Value = Arc<Db>;
} |
/// # Description
/// Takes two unsigned 8-bit integers and concatenates them into an unsigned 16-bit integer
///
/// # Arguments
/// * `first` - an unsigned 8-bit integer
/// * `second` - an unsigned 8-bit integer
///
/// # Example
/// let first = 0xab;
/// let second = 0xcd;
/// let together = convert_types::to_16_block(&first, &second);
/// assert!(together == 0xabcd);
pub fn to_u16_block(first: &u8, second: &u8) -> u16 {
let first = (*first as u16) << 8;
let second = *second as u16;
first | second
}
/// # Description
/// Takes two unsigned 16-bit integers and concatenates them into an unsigned 32-bit integer
///
/// # Arguments
/// * `first` - an unsigned 16-bit integer
/// * `second` - an unsigned 16-bit integer
///
/// # Example
/// let first = 0xabcd;
/// let second = 0x0123;
/// let together = convert_types::to_32_block(&first, &second);
/// assert!(together == 0xabcd0123);
pub fn to_u32_block(first: &u16, second: &u16) -> u32 {
let first = (*first as u32) << 16;
let second = *second as u32;
first | second
}
/// # Description
/// Takes a vector of unsigned 8-bit integers and transforms it into a vector of unsigned 16-bit integers
///
/// # Arguments
/// * `key` - a vector of unsigned 8-bit integers
///
/// # Example
/// let key = vec![0xab, 0xcd, 0xef, 0x01];
/// let key = convert_types::to_16_vec(&key);
/// assert!(key == vec![0xabcd, 0xef01]);
pub fn to_u16_vec(key: &Vec<u8>) -> Vec<u16> {
let mut iter = key.iter();
let mut blocks: Vec<u16> = vec![];
while let Some(first) = iter.next() {
let second = iter.next().unwrap();
let block = to_u16_block(&first, &second);
blocks.push(block);
}
blocks
}
/// Takes a vector of unsigned 16-bit integers and transforms it into a vector of unsigned 32-bit integers
/// # Arguments
/// * `key` - a vector of unsigned 16-bit integers
/// # Example
/// let key == vec![0xabcd, 0xef01];
/// let key = convert_types::to_32_vec(&key);
/// assert!(key == vec![0xabcdef01]);
pub fn to_u32_vec(key: &Vec<u16>) -> Vec<u32> {
let mut iter = key.iter();
let mut blocks: Vec<u32> = vec![];
while let Some(first) = iter.next() {
let second = iter.next().unwrap();
let block = to_u32_block(&first, &second);
blocks.push(block);
}
blocks
}
/// Takes a vector of unsigned 8-bit integers and creates an unsigned 64-bit integer representation of that vector
/// # Arguments
/// * `key` - a vector of unsigned 8-bit integers
/// # Example
/// let key = vec![0xab, 0xcd, 0xef, 0x01, 0x23, 0x45, 0x67, 0x89];
/// let key = convert_types::create_key_block(&key);
/// assert!(key == 0xabcdef0123456789);
pub fn create_key_block(key: &Vec<u8>) -> u64 {
let key = to_u16_vec(&key);
let key = to_u32_vec(&key);
((key[0] as u64) << 32) | key[1] as u64
}
|
#[doc = "Register `MTLISR` reader"]
pub type R = crate::R<MTLISR_SPEC>;
#[doc = "Field `Q0IS` reader - Queue interrupt status"]
pub type Q0IS_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - Queue interrupt status"]
#[inline(always)]
pub fn q0is(&self) -> Q0IS_R {
Q0IS_R::new((self.bits & 1) != 0)
}
}
#[doc = "Interrupt status Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mtlisr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MTLISR_SPEC;
impl crate::RegisterSpec for MTLISR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mtlisr::R`](R) reader structure"]
impl crate::Readable for MTLISR_SPEC {}
#[doc = "`reset()` method sets MTLISR to value 0"]
impl crate::Resettable for MTLISR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// Copyright 2020 Shift Cryptosecurity AG
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate log;
use byteorder::BigEndian;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
use std::io;
use std::io::Cursor;
const HEADER_INIT_LEN: usize = 7;
const HEADER_CONT_LEN: usize = 5;
// U2F specs:
// With this approach, a message with a payload less or equal to (s - 7) may be sent as one packet.
// A larger message is then divided into one or more continuation packets, starting with sequence
// number 0, which then increments by one to a maximum of 127.
// With a packet size of 64 bytes (max for full-speed devices), this means that the maximum message
// payload length is 64 - 7 + 128 * (64 - 5) = 7609 bytes.
pub const MAX_PAYLOAD_LEN: usize = 64 - HEADER_INIT_LEN + 128 * (64 - HEADER_CONT_LEN);
// This is the buffer size needed to fit the largest possible u2f package with headers
pub const MAX_LEN: usize = 129 * 64;
// TODO: CID and CMD are verified in the decode method but should maybe be handled by the
// application?
// TODO: decode returns an owned type (Vec) should probably have an interface for decoding into a
// buffer.
pub trait U2FFraming {
/// Encode function.
fn encode(&mut self, message: &[u8], buf: &mut [u8]) -> io::Result<usize>;
/// Decode function. Will fail in case CID and CMD doesn't match stored values.
fn decode(&mut self, buf: &[u8]) -> io::Result<Option<Vec<u8>>>;
/// Set the CMD field in case this struct didn't encode the packet
fn set_cmd(&mut self, cmd: u8);
}
pub fn parse_header(buf: &[u8]) -> io::Result<(u32, u8, u16)> {
if buf.len() < HEADER_INIT_LEN {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Buffer to short to contain header (7 bytes)",
));
}
let mut rdr = Cursor::new(buf);
let cid = rdr.read_u32::<BigEndian>()?;
let cmd = rdr.read_u8()?;
let len = rdr.read_u16::<BigEndian>()?;
Ok((cid, cmd, len))
}
pub fn encode_header_init(cid: u32, cmd: u8, len: u16, mut buf: &mut [u8]) -> io::Result<usize> {
if buf.len() < HEADER_INIT_LEN {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Buffer to short to contain header (7 bytes)",
));
}
buf.write_u32::<BigEndian>(cid)?;
buf.write_u8(cmd)?;
buf.write_u16::<BigEndian>(len)?;
Ok(7)
}
pub fn encode_header_cont(cid: u32, seq: u8, mut buf: &mut [u8]) -> io::Result<usize> {
if buf.len() < HEADER_CONT_LEN {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Buffer to short to contain header (5 bytes)",
));
}
buf.write_u32::<BigEndian>(cid)?;
buf.write_u8(seq)?;
Ok(5)
}
// TODO: Add randomness to CID
pub fn generate_cid() -> u32 {
0xff00ff00
}
// U2FWS (U2F WebSocket framing protocol) writes u2fhid header and payload as single package (up to
// 7+7609 bytes)
pub struct U2fWs {
cid: u32,
cmd: u8,
}
impl U2fWs {
pub fn new(cmd: u8) -> Self {
U2fWs {
cid: generate_cid(),
cmd,
}
}
// If you want to decode first you need to set the correct cid...
// TODO: Is this good?
pub fn with_cid(cid: u32, cmd: u8) -> Self {
U2fWs { cid, cmd }
}
}
impl Default for U2fWs {
fn default() -> Self {
Self::new(0)
}
}
impl U2FFraming for U2fWs {
fn encode(&mut self, message: &[u8], mut buf: &mut [u8]) -> io::Result<usize> {
let len = encode_header_init(self.cid, self.cmd, message.len() as u16, buf)?;
buf = &mut buf[len..];
if buf.len() < message.len() {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Message won't fit in buffer",
));
}
let buf_slice = &mut buf[..message.len()];
buf_slice.copy_from_slice(message);
Ok(len + message.len())
}
fn decode(&mut self, buf: &[u8]) -> io::Result<Option<Vec<u8>>> {
let (cid, cmd, len) = parse_header(buf)?;
if cid != self.cid {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Wrong CID",
));
}
if cmd != self.cmd {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Wrong CMD",
));
}
if buf.len() < HEADER_INIT_LEN + len as usize {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Invalid length",
));
}
Ok(Some(Vec::from(
&buf[HEADER_INIT_LEN..HEADER_INIT_LEN + len as usize],
)))
}
fn set_cmd(&mut self, cmd: u8) {
self.cmd = cmd;
}
}
// U2fHid writes packets / usb reports. 64 bytes at a time
pub struct U2fHid {
cid: u32,
cmd: u8,
}
impl U2fHid {
pub fn new(cmd: u8) -> Self {
U2fHid {
cid: generate_cid(),
cmd,
}
}
pub fn with_cid(cid: u32, cmd: u8) -> Self {
U2fHid { cid, cmd }
}
fn get_encoded_len(len: u16) -> usize {
if len < 57 {
64
} else {
let len = len - 57;
64 + 64 * ((59 + len - 1) / 59) as usize
}
}
}
impl Default for U2fHid {
fn default() -> Self {
Self::new(0)
}
}
impl U2FFraming for U2fHid {
fn encode(&mut self, mut message: &[u8], mut buf: &mut [u8]) -> io::Result<usize> {
let enc_len = Self::get_encoded_len(message.len() as u16);
debug!("Will encode {} in {}", message.len(), enc_len);
if buf.len() < enc_len {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Message won't fit in buffer",
));
}
let len = encode_header_init(self.cid, self.cmd, message.len() as u16, buf)?;
buf = &mut buf[len..];
let len = usize::min(64 - len, message.len());
let buf_slice = &mut buf[..len];
buf_slice.copy_from_slice(&message[..len]);
message = &message[len..];
buf = &mut buf[len..];
let mut seq = 0;
while !message.is_empty() {
let len = encode_header_cont(self.cid, seq as u8, buf)?;
buf = &mut buf[len..];
let len = usize::min(64 - len, message.len());
let buf_slice = &mut buf[..len];
buf_slice.copy_from_slice(&message[..len]);
buf = &mut buf[len..];
message = &message[len..];
seq += 1;
if seq > 127 {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"More frames than allowed",
));
}
}
Ok(enc_len)
}
fn decode(&mut self, mut buf: &[u8]) -> io::Result<Option<Vec<u8>>> {
debug!("decode: {}", buf.len());
let (cid, cmd, len) = parse_header(buf)?;
if cid != self.cid {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Wrong CID",
));
}
if cmd != self.cmd {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Wrong CMD",
));
}
if buf.len() < Self::get_encoded_len(len) {
// Need more bytes.
println!("{}", Self::get_encoded_len(len));
debug!("need more bytes");
return Ok(None);
}
let mut res = Vec::with_capacity(len as usize);
let mut left = len as usize;
let len = usize::min(57, len as usize);
res.extend_from_slice(&buf[HEADER_INIT_LEN..HEADER_INIT_LEN + len]);
buf = &buf[HEADER_INIT_LEN + len..];
left -= len;
while left > 0 {
let len = usize::min(59, left);
res.extend_from_slice(&buf[HEADER_CONT_LEN..HEADER_CONT_LEN + len]);
buf = &buf[HEADER_CONT_LEN + len..];
left -= len;
}
Ok(Some(res))
}
fn set_cmd(&mut self, cmd: u8) {
self.cmd = cmd;
}
}
#[cfg(test)]
mod tests {
use crate::*;
#[test]
fn test_u2fhid_encode_single() {
let mut codec = U2fHid::with_cid(0xEEEEEEEE, 0x55);
let mut data = [0u8; 8000];
let len = codec.encode(b"\x01\x02\x03\x04", &mut data[..]).unwrap();
assert_eq!(len, 64);
let mut expect = [0u8; 64];
expect[..11].copy_from_slice(b"\xEE\xEE\xEE\xEE\x55\x00\x04\x01\x02\x03\x04");
assert_eq!(&data[..len], &expect[..]);
}
#[test]
fn test_u2fhid_encode_multi() {
let payload: Vec<u8> = (0..65u8).collect();
let mut codec = U2fHid::with_cid(0xEEEEEEEE, 0x55);
let mut data = [0u8; 8000];
let len = codec.encode(&payload[..], &mut data[..]).unwrap();
assert_eq!(len, 128);
let mut expect = [0u8; 128];
expect[..7].copy_from_slice(b"\xEE\xEE\xEE\xEE\x55\x00\x41");
expect[7..64].copy_from_slice(&payload[..57]);
expect[64..69].copy_from_slice(b"\xEE\xEE\xEE\xEE\x00");
expect[69..77].copy_from_slice(&payload[57..]);
assert_eq!(&data[..len], &expect[..]);
}
#[test]
fn test_u2fhid_decode_single() {
let mut codec = U2fHid::with_cid(0xEEEEEEEE, 0x55);
let mut raw = [0u8; 64];
raw[..11].copy_from_slice(b"\xEE\xEE\xEE\xEE\x55\x00\x04\x01\x02\x03\x04");
let data = codec.decode(&raw[..]).unwrap().unwrap();
assert_eq!(&data[..], b"\x01\x02\x03\x04");
}
#[test]
fn test_u2fhid_decode_multi() {
let payload: Vec<u8> = (0..65u8).collect();
let mut codec = U2fHid::with_cid(0xEEEEEEEE, 0x55);
let mut raw = [0u8; 128];
raw[..7].copy_from_slice(b"\xEE\xEE\xEE\xEE\x55\x00\x41");
raw[7..64].copy_from_slice(&payload[..57]);
raw[64..69].copy_from_slice(b"\xEE\xEE\xEE\xEE\x00");
raw[69..77].copy_from_slice(&payload[57..]);
let data = codec.decode(&raw[..]).unwrap().unwrap();
assert_eq!(&data[..], &payload[..]);
}
#[test]
fn test_u2fws_encode_single() {
let mut codec = U2fWs::with_cid(0xEEEEEEEE, 0x55);
let mut data = [0u8; 8000];
let len = codec.encode(b"\x01\x02\x03\x04", &mut data[..]).unwrap();
assert_eq!(len, 11);
assert_eq!(
&data[..len],
b"\xEE\xEE\xEE\xEE\x55\x00\x04\x01\x02\x03\x04"
);
}
#[test]
fn test_u2fws_encode_multi() {
let payload: Vec<u8> = (0..65u8).collect();
let mut codec = U2fWs::with_cid(0xEEEEEEEE, 0x55);
let mut data = [0u8; 8000];
let len = codec.encode(&payload[..], &mut data[..]).unwrap();
assert_eq!(len, 72);
let mut expect = [0u8; 72];
expect[..7].copy_from_slice(b"\xEE\xEE\xEE\xEE\x55\x00\x41");
expect[7..72].copy_from_slice(&payload[..]);
assert_eq!(&data[..len], &expect[..]);
}
#[test]
fn test_u2fws_decode_single() {
let mut codec = U2fWs::with_cid(0xEEEEEEEE, 0x55);
let data = codec
.decode(b"\xEE\xEE\xEE\xEE\x55\x00\x04\x01\x02\x03\x04")
.unwrap()
.unwrap();
assert_eq!(&data[..], b"\x01\x02\x03\x04");
}
#[test]
fn test_u2fws_decode_multi() {
let payload: Vec<u8> = (0..65u8).collect();
let mut codec = U2fWs::with_cid(0xEEEEEEEE, 0x55);
let mut raw = [0u8; 128];
raw[..7].copy_from_slice(b"\xEE\xEE\xEE\xEE\x55\x00\x41");
raw[7..72].copy_from_slice(&payload[..]);
let data = codec.decode(&raw[..]).unwrap().unwrap();
assert_eq!(&data[..], &payload[..]);
}
}
|
use mongodb::{ Document};
use mongodb::{Client, ThreadedClient};
use mongodb::db::{ThreadedDatabase};
// use mongodb::coll::Collection;
use crate::analyze_result::AnalyzeResult;
use crate::config::DBConfig;
#[derive(Clone)]
pub struct DBase {
// mongodb 客户端
pub client: Client,
// // 数据集
// pub collection: Collection,
pub db_config: DBConfig,
}
impl DBase {
/// 创建一个一个数据库连接
pub fn new(db_conf: DBConfig) -> Self {
let client = Client::connect(&db_conf.server, db_conf.port)
.expect("Failed to initialize standalone client.");
DBase{
db_config: db_conf,
client: client
}
}
/// 数据插入
pub fn insert(&self, data: Vec<AnalyzeResult>) {
println!("正在对 {:?} 条数据进行入库处理.", data.len());
// let db_c = &self.db_config;
// let client = Client::connect(&db_c.server, db_c.port)
// .expect("Failed to initialize standalone client.");
// let coll = client.db(&db_c.database).collection("logs");
let mut docs: Vec<Document> = vec![];
let coll = self.client.db(&self.db_config.database).collection("logs");
for ar in data {
// let ar = &q.pop();
docs.push(ar.to_doc());
}
// Insert document into 'test.movies' collection
coll.insert_many(docs.clone(), None)
.ok();
}
}
|
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
use crate::store::{State, ObjectCache};
use crate::object;
struct Entry {
next: usize,
prev: usize,
state: Rc<RefCell<State>>
}
pub struct SimpleCache {
max_entries: usize,
least_recently_used: usize,
most_recently_used: usize,
entries: Vec<Entry>,
lookup: HashMap<object::Id, usize>
}
impl SimpleCache {
pub fn new(max_entries: usize) -> SimpleCache {
SimpleCache {
max_entries,
least_recently_used: 0,
most_recently_used: 0,
entries: Vec::new(),
lookup: HashMap::new()
}
}
pub fn new_trait_object(max_entries: usize) -> Box<dyn ObjectCache> {
Box::new(SimpleCache::new(max_entries))
}
}
impl ObjectCache for SimpleCache {
fn clear(&mut self) {
self.entries.clear();
self.lookup.clear();
self.least_recently_used = 0;
self.most_recently_used = 0;
}
fn remove(&mut self, object_id: &object::Id) {
// Just remove the entry from the lookup index. To keep ownership simple,
// we'll leave it in the LRU chain and let it fall off the end naturally
// since it can no longer be accessed.
self.lookup.remove(object_id);
}
fn get(&mut self, object_id: &object::Id) -> Option<&Rc<RefCell<State>>> {
if self.entries.is_empty() {
None
}
else if let Some(idx) = self.lookup.get(object_id) {
//let mut target = &self.entries[*idx];
if *idx == self.least_recently_used {
let new_lru = self.entries[self.least_recently_used].next;
self.entries[new_lru].prev = new_lru; // point to self
self.least_recently_used = new_lru;
}
// No adjustment required if we're accessing the already most-recently-used object
// so only check for the negative here
if *idx != self.most_recently_used {
let mut mru = &mut self.entries[self.most_recently_used];
mru.next = *idx;
let prev = self.entries[*idx].prev;
self.entries[prev].next = self.entries[*idx].next;
let next = self.entries[*idx].next;
self.entries[next].prev = self.entries[*idx].prev;
self.entries[*idx].prev = self.most_recently_used;
self.entries[*idx].next = *idx; // point to self
}
Some(&self.entries[*idx].state)
}
else {
None
}
}
fn insert(&mut self, state: Rc<RefCell<State>>) -> Option<Rc<RefCell<State>>> {
if self.entries.is_empty() {
self.least_recently_used = 0;
self.most_recently_used = 0;
self.lookup.insert(state.borrow().id, 0);
self.entries.push(Entry{
prev: 0,
next: 0,
state
});
return None;
}
else if self.entries.len() < self.max_entries {
let idx = self.entries.len();
self.entries[self.most_recently_used].next = idx;
self.lookup.insert(state.borrow().id, idx);
self.entries.push(Entry{
prev: self.most_recently_used,
next: idx,
state
});
self.most_recently_used = idx;
None
}
else {
// Index is full, need to pop an entry. However, we cannot pop objects locked
// to transactions. So we'll use get() on them to put those at the head of
// the list until a non-locked object occurs
while self.entries[self.least_recently_used].state.borrow().transaction_references != 0 {
let object_id = self.entries[self.least_recently_used].state.borrow().id.clone();
self.get(&object_id);
}
let lru_object_id = self.entries[self.least_recently_used].state.borrow().id.clone();
let new_lru = self.entries[self.least_recently_used].next;
let new_mru = self.entries[new_lru].prev;
self.entries[new_lru].prev = new_lru;
let new_object_id = state.borrow().id.clone();
let mut e = Entry{
prev: self.most_recently_used,
next: self.least_recently_used,
state
};
self.lookup.remove(&lru_object_id);
self.lookup.insert(new_object_id, new_mru);
std::mem::swap(&mut e, &mut self.entries[self.least_recently_used]);
self.most_recently_used = self.least_recently_used;
self.least_recently_used = new_lru;
Some(e.state)
}
}
}
#[cfg(test)]
mod tests {
use std::cell::RefCell;
use super::*;
use crate::object;
use crate::store;
use std::sync;
use uuid;
fn objs() -> (State, State, State, State) {
let o1 = object::Id(uuid::Uuid::parse_str("1108d237-a26e-4735-b001-6782fb2eac38").unwrap());
let o2 = object::Id(uuid::Uuid::parse_str("2208d237-a26e-4735-b001-6782fb2eac38").unwrap());
let o3 = object::Id(uuid::Uuid::parse_str("3308d237-a26e-4735-b001-6782fb2eac38").unwrap());
let o4 = object::Id(uuid::Uuid::parse_str("4408d237-a26e-4735-b001-6782fb2eac38").unwrap());
let metadata = object::Metadata {
revision: object::Revision(uuid::Uuid::parse_str("f308d237-a26e-4735-b001-6782fb2eac38").unwrap()),
refcount: object::Refcount{update_serial: 1, count: 1},
timestamp: crate::hlc::Timestamp::from(1)
};
(State {
id: o1,
store_pointer: store::Pointer::None{pool_index: 0},
metadata,
object_kind: object::Kind::Data,
transaction_references: 0,
locked_to_transaction: None,
data: sync::Arc::new(vec![]),
max_size: None,
kv_state: None
},
State {
id: o2,
store_pointer: store::Pointer::None{pool_index: 0},
metadata,
object_kind: object::Kind::Data,
transaction_references: 0,
locked_to_transaction: None,
data: sync::Arc::new(vec![]),
max_size: None,
kv_state: None
},
State {
id: o3,
store_pointer: store::Pointer::None{pool_index: 0},
metadata,
object_kind: object::Kind::Data,
transaction_references: 0,
locked_to_transaction: None,
data: sync::Arc::new(vec![]),
max_size: None,
kv_state: None
},
State {
id: o4,
store_pointer: store::Pointer::None{pool_index: 0},
metadata,
object_kind: object::Kind::Data,
transaction_references: 0,
locked_to_transaction: None,
data: sync::Arc::new(vec![]),
max_size: None,
kv_state: None
},
)
}
#[test]
fn max_size() {
let (o1, o2, o3, o4) = objs();
let u1 = o1.id.clone();
//let u2 = o2.id.clone();
//let u3 = o3.id.clone();
//let u4 = o3.id.clone();
let o1 = Rc::new(RefCell::new(o1));
let o2 = Rc::new(RefCell::new(o2));
let o3 = Rc::new(RefCell::new(o3));
let o4 = Rc::new(RefCell::new(o4));
let mut c = SimpleCache::new(3);
assert!(c.insert(o1).is_none());
assert!(c.insert(o2).is_none());
assert!(c.insert(o3).is_none());
let x = c.insert(o4);
assert!(x.is_some());
let x = x.unwrap();
assert_eq!(x.borrow().id, u1);
}
#[test]
fn get_increases_priority() {
let (o1, o2, o3, o4) = objs();
let u1 = o1.id.clone();
let u2 = o2.id.clone();
let u3 = o3.id.clone();
//let u4 = o3.id.clone();
let o1 = Rc::new(RefCell::new(o1));
let o2 = Rc::new(RefCell::new(o2));
let o3 = Rc::new(RefCell::new(o3));
let o4 = Rc::new(RefCell::new(o4));
let mut c = SimpleCache::new(3);
assert!(c.insert(o1).is_none());
assert!(c.insert(o2).is_none());
assert!(c.insert(o3).is_none());
assert_eq!(c.get(&u1).unwrap().borrow().id, u1);
assert_eq!(c.get(&u2).unwrap().borrow().id, u2);
let x = c.insert(o4);
assert!(x.is_some());
let x = x.unwrap();
assert_eq!(x.borrow().id, u3);
}
#[test]
fn two_element_increases_priority() {
let (o1, o2, o3, o4) = objs();
let u1 = o1.id.clone();
let u2 = o2.id.clone();
let u3 = o3.id.clone();
//let u4 = o3.id.clone();
let o1 = Rc::new(RefCell::new(o1));
let o2 = Rc::new(RefCell::new(o2));
let o3 = Rc::new(RefCell::new(o3));
let o4 = Rc::new(RefCell::new(o4));
let mut c = SimpleCache::new(3);
assert!(c.insert(o1).is_none());
assert!(c.insert(o2).is_none());
assert_eq!(c.get(&u1).unwrap().borrow().id, u1);
assert!(c.insert(o3).is_none());
assert_eq!(c.get(&u2).unwrap().borrow().id, u2);
let x = c.insert(o4);
assert!(x.is_some());
let x = x.unwrap();
assert_eq!(x.borrow().id, u3);
}
#[test]
fn skip_locked_transactions() {
let (mut o1, mut o2, o3, o4) = objs();
//let u1 = o1.id.clone();
//let u2 = o2.id.clone();
let u3 = o3.id.clone();
//let u4 = o3.id.clone();
o1.transaction_references = 1;
o2.transaction_references = 1;
let o1 = Rc::new(RefCell::new(o1));
let o2 = Rc::new(RefCell::new(o2));
let o3 = Rc::new(RefCell::new(o3));
let o4 = Rc::new(RefCell::new(o4));
let mut c = SimpleCache::new(3);
assert!(c.insert(o1).is_none());
assert!(c.insert(o2).is_none());
assert!(c.insert(o3).is_none());
let x = c.insert(o4);
assert!(x.is_some());
let x = x.unwrap();
assert_eq!(x.borrow().id, u3);
}
} |
use super::*;
pub fn expression() -> Expression {
Expression {
boostrap_compiler: boostrap_compiler,
typecheck: typecheck,
codegen: codegen,
}
}
fn boostrap_compiler(compiler: &mut Compiler) {
add_function_to_compiler(compiler, "print", Type::None, &vec![Type::Int], "print_int");
add_function_to_compiler(
compiler,
"print",
Type::None,
&vec![Type::Bool],
"print_bool",
);
add_function_to_compiler(
compiler,
"print",
Type::None,
&vec![Type::String],
"print_string",
);
add_function_to_compiler(
compiler,
"print",
Type::None,
&vec![Type::Array(Box::new(Type::Byte))],
"print_bytes",
);
add_function_to_compiler(
compiler,
"print",
Type::None,
&vec![Type::Map(Box::new(Type::String), Box::new(Type::Int))],
"print_map",
);
add_function_to_compiler(
compiler,
"print",
Type::None,
&vec![Type::Byte],
"print_byte",
);
}
fn typecheck(
resolver: &mut TypeResolver<TypecheckType>,
_function: &TypevarFunction,
_args: &Vec<TypeVar>,
) -> GenericResult<TypeVar> {
let type_var = resolver.create_type_var();
resolver.add_constraint(Constraint::IsLiteral(type_var,
Unresolved::Literal(TypecheckType::None)
))?;
Ok(type_var)
}
pub fn codegen(context: &mut Context, args: &[Token]) -> CodegenResult<Object> {
call_function(context, "print", args)
}
#[no_mangle]
pub extern "C" fn print_map(map: *mut HashMap<String, bool>) {
let map_unpacked = unsafe { &*map };
// the pointer must be returned back into the general pool,
// by calling into raw.
print!("{{");
for (k, v) in &*map_unpacked {
print!("{}: {}, ", k, v);
}
print!("}}");
}
#[no_mangle]
pub extern "C" fn print_string(value: *const c_char) {
print!("{}", unsafe { CStr::from_ptr(value).to_str().unwrap() });
}
#[no_mangle]
pub extern "C" fn print_bool(value: bool) {
print!("{}", value);
}
#[no_mangle]
pub extern "C" fn print_int(value: i64) {
print!("{}", value);
}
|
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
use regex::Regex;
use std::collections::HashMap;
use std::collections::HashSet;
fn parse_line(s: &str) -> (String, Vec<(u32, String)>) {
let mut iter = s.split(" bags contain ");
let bag_color = iter.next().unwrap().to_string();
let re = Regex::new(r"([0-9]+) (.+?) bag").unwrap();
let mut contains: Vec<(u32, String)> = Vec::new();
for cap in re.captures_iter(iter.next().unwrap()) {
contains.push(((&cap[1]).parse().unwrap(), (&cap[2]).to_string()));
}
(bag_color, contains)
}
fn calc_part_1(input: &HashMap<String, Vec<(u32, String)>>) -> usize {
let mut may_have: HashSet<String> = HashSet::new();
let mut to_process: HashSet<String> = HashSet::new();
to_process.insert("shiny gold".to_string());
while to_process.len() > 0 {
let mut next_batch: HashSet<String> = HashSet::new();
for color in to_process.iter() {
for (key, val) in input.iter() {
if val.iter().any(|x| x.1 == *color) {
may_have.insert(key.clone());
next_batch.insert(key.clone());
}
}
}
to_process = next_batch;
}
may_have.len()
}
fn calc_part_2(color: &String, input: &HashMap<String, Vec<(u32, String)>>) -> u32 {
input.get(color)
.unwrap()
.iter()
.map(|(count, color)| count + count * calc_part_2(color, &input))
.sum::<u32>()
}
fn main() {
if let Ok(lines) = read_lines("input") {
let mut data = HashMap::new();
for line in lines {
let entry = parse_line(&line.unwrap());
data.insert(entry.0, entry.1);
}
println!("Part 1: {}", calc_part_1(&data));
println!("Part 2: {}", calc_part_2(&"shiny gold".to_string(), &data));
}
}
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where
P: AsRef<Path>,
{
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_line_parser() {
assert_eq!(
(
"light red".to_string(),
vec![
(1, "bright white".to_string()),
(2, r"muted yellow".to_string())
]
),
parse_line("light red bags contain 1 bright white bag, 2 muted yellow bags.")
);
assert_eq!(
("faded blue".to_string(), vec![]),
parse_line("faded blue bags contain no other bags.")
);
}
}
|
use num::BigInt;
pub struct PrimorialPi {
index: BigInt,
}
impl PrimorialPi {
pub fn new<T>(i: T) -> PrimorialPi
where
BigInt: From<T>,
{
let int = BigInt::from(i);
PrimorialPi { index: int }
}
}
|
use amethyst::{
core::{
cgmath::Vector3,
transform::{GlobalTransform, Transform},
},
ecs::prelude::{Component,VecStorage},
prelude::*,
renderer::{SpriteRender, SpriteSheetHandle,ScreenDimensions},
};
use rand;
use rand::Rng;
pub struct Snake {
pub last_head_pos: Vector3<f32>,
pub last_head_dir: SegmentDirection,
pub food_available: bool,
pub score: u64,
}
impl Snake {
pub fn new(pos: Vector3<f32>,dir: SegmentDirection) -> Self {
Snake {
last_head_pos: pos,
last_head_dir: dir,
food_available: false,
score: 0,
}
}
}
#[derive(PartialEq,Eq,Debug)]
pub enum SegmentType {
Head,
Body,
}
#[derive(Debug,Clone,Copy)]
pub enum SegmentDirection {
Left,
Right,
Up,
Down,
Idle,
}
#[derive(Debug)]
pub struct Segment{
pub t: SegmentType,
pub direction: SegmentDirection,
pub id: u64,
}
impl Segment {
pub fn body(direction: SegmentDirection,id: u64) -> Self {
Segment {
t: SegmentType::Body,
direction: direction,
id: id,
}
}
}
impl Default for Segment {
fn default() -> Self {
Segment {
t: SegmentType::Head,
direction: SegmentDirection::Idle,
id: 0,
}
}
}
impl Component for Segment {
type Storage = VecStorage<Self>;
}
pub fn initialise_snake(world: &mut World,sheet_handle: SpriteSheetHandle){
world.register::<Segment>();
let snake_color_id = rand::thread_rng().gen_range(0,7);
let snake_sprite = SpriteRender {
sprite_sheet: sheet_handle,
sprite_number: snake_color_id,
flip_horizontal: false,
flip_vertical: false,
};
let (width,height) = {
let dimn = world.read_resource::<ScreenDimensions>();
assert!(dimn.width() % 8.0 == 0.0, dimn.height() % 8.0 == 0.0);
(dimn.width(), dimn.height())
};
let (x,y) = ((width / 16.0).round() * 8.0,(height / 16.0).round() * 8.0);
world.add_resource(Snake::new(Vector3::new(x,y,0.0),SegmentDirection::Idle));
world.create_entity()
.with(snake_sprite)
.with(GlobalTransform::default())
.with(Transform::from(Vector3::new(x,y,0.0)))
.with(Segment::default())
.build();
}
|
fn main() {
let mut config = prost_build::Config::new();
config
.compile_protos(
&[
"secret/v1/secret.proto",
"secret/v1/state.proto",
"secret/v1/keys.proto",
"secret/v1/encrypted.proto",
"secret/v1/messages.proto",
],
&["src/protocol"],
)
.expect("Protobuf code generation failed");
}
|
//! Timetoken type.
use std::time::{SystemTime, SystemTimeError};
/// # PubNub Timetoken
///
/// This is the timetoken structure that PubNub uses as a stream index.
/// It allows clients to resume streaming from where they left off for added
/// resiliency.
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
pub struct Timetoken {
/// Timetoken
pub t: u64,
/// Origin region
pub r: u32,
}
impl Timetoken {
/// Create a `Timetoken`.
///
/// # Arguments
///
/// - `time` - A [`SystemTime`] representing when the message was received
/// by the PubNub global network.
/// - `region` - An internal region identifier for the originating region.
///
/// `region` may be set to `0` if you have nothing better to use. The
/// combination of a time and region gives us a vector clock that represents
/// the message origin in spacetime; when and where the message was created.
/// Using an appropriate `region` is important for delivery semantics in a
/// global distributed system.
///
/// # Errors
///
/// Returns an error when the input `time` argument cannot be transformed
/// into a duration.
///
/// # Example
///
/// ```
/// use pubnub_core::data::timetoken::Timetoken;
/// use std::time::SystemTime;
///
/// let now = SystemTime::now();
/// let timetoken = Timetoken::new(now, 0)?;
/// # Ok::<(), std::time::SystemTimeError>(())
/// ```
pub fn new(time: SystemTime, region: u32) -> Result<Self, SystemTimeError> {
let time = time.duration_since(SystemTime::UNIX_EPOCH)?;
let secs = time.as_secs();
let nanos = time.subsec_nanos();
// Format the timetoken with the appropriate resolution
let t = (secs * 10_000_000) | (u64::from(nanos) / 100);
Ok(Self { t, r: region })
}
}
impl Default for Timetoken {
#[must_use]
fn default() -> Self {
Self {
t: u64::default(),
r: u32::default(),
}
}
}
impl std::fmt::Display for Timetoken {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
write!(fmt, "{{ t: {}, r: {} }}", self.t, self.r)
}
}
|
use eval::Expr;
use gc::Gc;
use rnix::types::Wrapper;
use scope::Scope;
use std::borrow::Borrow;
use value::NixValue;
use crate::static_analysis;
#[cfg(test)]
use maplit::hashmap;
#[cfg(test)]
use serde_json::json;
#[cfg(test)]
use std::time::Duration;
#[cfg(test)]
use stoppable_thread::*;
#[cfg(test)]
use crate::error::ValueError;
#[allow(dead_code)]
/// Evaluates a nix code snippet
fn eval(code: &str) -> NixValue {
let ast = rnix::parse(&code);
let root = ast.root().inner().unwrap();
let path = std::env::current_dir().unwrap();
let out = Expr::parse(root, Gc::new(Scope::Root(path))).unwrap();
assert_eq!(static_analysis::check(&out), Vec::new());
let tmp = out.eval();
let val: &NixValue = tmp.as_ref().unwrap().borrow();
val.clone()
}
#[allow(dead_code)]
/// Returns the errors found by static_analysis::check.
///
/// As dealing with ranges is cumbersome, returns a map "text matched by the range" => "error text"
fn static_analysis(code: &str) -> HashMap<&str, String> {
let ast = rnix::parse(&code);
let root = ast.root().inner().unwrap();
let path = std::env::current_dir().unwrap();
let out = Expr::parse(root, Gc::new(Scope::Root(path))).unwrap();
let errors = static_analysis::check(&out);
let mut res = HashMap::new();
for error in errors {
let range = error.range.start().into()..error.range.end().into();
let text = code.get(range).unwrap();
res.insert(text, error.kind.to_string());
}
res
}
use super::*;
#[test]
fn integer_division() {
let code = "1 / 2";
assert_eq!(eval(code).as_int().unwrap(), 0);
}
#[test]
fn float_division() {
let code = "1.0 / 2.0";
assert_eq!(eval(code).as_float().unwrap(), 0.5);
}
#[test]
fn order_of_operations() {
let code = "1 + 2 * 3";
assert_eq!(eval(code).as_int().unwrap(), 7);
}
#[test]
fn div_int_by_float() {
let code = "1 / 2.0";
assert_eq!(eval(code).as_float().unwrap(), 0.5);
}
#[test]
fn unbound_simple() {
let code = "1+x";
assert_eq!(static_analysis(code), hashmap!{ "x" => "identifier x is unbound".into() });
}
#[test]
fn with_conservative_binding_analysis() {
let code = "1 + with import <nixpkgs> {}; some_attr";
assert_eq!(static_analysis(code), hashmap!{});
}
#[test]
fn binding_analysis_ignores_attrset_selection() {
let code = "1 + rec { x = 1; y = x; }.y";
assert_eq!(static_analysis(code), hashmap!{});
}
#[test]
fn unbound_attrset() {
let code = "1 + rec { x = 1; y = x; z = t; }.y";
assert_eq!(static_analysis(code), hashmap!{"t" => "identifier t is unbound".into()});
}
#[cfg(test)]
fn prepare_integration_test(code: &str, filename: &str) -> (Connection, StoppableHandle<()>) {
let (server, client) = Connection::memory();
// Manually handle LSP communications here. This is needed in order to not wait
// indefinetely for a message to be able to exit as soon as the test is finished
// and the thread is stopped.
let h = spawn(move |stopped| {
let mut app = App { files: HashMap::new(), conn: server };
loop {
if let Ok(msg) = app.conn.receiver.recv_timeout(Duration::from_millis(100)) {
match msg {
Message::Request(req) => app.handle_request(req),
Message::Notification(notification) => {
let _ = app.handle_notification(notification);
}
Message::Response(_) => (),
}
}
if stopped.get() {
break;
}
}
});
let open = Notification {
method: String::from("textDocument/didOpen"),
params: json!({
"textDocument": { "uri": filename, "text": code, "version": 1, "languageId": "nix" }
})
};
client.sender.send(open.into()).expect("Cannot send didOpen!");
(client, h)
}
#[cfg(test)]
fn recv_msg(client: &Connection) -> lsp_server::Message {
client.receiver.recv_timeout(Duration::new(5, 0)).expect("No message within 5 secs!")
}
#[cfg(test)]
fn expect_diagnostics(client: &Connection) {
let notf = recv_msg(client);
if let Message::Notification(x) = notf {
assert_eq!("textDocument/publishDiagnostics", x.method);
} else {
panic!("Expected diagnostics notification!");
}
}
#[cfg(test)]
fn coerce_response(msg: lsp_server::Message) -> lsp_server::Response {
if let Message::Response(x) = msg {
x
} else {
panic!("Expected LSP message to be a response!");
}
}
#[test]
fn test_hover_integration() {
// Since we transmit content via `textDocument/didOpen`, we can
// use made-up names for paths here that don't need to exist anywhere.
let urlpath = "file:///code/default.nix";
let (client, handle) = prepare_integration_test("(1 + 1)", urlpath);
let r = Request {
id: RequestId::from(23),
method: String::from("textDocument/hover"),
params: json!({
"textDocument": {
"uri": "file:///code/default.nix",
},
"position": {
"line": 0,
"character": 7
}
})
};
client.sender.send(r.into()).expect("Cannot send hover notification!");
expect_diagnostics(&client);
let msg = recv_msg(&client);
let hover_json = coerce_response(msg).result.expect("Expected hover response!");
let hover_value = &hover_json.as_object().unwrap()["contents"]["value"];
assert_eq!("2", *hover_value.to_string().split("\\n").collect::<Vec<_>>().get(1).unwrap());
handle.stop().join().expect("Failed to gracefully terminate LSP worker thread!");
}
#[test]
fn test_rename() {
let urlpath = "file:///code/default.nix";
let (client, handle) = prepare_integration_test("let a = { b = a; }; in a", urlpath);
let r = Request {
id: RequestId::from(23),
method: String::from("textDocument/rename"),
params: json!({
"textDocument": {
"uri": urlpath
},
"position": {
"line": 0,
"character": 24,
},
"newName": "c",
})
};
client.sender.send(r.into()).expect("Cannot send rename request!");
expect_diagnostics(&client);
let msg = recv_msg(&client);
let response = coerce_response(msg).result.expect("Expected rename response!");
let changes = &response
.as_object()
.unwrap()["changes"]["file:///code/default.nix"]
.as_array()
.expect("Changes must be an array!");
// `let a`, `{ b = a; }`, `in a` is where `a` should be replaced with `c`.
assert_eq!(3, changes.len());
let first = changes
.get(0)
.expect("Array should have three elements!")
.as_object()
.expect("Changes should be objects!");
assert_eq!("c", first["newText"]);
assert_eq!(4, first["range"]["start"]["character"]);
assert_eq!(5, first["range"]["end"]["character"]);
assert_eq!(0, first["range"]["start"]["line"]);
assert_eq!(0, first["range"]["end"]["line"]);
let second = changes
.get(1)
.expect("Array should have three elements!")
.as_object()
.expect("Changes should be objects!");
assert_eq!("c", second["newText"]);
assert_eq!(14, second["range"]["start"]["character"]);
assert_eq!(15, second["range"]["end"]["character"]);
assert_eq!(0, second["range"]["start"]["line"]);
assert_eq!(0, second["range"]["end"]["line"]);
let third = changes
.get(2)
.expect("Array should have three elements!")
.as_object()
.expect("Changes should be objects!");
assert_eq!("c", third["newText"]);
assert_eq!(23, third["range"]["start"]["character"]);
assert_eq!(24, third["range"]["end"]["character"]);
assert_eq!(0, third["range"]["start"]["line"]);
assert_eq!(0, third["range"]["end"]["line"]);
handle.stop().join().expect("Failed to gracefully terminate LSP worker thread!");
}
#[test]
fn attrs_simple() {
let code = "{ x = 1; y = 2; }.x";
assert_eq!(eval(code).as_int().unwrap(), 1);
}
#[test]
fn attrs_path() {
let code = "{ x.y.z = 3; }.x.y.z";
assert_eq!(eval(code).as_int().unwrap(), 3);
}
#[test]
fn attrs_rec() {
let code = "rec { x = 4; y = x; }.y";
assert_eq!(eval(code).as_int().unwrap(), 4);
}
#[test]
fn attrs_rec_nested() {
let code = "rec { x = { b = 1; }; y = x; }.y.b";
assert_eq!(eval(code).as_int().unwrap(), 1);
}
#[test]
fn attrs_merge() {
let code = "{ a = { b = 1; }; a.c = 2; }".to_string();
assert_eq!(eval(&format!("{}.a.b", code)).as_int().unwrap(), 1);
assert_eq!(eval(&format!("{}.a.c", code)).as_int().unwrap(), 2);
}
#[test]
fn attrs_merge_3() {
let code = "{ a.b = 1; a.c = 2; a.d = 3; }".to_string();
assert_eq!(eval(&format!("{}.a.b", code)).as_int().unwrap(), 1);
assert_eq!(eval(&format!("{}.a.c", code)).as_int().unwrap(), 2);
assert_eq!(eval(&format!("{}.a.d", code)).as_int().unwrap(), 3);
}
#[test]
fn attrs_merge_conflict() {
let ast = rnix::parse("{ a = { b = 1; c = 3; }; a.c = 2; }");
let root = ast.root().inner().unwrap();
let path = std::env::current_dir().unwrap();
let parse_result = Expr::parse(root, Gc::new(Scope::Root(path)));
assert!(matches!(parse_result, Err(EvalError::Value(ValueError::AttrAlreadyDefined(_)))));
}
#[test]
fn attrs_merge_conflict_2() {
let ast = rnix::parse("{ a = let y = { b = 1; }; in y; a.c = 2; }");
let root = ast.root().inner().unwrap();
let path = std::env::current_dir().unwrap();
let parse_result = Expr::parse(root, Gc::new(Scope::Root(path)));
assert!(parse_result.is_err());
// assert!(matches!(parse_result, Err(EvalError::Value(ValueError::AttrAlreadyDefined(_)))));
}
#[test]
fn attrs_merge_conflict_rec() {
let ast = rnix::parse("rec { x = { b = 1; }; a = x; a.c = 2; }");
let root = ast.root().inner().unwrap();
let path = std::env::current_dir().unwrap();
let parse_result = Expr::parse(root, Gc::new(Scope::Root(path)));
assert!(parse_result.is_err());
}
#[test]
fn attrs_merge_conflict_inherit() {
let ast = rnix::parse("{ inherit ({ a = { b = 1; }; }) a; a.c = 2; }");
let root = ast.root().inner().unwrap();
let path = std::env::current_dir().unwrap();
let parse_result = Expr::parse(root, Gc::new(Scope::Root(path)));
assert!(parse_result.is_err());
}
#[test]
fn attrs_inherit_from() {
let code = "{ inherit ({ b = 1; }) b; }.b";
assert_eq!(eval(code).as_int().unwrap(), 1);
}
|
use super::{Context, Module, RootModuleConfig};
use crate::configs::nodejs::NodejsConfig;
use crate::formatter::StringFormatter;
use crate::utils;
/// Creates a module with the current Node.js version
///
/// Will display the Node.js version if any of the following criteria are met:
/// - Current directory contains a `.js`, `.mjs` or `.cjs` file
/// - Current directory contains a `.ts` file
/// - Current directory contains a `package.json` or `.node-version` file
/// - Current directory contains a `node_modules` directory
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
let is_js_project = context
.try_begin_scan()?
.set_files(&["package.json", ".node-version"])
.set_extensions(&["js", "mjs", "cjs", "ts"])
.set_folders(&["node_modules"])
.is_match();
let is_esy_project = context
.try_begin_scan()?
.set_folders(&["esy.lock"])
.is_match();
if !is_js_project || is_esy_project {
return None;
}
let mut module = context.new_module("nodejs");
let config = NodejsConfig::try_load(module.config);
let nodejs_version = utils::exec_cmd("node", &["--version"])?.stdout;
let parsed = StringFormatter::new(config.format).and_then(|formatter| {
formatter
.map_meta(|var, _| match var {
"symbol" => Some(config.symbol),
_ => None,
})
.map_style(|variable| match variable {
"style" => Some(Ok(config.style)),
_ => None,
})
.map(|variable| match variable {
"version" => Some(Ok(nodejs_version.trim())),
_ => None,
})
.parse(None)
});
module.set_segments(match parsed {
Ok(segments) => segments,
Err(error) => {
log::warn!("Error in module `nodejs`:\n{}", error);
return None;
}
});
Some(module)
}
#[cfg(test)]
mod tests {
use crate::test::ModuleRenderer;
use ansi_term::Color;
use std::fs::{self, File};
use std::io;
#[test]
fn folder_without_node_files() -> io::Result<()> {
let dir = tempfile::tempdir()?;
let actual = ModuleRenderer::new("nodejs").path(dir.path()).collect();
let expected = None;
assert_eq!(expected, actual);
dir.close()
}
#[test]
fn folder_with_package_json() -> io::Result<()> {
let dir = tempfile::tempdir()?;
File::create(dir.path().join("package.json"))?.sync_all()?;
let actual = ModuleRenderer::new("nodejs").path(dir.path()).collect();
let expected = Some(format!("via {} ", Color::Green.bold().paint("⬢ v12.0.0")));
assert_eq!(expected, actual);
dir.close()
}
#[test]
fn folder_with_package_json_and_esy_lock() -> io::Result<()> {
let dir = tempfile::tempdir()?;
File::create(dir.path().join("package.json"))?.sync_all()?;
let esy_lock = dir.path().join("esy.lock");
fs::create_dir_all(&esy_lock)?;
let actual = ModuleRenderer::new("nodejs").path(dir.path()).collect();
let expected = None;
assert_eq!(expected, actual);
dir.close()
}
#[test]
fn folder_with_node_version() -> io::Result<()> {
let dir = tempfile::tempdir()?;
File::create(dir.path().join(".node-version"))?.sync_all()?;
let actual = ModuleRenderer::new("nodejs").path(dir.path()).collect();
let expected = Some(format!("via {} ", Color::Green.bold().paint("⬢ v12.0.0")));
assert_eq!(expected, actual);
dir.close()
}
#[test]
fn folder_with_js_file() -> io::Result<()> {
let dir = tempfile::tempdir()?;
File::create(dir.path().join("index.js"))?.sync_all()?;
let actual = ModuleRenderer::new("nodejs").path(dir.path()).collect();
let expected = Some(format!("via {} ", Color::Green.bold().paint("⬢ v12.0.0")));
assert_eq!(expected, actual);
dir.close()
}
#[test]
fn folder_with_mjs_file() -> io::Result<()> {
let dir = tempfile::tempdir()?;
File::create(dir.path().join("index.mjs"))?.sync_all()?;
let actual = ModuleRenderer::new("nodejs").path(dir.path()).collect();
let expected = Some(format!("via {} ", Color::Green.bold().paint("⬢ v12.0.0")));
assert_eq!(expected, actual);
dir.close()
}
#[test]
fn folder_with_cjs_file() -> io::Result<()> {
let dir = tempfile::tempdir()?;
File::create(dir.path().join("index.cjs"))?.sync_all()?;
let actual = ModuleRenderer::new("nodejs").path(dir.path()).collect();
let expected = Some(format!("via {} ", Color::Green.bold().paint("⬢ v12.0.0")));
assert_eq!(expected, actual);
dir.close()
}
#[test]
fn folder_with_ts_file() -> io::Result<()> {
let dir = tempfile::tempdir()?;
File::create(dir.path().join("index.ts"))?.sync_all()?;
let actual = ModuleRenderer::new("nodejs").path(dir.path()).collect();
let expected = Some(format!("via {} ", Color::Green.bold().paint("⬢ v12.0.0")));
assert_eq!(expected, actual);
dir.close()
}
#[test]
fn folder_with_node_modules() -> io::Result<()> {
let dir = tempfile::tempdir()?;
let node_modules = dir.path().join("node_modules");
fs::create_dir_all(&node_modules)?;
let actual = ModuleRenderer::new("nodejs").path(dir.path()).collect();
let expected = Some(format!("via {} ", Color::Green.bold().paint("⬢ v12.0.0")));
assert_eq!(expected, actual);
dir.close()
}
}
|
use std::fmt::Display;
use serde::{Deserialize, Serialize};
/// Key to find RecordPos from a record / row.
///
/// Represented in a string either like "(prefix).(attr)" or "(attr)".
#[derive(
Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default, Serialize, Deserialize, new,
)]
pub struct SchemaIndex {
prefix: Option<String>,
attr: String,
}
impl SchemaIndex {
/// Optional prefix part
pub fn prefix(&self) -> Option<&str> {
self.prefix.as_deref()
}
/// Attribute part
pub fn attr(&self) -> &str {
&self.attr
}
}
impl Display for SchemaIndex {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let prefix = if let Some(p) = self.prefix() {
format!("{}.", p)
} else {
"".to_string()
};
write!(f, "{}{}", prefix, self.attr())
}
}
impl From<&str> for SchemaIndex {
fn from(s: &str) -> Self {
let parts: Vec<&str> = s.split('.').collect();
debug_assert!(!parts.is_empty());
assert!(parts.len() <= 2, "too many dots (.) !");
parts
.iter()
.for_each(|part| assert!(!part.is_empty(), "prefix nor attr must not be empty string"));
let first = parts
.get(0)
.expect("must have at least 1 part")
.trim()
.to_string();
let second = parts.get(1).map(|s| s.trim().to_string());
if let Some(second) = second {
Self::new(Some(first), second)
} else {
Self::new(None, first)
}
}
}
#[cfg(test)]
mod tests {
use crate::SchemaIndex;
#[test]
fn test_from_success() {
let from_to_data: Vec<(&str, &str)> = vec![
("c", "c"),
(" c ", "c"),
("t.c", "t.c"),
(" t . c ", "t.c"),
];
for (from, to) in from_to_data {
assert_eq!(SchemaIndex::from(from).to_string(), to);
}
}
#[test]
#[should_panic]
fn test_from_panic1() {
SchemaIndex::from("");
}
#[test]
#[should_panic]
fn test_from_panic2() {
SchemaIndex::from(".c");
}
#[test]
#[should_panic]
fn test_from_panic3() {
SchemaIndex::from("t.");
}
#[test]
#[should_panic]
fn test_from_panic4() {
SchemaIndex::from("a.b.c");
}
}
|
use std::collections::HashMap;
use std::pin::Pin;
use std::sync::Arc;
use futures::{
future::{poll_fn, select},
pin_mut, FutureExt, Sink,
};
use crate::{
client::{watch_for_client_state_change, Client, ClientState, ClientTaskTracker},
error::WampError,
proto::TxMessage,
transport::Transport,
uri::Uri,
MessageBuffer,
};
async fn close_impl<T: Transport>(
task_tracker: Arc<ClientTaskTracker<T>>,
reason: Uri,
received: Arc<MessageBuffer>,
) -> Result<(), WampError> {
{
let mut sender = task_tracker.get_sender().lock().await;
poll_fn(|cx| Pin::new(&mut *sender).poll_ready(cx))
.await
.map_err(|error| WampError::WaitForReadyToSendFailed {
message_type: "GOODBYE",
error,
})?;
Pin::new(&mut *sender)
.start_send(TxMessage::Goodbye {
details: HashMap::default(),
reason,
})
.map_err(|error| WampError::MessageSendFailed {
message_type: "GOODBYE",
error,
})?;
}
{
let mut sender = task_tracker.get_sender().lock().await;
poll_fn(|cx| Pin::new(&mut *sender).poll_flush(cx))
.await
.map_err(|error| WampError::SinkFlushFailed {
message_type: "GOODBYE (flush)",
error,
})?;
}
// Wait for a goodbye message to come in, confirming the disconnect.
poll_fn(|cx| received.goodbye.poll_take_any(cx)).await;
Ok(())
}
/// Asynchronous function to cleanly close a connection.
pub(in crate::client) async fn close<T: Transport>(
client: &Client<T>,
reason: Uri,
) -> Result<(), WampError> {
let wfcsc = watch_for_client_state_change(client.state.clone(), |state| {
state == ClientState::ShuttingDown || state == ClientState::Closed
})
.fuse();
let ci = close_impl(client.task_tracker.clone(), reason, client.received.clone()).fuse();
pin_mut!(wfcsc, ci);
select(wfcsc, ci).await.factor_first().0
}
|
use std::{io::Write, env};
use std::fs::File;
use std::path::Path;
use std::io::{self, prelude::*, BufReader};
use mp3_duration;
fn main() {
let args: Vec<String> = env::args().collect();
let filename = &args[1];
let audio = &args[2];
print!("reading... ");
let contents = File::open(filename).expect("fuck");
println!("done.");
print!("finding red lines... ");
// find timing points
let mut points: Vec<String> = Vec::new();
let reader = BufReader::new(contents);
let mut collect = false;
for line in reader.lines() {
let line = line.unwrap();
if collect {
points.push(line.to_string());
}
if line == "[TimingPoints]" {
collect = true;
}
if line == "[Colours]" {
points.resize(points.len() - 3, String::new());
break;
}
}
// find and set up red lines
let mut reds: Vec<(i64, f32)> = Vec::new(); // first element of the tuple is the red line offset,
for point in points { // second is the time between bars
let elements: Vec<&str> = point.split(',').collect();
if elements[6] == "1" {
let offset: i64 = elements[0].parse().unwrap();
let timesig: f32 = elements[2].parse().unwrap();
let diff: f32 = elements[1].parse::<f32>().unwrap() * timesig; // multiply beat time against time sig to get bar time
reds.push((offset, diff));
}
}
println!("done.");
// find the length of the song, probably wrong lol
let songlength = mp3_duration::from_path(Path::new(audio)).expect("the fuck happened here").as_millis(); // pain
reds.push((songlength as i64, 69420.0)); // hack to make my life easier
print!("generating mods... ");
let mut mods: String = String::new();
// lmao
fn mods_per_redline(start: i64, end: i64, bartime: f32, mods: &mut String) {
for i in start..end {
if i % (bartime.round() as i64) == 0 {
let mins = i / 1000 / 60;
let secs = i / 1000 - (mins * 60);
let mils = i - (mins * 60 * 1000) - (secs * 1000);
let entry = format!("{}:{}:{} NC.\n", mins, secs, mils);
mods.push_str(entry.as_str());
}
}
}
let mut next = 0;
for red in reds.clone() {
if next == reds.len() - 1 {
break;
}
mods_per_redline(red.0, reds[next + 1].0, red.1, &mut mods);
next += 1;
}
println!("done.");
// write to file
print!("writing mods... ");
let mut file = File::create("mods.txt").expect("fuck me");
file.write_all(mods.as_bytes()).expect("fuck you");
println!("done.");
println!("enjoy your kudosu lmao");
}
|
#![cfg_attr(not(feature = "std"), no_std)]
pub mod rewards;
pub mod inflation;
pub mod weights;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
#[cfg(feature = "runtime-benchmarks")]
pub mod benchmarking;
use frame_support::traits::{Currency, OnUnbalanced, Get, EnsureOrigin};
use frame_support::{
decl_event, decl_module, decl_storage,
PalletId, dispatch::DispatchResult,
};
use weights::WeightInfo;
use sp_runtime::traits::{AccountIdConversion};
use frame_system::{ensure_root};
use sp_std::prelude::*;
pub type BalanceOf<T> =
<<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance;
type PositiveImbalanceOf<T> = <<T as Config>::Currency as Currency<
<T as frame_system::Config>::AccountId, >>::PositiveImbalance;
type NegativeImbalanceOf<T> = <<T as Config>::Currency as Currency<
<T as frame_system::Config>::AccountId, >>::NegativeImbalance;
pub trait Config: frame_system::Config {
/// The Event type.
type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;
/// The currency mechanism.
type Currency: Currency<Self::AccountId>;
// Handler with which to retrieve total token custody
type CustodianHandler: pallet_staking::CustodianHandler<Self::AccountId, BalanceOf<Self>>;
//---------------- REWARDS POOL ----------------//
/// The RewardsPool sub component id, used to derive its account ID.
type RewardsPoolId: Get<PalletId>;
/// The reward remainder handler (Treasury).
type RewardRemainder: OnUnbalanced<NegativeImbalanceOf<Self>>;
//---------------- INFLATION ----------------//
/// Era duration needed for ideal inflation computation.
type EraDuration: Get<Self::BlockNumber>;
/// The admin origin for the pallet (Tech Committee unanimity).
type AdminOrigin: EnsureOrigin<Self::Origin>;
/// Weight information for extrinsics in this pallet.
type WeightInfo: WeightInfo;
}
decl_storage! {
trait Store for Module<T: Config> as XXEconomics {
//---------------- INFLATION ----------------//
/// Inflation fixed parameters: minimum inflation, ideal stake and curve falloff
pub InflationParams get(fn inflation_params) config():
inflation::InflationFixedParams;
/// List of ideal interest points, defined as a tuple of block number and idea interest
pub InterestPoints get(fn interest_points) config() build(|config: &GenesisConfig<T>| {
// Sort points when building from genesis
let mut points = config.interest_points.clone();
points.sort_by(|a, b| a.block.cmp(&b.block));
points
}): Vec<inflation::IdealInterestPoint<T::BlockNumber>>;
/// Ideal liquidity rewards staked amount
pub IdealLiquidityStake get(fn ideal_stake_rewards) config(): BalanceOf<T>;
/// Liquidity rewards balance
pub LiquidityRewards get(fn liquidity_rewards) config(): BalanceOf<T>;
}
add_extra_genesis {
config(balance): BalanceOf<T>;
build(|config| {
//---------------- REWARDS POOL ----------------//
// Create Rewards pool account and set the balance from genesis
let account_id = <Module<T>>::rewards_account_id();
let _ = <T as Config>::Currency::make_free_balance_be(&account_id, config.balance);
});
}
}
decl_event! {
pub enum Event<T> where
Balance = BalanceOf<T>,
{
//---------------- REWARDS POOL ----------------//
/// Rewards were given from the pool
RewardFromPool(Balance),
/// Rewards were minted
RewardMinted(Balance),
//---------------- INFLATION ----------------//
/// Inflation fixed parameters were changed
InflationParamsChanged,
/// Ideal interest points were changed
InterestPointsChanged,
/// Ideal liquidity rewards stake was changed
IdealLiquidityStakeChanged,
/// Liquidity rewards balance was changed
LiquidityRewardsBalanceChanged,
}
}
decl_module! {
pub struct Module<T: Config> for enum Call where origin: T::Origin {
//---------------- REWARDS POOL ----------------//
const RewardsPoolId: PalletId = T::RewardsPoolId::get();
const RewardsPoolAccount: T::AccountId = T::RewardsPoolId::get().into_account();
fn deposit_event() = default;
//---------------- ADMIN ----------------//
/// Set inflation fixed parameters
///
/// The dispatch origin must be AdminOrigin.
///
#[weight = <T as Config>::WeightInfo::set_inflation_params()]
pub fn set_inflation_params(origin, params: inflation::InflationFixedParams) {
Self::ensure_admin(origin)?;
<InflationParams>::put(params);
Self::deposit_event(RawEvent::InflationParamsChanged);
}
/// Set ideal interest points
///
/// Overwrites the full list of points. Doesn't check if points are ordered per block.
/// It's up to the caller to ensure the ordering, otherwise leads to unexpected behavior.
///
/// The dispatch origin must be AdminOrigin.
///
#[weight = <T as Config>::WeightInfo::set_interest_points()]
pub fn set_interest_points(origin, points: Vec<inflation::IdealInterestPoint<T::BlockNumber>>) {
Self::ensure_admin(origin)?;
// Insert sorted vector of points
let mut sorted_points = points.clone();
sorted_points.sort_by(|a, b| a.block.cmp(&b.block));
<InterestPoints<T>>::put(sorted_points);
Self::deposit_event(RawEvent::InterestPointsChanged);
}
/// Set ideal liquidity rewards stake amount
///
/// The dispatch origin must be AdminOrigin.
/// This can be used to adjust the ideal liquidity reward stake
///
#[weight = <T as Config>::WeightInfo::set_liquidity_rewards_stake()]
pub fn set_liquidity_rewards_stake(origin, #[compact] amount: BalanceOf<T>) {
Self::ensure_admin(origin)?;
<IdealLiquidityStake<T>>::put(amount);
Self::deposit_event(RawEvent::IdealLiquidityStakeChanged);
}
/// Set balance of liquidity rewards
///
/// The dispatch origin must be AdminOrigin.
/// This should only be used to make corrections to liquidity rewards balance
/// according to data from ETH chain
///
#[weight = <T as Config>::WeightInfo::set_liquidity_rewards_balance()]
pub fn set_liquidity_rewards_balance(origin, #[compact] amount: BalanceOf<T>) {
Self::ensure_admin(origin)?;
<LiquidityRewards<T>>::put(amount);
Self::deposit_event(RawEvent::LiquidityRewardsBalanceChanged);
}
}
}
impl<T: Config> Module<T> {
/// Check if origin is admin
fn ensure_admin(o: T::Origin) -> DispatchResult {
<T as Config>::AdminOrigin::try_origin(o)
.map(|_| ())
.or_else(ensure_root)?;
Ok(())
}
}
|
use iron::error::HttpError;
use iron::headers::{parsing, Header, HeaderFormat};
use iron::{Chain, Request, Response, IronResult, BeforeMiddleware, AfterMiddleware, typemap, status};
use router::Router;
use std::fmt;
use std::io::Read;
use std::sync::Mutex;
use std::ascii::AsciiExt;
use std::sync::mpsc::Sender;
use github_v3::types::comments::{
IssueCommentEvent,
PullRequestReviewCommentEvent,
};
use github_v3::types::PushEvent;
use github_v3::types::pull_requests::PullRequestEvent;
use rustc_serialize::{json};
use types::HandledGithubEvents;
struct Deserialize;
struct EventInfo;
impl typemap::Key for EventInfo { type Value = EventType; }
struct IsIssueComment;
impl typemap::Key for IsIssueComment { type Value = IssueCommentEvent; }
struct IsPullRequestReviewComment;
impl typemap::Key for IsPullRequestReviewComment { type Value = PullRequestReviewCommentEvent; }
struct IsPullRequest;
impl typemap::Key for IsPullRequest { type Value = PullRequestEvent; }
enum EventType {
IssueComment,
PullRequestReviewComment,
PullRequest,
Push,
UnknownEvent,
}
#[derive(Clone, PartialEq, Debug)]
pub struct GithubEventHeader {
pub event_name: String
}
impl Header for GithubEventHeader {
fn header_name() -> &'static str {
"X-Github-Event"
}
fn parse_header(raw: &[Vec<u8>]) -> Result<GithubEventHeader, HttpError> {
parsing::from_one_raw_str(raw).and_then(|s: String| {
Ok(GithubEventHeader{event_name: s.to_ascii_lowercase()})
})
}
}
impl HeaderFormat for GithubEventHeader {
fn fmt_header(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.event_name)
}
}
struct IsPush;
impl typemap::Key for IsPush { type Value = PushEvent; }
impl BeforeMiddleware for Deserialize {
fn before(&self, req: &mut Request) -> IronResult<()> {
let mut payload = String::new();
req.body.read_to_string(&mut payload).unwrap();
let headers = req.headers.clone();
let event_header = headers.get::<GithubEventHeader>();
event_header.map(|header| {
match header.event_name.as_ref() {
"issue_comment" => {
req.extensions.insert::<EventInfo>(EventType::IssueComment);
req.extensions.insert::<IsIssueComment>(json::decode(&payload).unwrap());
},
"pull_request_review_comment" => {
req.extensions.insert::<EventInfo>(EventType::PullRequestReviewComment);
req.extensions.insert::<IsPullRequestReviewComment>(json::decode(&payload).unwrap());
},
"pull_request" => {
req.extensions.insert::<EventInfo>(EventType::PullRequest);
req.extensions.insert::<IsPullRequest>(json::decode(&payload).unwrap());
},
"push" => {
req.extensions.insert::<EventInfo>(EventType::Push);
req.extensions.insert::<IsPush>(json::decode(&payload).unwrap());
},
_ => {req.extensions.insert::<EventInfo>(EventType::UnknownEvent);}
}
});
Ok(())
}
}
struct DeliverActionables {
event_tx: Mutex<Sender<HandledGithubEvents>>,
}
impl AfterMiddleware for DeliverActionables {
fn after(&self, req: &mut Request, response: Response) -> IronResult<Response> {
let possible_event_type = req.extensions.remove::<EventInfo>();
match possible_event_type {
Some(EventType::IssueComment) => {
let possible_payload = req.extensions.remove::<IsIssueComment>();
possible_payload.map(|payload: IssueCommentEvent| self.event_tx.lock().map(|sender| sender.send(HandledGithubEvents::IssueCommentEvent(payload))));
},
Some(EventType::PullRequestReviewComment) => {
let possible_payload = req.extensions.remove::<IsPullRequestReviewComment>();
possible_payload.map(|payload: PullRequestReviewCommentEvent| self.event_tx.lock().map(|sender| sender.send(HandledGithubEvents::PullRequestReviewCommentEvent(payload))));
},
Some(EventType::PullRequest) => {
let possible_payload = req.extensions.remove::<IsPullRequest>();
possible_payload.map(|payload: PullRequestEvent| self.event_tx.lock().map(|sender| sender.send(HandledGithubEvents::PullRequestEvent(payload))));
},
_ => ()
}
Ok(response)
}
}
fn handle_webhooks(req: &mut Request) -> IronResult<Response> {
let possible_event_type = req.extensions.get::<EventInfo>();
match possible_event_type {
Some(&EventType::IssueComment) => Ok(Response::with((status::Accepted, "{\"body\":\"Successful recv of issue comment\"}"))),
Some(&EventType::PullRequestReviewComment) => Ok(Response::with((status::Accepted, "{\"body\":\"Successful recv of pull request review comment\"}"))),
Some(&EventType::PullRequest) => Ok(Response::with((status::Accepted, "{\"body\":\"Successful recv of pull request\"}"))),
Some(&EventType::Push) => Ok(Response::with((status::Accepted, "{\"body\":\"Successful recv of push\"}"))),
Some(&EventType::UnknownEvent) => Ok(Response::with((status::Accepted, "{\"body\":\"Recv an unhandled event\"}"))),
None => Ok(Response::with((status::Accepted, "{\"body\":\"No event header provided\"}")))
}
}
pub fn get_webhook_handler(
event_tx: Sender<HandledGithubEvents>
) -> Router {
let deliverer = DeliverActionables {
event_tx: Mutex::new(event_tx),
};
let mut webhook_chain = Chain::new(handle_webhooks);
webhook_chain.link_before(Deserialize);
webhook_chain.link_after(deliverer);
let mut webhook_router = Router::new();
webhook_router.post("/", webhook_chain);
webhook_router
}
|
use std::cmp;
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;
struct Ingredient {
name: String,
capacity: i32,
durability: i32,
flavor: i32,
texture: i32,
calories: i32,
}
impl Ingredient {
fn mult(&self, x: i32) -> Ingredient {
Ingredient {
name: self.name.clone(),
capacity: self.capacity * x,
durability: self.durability * x,
flavor: self.flavor * x,
texture: self.texture * x,
calories: self.calories * x,
}
}
}
fn score(ingredients: &Vec<Ingredient>) -> i32 {
let totals = ingredients.iter().fold((0, 0, 0, 0), |acc, ingredient| (
acc.0 + ingredient.capacity,
acc.1 + ingredient.durability,
acc.2 + ingredient.flavor,
acc.3 + ingredient.texture,
));
cmp::max(0, totals.0) * cmp::max(0, totals.1) * cmp::max(0, totals.2) * cmp::max(0, totals.3)
}
fn total_calories(ingredients: &Vec<Ingredient>) -> i32 {
ingredients.iter().map(|ingredient| ingredient.calories).sum()
}
fn parse_line(line: &str) -> Ingredient {
let parts = line.split(' ').collect::<Vec<_>>();
let mut nam = parts[0].to_string();
let mut cap = parts[2].to_string();
let mut dur = parts[4].to_string();
let mut fla = parts[6].to_string();
let mut tex = parts[8].to_string();
let cal = parts[10].to_string();
nam.pop();
cap.pop();
dur.pop();
fla.pop();
tex.pop();
Ingredient {
name: nam,
capacity: cap.parse().unwrap(),
durability: dur.parse().unwrap(),
flavor: fla.parse().unwrap(),
texture: tex.parse().unwrap(),
calories: cal.parse().unwrap(),
}
}
fn main() {
let file = File::open("input.txt").expect("file not found");
let mut reader = BufReader::new(file);
let mut contents = String::new();
reader.read_to_string(&mut contents).expect("could not read input file");
let mut ingredients = Vec::new();
for line in contents.lines() {
ingredients.push(parse_line(&line));
}
let mut max_score_a = 0;
let mut max_score_b = 0;
for i in 1..98 {
for j in 1..98 {
for k in 1..98 {
for l in 1..98 {
if i + j + k + l == 100 {
let mut recipe = Vec::new();
recipe.push(ingredients[0].mult(i));
recipe.push(ingredients[1].mult(j));
recipe.push(ingredients[2].mult(k));
recipe.push(ingredients[3].mult(l));
let s = score(&recipe);
if s > max_score_a {
max_score_a = s;
}
if s > max_score_b && total_calories(&recipe) == 500 {
max_score_b = s;
}
}
}
}
}
}
println!("A: {}", max_score_a);
println!("B: {}", max_score_b);
}
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// https://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Basic floating-point number distributions
/// A distribution to sample floating point numbers uniformly in the open
/// interval `(0, 1)` (not including either endpoint).
///
/// See also: [`Closed01`] for the closed `[0, 1]`; [`Uniform`] for the
/// half-open `[0, 1)`.
///
/// # Example
/// ```rust
/// use rand::{weak_rng, Rng};
/// use rand::distributions::Open01;
///
/// let val: f32 = weak_rng().sample(Open01);
/// println!("f32 from (0,1): {}", val);
/// ```
///
/// [`Uniform`]: struct.Uniform.html
/// [`Closed01`]: struct.Closed01.html
#[derive(Clone, Copy, Debug)]
pub struct Open01;
/// A distribution to sample floating point numbers uniformly in the closed
/// interval `[0, 1]` (including both endpoints).
///
/// See also: [`Open01`] for the open `(0, 1)`; [`Uniform`] for the half-open
/// `[0, 1)`.
///
/// # Example
/// ```rust
/// use rand::{weak_rng, Rng};
/// use rand::distributions::Closed01;
///
/// let val: f32 = weak_rng().sample(Closed01);
/// println!("f32 from [0,1]: {}", val);
/// ```
///
/// [`Uniform`]: struct.Uniform.html
/// [`Open01`]: struct.Open01.html
#[derive(Clone, Copy, Debug)]
pub struct Closed01;
macro_rules! float_impls {
($mod_name:ident, $ty:ty, $mantissa_bits:expr, $method_name:ident) => {
mod $mod_name {
use Rng;
use distributions::{Distribution, Uniform};
use super::{Open01, Closed01};
const SCALE: $ty = (1u64 << $mantissa_bits) as $ty;
impl Distribution<$ty> for Uniform {
/// Generate a floating point number in the half-open
/// interval `[0,1)`.
///
/// See `Closed01` for the closed interval `[0,1]`,
/// and `Open01` for the open interval `(0,1)`.
#[inline]
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $ty {
rng.$method_name()
}
}
impl Distribution<$ty> for Open01 {
#[inline]
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $ty {
// add 0.5 * epsilon, so that smallest number is
// greater than 0, and largest number is still
// less than 1, specifically 1 - 0.5 * epsilon.
rng.$method_name() + 0.5 / SCALE
}
}
impl Distribution<$ty> for Closed01 {
#[inline]
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $ty {
// rescale so that 1.0 - epsilon becomes 1.0
// precisely.
rng.$method_name() * SCALE / (SCALE - 1.0)
}
}
}
}
}
float_impls! { f64_rand_impls, f64, 52, next_f64 }
float_impls! { f32_rand_impls, f32, 23, next_f32 }
#[cfg(test)]
mod tests {
use Rng;
use mock::StepRng;
use distributions::{Open01, Closed01};
const EPSILON32: f32 = ::core::f32::EPSILON;
const EPSILON64: f64 = ::core::f64::EPSILON;
#[test]
fn floating_point_edge_cases() {
let mut zeros = StepRng::new(0, 0);
assert_eq!(zeros.gen::<f32>(), 0.0);
assert_eq!(zeros.gen::<f64>(), 0.0);
let mut one = StepRng::new(1, 0);
assert_eq!(one.gen::<f32>(), EPSILON32);
assert_eq!(one.gen::<f64>(), EPSILON64);
let mut max = StepRng::new(!0, 0);
assert_eq!(max.gen::<f32>(), 1.0 - EPSILON32);
assert_eq!(max.gen::<f64>(), 1.0 - EPSILON64);
}
#[test]
fn fp_closed_edge_cases() {
let mut zeros = StepRng::new(0, 0);
assert_eq!(zeros.sample::<f32, _>(Closed01), 0.0);
assert_eq!(zeros.sample::<f64, _>(Closed01), 0.0);
let mut one = StepRng::new(1, 0);
let one32 = one.sample::<f32, _>(Closed01);
let one64 = one.sample::<f64, _>(Closed01);
assert!(EPSILON32 < one32 && one32 < EPSILON32 * 1.01);
assert!(EPSILON64 < one64 && one64 < EPSILON64 * 1.01);
let mut max = StepRng::new(!0, 0);
assert_eq!(max.sample::<f32, _>(Closed01), 1.0);
assert_eq!(max.sample::<f64, _>(Closed01), 1.0);
}
#[test]
fn fp_open_edge_cases() {
let mut zeros = StepRng::new(0, 0);
assert_eq!(zeros.sample::<f32, _>(Open01), 0.0 + EPSILON32 / 2.0);
assert_eq!(zeros.sample::<f64, _>(Open01), 0.0 + EPSILON64 / 2.0);
let mut one = StepRng::new(1, 0);
let one32 = one.sample::<f32, _>(Open01);
let one64 = one.sample::<f64, _>(Open01);
assert!(EPSILON32 < one32 && one32 < EPSILON32 * 2.0);
assert!(EPSILON64 < one64 && one64 < EPSILON64 * 2.0);
let mut max = StepRng::new(!0, 0);
assert_eq!(max.sample::<f32, _>(Open01), 1.0 - EPSILON32 / 2.0);
assert_eq!(max.sample::<f64, _>(Open01), 1.0 - EPSILON64 / 2.0);
}
#[test]
fn rand_open() {
// this is unlikely to catch an incorrect implementation that
// generates exactly 0 or 1, but it keeps it sane.
let mut rng = ::test::rng(510);
for _ in 0..1_000 {
// strict inequalities
let f: f64 = rng.sample(Open01);
assert!(0.0 < f && f < 1.0);
let f: f32 = rng.sample(Open01);
assert!(0.0 < f && f < 1.0);
}
}
#[test]
fn rand_closed() {
let mut rng = ::test::rng(511);
for _ in 0..1_000 {
// strict inequalities
let f: f64 = rng.sample(Closed01);
assert!(0.0 <= f && f <= 1.0);
let f: f32 = rng.sample(Closed01);
assert!(0.0 <= f && f <= 1.0);
}
}
}
|
use std::cmp;
use nalgebra::{Scalar, Vector3};
/// Helper trait for computing minimum and maximum values for types. This is used in conjunction
/// with `PrimitiveType` to enable min/max computations even for vector types
pub trait MinMax {
/// Computes the infimum of this value and `other`. For scalar types, the infimum is simply the
/// minimum of the two types (as defined by `PartialOrd`), for vector types, this is the component-wise
/// minimum
///
/// # Example
/// ```
/// use pasture_core::math::MinMax;
/// # use pasture_core::nalgebra::Vector3;
///
/// assert_eq!(5i32.infimum(&3i32), 3i32);
/// assert_eq!(Vector3::new(1.0, 2.0, 3.0).infimum(&Vector3::new(2.0, 1.0, 0.0)), Vector3::new(1.0, 1.0, 0.0));
/// ```
fn infimum(&self, other: &Self) -> Self;
/// Computes the supremum of this value and `other`. For scalar types, the infimum is simply the
/// maximum of the two types (as defined by `PartialOrd`), for vector types, this is the component-wise
/// maximum
///
/// # Example
/// ```
/// use pasture_core::math::MinMax;
/// # use pasture_core::nalgebra::Vector3;
///
/// assert_eq!(5i32.supremum(&3i32), 5i32);
/// assert_eq!(Vector3::new(1.0, 2.0, 3.0).supremum(&Vector3::new(2.0, 1.0, 4.0)), Vector3::new(2.0, 2.0, 4.0));
/// ```
fn supremum(&self, other: &Self) -> Self;
}
macro_rules! impl_minmax_for_primitive_type {
($type:tt) => {
impl MinMax for $type {
fn infimum(&self, other: &Self) -> Self {
cmp::min(*self, *other)
}
fn supremum(&self, other: &Self) -> Self {
cmp::max(*self, *other)
}
}
};
}
impl_minmax_for_primitive_type! {u8}
impl_minmax_for_primitive_type! {u16}
impl_minmax_for_primitive_type! {u32}
impl_minmax_for_primitive_type! {u64}
impl_minmax_for_primitive_type! {i8}
impl_minmax_for_primitive_type! {i16}
impl_minmax_for_primitive_type! {i32}
impl_minmax_for_primitive_type! {i64}
impl_minmax_for_primitive_type! {bool}
impl MinMax for f32 {
fn infimum(&self, other: &Self) -> Self {
if *self < *other {
*self
} else {
*other
}
}
fn supremum(&self, other: &Self) -> Self {
if *self > *other {
*self
} else {
*other
}
}
}
impl MinMax for f64 {
fn infimum(&self, other: &Self) -> Self {
if *self < *other {
*self
} else {
*other
}
}
fn supremum(&self, other: &Self) -> Self {
if *self > *other {
*self
} else {
*other
}
}
}
impl<T: MinMax + Scalar> MinMax for Vector3<T> {
fn infimum(&self, other: &Self) -> Self {
Vector3::new(
self.x.infimum(&other.x),
self.y.infimum(&other.y),
self.z.infimum(&other.z),
)
}
fn supremum(&self, other: &Self) -> Self {
Vector3::new(
self.x.supremum(&other.x),
self.y.supremum(&other.y),
self.z.supremum(&other.z),
)
}
}
|
pub mod title;
pub mod game;
pub mod game_end; |
use bevy::math::vec3;
use bevy::prelude::*;
use bevy::math::f32::Vec2;
use bevy::sprite::collide_aabb;
use bevy_prototype_debug_lines::*;
use crate::player::Player;
pub enum Team {
Player,
Enemy,
}
pub struct Hurtbox {
pub team: Team,
pub size: Vec2,
pub health: u64,
pub is_hit: bool,
pub invincible: bool,
pub vel: Vec2, // TODO: maybe split it into a Physics component? not sure if its worth it.
}
pub enum CanHitTeam {
Player,
Enemy,
//All,
}
impl CanHitTeam {
fn can_hit(&self, team: &Team) -> bool {
match (self, team) {
(CanHitTeam::Enemy, Team::Enemy) => true,
(CanHitTeam::Player, Team::Player) => true,
//(CanHitTeam::All, Team::Player) => true,
//(CanHitTeam::All, Team::Enemy) => true,
_ => false,
}
}
}
pub struct HitBoxEvent {
pub position: Vec2,
pub size: Vec2,
pub damage: u64,
pub knockback: f32,
pub can_hit: CanHitTeam,
}
pub fn take_damage(
mut entities: Query<(&mut Hurtbox, &mut Transform)>,
mut hitbox_events: EventReader<HitBoxEvent>,
) {
for hitbox in hitbox_events.iter() {
for (mut hurtbox, transform) in entities.iter_mut() {
if hitbox.can_hit.can_hit(&hurtbox.team) && !hurtbox.invincible &&
collide_aabb::collide(
transform.translation,
hurtbox.size,
hitbox.position.extend(0.0),
hitbox.size,
)
.is_some()
{
hurtbox.is_hit = true;
hurtbox.invincible = true;
hurtbox.health = hurtbox.health.saturating_sub(hitbox.damage);
let direction = transform.translation.truncate() - hitbox.position;
hurtbox.vel = direction.normalize() * hitbox.knockback;
}
}
}
}
pub fn physics_system(mut entities: Query<(&mut Hurtbox, &mut Transform)>) {
for (mut hurtbox, mut transform) in entities.iter_mut() {
//apply vel and friction
transform.translation += hurtbox.vel.extend(0.0);
hurtbox.vel *= 0.8;
}
}
pub fn die_system(
mut commands: Commands,
entities: Query<(Entity, &Hurtbox)>,
mut player: Query<&mut Player>,
) {
for (entity, Hurtbox { health, .. }) in entities.iter() {
if *health <= 0 {
commands.entity(entity).despawn_recursive();
if let Ok(mut player) = player.single_mut() {
player.exp += 100;
player.money += 200;
}
}
}
}
// TODO(rukai): only include these systems in debug mode
pub fn debug_hurtboxes(
entities: Query<(&Hurtbox, &Transform), Without<Player>>,
mut lines: ResMut<DebugLines>,
) {
for (hittable, transform) in entities.iter() {
let size = hittable.size;
let pos = transform.translation;
draw_box(&mut lines, pos, size, Color::YELLOW);
}
}
pub fn debug_hitboxes(mut hitbox_events: EventReader<HitBoxEvent>, mut lines: ResMut<DebugLines>) {
for hitbox in hitbox_events.iter() {
let pos = hitbox.position.extend(0.0);
let size = hitbox.size;
draw_box(&mut lines, pos, size, Color::RED);
}
}
fn draw_box(lines: &mut DebugLines, pos: Vec3, size: Vec2, color: Color) {
let size = size.extend(0.0);
let pos = pos - size / 2.0;
let p1 = pos;
let p2 = pos + vec3(size.x, 0.0, 0.0);
let p3 = pos + vec3(0.0, size.y, 0.0);
let p4 = pos + size;
lines.line_colored(p1, p2, 0.0, color);
lines.line_colored(p2, p4, 0.0, color);
lines.line_colored(p1, p3, 0.0, color);
lines.line_colored(p3, p4, 0.0, color);
}
|
mod identity;
pub use identity::*;
mod user;
pub use user::*;
mod post;
pub use post::*;
|
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
mod client {
pub struct clt {
name:String,
}
impl clt {
fn new(srv_name:String) {
}
fn send(data:String) {
}
fn send_async(data:String) {
}
fn read() -> String {
"".to_string()
}
fn poll() -> bool {
false
}
}
}
mod server {
pub struct srv {
name: String,
}
impl srv {
fn new(srv_name:String) -> srv {
srv{name:srv_name}
}
fn start() {
}
fn stop() {
}
}
}
|
#[doc = "Reader of register LUT_SEL[%s]"]
pub type R = crate::R<u32, super::LUT_SEL>;
#[doc = "Writer for register LUT_SEL[%s]"]
pub type W = crate::W<u32, super::LUT_SEL>;
#[doc = "Register LUT_SEL[%s] `reset()`'s with value 0"]
impl crate::ResetValue for super::LUT_SEL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `LUT_TR0_SEL`"]
pub type LUT_TR0_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `LUT_TR0_SEL`"]
pub struct LUT_TR0_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> LUT_TR0_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "Reader of field `LUT_TR1_SEL`"]
pub type LUT_TR1_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `LUT_TR1_SEL`"]
pub struct LUT_TR1_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> LUT_TR1_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);
self.w
}
}
#[doc = "Reader of field `LUT_TR2_SEL`"]
pub type LUT_TR2_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `LUT_TR2_SEL`"]
pub struct LUT_TR2_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> LUT_TR2_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - LUT input signal 'tr0_in' source selection: '0': Data unit output. '1': LUT 1 output. '2': LUT 2 output. '3': LUT 3 output. '4': LUT 4 output. '5': LUT 5 output. '6': LUT 6 output. '7': LUT 7 output. '8': chip_data\\[0\\] (for LUTs 0, 1, 2, 3); chip_data\\[4\\] (for LUTs 4, 5, 6, 7). '9': chip_data\\[1\\] (for LUTs 0, 1, 2, 3); chip_data\\[5\\] (for LUTs 4, 5, 6, 7). '10': chip_data\\[2\\] (for LUTs 0, 1, 2, 3); chip_data\\[6\\] (for LUTs 4, 5, 6, 7). '11': chip_data\\[3\\] (for LUTs 0, 1, 2, 3); chip_data\\[7\\] (for LUTs 4, 5, 6, 7). '12': io_data_in\\[0\\] (for LUTs 0, 1, 2, 3); io_data_in\\[4\\] (for LUTs 4, 5, 6, 7). '13': io_data_in\\[1\\] (for LUTs 0, 1, 2, 3); io_data_in\\[5\\] (for LUTs 4, 5, 6, 7). '14': io_data_in\\[2\\] (for LUTs 0, 1, 2, 3); io_data_in\\[6\\] (for LUTs 4, 5, 6, 7). '15': io_data_in\\[3\\] (for LUTs 0, 1, 2, 3); io_data_in\\[7\\] (for LUTs 4, 5, 6, 7)."]
#[inline(always)]
pub fn lut_tr0_sel(&self) -> LUT_TR0_SEL_R {
LUT_TR0_SEL_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 8:11 - LUT input signal 'tr1_in' source selection: '0': LUT 0 output. '1': LUT 1 output. '2': LUT 2 output. '3': LUT 3 output. '4': LUT 4 output. '5': LUT 5 output. '6': LUT 6 output. '7': LUT 7 output. '8': chip_data\\[0\\] (for LUTs 0, 1, 2, 3); chip_data\\[4\\] (for LUTs 4, 5, 6, 7). '9': chip_data\\[1\\] (for LUTs 0, 1, 2, 3); chip_data\\[5\\] (for LUTs 4, 5, 6, 7). '10': chip_data\\[2\\] (for LUTs 0, 1, 2, 3); chip_data\\[6\\] (for LUTs 4, 5, 6, 7). '11': chip_data\\[3\\] (for LUTs 0, 1, 2, 3); chip_data\\[7\\] (for LUTs 4, 5, 6, 7). '12': io_data_in\\[0\\] (for LUTs 0, 1, 2, 3); io_data_in\\[4\\] (for LUTs 4, 5, 6, 7). '13': io_data_in\\[1\\] (for LUTs 0, 1, 2, 3); io_data_in\\[5\\] (for LUTs 4, 5, 6, 7). '14': io_data_in\\[2\\] (for LUTs 0, 1, 2, 3); io_data_in\\[6\\] (for LUTs 4, 5, 6, 7). '15': io_data_in\\[3\\] (for LUTs 0, 1, 2, 3); io_data_in\\[7\\] (for LUTs 4, 5, 6, 7)."]
#[inline(always)]
pub fn lut_tr1_sel(&self) -> LUT_TR1_SEL_R {
LUT_TR1_SEL_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 16:19 - LUT input signal 'tr2_in' source selection. Encoding is the same as for LUT_TR1_SEL."]
#[inline(always)]
pub fn lut_tr2_sel(&self) -> LUT_TR2_SEL_R {
LUT_TR2_SEL_R::new(((self.bits >> 16) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - LUT input signal 'tr0_in' source selection: '0': Data unit output. '1': LUT 1 output. '2': LUT 2 output. '3': LUT 3 output. '4': LUT 4 output. '5': LUT 5 output. '6': LUT 6 output. '7': LUT 7 output. '8': chip_data\\[0\\] (for LUTs 0, 1, 2, 3); chip_data\\[4\\] (for LUTs 4, 5, 6, 7). '9': chip_data\\[1\\] (for LUTs 0, 1, 2, 3); chip_data\\[5\\] (for LUTs 4, 5, 6, 7). '10': chip_data\\[2\\] (for LUTs 0, 1, 2, 3); chip_data\\[6\\] (for LUTs 4, 5, 6, 7). '11': chip_data\\[3\\] (for LUTs 0, 1, 2, 3); chip_data\\[7\\] (for LUTs 4, 5, 6, 7). '12': io_data_in\\[0\\] (for LUTs 0, 1, 2, 3); io_data_in\\[4\\] (for LUTs 4, 5, 6, 7). '13': io_data_in\\[1\\] (for LUTs 0, 1, 2, 3); io_data_in\\[5\\] (for LUTs 4, 5, 6, 7). '14': io_data_in\\[2\\] (for LUTs 0, 1, 2, 3); io_data_in\\[6\\] (for LUTs 4, 5, 6, 7). '15': io_data_in\\[3\\] (for LUTs 0, 1, 2, 3); io_data_in\\[7\\] (for LUTs 4, 5, 6, 7)."]
#[inline(always)]
pub fn lut_tr0_sel(&mut self) -> LUT_TR0_SEL_W {
LUT_TR0_SEL_W { w: self }
}
#[doc = "Bits 8:11 - LUT input signal 'tr1_in' source selection: '0': LUT 0 output. '1': LUT 1 output. '2': LUT 2 output. '3': LUT 3 output. '4': LUT 4 output. '5': LUT 5 output. '6': LUT 6 output. '7': LUT 7 output. '8': chip_data\\[0\\] (for LUTs 0, 1, 2, 3); chip_data\\[4\\] (for LUTs 4, 5, 6, 7). '9': chip_data\\[1\\] (for LUTs 0, 1, 2, 3); chip_data\\[5\\] (for LUTs 4, 5, 6, 7). '10': chip_data\\[2\\] (for LUTs 0, 1, 2, 3); chip_data\\[6\\] (for LUTs 4, 5, 6, 7). '11': chip_data\\[3\\] (for LUTs 0, 1, 2, 3); chip_data\\[7\\] (for LUTs 4, 5, 6, 7). '12': io_data_in\\[0\\] (for LUTs 0, 1, 2, 3); io_data_in\\[4\\] (for LUTs 4, 5, 6, 7). '13': io_data_in\\[1\\] (for LUTs 0, 1, 2, 3); io_data_in\\[5\\] (for LUTs 4, 5, 6, 7). '14': io_data_in\\[2\\] (for LUTs 0, 1, 2, 3); io_data_in\\[6\\] (for LUTs 4, 5, 6, 7). '15': io_data_in\\[3\\] (for LUTs 0, 1, 2, 3); io_data_in\\[7\\] (for LUTs 4, 5, 6, 7)."]
#[inline(always)]
pub fn lut_tr1_sel(&mut self) -> LUT_TR1_SEL_W {
LUT_TR1_SEL_W { w: self }
}
#[doc = "Bits 16:19 - LUT input signal 'tr2_in' source selection. Encoding is the same as for LUT_TR1_SEL."]
#[inline(always)]
pub fn lut_tr2_sel(&mut self) -> LUT_TR2_SEL_W {
LUT_TR2_SEL_W { w: self }
}
}
|
#[doc = "Register `LCKR` reader"]
pub type R = crate::R<LCKR_SPEC>;
#[doc = "Register `LCKR` writer"]
pub type W = crate::W<LCKR_SPEC>;
#[doc = "Field `LCK0` reader - Port x lock bit y (y= 0..15)"]
pub type LCK0_R = crate::BitReader<LCK0_A>;
#[doc = "Port x lock bit y (y= 0..15)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LCK0_A {
#[doc = "0: Port configuration not locked"]
Unlocked = 0,
#[doc = "1: Port configuration locked"]
Locked = 1,
}
impl From<LCK0_A> for bool {
#[inline(always)]
fn from(variant: LCK0_A) -> Self {
variant as u8 != 0
}
}
impl LCK0_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LCK0_A {
match self.bits {
false => LCK0_A::Unlocked,
true => LCK0_A::Locked,
}
}
#[doc = "Port configuration not locked"]
#[inline(always)]
pub fn is_unlocked(&self) -> bool {
*self == LCK0_A::Unlocked
}
#[doc = "Port configuration locked"]
#[inline(always)]
pub fn is_locked(&self) -> bool {
*self == LCK0_A::Locked
}
}
#[doc = "Field `LCK0` writer - Port x lock bit y (y= 0..15)"]
pub type LCK0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LCK0_A>;
impl<'a, REG, const O: u8> LCK0_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Port configuration not locked"]
#[inline(always)]
pub fn unlocked(self) -> &'a mut crate::W<REG> {
self.variant(LCK0_A::Unlocked)
}
#[doc = "Port configuration locked"]
#[inline(always)]
pub fn locked(self) -> &'a mut crate::W<REG> {
self.variant(LCK0_A::Locked)
}
}
#[doc = "Field `LCK1` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK1_R;
#[doc = "Field `LCK2` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK2_R;
#[doc = "Field `LCK3` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK3_R;
#[doc = "Field `LCK4` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK4_R;
#[doc = "Field `LCK5` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK5_R;
#[doc = "Field `LCK6` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK6_R;
#[doc = "Field `LCK7` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK7_R;
#[doc = "Field `LCK8` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK8_R;
#[doc = "Field `LCK9` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK9_R;
#[doc = "Field `LCK10` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK10_R;
#[doc = "Field `LCK11` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK11_R;
#[doc = "Field `LCK12` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK12_R;
#[doc = "Field `LCK13` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK13_R;
#[doc = "Field `LCK14` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK14_R;
#[doc = "Field `LCK15` reader - Port x lock bit y (y= 0..15)"]
pub use LCK0_R as LCK15_R;
#[doc = "Field `LCK1` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK1_W;
#[doc = "Field `LCK2` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK2_W;
#[doc = "Field `LCK3` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK3_W;
#[doc = "Field `LCK4` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK4_W;
#[doc = "Field `LCK5` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK5_W;
#[doc = "Field `LCK6` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK6_W;
#[doc = "Field `LCK7` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK7_W;
#[doc = "Field `LCK8` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK8_W;
#[doc = "Field `LCK9` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK9_W;
#[doc = "Field `LCK10` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK10_W;
#[doc = "Field `LCK11` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK11_W;
#[doc = "Field `LCK12` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK12_W;
#[doc = "Field `LCK13` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK13_W;
#[doc = "Field `LCK14` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK14_W;
#[doc = "Field `LCK15` writer - Port x lock bit y (y= 0..15)"]
pub use LCK0_W as LCK15_W;
#[doc = "Field `LCKK` reader - Port x lock bit y (y= 0..15)"]
pub type LCKK_R = crate::BitReader<LCKK_A>;
#[doc = "Port x lock bit y (y= 0..15)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LCKK_A {
#[doc = "0: Port configuration lock key not active"]
NotActive = 0,
#[doc = "1: Port configuration lock key active"]
Active = 1,
}
impl From<LCKK_A> for bool {
#[inline(always)]
fn from(variant: LCKK_A) -> Self {
variant as u8 != 0
}
}
impl LCKK_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LCKK_A {
match self.bits {
false => LCKK_A::NotActive,
true => LCKK_A::Active,
}
}
#[doc = "Port configuration lock key not active"]
#[inline(always)]
pub fn is_not_active(&self) -> bool {
*self == LCKK_A::NotActive
}
#[doc = "Port configuration lock key active"]
#[inline(always)]
pub fn is_active(&self) -> bool {
*self == LCKK_A::Active
}
}
#[doc = "Field `LCKK` writer - Port x lock bit y (y= 0..15)"]
pub type LCKK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LCKK_A>;
impl<'a, REG, const O: u8> LCKK_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Port configuration lock key not active"]
#[inline(always)]
pub fn not_active(self) -> &'a mut crate::W<REG> {
self.variant(LCKK_A::NotActive)
}
#[doc = "Port configuration lock key active"]
#[inline(always)]
pub fn active(self) -> &'a mut crate::W<REG> {
self.variant(LCKK_A::Active)
}
}
impl R {
#[doc = "Bit 0 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck0(&self) -> LCK0_R {
LCK0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck1(&self) -> LCK1_R {
LCK1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck2(&self) -> LCK2_R {
LCK2_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck3(&self) -> LCK3_R {
LCK3_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck4(&self) -> LCK4_R {
LCK4_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck5(&self) -> LCK5_R {
LCK5_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck6(&self) -> LCK6_R {
LCK6_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck7(&self) -> LCK7_R {
LCK7_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck8(&self) -> LCK8_R {
LCK8_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck9(&self) -> LCK9_R {
LCK9_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck10(&self) -> LCK10_R {
LCK10_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck11(&self) -> LCK11_R {
LCK11_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck12(&self) -> LCK12_R {
LCK12_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck13(&self) -> LCK13_R {
LCK13_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck14(&self) -> LCK14_R {
LCK14_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lck15(&self) -> LCK15_R {
LCK15_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
pub fn lckk(&self) -> LCKK_R {
LCKK_R::new(((self.bits >> 16) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck0(&mut self) -> LCK0_W<LCKR_SPEC, 0> {
LCK0_W::new(self)
}
#[doc = "Bit 1 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck1(&mut self) -> LCK1_W<LCKR_SPEC, 1> {
LCK1_W::new(self)
}
#[doc = "Bit 2 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck2(&mut self) -> LCK2_W<LCKR_SPEC, 2> {
LCK2_W::new(self)
}
#[doc = "Bit 3 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck3(&mut self) -> LCK3_W<LCKR_SPEC, 3> {
LCK3_W::new(self)
}
#[doc = "Bit 4 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck4(&mut self) -> LCK4_W<LCKR_SPEC, 4> {
LCK4_W::new(self)
}
#[doc = "Bit 5 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck5(&mut self) -> LCK5_W<LCKR_SPEC, 5> {
LCK5_W::new(self)
}
#[doc = "Bit 6 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck6(&mut self) -> LCK6_W<LCKR_SPEC, 6> {
LCK6_W::new(self)
}
#[doc = "Bit 7 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck7(&mut self) -> LCK7_W<LCKR_SPEC, 7> {
LCK7_W::new(self)
}
#[doc = "Bit 8 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck8(&mut self) -> LCK8_W<LCKR_SPEC, 8> {
LCK8_W::new(self)
}
#[doc = "Bit 9 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck9(&mut self) -> LCK9_W<LCKR_SPEC, 9> {
LCK9_W::new(self)
}
#[doc = "Bit 10 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck10(&mut self) -> LCK10_W<LCKR_SPEC, 10> {
LCK10_W::new(self)
}
#[doc = "Bit 11 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck11(&mut self) -> LCK11_W<LCKR_SPEC, 11> {
LCK11_W::new(self)
}
#[doc = "Bit 12 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck12(&mut self) -> LCK12_W<LCKR_SPEC, 12> {
LCK12_W::new(self)
}
#[doc = "Bit 13 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck13(&mut self) -> LCK13_W<LCKR_SPEC, 13> {
LCK13_W::new(self)
}
#[doc = "Bit 14 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck14(&mut self) -> LCK14_W<LCKR_SPEC, 14> {
LCK14_W::new(self)
}
#[doc = "Bit 15 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lck15(&mut self) -> LCK15_W<LCKR_SPEC, 15> {
LCK15_W::new(self)
}
#[doc = "Bit 16 - Port x lock bit y (y= 0..15)"]
#[inline(always)]
#[must_use]
pub fn lckk(&mut self) -> LCKK_W<LCKR_SPEC, 16> {
LCKK_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "GPIO port configuration lock register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`lckr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`lckr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct LCKR_SPEC;
impl crate::RegisterSpec for LCKR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`lckr::R`](R) reader structure"]
impl crate::Readable for LCKR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`lckr::W`](W) writer structure"]
impl crate::Writable for LCKR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets LCKR to value 0"]
impl crate::Resettable for LCKR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::io::{Result, Write};
use pulldown_cmark::{Tag, Event};
use crate::gen::{State, States, Generator, Document};
#[derive(Debug)]
pub struct InlineEmphasis;
impl<'a> State<'a> for InlineEmphasis {
fn new(tag: Tag<'a>, gen: &mut Generator<'a, impl Document<'a>, impl Write>) -> Result<Self> {
write!(gen.get_out(), "\\emph{{")?;
Ok(InlineEmphasis)
}
fn finish(self, gen: &mut Generator<'a, impl Document<'a>, impl Write>, peek: Option<&Event<'a>>) -> Result<()> {
write!(gen.get_out(), "}}")?;
Ok(())
}
}
#[derive(Debug)]
pub struct InlineStrong;
impl<'a> State<'a> for InlineStrong {
fn new(tag: Tag<'a>, gen: &mut Generator<'a, impl Document<'a>, impl Write>) -> Result<Self> {
write!(gen.get_out(), "\\textbf{{")?;
Ok(InlineStrong)
}
fn finish(self, gen: &mut Generator<'a, impl Document<'a>, impl Write>, peek: Option<&Event<'a>>) -> Result<()> {
write!(gen.get_out(), "}}")?;
Ok(())
}
}
#[derive(Debug)]
pub struct InlineCode;
impl<'a> State<'a> for InlineCode {
fn new(tag: Tag<'a>, gen: &mut Generator<'a, impl Document<'a>, impl Write>) -> Result<Self> {
write!(gen.get_out(), "\\texttt{{")?;
Ok(InlineCode)
}
fn finish(self, gen: &mut Generator<'a, impl Document<'a>, impl Write>, peek: Option<&Event<'a>>) -> Result<()> {
write!(gen.get_out(), "}}")?;
Ok(())
}
}
|
use x86_64::structures::idt::InterruptDescriptorTable;
pub fn interrupts_callback(table: &mut InterruptDescriptorTable) {
} |
extern crate bincode;
#[macro_use]
extern crate clap;
extern crate colored;
#[cfg(feature = "tensorflow")]
extern crate conform;
extern crate dot;
#[macro_use]
extern crate error_chain;
extern crate insideout;
extern crate itertools;
#[macro_use]
extern crate log;
extern crate ndarray;
#[macro_use]
extern crate prettytable;
extern crate rand;
extern crate simplelog;
extern crate terminal_size;
extern crate textwrap;
#[macro_use]
extern crate tfdeploy;
extern crate atty;
extern crate libc;
extern crate pbr;
#[macro_use]
extern crate rouille;
extern crate open;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
use std::fs::File;
use std::io::Read;
use std::process;
use std::str::FromStr;
use insideout::InsideOut;
use simplelog::Level::{Error, Trace};
use simplelog::{Config, LevelFilter, TermLogger};
use tfdeploy::tfpb;
use tfdeploy::{DataType, Tensor};
use tfpb::graph::GraphDef;
use errors::*;
#[allow(unused_imports)]
use format::Row;
mod analyse;
mod compare;
mod display_graph;
mod dump;
mod errors;
mod format;
mod graphviz;
mod profile;
mod prune;
mod rusage;
mod utils;
mod web;
/// The default maximum for iterations and time.
const DEFAULT_MAX_ITERS: u64 = 100_000;
const DEFAULT_MAX_TIME: u64 = 200;
/// Entrypoint for the command-line interface.
fn main() {
use clap::*;
let mut app = clap_app!(("tfdeploy-cli") =>
(version: "1.0")
(author: "Romain Liautaud <romain.liautaud@snips.ai>")
(about: "A set of tools to compare tfdeploy with tensorflow.")
(@setting UnifiedHelpMessage)
(@setting SubcommandRequired)
(@setting DeriveDisplayOrder)
(@arg model: +required +takes_value
"Sets the TensorFlow model to use (in Protobuf format).")
(@arg inputs: -i --input ... [input]
"Sets the input nodes names (auto-detects otherwise).")
(@arg output: -o --output [output]
"Sets the output node name (auto-detects otherwise).")
(@arg size: -s --size [size]
"Generates random input of a given size, e.g. 32x64xf32.")
(@arg data: -f --data [data]
"Loads input data from a given file.")
(@arg verbosity: -v ... "Sets the level of verbosity.")
);
let compare = clap::SubCommand::with_name("compare")
.help("Compares the output of tfdeploy and tensorflow on randomly generated input.");
app = app.subcommand(output_options(compare));
let dump = clap::SubCommand::with_name("dump")
.help("Dumps the Tensorflow graph in human readable form.");
app = app.subcommand(output_options(dump));
let profile = clap::SubCommand::with_name("profile")
.help("Benchmarks tfdeploy on randomly generated input.")
.arg(
Arg::with_name("max_iters")
.short("n")
.help("Sets the maximum number of iterations for each node [default: 10_000]."),
)
.arg(
Arg::with_name("max_time")
.short("t")
.help("Sets the maximum execution time for each node (in ms) [default: 500]."),
)
.arg(
Arg::with_name("buffering")
.short("b")
.help("Run the stream network without inner instrumentations"),
);
app = app.subcommand(output_options(profile));
let analyse = clap::SubCommand::with_name("analyse")
.help("Analyses the graph to infer properties about tensors (experimental).");
app = app.subcommand(output_options(analyse));
let optimize = clap::SubCommand::with_name("optimize").help("Optimize the graph");
app = app.subcommand(output_options(optimize));
let matches = app.get_matches();
if let Err(e) = handle(matches) {
error!("{}", e.to_string());
process::exit(1)
}
}
fn output_options<'a, 'b>(command: clap::App<'a, 'b>) -> clap::App<'a, 'b> {
use clap::*;
command
.arg(
Arg::with_name("web")
.long("web")
.help("Display int a web interface"),
)
.arg(
Arg::with_name("json")
.long("json")
.takes_value(true)
.help("output to a json file"),
)
.arg(
Arg::with_name("const")
.long("const")
.help("also display consts nodes"),
)
}
/// Structure holding the parsed parameters.
pub struct Parameters {
name: String,
graph: GraphDef,
tfd_model: tfdeploy::Model,
#[cfg(feature = "tensorflow")]
tf_model: conform::tf::Tensorflow,
input: Option<InputParameters>,
input_node_ids: Vec<usize>,
output_node_id: usize,
}
impl Parameters {
/// Parses the command-line arguments.
pub fn from_clap(matches: &clap::ArgMatches) -> Result<Parameters> {
let name = matches.value_of("model").unwrap();
let graph = tfdeploy::Model::graphdef_for_path(&name)?;
let tfd_model = tfdeploy::for_path(&name)?;
#[cfg(feature = "tensorflow")]
let tf_model = conform::tf::for_path(&name)?;
let input = InputParameters::from_clap(matches)?;
let input_node_ids = match matches.values_of("inputs") {
Some(names) => names
.map(|s| Ok(tfd_model.node_id_by_name(s)?))
.collect::<Result<_>>()?,
None => tfdeploy::analyser::detect_inputs(&tfd_model)?
.ok_or("Impossible to auto-detect input nodes: no placeholder.")?,
};
let output_node_id = match matches.value_of("output") {
Some(name) => tfd_model.node_id_by_name(name)?,
None => tfdeploy::analyser::detect_output(&tfd_model)?
.ok_or("Impossible to auto-detect output nodes.")?,
};
#[cfg(feature = "tensorflow")]
return Ok(Parameters {
name: name.to_string(),
graph,
tfd_model,
tf_model,
input_node_ids,
output_node_id,
input,
});
#[cfg(not(feature = "tensorflow"))]
return Ok(Parameters {
name: name.to_string(),
graph,
tfd_model,
input_node_ids,
output_node_id,
input,
});
}
}
/// Structure holding the input parameters (eventually containing data).
pub struct InputParameters {
data: Option<Tensor>,
shape: Vec<Option<usize>>,
datatype: DataType,
}
impl InputParameters {
fn from_clap(matches: &clap::ArgMatches) -> Result<Option<InputParameters>> {
let input = match (matches.value_of("size"), matches.value_of("data")) {
(_, Some(filename)) => Some(Self::for_data(filename)?),
(Some(size), _) => Some(Self::for_size(size)?),
_ => None,
};
Ok(input)
}
fn for_size(size: &str) -> std::result::Result<InputParameters, errors::Error> {
let splits = size.split("x").collect::<Vec<_>>();
if splits.len() < 1 {
bail!("The <size> argument should be formatted as {size}x{...}x{type}.");
}
let (datatype, shape) = splits.split_last().unwrap();
let shape = shape
.iter()
.map(|s| match *s {
"S" => Ok(None), // Streaming dimension.
_ => Ok(Some(s.parse()?)), // Regular dimension.
})
.collect::<Result<Vec<_>>>()?;
if shape.iter().filter(|o| o.is_none()).count() > 1 {
bail!("The <size> argument doesn't support more than one streaming dimension.");
}
let datatype = match datatype.to_lowercase().as_str() {
"f64" => DataType::F64,
"f32" => DataType::F32,
"i32" => DataType::I32,
"i8" => DataType::I8,
"u8" => DataType::U8,
_ => bail!("Type of the input should be f64, f32, i32, i8 or u8."),
};
Ok(InputParameters {
data: None,
shape,
datatype,
})
}
/// Parses the `data` command-line argument.
fn for_data(filename: &str) -> Result<InputParameters> {
let mut file = File::open(filename)?;
let mut data = String::new();
file.read_to_string(&mut data)?;
let mut lines = data.lines();
let InputParameters {
shape, datatype, ..
} = InputParameters::for_size(lines.next().ok_or("Empty data file")?)?;
let values = lines.flat_map(|l| l.split_whitespace()).collect::<Vec<_>>();
// We know there is at most one streaming dimension, so we can deduce the
// missing value with a simple division.
let product: usize = shape.iter().map(|o| o.unwrap_or(1)).product();
let missing = values.len() / product;
let data_shape = shape
.iter()
.map(|o| o.unwrap_or(missing))
.collect::<Vec<_>>();
macro_rules! for_type {
($t:ty) => {{
let array = ndarray::Array::from_iter(values.iter().map(|v| v.parse::<$t>().unwrap()));
array.into_shape(data_shape)?
}};
}
let tensor = match datatype {
DataType::F64 => for_type!(f64).into(),
DataType::F32 => for_type!(f32).into(),
DataType::I32 => for_type!(i32).into(),
DataType::I8 => for_type!(i8).into(),
DataType::U8 => for_type!(u8).into(),
_ => unimplemented!(),
};
Ok(InputParameters {
data: Some(tensor),
shape,
datatype,
})
}
fn streaming(&self) -> bool {
self.shape.iter().any(|dim| dim.is_none())
}
}
pub enum ProfilingMode {
Regular { max_iters: u64, max_time: u64 },
RegularBenching { max_iters: u64, max_time: u64 },
StreamCruising,
StreamBuffering,
StreamBenching { max_iters: u64, max_time: u64 },
}
impl ProfilingMode {
pub fn from_clap(matches: &clap::ArgMatches, streaming: bool) -> Result<ProfilingMode> {
let max_iters = matches
.value_of("max_iters")
.map(u64::from_str)
.inside_out()?
.unwrap_or(DEFAULT_MAX_ITERS);
let max_time = matches
.value_of("max_time")
.map(u64::from_str)
.inside_out()?
.unwrap_or(DEFAULT_MAX_TIME);
let mode = if streaming {
if matches.is_present("buffering") {
ProfilingMode::StreamBuffering
} else if matches.is_present("bench") {
ProfilingMode::StreamBenching {
max_iters,
max_time,
}
} else {
ProfilingMode::StreamCruising
}
} else {
if matches.is_present("bench") {
ProfilingMode::RegularBenching {
max_iters,
max_time,
}
} else {
ProfilingMode::Regular {
max_iters,
max_time,
}
}
};
Ok(mode)
}
}
pub struct OutputParameters {
web: bool,
konst: bool,
json: Option<String>,
}
impl OutputParameters {
pub fn from_clap(matches: &clap::ArgMatches) -> Result<OutputParameters> {
Ok(OutputParameters {
web: matches.is_present("web"),
konst: matches.is_present("const"),
json: matches.value_of("json").map(String::from),
})
}
}
/// Handles the command-line input.
fn handle(matches: clap::ArgMatches) -> Result<()> {
// Configure the logging level.
let level = match matches.occurrences_of("verbosity") {
0 => LevelFilter::Warn,
1 => LevelFilter::Info,
2 => LevelFilter::Debug,
_ => LevelFilter::Trace,
};
let log_config = Config {
time: None,
time_format: None,
level: Some(Error),
target: None,
location: Some(Trace),
};
if TermLogger::init(level, log_config).is_err()
&& simplelog::SimpleLogger::init(level, log_config).is_err()
{
panic!("Could not initiatize logger")
};
let params = Parameters::from_clap(&matches)?;
let streaming = params
.input
.as_ref()
.map(|i| i.streaming())
.unwrap_or(false);
match matches.subcommand() {
("compare", Some(m)) => compare::handle(params, OutputParameters::from_clap(m)?),
("dump", Some(m)) => dump::handle(params, OutputParameters::from_clap(m)?),
("profile", Some(m)) => profile::handle(
params,
ProfilingMode::from_clap(&m, streaming)?,
OutputParameters::from_clap(m)?,
),
("analyse", Some(m)) => analyse::handle(params, false, OutputParameters::from_clap(m)?),
("optimize", Some(m)) => analyse::handle(params, true, OutputParameters::from_clap(m)?),
(s, _) => bail!("Unknown subcommand {}.", s),
}
}
|
use std::any::Any;
use std::cmp::Ordering;
use std::fmt;
use std::hash::{self, Hash};
use std::marker::PhantomData;
use std::mem::{self, MaybeUninit};
use std::ops;
use std::ptr;
use ::alloc::alloc::{self, Layout};
#[cfg(feature = "coerce")]
use std::marker::Unsize;
#[cfg(feature = "coerce")]
use std::ops::CoerceUnsized;
#[cfg(feature = "coerce")]
impl<T: ?Sized + Unsize<U>, U: ?Sized, Space> CoerceUnsized<SmallBox<U, Space>>
for SmallBox<T, Space>
{
}
/// Box value on stack or on heap depending on its size
///
/// This macro is similar to `SmallBox::new`, which is used to create a new `Smallbox` instance,
/// but relaxing the constraint `T: Sized`.
/// In order to do that, this macro will check the coersion rules from type `T` to
/// the target type. This macro will invoke a complie-time error on any invalid type coersion.
///
/// You can think that it has the signature of `smallbox!<U: Sized, T: ?Sized>(val: U) -> SmallBox<T, Space>`
///
/// # Example
///
/// ```
/// #[macro_use]
/// extern crate smallbox;
///
/// # fn main() {
/// use smallbox::SmallBox;
/// use smallbox::space::*;
///
/// let small: SmallBox<[usize], S4> = smallbox!([0usize; 2]);
/// let large: SmallBox<[usize], S4> = smallbox!([1usize; 8]);
///
/// assert_eq!(small.len(), 2);
/// assert_eq!(large[7], 1);
///
/// assert!(large.is_heap() == true);
/// # }
/// ```
#[macro_export]
macro_rules! smallbox {
( $e: expr ) => {{
let val = $e;
let ptr = &val as *const _;
#[allow(unsafe_code)]
unsafe {
$crate::SmallBox::new_unchecked(val, ptr)
}
}};
}
/// An optimized box that store value on stack or on heap depending on its size
pub struct SmallBox<T: ?Sized, Space> {
space: MaybeUninit<Space>,
ptr: *const T,
_phantom: PhantomData<T>,
}
impl<T: ?Sized, Space> SmallBox<T, Space> {
/// Box value on stack or on heap depending on its size.
///
/// # Example
///
/// ```
/// use smallbox::SmallBox;
/// use smallbox::space::*;
///
/// let small: SmallBox<_, S4> = SmallBox::new([0usize; 2]);
/// let large: SmallBox<_, S4> = SmallBox::new([1usize; 8]);
///
/// assert_eq!(small.len(), 2);
/// assert_eq!(large[7], 1);
///
/// assert!(large.is_heap() == true);
/// ```
#[inline(always)]
pub fn new(val: T) -> SmallBox<T, Space>
where
T: Sized,
{
smallbox!(val)
}
#[doc(hidden)]
#[inline]
pub unsafe fn new_unchecked<U>(val: U, ptr: *const T) -> SmallBox<T, Space>
where
U: Sized,
{
let result = Self::new_copy(&val, ptr);
mem::forget(val);
result
}
/// Change the capacity of `SmallBox`.
///
/// This method may move stack-allocated data from stack to heap
/// when inline space is not sufficient. And once the data
/// is moved to heap, it'll never be moved again.
///
/// # Example
///
/// ```
/// use smallbox::SmallBox;
/// use smallbox::space::{S2, S4};
///
/// let s: SmallBox::<_, S4> = SmallBox::new([0usize; 4]);
/// let m: SmallBox::<_, S2> = s.resize();
/// ```
pub fn resize<ToSpace>(self) -> SmallBox<T, ToSpace> {
unsafe {
let result = if self.is_heap() {
// don't change anything if data is already on heap
let space = MaybeUninit::<ToSpace>::uninit();
SmallBox {
space,
ptr: self.ptr,
_phantom: PhantomData,
}
} else {
let val: &T = &*self;
SmallBox::<T, ToSpace>::new_copy(val, val as *const T)
};
mem::forget(self);
result
}
}
/// Returns true if data is allocated on heap.
///
/// # Example
///
/// ```
/// use smallbox::SmallBox;
/// use smallbox::space::S1;
///
/// let stacked: SmallBox::<usize, S1> = SmallBox::new(0usize);
/// assert!(!stacked.is_heap());
///
/// let heaped: SmallBox::<(usize, usize), S1> = SmallBox::new((0usize, 1usize));
/// assert!(heaped.is_heap());
/// ```
#[inline]
pub fn is_heap(&self) -> bool {
!self.ptr.is_null()
}
unsafe fn new_copy<U>(val: &U, ptr: *const T) -> SmallBox<T, Space>
where
U: ?Sized,
{
let size = mem::size_of_val::<U>(val);
let align = mem::align_of_val::<U>(val);
let mut space = MaybeUninit::<Space>::uninit();
let (ptr_addr, ptr_copy): (*const u8, *mut u8) = if size == 0 {
(ptr::null(), align as *mut u8)
} else if size > mem::size_of::<Space>() || align > mem::align_of::<Space>() {
// Heap
let layout = Layout::for_value::<U>(val);
let heap_ptr = alloc::alloc(layout);
(heap_ptr, heap_ptr)
} else {
// Stack
(ptr::null(), space.as_mut_ptr() as *mut u8)
};
// Overwrite the pointer but retain any extra data inside the fat pointer.
let mut ptr = ptr;
let ptr_ptr = &mut ptr as *mut _ as *mut usize;
ptr_ptr.write(ptr_addr as usize);
ptr::copy_nonoverlapping(val as *const _ as *const u8, ptr_copy, size);
SmallBox {
space,
ptr,
_phantom: PhantomData,
}
}
unsafe fn downcast_unchecked<U: Any>(self) -> SmallBox<U, Space> {
let size = mem::size_of::<U>();
let mut space = MaybeUninit::<Space>::uninit();
if !self.is_heap() {
ptr::copy_nonoverlapping(
self.space.as_ptr() as *const u8,
space.as_mut_ptr() as *mut u8,
size,
);
};
let ptr = self.ptr as *const U;
mem::forget(self);
SmallBox {
space,
ptr,
_phantom: PhantomData,
}
}
#[inline]
unsafe fn as_ptr(&self) -> *const T {
let mut ptr = self.ptr;
if !self.is_heap() {
// Overwrite the pointer but retain any extra data inside the fat pointer.
let ptr_ptr = &mut ptr as *mut _ as *mut usize;
ptr_ptr.write(self.space.as_ptr() as *const () as usize);
}
ptr
}
#[inline]
unsafe fn as_mut_ptr(&mut self) -> *mut T {
let mut ptr = self.ptr;
if !self.is_heap() {
// Overwrite the pointer but retain any extra data inside the fat pointer.
let ptr_ptr = &mut ptr as *mut _ as *mut usize;
ptr_ptr.write(self.space.as_mut_ptr() as *mut () as usize);
}
ptr as *mut _
}
/// Consumes the SmallBox and returns ownership of the boxed value
///
/// # Examples
/// ```
/// use smallbox::SmallBox;
/// use smallbox::space::S1;
///
/// let stacked : SmallBox<_, S1> = SmallBox::new([21usize]);
/// let val = stacked.into_inner();
/// assert_eq!(val[0], 21);
///
/// let boxed : SmallBox<_, S1> = SmallBox::new(vec![21, 56, 420]);
/// let val = boxed.into_inner();
/// assert_eq!(val[1], 56);
/// ```
#[inline]
pub fn into_inner(self) -> T
where
T: Sized,
{
let ret_val: T = unsafe { self.as_ptr().read() };
// Just drops the heap without dropping the boxed value
if self.is_heap() {
let layout = Layout::new::<T>();
unsafe {
alloc::dealloc(self.ptr as *mut u8, layout);
}
}
mem::forget(self);
ret_val
}
}
impl<Space> SmallBox<dyn Any, Space> {
/// Attempt to downcast the box to a concrete type.
///
/// # Examples
///
/// ```
/// #[macro_use]
/// extern crate smallbox;
///
/// # fn main() {
/// use std::any::Any;
/// use smallbox::SmallBox;
/// use smallbox::space::*;
///
/// fn print_if_string(value: SmallBox<dyn Any, S1>) {
/// if let Ok(string) = value.downcast::<String>() {
/// println!("String ({}): {}", string.len(), string);
/// }
/// }
///
/// fn main() {
/// let my_string = "Hello World".to_string();
/// print_if_string(smallbox!(my_string));
/// print_if_string(smallbox!(0i8));
/// }
/// # }
/// ```
#[inline]
pub fn downcast<T: Any>(self) -> Result<SmallBox<T, Space>, Self> {
if self.is::<T>() {
unsafe { Ok(self.downcast_unchecked()) }
} else {
Err(self)
}
}
}
impl<Space> SmallBox<dyn Any + Send, Space> {
/// Attempt to downcast the box to a concrete type.
///
/// # Examples
///
/// ```
/// #[macro_use]
/// extern crate smallbox;
///
/// # fn main() {
/// use std::any::Any;
/// use smallbox::SmallBox;
/// use smallbox::space::*;
///
/// fn print_if_string(value: SmallBox<dyn Any, S1>) {
/// if let Ok(string) = value.downcast::<String>() {
/// println!("String ({}): {}", string.len(), string);
/// }
/// }
///
/// fn main() {
/// let my_string = "Hello World".to_string();
/// print_if_string(smallbox!(my_string));
/// print_if_string(smallbox!(0i8));
/// }
/// # }
/// ```
#[inline]
pub fn downcast<T: Any>(self) -> Result<SmallBox<T, Space>, Self> {
if self.is::<T>() {
unsafe { Ok(self.downcast_unchecked()) }
} else {
Err(self)
}
}
}
impl<T: ?Sized, Space> ops::Deref for SmallBox<T, Space> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.as_ptr() }
}
}
impl<T: ?Sized, Space> ops::DerefMut for SmallBox<T, Space> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.as_mut_ptr() }
}
}
impl<T: ?Sized, Space> ops::Drop for SmallBox<T, Space> {
fn drop(&mut self) {
unsafe {
let layout = Layout::for_value::<T>(&*self);
ptr::drop_in_place::<T>(&mut **self);
if self.is_heap() {
alloc::dealloc(self.ptr as *mut u8, layout);
}
}
}
}
impl<T: Clone, Space> Clone for SmallBox<T, Space>
where
T: Sized,
{
fn clone(&self) -> Self {
let val: &T = &*self;
SmallBox::new(val.clone())
}
}
impl<T: ?Sized + fmt::Display, Space> fmt::Display for SmallBox<T, Space> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<T: ?Sized + fmt::Debug, Space> fmt::Debug for SmallBox<T, Space> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: ?Sized, Space> fmt::Pointer for SmallBox<T, Space> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// It's not possible to extract the inner Unique directly from the Box,
// instead we cast it to a *const which aliases the Unique
let ptr: *const T = &**self;
fmt::Pointer::fmt(&ptr, f)
}
}
impl<T: ?Sized + PartialEq, Space> PartialEq for SmallBox<T, Space> {
fn eq(&self, other: &SmallBox<T, Space>) -> bool {
PartialEq::eq(&**self, &**other)
}
}
impl<T: ?Sized + PartialOrd, Space> PartialOrd for SmallBox<T, Space> {
fn partial_cmp(&self, other: &SmallBox<T, Space>) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
fn lt(&self, other: &SmallBox<T, Space>) -> bool {
PartialOrd::lt(&**self, &**other)
}
fn le(&self, other: &SmallBox<T, Space>) -> bool {
PartialOrd::le(&**self, &**other)
}
fn ge(&self, other: &SmallBox<T, Space>) -> bool {
PartialOrd::ge(&**self, &**other)
}
fn gt(&self, other: &SmallBox<T, Space>) -> bool {
PartialOrd::gt(&**self, &**other)
}
}
impl<T: ?Sized + Ord, Space> Ord for SmallBox<T, Space> {
fn cmp(&self, other: &SmallBox<T, Space>) -> Ordering {
Ord::cmp(&**self, &**other)
}
}
impl<T: ?Sized + Eq, Space> Eq for SmallBox<T, Space> {}
impl<T: ?Sized + Hash, Space> Hash for SmallBox<T, Space> {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
unsafe impl<T: ?Sized + Send, Space> Send for SmallBox<T, Space> {}
unsafe impl<T: ?Sized + Sync, Space> Sync for SmallBox<T, Space> {}
#[cfg(test)]
mod tests {
use super::SmallBox;
use crate::space::*;
use std::any::Any;
#[test]
fn test_basic() {
let stacked: SmallBox<usize, S1> = SmallBox::new(1234usize);
assert!(*stacked == 1234);
let heaped: SmallBox<(usize, usize), S1> = SmallBox::new((0, 1));
assert!(*heaped == (0, 1));
}
#[test]
fn test_new_unchecked() {
let val = [0usize, 1];
let ptr = &val as *const _;
unsafe {
let stacked: SmallBox<[usize], S2> = SmallBox::new_unchecked(val, ptr);
assert!(*stacked == [0, 1]);
assert!(!stacked.is_heap());
}
let val = [0usize, 1, 2];
let ptr = &val as *const _;
unsafe {
let heaped: SmallBox<dyn Any, S2> = SmallBox::new_unchecked(val, ptr);
assert!(heaped.is_heap());
if let Some(array) = heaped.downcast_ref::<[usize; 3]>() {
assert_eq!(*array, [0, 1, 2]);
} else {
unreachable!();
}
}
}
#[test]
#[deny(unsafe_code)]
fn test_macro() {
let stacked: SmallBox<dyn Any, S1> = smallbox!(1234usize);
if let Some(num) = stacked.downcast_ref::<usize>() {
assert_eq!(*num, 1234);
} else {
unreachable!();
}
let heaped: SmallBox<dyn Any, S1> = smallbox!([0usize, 1]);
if let Some(array) = heaped.downcast_ref::<[usize; 2]>() {
assert_eq!(*array, [0, 1]);
} else {
unreachable!();
}
let is_even: SmallBox<dyn Fn(u8) -> bool, S1> = smallbox!(|num: u8| num % 2 == 0);
assert!(!is_even(5));
assert!(is_even(6));
}
#[test]
#[cfg(feature = "coerce")]
fn test_coerce() {
let stacked: SmallBox<dyn Any, S1> = SmallBox::new(1234usize);
if let Some(num) = stacked.downcast_ref::<usize>() {
assert_eq!(*num, 1234);
} else {
unreachable!();
}
let heaped: SmallBox<dyn Any, S1> = SmallBox::new([0usize, 1]);
if let Some(array) = heaped.downcast_ref::<[usize; 2]>() {
assert_eq!(*array, [0, 1]);
} else {
unreachable!();
}
}
#[test]
fn test_drop() {
use std::cell::Cell;
struct Struct<'a>(&'a Cell<bool>, u8);
impl<'a> Drop for Struct<'a> {
fn drop(&mut self) {
self.0.set(true);
}
}
let flag = Cell::new(false);
let stacked: SmallBox<_, S2> = SmallBox::new(Struct(&flag, 0));
assert!(!stacked.is_heap());
assert!(flag.get() == false);
drop(stacked);
assert!(flag.get() == true);
let flag = Cell::new(false);
let heaped: SmallBox<_, S1> = SmallBox::new(Struct(&flag, 0));
assert!(heaped.is_heap());
assert!(flag.get() == false);
drop(heaped);
assert!(flag.get() == true);
}
#[test]
fn test_dont_drop_space() {
struct NoDrop(S1);
impl Drop for NoDrop {
fn drop(&mut self) {
unreachable!();
}
}
drop(SmallBox::<_, NoDrop>::new([true]));
}
#[test]
fn test_oversize() {
let fit = SmallBox::<_, S1>::new([1usize]);
let oversize = SmallBox::<_, S1>::new([1usize, 2]);
assert!(!fit.is_heap());
assert!(oversize.is_heap());
}
#[test]
fn test_resize() {
let m = SmallBox::<_, S4>::new([1usize, 2]);
let l = m.resize::<S8>();
assert!(!l.is_heap());
let m = l.resize::<S4>();
assert!(!m.is_heap());
let s = m.resize::<S2>();
assert!(!s.is_heap());
let xs = s.resize::<S1>();
assert!(xs.is_heap());
let m = xs.resize::<S4>();
assert!(m.is_heap());
assert_eq!(*m, [1usize, 2]);
}
#[test]
fn test_clone() {
let stacked: SmallBox<[usize; 2], S2> = smallbox!([1usize, 2]);
assert_eq!(stacked, stacked.clone())
}
#[test]
fn test_zst() {
struct ZSpace;
let zst: SmallBox<[usize], S1> = smallbox!([1usize; 0]);
assert_eq!(*zst, [1usize; 0]);
let zst: SmallBox<[usize], ZSpace> = smallbox!([1usize; 0]);
assert_eq!(*zst, [1usize; 0]);
let zst: SmallBox<[usize], ZSpace> = smallbox!([1usize; 2]);
assert_eq!(*zst, [1usize; 2]);
}
#[test]
fn test_downcast() {
let stacked: SmallBox<dyn Any, S1> = smallbox!(0x01u32);
assert!(!stacked.is_heap());
assert_eq!(SmallBox::new(0x01), stacked.downcast::<u32>().unwrap());
let heaped: SmallBox<dyn Any, S1> = smallbox!([1usize, 2]);
assert!(heaped.is_heap());
assert_eq!(
smallbox!([1usize, 2]),
heaped.downcast::<[usize; 2]>().unwrap()
);
let stacked_send: SmallBox<dyn Any + Send, S1> = smallbox!(0x01u32);
assert!(!stacked_send.is_heap());
assert_eq!(SmallBox::new(0x01), stacked_send.downcast::<u32>().unwrap());
let heaped_send: SmallBox<dyn Any + Send, S1> = smallbox!([1usize, 2]);
assert!(heaped_send.is_heap());
assert_eq!(
SmallBox::new([1usize, 2]),
heaped_send.downcast::<[usize; 2]>().unwrap()
);
let mismatched: SmallBox<dyn Any, S1> = smallbox!(0x01u32);
assert!(mismatched.downcast::<u8>().is_err());
let mismatched: SmallBox<dyn Any, S1> = smallbox!(0x01u32);
assert!(mismatched.downcast::<u64>().is_err());
}
#[test]
fn test_option_encoding() {
let tester: SmallBox<Box<()>, S2> = SmallBox::new(Box::new(()));
assert!(Some(tester).is_some());
}
#[test]
fn test_into_inner() {
let tester: SmallBox<_, S1> = SmallBox::new([21usize]);
let val = tester.into_inner();
assert_eq!(val[0], 21);
let tester: SmallBox<_, S1> = SmallBox::new(vec![21, 56, 420]);
let val = tester.into_inner();
assert_eq!(val[1], 56);
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use]
extern crate rocket;
pub mod utils;
pub mod api;
use api::deploy;
fn main() {
rocket::ignite().mount("/api", routes![deploy::create]).launch();
} |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
use clap::Parser;
use reverie::syscalls::Displayable;
use reverie::syscalls::Errno;
use reverie::syscalls::Syscall;
use reverie::Error;
use reverie::GlobalTool;
use reverie::Guest;
use reverie::Pid;
use reverie::Tool;
use reverie_util::CommonToolArguments;
use serde::Deserialize;
use serde::Serialize;
/// A tool to introduce inject "chaos" into a running process. A pathological
/// kernel is simulated by forcing reads to only return one byte a time.
#[derive(Debug, Parser)]
struct Args {
#[clap(flatten)]
common_opts: CommonToolArguments,
#[clap(flatten)]
chaos_opts: ChaosOpts,
}
#[derive(Parser, Debug, Serialize, Deserialize, Clone, Default)]
struct ChaosOpts {
/// Skips the first N syscalls of a process before doing any intervention.
/// This is useful when you need to skip past an error caused by the tool.
#[clap(long, value_name = "N", default_value = "0")]
skip: u64,
/// If set, does not intercept `read`-like system calls and modify them.
#[clap(long)]
no_read: bool,
/// If set, does not intercept `recv`-like system calls and modify them.
#[clap(long)]
no_recv: bool,
/// If set, does not inject random `EINTR` errors.
#[clap(long)]
no_interrupt: bool,
}
#[derive(Debug, Default)]
struct ChaosTool {
count: AtomicU64,
}
impl Clone for ChaosTool {
fn clone(&self) -> Self {
ChaosTool {
count: AtomicU64::new(self.count.load(Ordering::SeqCst)),
}
}
}
#[derive(Debug, Default, Clone)]
struct ChaosToolGlobal {}
#[reverie::global_tool]
impl GlobalTool for ChaosToolGlobal {
type Request = ();
type Response = ();
type Config = ChaosOpts;
async fn receive_rpc(&self, _from: Pid, _request: ()) {}
}
#[reverie::tool]
impl Tool for ChaosTool {
type GlobalState = ChaosToolGlobal;
type ThreadState = bool;
fn new(_pid: Pid, _cfg: &ChaosOpts) -> Self {
Self {
count: AtomicU64::new(0),
}
}
async fn handle_syscall_event<T: Guest<Self>>(
&self,
guest: &mut T,
syscall: Syscall,
) -> Result<i64, Error> {
let count = self.count.fetch_add(1, Ordering::SeqCst);
let config = guest.config().clone();
let memory = guest.memory();
// This provides a way to wait until the dynamic linker has done its job
// before we start trying to create chaos. glibc's dynamic linker has a
// bug where it doesn't retry `read` calls that don't return the
// expected amount of data.
if count < config.skip {
eprintln!(
"SKIPPED [pid={}, n={}] {}",
guest.pid(),
count,
syscall.display(&memory),
);
return guest.tail_inject(syscall).await;
}
// Transform the syscall arguments.
let syscall = match syscall {
Syscall::Read(read) => {
if !config.no_interrupt && !*guest.thread_state() {
// Return an EINTR instead of running the syscall.
// Programs should always retry the read in this case.
*guest.thread_state_mut() = true;
// XXX: inject a signal like SIGINT?
let ret = Err(Errno::ERESTARTSYS);
eprintln!(
"[pid={}, n={}] {} = {}",
guest.pid(),
count,
syscall.display(&memory),
ret.unwrap_or_else(|errno| -errno.into_raw() as i64)
);
return Ok(ret?);
} else if !config.no_read {
// Reduce read length to 1 byte at most.
Syscall::Read(read.with_len(1.min(read.len())))
} else {
// Return syscall unmodified.
Syscall::Read(read)
}
}
Syscall::Recvfrom(recv) if !config.no_recv => {
// Reduce recv length to 1 byte at most.
Syscall::Recvfrom(recv.with_len(1.min(recv.len())))
}
x => {
eprintln!(
"[pid={}, n={}] {}",
guest.pid(),
count,
syscall.display(&memory),
);
return guest.tail_inject(x).await;
}
};
*guest.thread_state_mut() = false;
let ret = guest.inject(syscall).await;
eprintln!(
"[pid={}, n={}] {} = {}",
guest.pid(),
count,
syscall.display_with_outputs(&memory),
ret.unwrap_or_else(|errno| -errno.into_raw() as i64)
);
Ok(ret?)
}
}
#[tokio::main]
async fn main() -> Result<(), Error> {
let args = Args::from_args();
let log_guard = args.common_opts.init_tracing();
let tracer = reverie_ptrace::TracerBuilder::<ChaosTool>::new(args.common_opts.into())
.config(args.chaos_opts)
.spawn()
.await?;
let (status, _) = tracer.wait().await?;
drop(log_guard); // Flush logs before exiting.
status.raise_or_exit()
}
|
mod application;
mod queue;
pub mod shared_res;
mod vertex;
mod window;
pub use application::GliumApplication;
|
// Copyright 2015 The GeoRust Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use rustc_serialize::json::{self, Json, ToJson};
use ::{Bbox, Crs, Error, FromObject, Geometry, util};
/// Feature Objects
///
/// [GeoJSON Format Specification § 2.2]
/// (http://geojson.org/geojson-spec.html#feature-objects)
#[derive(Clone, Debug, PartialEq)]
pub struct Feature {
pub bbox: Option<Bbox>,
pub crs: Option<Crs>,
pub geometry: Geometry,
pub id: Option<json::Json>,
pub properties: Option<json::Object>,
}
impl<'a> From<&'a Feature> for json::Object {
fn from(feature: &'a Feature) -> json::Object {
let mut map = BTreeMap::new();
map.insert(String::from("type"), "Feature".to_json());
map.insert(String::from("geometry"), feature.geometry.to_json());
if let Some(ref properties) = feature.properties {
map.insert(String::from("properties"), properties.to_json());
}
if let Some(ref crs) = feature.crs {
map.insert(String::from("crs"), crs.to_json());
}
if let Some(ref bbox) = feature.bbox {
map.insert(String::from("bbox"), bbox.to_json());
}
if let Some(ref id) = feature.id {
map.insert(String::from("id"), id.to_json());
}
return map;
}
}
impl FromObject for Feature {
fn from_object(object: &json::Object) -> Result<Self, Error> {
return Ok(Feature{
geometry: try!(util::get_geometry(object)),
properties: try!(util::get_properties(object)),
id: try!(util::get_id(object)),
crs: try!(util::get_crs(object)),
bbox: try!(util::get_bbox(object)),
});
}
}
impl ToJson for Feature {
fn to_json(&self) -> json::Json {
return json::Json::Object(self.into());
}
}
#[cfg(test)]
mod tests {
use rustc_serialize::json::{self, ToJson};
use super::super::{Feature, GeoJson, Geometry, Value};
#[test]
fn encode_decode_feature() {
let feature_json_str = "{\"geometry\":{\"coordinates\":[1.0,2.0],\"type\":\"Point\"},\"properties\":{},\"type\":\"Feature\"}";
let feature = Feature {
geometry: Geometry {
value: Value::Point(vec![1., 2.]),
crs: None,
bbox: None,
},
properties: Some(json::Object::new()),
crs: None,
bbox: None,
id: None,
};
// Test encoding
let json_string = json::encode(&feature.to_json()).unwrap();
assert_eq!(json_string, feature_json_str);
// Test decoding
let decoded_feature = match json_string.parse() {
Ok(GeoJson::Feature(f)) => f,
_ => unreachable!(),
};
assert_eq!(decoded_feature, feature);
}
}
|
#[doc = "Reader of register RSLV_LIST_PEER_RPA_BASE_ADDR"]
pub type R = crate::R<u32, super::RSLV_LIST_PEER_RPA_BASE_ADDR>;
#[doc = "Writer for register RSLV_LIST_PEER_RPA_BASE_ADDR"]
pub type W = crate::W<u32, super::RSLV_LIST_PEER_RPA_BASE_ADDR>;
#[doc = "Register RSLV_LIST_PEER_RPA_BASE_ADDR `reset()`'s with value 0"]
impl crate::ResetValue for super::RSLV_LIST_PEER_RPA_BASE_ADDR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `RSLV_LIST_PEER_RPA_BASE_ADDR`"]
pub type RSLV_LIST_PEER_RPA_BASE_ADDR_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `RSLV_LIST_PEER_RPA_BASE_ADDR`"]
pub struct RSLV_LIST_PEER_RPA_BASE_ADDR_W<'a> {
w: &'a mut W,
}
impl<'a> RSLV_LIST_PEER_RPA_BASE_ADDR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15 - Device address values written to the list are written as 16-bit wide address."]
#[inline(always)]
pub fn rslv_list_peer_rpa_base_addr(&self) -> RSLV_LIST_PEER_RPA_BASE_ADDR_R {
RSLV_LIST_PEER_RPA_BASE_ADDR_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - Device address values written to the list are written as 16-bit wide address."]
#[inline(always)]
pub fn rslv_list_peer_rpa_base_addr(&mut self) -> RSLV_LIST_PEER_RPA_BASE_ADDR_W {
RSLV_LIST_PEER_RPA_BASE_ADDR_W { w: self }
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.