text stringlengths 8 4.13M |
|---|
// Copyright (c) The diem-devtools Contributors
// SPDX-License-Identifier: MIT OR Apache-2.0
//! Generate and read JUnit reports in Rust.
mod report;
mod serialize;
pub use report::*;
// Re-export `quick_xml::Result` so it can be used by downstream consumers.
pub use quick_xml::Result;
|
use crate::models::task::{Task, TaskState};
pub struct TaskStack {
tasks: Vec<Task>,
}
impl TaskStack {
pub fn new() -> TaskStack {
TaskStack { tasks: Vec::new() }
}
pub fn add(&mut self, task: Task) {
self.tasks.push(task);
}
pub fn remove(&mut self, task_index: usize) -> Task {
self.tasks.remove(task_index)
}
pub fn get(&self, task_index: usize) -> &Task {
&self.tasks[task_index]
}
pub fn list(&self) -> String {
self.tasks
.iter()
.map(|task| -> String { task.to_org() })
.collect::<Vec<String>>()
.join("\n")
}
pub fn get_completed(&mut self) -> Vec<&Task> {
self.tasks
.iter()
.filter(|task| task.state == TaskState::DONE)
.collect::<Vec<&Task>>()
}
pub fn count_completed(&mut self) -> usize {
self.get_completed().len()
}
pub fn length(&self) -> usize {
self.tasks.len()
}
}
|
use beryllium::*;
fn main() {
let sdl = Sdl::init(InitFlags::EVERYTHING).unwrap();
let rend_win = sdl
.new_renderer_window(
"Event Test",
None,
[800, 600],
WindowCreationFlags::default(),
)
.unwrap();
let mut controllers = vec![];
let joystick_count = sdl.get_number_of_joysticks().unwrap();
for n in 0 .. joystick_count {
controllers.push(sdl.open_controller(n).unwrap())
}
'main: loop {
while let Some((event, time)) = sdl.poll_event() {
println!("[{time}] {event:?}", time = time, event = event);
if matches!(event, Event::Quit) {
break 'main;
}
}
rend_win.clear().unwrap();
rend_win.present();
}
}
|
use std::char;
#[aoc(day14, part1)]
fn solve_part1(input: &str) -> String {
let NUM_RECIPES: usize = input.trim().parse().unwrap();
let mut recipes = vec![3, 7];
let (mut elf_1, mut elf_2) = (0, 1);
loop {
let mut sum = recipes[elf_1] + recipes[elf_2];
if sum < 10 {
recipes.push(sum);
} else {
let second = sum % 10;
sum /= 10;
let first = sum % 10;
recipes.push(first);
recipes.push(second);
}
elf_1 += (1 + recipes[elf_1]);
elf_1 %= recipes.len();
elf_2 += (1 + recipes[elf_2]);
elf_2 %= recipes.len();
if recipes.len() > NUM_RECIPES + 10 {
break;
}
}
let mut result = String::new();
for r in recipes.iter().skip(NUM_RECIPES).take(10) {
result.push(char::from_digit(*r as u32, 10).unwrap());
}
result
}
#[aoc(day14, part2)]
fn solve_part2(input: &str) -> u32 {
let NUM_RECIPES: usize = input.trim().parse().unwrap();
let mut recipes = vec![3, 7];
let (mut elf_1, mut elf_2) = (0, 1);
let mut curr_digits = vec![];
let pattern = to_digits(NUM_RECIPES);
let mut p_i = 0;
let mut r_i = 0;
let mut i = 0;
loop {
let mut sum = recipes[elf_1] + recipes[elf_2];
curr_digits.push(recipes[i]);
if sum < 10 {
recipes.push(sum);
} else {
let second = sum % 10;
sum /= 10;
let first = sum % 10;
recipes.push(first);
recipes.push(second);
}
elf_1 += (1 + recipes[elf_1]);
elf_1 %= recipes.len();
elf_2 += (1 + recipes[elf_2]);
elf_2 %= recipes.len();
if curr_digits[r_i] == pattern[p_i] {
if curr_digits.len() == pattern.len() {
i -= pattern.len() - 1;
break;
}
r_i += 1;
p_i += 1;
} else {
curr_digits.clear();
p_i = 0;
r_i = 0;
if recipes[i] == pattern[p_i] {
curr_digits.push(recipes[i]);
r_i += 1;
p_i += 1;
}
}
i += 1;
}
i as u32
}
fn to_digits(n: usize) -> Vec<usize> {
n.to_string()
.chars()
.map(|d| d.to_digit(10).unwrap() as usize)
.collect()
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_part1() {
assert_eq!(&solve_part1("18"), "9251071085");
assert_eq!(&solve_part1("2018"), "5941429882");
}
#[test]
fn test_part2() {
assert_eq!(solve_part2("51589"), 9);
assert_eq!(solve_part2("59414"), 2018);
}
}
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
#![warn(missing_debug_implementations, missing_docs)]
//! Aligned allocator
use std::collections::VecDeque;
use std::ops::Deref;
use std::sync::{Arc, Condvar, Mutex, MutexGuard};
use std::time::Duration;
use crate::common::{ANNError, ANNResult};
#[derive(Debug)]
/// Query scratch data structures
pub struct ConcurrentQueue<T> {
q: Mutex<VecDeque<T>>,
c: Mutex<bool>,
push_cv: Condvar,
}
impl Default for ConcurrentQueue<usize> {
fn default() -> Self {
Self::new()
}
}
impl<T> ConcurrentQueue<T> {
/// Create a concurrent queue
pub fn new() -> Self {
Self {
q: Mutex::new(VecDeque::new()),
c: Mutex::new(false),
push_cv: Condvar::new(),
}
}
/// Block the current thread until it is able to acquire the mutex
pub fn reserve(&self, size: usize) -> ANNResult<()> {
let mut guard = lock(&self.q)?;
guard.reserve(size);
Ok(())
}
/// queue stats
pub fn size(&self) -> ANNResult<usize> {
let guard = lock(&self.q)?;
Ok(guard.len())
}
/// empty the queue
pub fn is_empty(&self) -> ANNResult<bool> {
Ok(self.size()? == 0)
}
/// push back
pub fn push(&self, new_val: T) -> ANNResult<()> {
let mut guard = lock(&self.q)?;
self.push_internal(&mut guard, new_val);
self.push_cv.notify_all();
Ok(())
}
/// push back
fn push_internal(&self, guard: &mut MutexGuard<VecDeque<T>>, new_val: T) {
guard.push_back(new_val);
}
/// insert into queue
pub fn insert<I>(&self, iter: I) -> ANNResult<()>
where
I: IntoIterator<Item = T>,
{
let mut guard = lock(&self.q)?;
for item in iter {
self.push_internal(&mut guard, item);
}
self.push_cv.notify_all();
Ok(())
}
/// pop front
pub fn pop(&self) -> ANNResult<Option<T>> {
let mut guard = lock(&self.q)?;
Ok(guard.pop_front())
}
/// Empty - is this necessary?
pub fn empty_queue(&self) -> ANNResult<()> {
let mut guard = lock(&self.q)?;
while !guard.is_empty() {
let _ = guard.pop_front();
}
Ok(())
}
/// register for push notifications
pub fn wait_for_push_notify(&self, wait_time: Duration) -> ANNResult<()> {
let guard_lock = lock(&self.c)?;
let _ = self
.push_cv
.wait_timeout(guard_lock, wait_time)
.map_err(|err| {
ANNError::log_lock_poison_error(format!(
"ConcurrentQueue Lock is poisoned, err={}",
err
))
})?;
Ok(())
}
}
fn lock<T>(mutex: &Mutex<T>) -> ANNResult<MutexGuard<T>> {
let guard = mutex.lock().map_err(|err| {
ANNError::log_lock_poison_error(format!("ConcurrentQueue lock is poisoned, err={}", err))
})?;
Ok(guard)
}
/// A thread-safe queue that holds instances of `T`.
/// Each instance is stored in a `Box` to keep the size of the queue node constant.
#[derive(Debug)]
pub struct ArcConcurrentBoxedQueue<T> {
internal_queue: Arc<ConcurrentQueue<Box<T>>>,
}
impl<T> ArcConcurrentBoxedQueue<T> {
/// Create a new `ArcConcurrentBoxedQueue`.
pub fn new() -> Self {
Self {
internal_queue: Arc::new(ConcurrentQueue::new()),
}
}
}
impl<T> Default for ArcConcurrentBoxedQueue<T> {
fn default() -> Self {
Self::new()
}
}
impl<T> Clone for ArcConcurrentBoxedQueue<T> {
/// Create a new `ArcConcurrentBoxedQueue` that shares the same internal queue
/// with the existing one. This allows multiple `ArcConcurrentBoxedQueue` to
/// operate on the same underlying queue.
fn clone(&self) -> Self {
Self {
internal_queue: Arc::clone(&self.internal_queue),
}
}
}
/// Deref to the ConcurrentQueue.
impl<T> Deref for ArcConcurrentBoxedQueue<T> {
type Target = ConcurrentQueue<Box<T>>;
fn deref(&self) -> &Self::Target {
&self.internal_queue
}
}
#[cfg(test)]
mod tests {
use crate::model::ConcurrentQueue;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
#[test]
fn test_push_pop() {
let queue = ConcurrentQueue::<i32>::new();
queue.push(1).unwrap();
queue.push(2).unwrap();
queue.push(3).unwrap();
assert_eq!(queue.pop().unwrap(), Some(1));
assert_eq!(queue.pop().unwrap(), Some(2));
assert_eq!(queue.pop().unwrap(), Some(3));
assert_eq!(queue.pop().unwrap(), None);
}
#[test]
fn test_size_empty() {
let queue = ConcurrentQueue::new();
assert_eq!(queue.size().unwrap(), 0);
assert!(queue.is_empty().unwrap());
queue.push(1).unwrap();
queue.push(2).unwrap();
assert_eq!(queue.size().unwrap(), 2);
assert!(!queue.is_empty().unwrap());
queue.pop().unwrap();
queue.pop().unwrap();
assert_eq!(queue.size().unwrap(), 0);
assert!(queue.is_empty().unwrap());
}
#[test]
fn test_insert() {
let queue = ConcurrentQueue::new();
let data = vec![1, 2, 3];
queue.insert(data.into_iter()).unwrap();
assert_eq!(queue.pop().unwrap(), Some(1));
assert_eq!(queue.pop().unwrap(), Some(2));
assert_eq!(queue.pop().unwrap(), Some(3));
assert_eq!(queue.pop().unwrap(), None);
}
#[test]
fn test_notifications() {
let queue = Arc::new(ConcurrentQueue::new());
let queue_clone = Arc::clone(&queue);
let producer = thread::spawn(move || {
for i in 0..3 {
thread::sleep(Duration::from_millis(50));
queue_clone.push(i).unwrap();
}
});
let consumer = thread::spawn(move || {
let mut values = vec![];
for _ in 0..3 {
let mut val = -1;
while val == -1 {
queue
.wait_for_push_notify(Duration::from_millis(10))
.unwrap();
val = queue.pop().unwrap().unwrap_or(-1);
}
values.push(val);
}
values
});
producer.join().unwrap();
let consumer_results = consumer.join().unwrap();
assert_eq!(consumer_results, vec![0, 1, 2]);
}
#[test]
fn test_multithreaded_push_pop() {
let queue = Arc::new(ConcurrentQueue::new());
let queue_clone = Arc::clone(&queue);
let producer = thread::spawn(move || {
for i in 0..10 {
queue_clone.push(i).unwrap();
thread::sleep(Duration::from_millis(50));
}
});
let consumer = thread::spawn(move || {
let mut values = vec![];
for _ in 0..10 {
let mut val = -1;
while val == -1 {
val = queue.pop().unwrap().unwrap_or(-1);
thread::sleep(Duration::from_millis(10));
}
values.push(val);
}
values
});
producer.join().unwrap();
let consumer_results = consumer.join().unwrap();
assert_eq!(consumer_results, (0..10).collect::<Vec<_>>());
}
/// This is a single value test. It avoids the unlimited wait until the collectin got empty on the previous test.
/// It will make sure the signal mutex is matching the waiting mutex.
#[test]
fn test_wait_for_push_notify() {
let queue = Arc::new(ConcurrentQueue::<usize>::new());
let queue_clone = Arc::clone(&queue);
let producer = thread::spawn(move || {
thread::sleep(Duration::from_millis(100));
queue_clone.push(1).unwrap();
});
let consumer = thread::spawn(move || {
queue
.wait_for_push_notify(Duration::from_millis(200))
.unwrap();
assert_eq!(queue.pop().unwrap(), Some(1));
});
producer.join().unwrap();
consumer.join().unwrap();
}
}
|
/*!
# HTML Escape
This library is for encoding/escaping special characters in HTML and decoding/unescaping HTML entities as well.
## Usage
### Encoding
This crate provides some `encode_*` functions to encode HTML text in different situations.
For example, to put a text between a start tag `<foo>` and an end tag `</foo>`, use the `encode_text` function to escape every `&`, `<`, and `>` in the text.
```rust
extern crate html_escape;
assert_eq!("a > b && a < c", html_escape::encode_text("a > b && a < c"));
```
The functions suffixed with `_to_writer`, `_to_vec` or `_to_string` are useful to generate HTML.
```rust
extern crate html_escape;
let mut html = String::from("<input value=");
assert_eq!("Hello world!", html_escape::encode_unquoted_attribute_to_string("Hello world!", &mut html));
html.push_str(" placeholder=\"");
assert_eq!("The default value is "Hello world!".", html_escape::encode_double_quoted_attribute_to_string("The default value is \"Hello world!\".", &mut html));
html.push_str("\"/><script>alert('");
assert_eq!(r"<script>\'s end tag is <\/script>", html_escape::encode_script_single_quoted_text_to_string("<script>'s end tag is </script>", &mut html));
html.push_str("');</script>");
assert_eq!("<input value=Hello world! placeholder=\"The default value is "Hello world!".\"/><script>alert(\'<script>\\\'s end tag is <\\/script>\');</script>", html);
```
### Decoding
```rust
extern crate html_escape;
assert_eq!("Hello world!", html_escape::decode_html_entities("Hello world!"));
```
```rust
extern crate html_escape;
assert_eq!("alert('<script></script>');", html_escape::decode_script(r"alert('<script><\/script>');"));
```
## No Std
Disable the default features to compile this crate without std.
```toml
[dependencies.html-escape]
version = "*"
default-features = false
```
## Benchmark
```bash
cargo bench
```
*/
#![cfg_attr(not(feature = "std"), no_std)]
extern crate alloc;
extern crate utf8_width;
mod decode;
mod encode;
mod functions;
pub use decode::*;
pub use encode::*;
|
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct BankResponse {
pub bank_transfer: BankTransfer,
/// A unique identifier for the request, which can be used for
/// troubleshooting. This identifier, like all Plaid identifiers, is case
/// sensitive.
pub request_id: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct BankTransfer {
pub id: String,
pub ach_class: String,
pub account_id: String,
pub Type: String,
pub user: String,
} |
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt;
use once_cell::sync::Lazy;
use crate::error::RSocketError;
use crate::Result;
#[derive(PartialEq, Eq, Debug, Clone, Hash)]
pub enum MimeType {
Normal(String),
WellKnown(u8),
}
static U8_TO_STR: Lazy<HashMap<u8, &'static str>> = Lazy::new(|| {
let mut m = HashMap::new();
for it in list_all().iter() {
m.insert(it.0, it.1);
}
m
});
static STR_TO_U8: Lazy<HashMap<&'static str, u8>> = Lazy::new(|| {
let mut m = HashMap::new();
for it in list_all().iter() {
m.insert(it.1, it.0);
}
m
});
impl MimeType {
pub fn parse(value: u8) -> Option<MimeType> {
U8_TO_STR.get(&value).map(|it| Self::WellKnown(value))
}
pub fn as_u8(&self) -> Option<u8> {
match self {
Self::WellKnown(n) => Some(*n),
Self::Normal(_) => None,
}
}
pub fn as_str(&self) -> Option<&str> {
match self {
Self::Normal(s) => Some(s.as_ref()),
Self::WellKnown(n) => U8_TO_STR.get(n).copied(),
}
}
}
impl Into<String> for MimeType {
fn into(self) -> String {
match self {
Self::Normal(s) => s,
Self::WellKnown(n) => match U8_TO_STR.get(&n) {
Some(v) => v.to_string(),
None => "UNKNOWN".to_string(),
},
}
}
}
impl From<&str> for MimeType {
fn from(value: &str) -> MimeType {
match STR_TO_U8.get(value) {
Some(v) => Self::WellKnown(*v),
None => Self::Normal(value.to_owned()),
}
}
}
impl fmt::Display for MimeType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::Normal(s) => write!(f, "{}", s),
Self::WellKnown(n) => match U8_TO_STR.get(n) {
Some(v) => write!(f, "{}", v),
None => Err(fmt::Error),
},
}
}
}
macro_rules! mime {
($name:ident,$n:expr,$s:expr) => {
const $name: (u8, &str) = ($n, $s);
impl MimeType {
pub const $name: Self = Self::WellKnown($n);
}
};
}
mime!(APPLICATION_AVRO, 0x00, "application/avro");
mime!(APPLICATION_CBOR, 0x01, "application/avro");
mime!(APPLICATION_GRAPHQL, 0x02, "application/graphql");
mime!(APPLICATION_GZIP, 0x03, "application/gzip");
mime!(APPLICATION_JAVASCRIPT, 0x04, "application/javascript");
mime!(APPLICATION_JSON, 0x05, "application/json");
mime!(APPLICATION_OCTET_STREAM, 0x06, "application/octet-stream");
mime!(APPLICATION_PDF, 0x07, "application/pdf");
mime!(
APPLICATION_VND_APACHE_THRIFT_BINARY,
0x08,
"application/vnd.apache.thrift.binary"
);
mime!(
APPLICATION_VND_GOOGLE_PROTOBUF,
0x09,
"application/vnd.google.protobuf"
);
mime!(APPLICATION_XML, 0x0A, "application/xml");
mime!(APPLICATION_ZIP, 0x0B, "application/zip");
mime!(AUDIO_AAC, 0x0C, "audio/aac");
mime!(AUDIO_MP3, 0x0D, "audio/mp3");
mime!(AUDIO_MP4, 0x0E, "audio/mp4");
mime!(AUDIO_MPEG3, 0x0F, "audio/mpeg3");
mime!(AUDIO_MPEG, 0x10, "audio/mpeg");
mime!(AUDIO_OGG, 0x11, "audio/ogg");
mime!(AUDIO_OPUS, 0x12, "audio/opus");
mime!(AUDIO_VORBIS, 0x13, "audio/vorbis");
mime!(IMAGE_BMP, 0x14, "image/bmp");
mime!(IMAGE_GIF, 0x15, "image/gif");
mime!(IMAGE_HEIC_SEQUENCE, 0x16, "image/heic-sequence");
mime!(IMAGE_HEIC, 0x17, "image/heic");
mime!(IMAGE_HEIF_SEQUENCE, 0x18, "image/heif-sequence");
mime!(IMAGE_HEIF, 0x19, "image/heif");
mime!(IMAGE_JPEG, 0x1A, "image/jpeg");
mime!(IMAGE_PNG, 0x1B, "image/png");
mime!(IMAGE_TIFF, 0x1C, "image/tiff");
mime!(MULTIPART_MIXED, 0x1D, "multipart/mixed");
mime!(TEXT_CSS, 0x1E, "text/css");
mime!(TEXT_CSV, 0x1F, "text/csv");
mime!(TEXT_HTML, 0x20, "text/html");
mime!(TEXT_PLAIN, 0x21, "text/plain");
mime!(TEXT_XML, 0x22, "text/xml");
mime!(VIDEO_H264, 0x23, "video/H264");
mime!(VIDEO_H265, 0x24, "video/H265");
mime!(VIDEO_VP8, 0x25, "video/VP8");
mime!(APPLICATION_X_HESSIAN, 0x26, "application/x-hessian");
mime!(APPLICATION_X_JAVA_OBJECT, 0x27, "application/x-java-object");
mime!(
APPLICATION_CLOUDEVENTS_JSON,
0x28,
"application/cloudevents+json"
);
mime!(
MESSAGE_X_RSOCKET_MIME_TYPE_V0,
0x7A,
"message/x.rsocket.mime-type.v0"
);
mime!(
MESSAGE_X_RSOCKET_ACCEPT_TIME_TYPES_V0,
0x7B,
"message/x.rsocket.accept-mime-types.v0"
);
mime!(
MESSAGE_X_RSOCKET_AUTHENTICATION_V0,
0x7C,
"message/x.rsocket.authentication.v0"
);
mime!(
MESSAGE_X_RSOCKET_TRACING_ZIPKIN_V0,
0x7D,
"message/x.rsocket.tracing-zipkin.v0"
);
mime!(
MESSAGE_X_RSOCKET_ROUTING_V0,
0x7E,
"message/x.rsocket.routing.v0"
);
mime!(
MESSAGE_X_RSOCKET_COMPOSITE_METADATA_V0,
0x7F,
"message/x.rsocket.composite-metadata.v0"
);
fn list_all() -> Vec<(u8, &'static str)> {
vec![
APPLICATION_AVRO,
APPLICATION_CBOR,
APPLICATION_GRAPHQL,
APPLICATION_GZIP,
APPLICATION_JAVASCRIPT,
APPLICATION_JSON,
APPLICATION_OCTET_STREAM,
APPLICATION_PDF,
APPLICATION_VND_APACHE_THRIFT_BINARY,
APPLICATION_VND_GOOGLE_PROTOBUF,
APPLICATION_XML,
APPLICATION_ZIP,
AUDIO_AAC,
AUDIO_MP3,
AUDIO_MP4,
AUDIO_MPEG3,
AUDIO_MPEG,
AUDIO_OGG,
AUDIO_OPUS,
AUDIO_VORBIS,
IMAGE_BMP,
IMAGE_GIF,
IMAGE_HEIC_SEQUENCE,
IMAGE_HEIC,
IMAGE_HEIF_SEQUENCE,
IMAGE_HEIF,
IMAGE_JPEG,
IMAGE_PNG,
IMAGE_TIFF,
MULTIPART_MIXED,
TEXT_CSS,
TEXT_CSV,
TEXT_HTML,
TEXT_PLAIN,
TEXT_XML,
VIDEO_H264,
VIDEO_H265,
VIDEO_VP8,
APPLICATION_X_HESSIAN,
APPLICATION_X_JAVA_OBJECT,
APPLICATION_CLOUDEVENTS_JSON,
MESSAGE_X_RSOCKET_MIME_TYPE_V0,
MESSAGE_X_RSOCKET_ACCEPT_TIME_TYPES_V0,
MESSAGE_X_RSOCKET_AUTHENTICATION_V0,
MESSAGE_X_RSOCKET_TRACING_ZIPKIN_V0,
MESSAGE_X_RSOCKET_ROUTING_V0,
MESSAGE_X_RSOCKET_COMPOSITE_METADATA_V0,
]
}
|
use crate::prelude::*;
pub fn pt1(input: Vec<(u32, u32, u32)>) -> Result<usize> {
Ok(input
.into_iter()
.filter(|&(a, b, c)| a + b > c && a + c > b && b + c > a)
.count())
}
pub fn pt2(input: Vec<(u32, u32, u32)>) -> Result<usize> {
let mut new_input = Vec::new();
for i in 0..input.len() / 3 {
let a = input[i * 3 + 0];
let b = input[i * 3 + 1];
let c = input[i * 3 + 2];
new_input.push((a.0, b.0, c.0));
new_input.push((a.1, b.1, c.1));
new_input.push((a.2, b.2, c.2));
}
pt1(new_input)
}
pub fn parse(s: &str) -> IResult<&str, Vec<(u32, u32, u32)>> {
use parsers::*;
separated_list1(
tag("\n"),
tuple((
preceded(space0, u32_str),
preceded(space1, u32_str),
preceded(space1, u32_str),
)),
)(s)
}
#[test]
fn day03() -> Result<()> {
test_parse!(parse, "\
541 588 421
827 272 126" => vec![(541, 588, 421), (827, 272, 126)]);
Ok(())
}
|
#![allow(unused_imports)]
use std::cmp::{max, min};
use std::collections::{HashMap, HashSet};
use std::fs;
use std::io::{self, Write};
use std::process::{Command, Stdio};
use std::str;
struct Scanner<R> {
reader: R,
buf_str: Vec<u8>,
buf_iter: str::SplitAsciiWhitespace<'static>,
}
impl<R: io::BufRead> Scanner<R> {
fn new(reader: R) -> Self {
Self {
reader,
buf_str: Vec::new(),
buf_iter: "".split_ascii_whitespace(),
}
}
fn token<T: str::FromStr>(&mut self) -> T {
loop {
if let Some(token) = self.buf_iter.next() {
return token.parse().ok().expect("Failed parse");
}
self.buf_str.clear();
self.reader
.read_until(b'\n', &mut self.buf_str)
.expect("Failed read");
self.buf_iter = unsafe {
let slice = str::from_utf8_unchecked(&self.buf_str);
std::mem::transmute(slice.split_ascii_whitespace())
}
}
}
}
fn main() {
let (stdin, stdout) = (io::stdin(), io::stdout());
let mut scan = Scanner::new(stdin.lock());
let mut out = io::BufWriter::new(stdout.lock());
let t = scan.token::<usize>();
for _ in 0..t {
let n = scan.token::<usize>();
let x = scan.token::<i64>();
let mut ans = x + 1;
for _ in 0..n {
let a = scan.token::<i64>();
if a == x {
ans = 1;
}
ans = min(ans, max(2, (x + a - 1) / a));
}
writeln!(out, "{}", ans).ok();
}
}
#[cfg(test)]
mod tests {
use std::process::{Command, Stdio};
use std::str;
#[test]
fn example1() {
// let input = fs::read_to_string(b_input.txt).expect("Failed Read");
let _cat_child = Command::new("cat")
.arg("src/b_input.txt")
.stdout(Stdio::piped())
//.spawn()
.output()
.expect("Failed to Execute command");
// let cat_out = cat_child.stdout;
println!("{:?}", "HELLLOOO");
let run_child = Command::new("cat")
//.current_dir("./../../..")
.stdin(Stdio::piped())
.output()
.expect("Failed to run program");
let run_out = run_child.stdout;
assert_eq!("2\n3\n1\n2", str::from_utf8(run_out.as_slice()).unwrap());
}
}
|
use crate::schema::access_tokens;
use compat_uuid::Uuid;
use diesel::{
self, delete, insert_into, prelude::*, result::Error, update, Associations, FromSqlRow,
Identifiable, Insertable, Queryable,
};
use postgres_resource::*;
use rocket::{http::Status, response::status::Custom};
use rocket_contrib::json::JsonValue;
#[resource(schema = access_tokens, table = "access_tokens")]
struct AccessToken {
jwt: String,
expires_in: i32,
user_id: i32,
}
|
use bevy::{prelude::*, render::render_graph::base::MainPass};
pub mod objects;
pub mod primitives;
pub use objects::*;
pub use primitives::*;
///////////////////////////////////////////////////////////////////////////////
/// Equivalent to [`PbrBundle`] but without the transforms, mesh and material components
#[derive(Bundle)]
struct PbrPrimitiveBundle {
main_pass: MainPass,
draw: Draw,
visible: Visible,
render_pipelines: RenderPipelines,
}
impl Default for PbrPrimitiveBundle {
fn default() -> Self {
let PbrBundle {
mesh: _,
material: _,
main_pass,
draw,
visible,
render_pipelines,
transform: _,
global_transform: _,
} = Default::default();
Self {
main_pass,
draw,
visible,
render_pipelines,
}
}
}
|
#![cfg_attr(feature="nightly", feature(integer_atomics))]
#![allow(unused_imports)]
#![cfg_attr(feature="nightly", feature(test))]
#![deny(warnings)]
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[cfg(feature="nightly")]
extern crate test;
extern crate savefile;
#[macro_use]
extern crate savefile_derive;
extern crate bit_vec;
extern crate smallvec;
extern crate byteorder;
extern crate rand;
use std::fmt::Debug;
use std::io::Write;
use savefile::prelude::*;
extern crate arrayvec;
extern crate parking_lot;
mod test_versioning;
mod test_introspect;
mod test_nested_non_repr_c;
mod test_nested_repr_c;
mod test_arrayvec;
#[cfg(feature = "external_benchmarks")]
mod ext_benchmark;
#[derive(Debug, Savefile, PartialEq)]
struct NonCopy {
ncfield: u8,
}
use std::io::Cursor;
use std::io::BufWriter;
pub fn assert_roundtrip<E: Serialize + Deserialize + Debug + PartialEq>(sample: E) {
assert_roundtrip_version(sample, 0)
}
pub fn assert_roundtrip_version<E: Serialize + Deserialize + Debug + PartialEq>(sample: E,version:u32) {
let mut f = Cursor::new(Vec::new());
{
let mut bufw = BufWriter::new(&mut f);
{
Serializer::save(&mut bufw, version, &sample, false).unwrap();
}
bufw.flush().unwrap();
}
f.set_position(0);
{
let roundtrip_result = Deserializer::load::<E>(&mut f, version).unwrap();
assert_eq!(sample, roundtrip_result);
}
let f_internal_size = f.get_ref().len();
assert_eq!(f.position() as usize,f_internal_size);
}
pub fn roundtrip<E: Serialize + Deserialize>(sample: E) -> E {
let mut f = Cursor::new(Vec::new());
{
let mut bufw = BufWriter::new(&mut f);
{
Serializer::save(&mut bufw, 0, &sample, false).unwrap();
}
bufw.flush().unwrap();
}
f.set_position(0);
let roundtrip_result;
{
roundtrip_result = Deserializer::load::<E>(&mut f, 0).unwrap();
}
let f_internal_size = f.get_ref().len();
assert_eq!(f.position() as usize,f_internal_size);
roundtrip_result
}
#[derive(Debug, Savefile, PartialEq)]
pub enum TestStructEnum {
Variant1 { a: u8, b: u8 },
Variant2 { a: u8 },
}
#[test]
pub fn test_struct_enum() {
assert_roundtrip(TestStructEnum::Variant1 { a: 42, b: 45 });
assert_roundtrip(TestStructEnum::Variant2 { a: 47 });
}
#[derive(Debug, Savefile, PartialEq)]
pub enum TestTupleEnum {
Variant1(u8),
}
#[test]
pub fn test_tuple_enum() {
assert_roundtrip(TestTupleEnum::Variant1(37));
}
#[test]
pub fn test_unit_enum() {
#[derive(Debug, Savefile, PartialEq)]
pub enum TestUnitEnum {
Variant1,
Variant2,
}
assert_roundtrip(TestUnitEnum::Variant1);
assert_roundtrip(TestUnitEnum::Variant2);
}
#[derive(Debug, Savefile, PartialEq)]
pub struct TestStruct {
x1: u8,
x2: u16,
x3: u32,
x4: u64,
x5: usize,
x6: i8,
x7: i16,
x8: i32,
x9: i64,
x10: isize,
x11: f32,
x12 : bool,
}
#[test]
pub fn test_struct_reg() {
assert_roundtrip(TestStruct {
x1: 1,
x2: 2,
x3: 3,
x4: 4,
x5: 5,
x6: 6,
x7: 7,
x8: 8,
x9: 9,
x10: 10,
x11 : 11.5,
x12 : true
});
}
#[test]
pub fn test_vec() {
let mut v = Vec::new();
v.push(43u8);
assert_roundtrip(v);
}
#[derive(Savefile,Debug,PartialEq)]
struct GenericWrapper<T:Serialize+Deserialize+WithSchema+Debug+PartialEq+Introspect> {
something : T
}
#[test]
pub fn test_generic() {
assert_roundtrip(GenericWrapper {
something:42u32
});
}
#[test]
pub fn test_bin_heap() {
use std::collections::BinaryHeap;
let mut v = BinaryHeap::new();
v.push(43u8);
let vv:Vec<u8>=v.iter().map(|x|*x).collect();
let n=roundtrip(v);
let nv:Vec<u8>=n.iter().map(|x|*x).collect();
assert_eq!(nv,vv);
}
#[test]
pub fn test_vec_of_string() {
let mut v = Vec::new();
v.push("hejsan".to_string());
assert_roundtrip(v);
}
#[test]
pub fn test_hashmap() {
use std::collections::HashMap;
let mut v = HashMap::new();
v.insert(43, 45);
v.insert(47, 49);
assert_roundtrip(v);
}
#[test]
pub fn test_string() {
assert_roundtrip("".to_string());
assert_roundtrip("test string".to_string());
}
#[derive(ReprC, Clone, Copy, Debug, Savefile, PartialEq)]
pub struct BenchStruct {
x: usize,
y: usize,
z: u8,
pad1:u8,
pad2:u8,
pad3:u8,
pad4:u32,
}
#[cfg(feature="nightly")]
#[cfg(not(miri))]
use test::{Bencher, black_box};
#[cfg(feature="nightly")]
#[bench]
#[cfg(not(miri))]
fn bench_savefile_serialize(b: &mut Bencher) {
let mut f = Cursor::new(Vec::with_capacity(100));
let mut test=Vec::new();
for i in 0..1000 {
test.push(BenchStruct {
x:black_box(i),
y:black_box(i),
z:black_box(0),
pad1:0,
pad2:0,
pad3:0,
pad4:0,
})
}
b.iter(move || {
{
save_noschema(&mut f,0,&test).unwrap();
}
black_box(&mut f);
f.set_position(0);
{
let r = load_noschema::<Vec<BenchStruct>>(&mut f, 0).unwrap();
assert!(r.len()==1000);
}
f.set_position(0);
});
}
#[cfg(feature="nightly")]
#[test]
#[cfg(not(miri))]
pub fn test_bench_struct() {
assert_roundtrip(
vec![
BenchStruct {
x:black_box(1),
y:black_box(2),
z:black_box(3),
pad1:0,pad2:0,pad3:0,pad4:0,
},
BenchStruct {
x:black_box(4),
y:black_box(5),
z:black_box(6),
pad1:0,pad2:0,pad3:0,pad4:0,
},
BenchStruct {
x:black_box(7),
y:black_box(8),
z:black_box(9),
pad1:0,pad2:0,pad3:0,pad4:0,
},
BenchStruct {
x:black_box(1),
y:black_box(2),
z:black_box(3),
pad1:0,pad2:0,pad3:0,pad4:0,
}
]
);
}
#[test]
pub fn test_bench_struct_miri_compat() {
assert_roundtrip(
vec![
BenchStruct {
x:1,
y:2,
z:3,
pad1:0,pad2:0,pad3:0,pad4:0,
},
BenchStruct {
x:4,
y:5,
z:6,
pad1:0,pad2:0,pad3:0,pad4:0,
},
BenchStruct {
x:7,
y:8,
z:9,
pad1:0,pad2:0,pad3:0,pad4:0,
},
BenchStruct {
x:10,
y:11,
z:12,
pad1:0,pad2:0,pad3:0,pad4:0,
}
]
);
}
#[test]
pub fn test_u16_vec() {
assert_roundtrip(Vec::<u16>::new());
assert_roundtrip(vec![0u16,42u16]);
assert_roundtrip(vec![0u16,1,2,3,4,5,6,7,8,9]);
}
#[derive(Debug, PartialEq, Savefile)]
struct SmallStruct {
x1: u32,
x2: i32,
}
#[test]
pub fn test_small_struct() {
assert_roundtrip(SmallStruct { x1: 123, x2: 321 });
}
#[derive(Debug, PartialEq, Savefile)]
struct SmallStruct2 {
x1: u32,
x2: i32,
#[savefile_default_val = "100"]
#[savefile_versions = "1.."]
x3: String,
#[savefile_default_val = "123"]
#[savefile_versions = "1.."]
x4: u64,
}
pub fn assert_roundtrip_to_new_version<
E1: Serialize + Deserialize + Debug + PartialEq,
E2: Serialize + Deserialize + Debug + PartialEq,
> (
sample_v1: E1,
version_number1: u32,
expected_v2: E2,
version_number2: u32,
) -> E2 {
let mut f = Cursor::new(Vec::new());
{
let mut bufw = BufWriter::new(&mut f);
{
Serializer::save(&mut bufw, version_number1, &sample_v1, false).unwrap();
}
bufw.flush().unwrap();
}
f.set_position(0);
let roundtrip_result = Deserializer::load::<E2>(&mut f, version_number2).unwrap();
assert_eq!(expected_v2, roundtrip_result);
roundtrip_result
}
#[test]
pub fn test_array_string() {
use arrayvec::ArrayString;
let arraystr:ArrayString<[u8;30]>=ArrayString::from("Hello everyone").unwrap();
assert_roundtrip(arraystr);
}
#[test]
pub fn test_smallvec0() {
let mut v = smallvec::SmallVec::<[u8;2]>::new();
v.push(1);
assert_roundtrip(v);
}
#[test]
pub fn test_smallvec1() {
let mut v = smallvec::SmallVec::<[u8;2]>::new();
v.push(1);
assert_roundtrip(v);
}
#[test]
pub fn test_smallvec2() {
let mut v = smallvec::SmallVec::<[u8;2]>::new();
v.push(1);
v.push(2);
assert_roundtrip(v);
}
#[test]
pub fn test_smallvec3() {
let mut v = smallvec::SmallVec::<[u8;2]>::new();
v.push(1);
v.push(2);
v.push(3);
assert_roundtrip(v);
}
#[test]
pub fn test_short_arrays() {
let empty:[u32;0]=[];
assert_roundtrip(empty);
assert_roundtrip([1]);
assert_roundtrip([1,2]);
assert_roundtrip([1,2,3]);
}
#[test]
pub fn test_short_array_with_drop_contents() {
let empty:[String;0]=[];
assert_roundtrip(empty);
assert_roundtrip(["Hej".to_string(),"Hello".to_string()]);
}
#[test]
pub fn test_short_array_with_drop_contents_leak_test() {
let mut i =0;
loop {
let test = [format!("Test {}",i),format!("Other {}",i)];
assert_roundtrip(test);
i+=1;
if i>23 {
break;
}
}
}
#[test]
pub fn test_string_leak_test() {
let mut i =0;
loop {
let test = format!("Test {}",i);
assert_roundtrip(test);
i+=1;
if i>23 {
break;
}
}
}
#[cfg(feature="nightly")]
#[test]
pub fn test_long_array() {
let arr=[47;32];
assert_roundtrip(arr);
}
#[cfg(feature="nightly")]
#[test]
pub fn test_very_long_array() {
#[derive(Savefile)]
struct LongArray([u32;1000]);
impl Debug for LongArray {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Long array")
}
}
impl PartialEq for LongArray {
fn eq(&self, other: &Self) -> bool {
for idx in 0..1000 {
if self.0[idx] != other.0[idx] {
return false;
}
}
true
}
}
let mut arr=LongArray([47;1000]);
arr.0[0]=0;
assert_roundtrip(arr);
}
#[test]
pub fn test_small_struct_upgrade() {
assert_roundtrip_to_new_version(
SmallStruct { x1: 123, x2: 321 },
0,
SmallStruct2 {
x1: 123,
x2: 321,
x3: "100".to_string(),
x4: 123,
},
1,
);
}
#[derive(Debug, PartialEq, Savefile)]
struct SmallStructRem1 {
x1: u32,
x2: i32,
x3: String,
}
#[derive(Debug, PartialEq, Savefile )]
struct SmallStructRem2 {
#[savefile_versions = "..0"]
x1: Removed<u32>,
x2: i32,
x3: String,
#[savefile_default_val = "123"]
#[savefile_versions = "1.."]
x4: isize,
}
#[test]
pub fn test_small_struct_remove() {
assert_roundtrip_to_new_version(
SmallStructRem1 {
x1: 123,
x2: 321,
x3: "hello".to_string(),
},
0,
SmallStructRem2 {
x1: Removed::new(),
x2: 321,
x3: "hello".to_string(),
x4: 123,
},
1,
);
}
#[derive(Debug, PartialEq, Savefile )]
struct TupleCarrier {
t0 : (),
t1 : (u32,),
t2 : (u32,u32),
t3 : (u32,u32,u32),
}
#[test]
pub fn test_tuple() {
assert_roundtrip(TupleCarrier{
t0:(),
t1:(42u32,),
t2:(42u32,43u32),
t3:(42u32,43u32,44u32),
});
}
#[derive(Debug, PartialEq, Savefile )]
struct StructWithIgnored {
a:u32,
b:u32,
#[savefile_ignore]
c:u32,
}
#[test]
pub fn test_ignored() {
assert_roundtrip(StructWithIgnored{a:42,b:7,c:0});
}
#[test]
pub fn test_box() {
use std::rc::Rc;
use std::sync::Arc;
use std::cell::RefCell;
use std::cell::Cell;
assert_roundtrip(Box::new(37));
assert_roundtrip(Rc::new(38));
assert_roundtrip(Arc::new(39));
assert_roundtrip(RefCell::new(40));
assert_roundtrip(Cell::new(40));
}
#[test]
pub fn test_option() {
assert_roundtrip(Some(32));
let x:Option<u32> = None;
assert_roundtrip(x);
}
#[derive(Savefile,Debug,PartialEq)]
struct NewTypeSample(u32);
#[test]
pub fn test_newtype() {
assert_roundtrip(NewTypeSample(43));
}
#[derive(Savefile,Debug,PartialEq)]
struct NewTypeSample2(u32,i8);
#[test]
pub fn test_newtype2() {
assert_roundtrip(NewTypeSample2(43,127));
}
#[derive(Savefile,Debug,PartialEq)]
struct NoFields {
}
#[test]
pub fn test_struct_no_fields() {
assert_roundtrip(NoFields{});
}
#[derive(Savefile,Debug,PartialEq)]
struct OnlyRemoved {
#[savefile_versions="0..0"]
rem : Removed<u32>,
}
#[test]
pub fn test_struct_only_removed_fields() {
assert_roundtrip_version(OnlyRemoved{rem: Removed::new()},1);
}
#[test]
pub fn test_bitvec() {
use bit_vec::BitVec;
let bv1 = BitVec::new();
let mut bv2 = BitVec::new();
bv2.push(false);
let mut bv3 = BitVec::new();
bv3.push(false);
bv3.push(true);
bv3.push(false);
let mut bv4 = BitVec::new();
for i in 0..127 {
bv4.push(if i%2==0 {true} else {false});
}
assert_roundtrip(bv1);
assert_roundtrip(bv2);
assert_roundtrip(bv3);
assert_roundtrip(bv4);
}
#[repr(u8)]
#[derive(Savefile, ReprC, Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub enum TerrainType {
Wheat,
Forest,
Desert,
Rock,
Dirt,
Grass,
Water,
}
#[repr(C)]
#[derive(ReprC, Savefile, Clone, Copy, Debug,PartialEq)]
pub struct TerrainTile
{
pub curtype: TerrainType,
pub resource: u8, //logarithmic scale, base resource abundance
pub height: i16,
}
#[test]
pub fn test_terrain() {
assert_roundtrip(vec![TerrainTile {
curtype : TerrainType::Dirt,
resource:42,
height:2111
}]);
}
#[cfg(test)]
use std::sync::atomic::{AtomicU8,AtomicUsize,Ordering};
use std::string::ToString;
use savefile::save_compressed;
use std::sync::Arc;
use std::path::PathBuf;
use smallvec::alloc::collections::BTreeMap;
use std::collections::HashSet;
use std::borrow::Cow;
#[test]
pub fn test_atomic() {
let atom = AtomicU8::new(43);
let mut f = Cursor::new(Vec::new());
{
let mut bufw = BufWriter::new(&mut f);
{
Serializer::save(&mut bufw, 1, &atom, false).unwrap();
}
bufw.flush().unwrap();
}
f.set_position(0);
{
let roundtrip_result : AtomicU8 = Deserializer::load(&mut f, 1).unwrap();
assert_eq!(atom.load(Ordering::SeqCst), roundtrip_result.load(Ordering::SeqCst));
}
}
#[derive(Savefile,Debug,PartialEq)]
struct CanaryTest {
canary1: Canary1,
some_field: i32
}
#[test]
pub fn test_canary1() {
assert_roundtrip(CanaryTest{
canary1: Canary1::default(),
some_field : 43
});
}
#[test]
#[cfg(not(miri))]
pub fn test_crypto1() {
use byteorder::{LittleEndian};
use byteorder::WriteBytesExt;
use byteorder::ReadBytesExt;
let zerokey = [0u8;32];
let mut temp = Vec::new();
{
let mut writer = CryptoWriter::new(&mut temp,zerokey).unwrap();
writer.write_u32::<LittleEndian>(0x01020304).unwrap();
writer.flush().unwrap();
}
let zerokey = [0u8;32];
let mut bufr = std::io::BufReader::new(&temp[..]);
let mut reader = CryptoReader::new(&mut bufr, zerokey).unwrap();
let end = reader.read_u32::<LittleEndian>().unwrap();
assert_eq!(end,0x01020304);
}
#[test]
#[cfg(not(miri))]
pub fn test_compressed_big() {
let mut zeros = Vec::new();
for _ in 0..100_000 {
zeros.push(0);
}
let mut buf = Vec::new();
save_compressed(&mut buf, 0, &zeros).unwrap();
assert!(buf.len() < 100);
}
#[test]
#[cfg(not(miri))]
pub fn test_compressed_small() {
let input = 42u8;
let mut buf = Vec::new();
save_compressed(&mut buf, 0, &input).unwrap();
let mut bufp = &buf[..];
let roundtripped :u8 = load(&mut bufp, 0).unwrap();
assert_eq!(input,roundtripped);
}
#[test]
#[cfg(not(miri))]
pub fn test_compressed_smallish() {
let input = 42u64;
let mut buf = Vec::new();
save_compressed(&mut buf, 0, &input).unwrap();
let mut bufp = &buf[..];
let roundtripped :u64 = load(&mut bufp, 0).unwrap();
assert_eq!(input,roundtripped);
}
#[test]
#[cfg(not(miri))]
pub fn test_crypto_big1() {
use byteorder::{LittleEndian};
use byteorder::WriteBytesExt;
use byteorder::ReadBytesExt;
let zerokey = [0u8;32];
let mut temp = Vec::new();
let mut writer = CryptoWriter::new(&mut temp,zerokey).unwrap();
for i in 0..10000 {
writer.write_u64::<LittleEndian>(i).unwrap();
}
writer.flush_final().unwrap();
let zerokey = [0u8;32];
let mut bufr = std::io::BufReader::new(&temp[..]);
let mut reader = CryptoReader::new(&mut bufr, zerokey).unwrap();
for i in 0..10000 {
assert_eq!(reader.read_u64::<LittleEndian>().unwrap(),i);
}
}
#[test]
#[cfg(not(miri))]
pub fn test_crypto_big2() {
use byteorder::{LittleEndian};
use byteorder::WriteBytesExt;
use byteorder::ReadBytesExt;
let zerokey = [0u8;32];
let mut kb = [0u8;1024];
let mut temp = Vec::new();
{
let mut writer = CryptoWriter::new(&mut temp,zerokey).unwrap();
let kbl = kb.len();
for i in 0..kbl {
kb[i] = (i/4) as u8;
}
for _ in 0..1000 {
writer.write(&kb).unwrap();
}
writer.flush().unwrap();
}
let zerokey = [0u8;32];
let mut bufr = std::io::BufReader::new(&temp[..]);
let mut reader = CryptoReader::new(&mut bufr, zerokey).unwrap();
use std::io::Read;
let mut testkb= [0;1024];
for _ in 0..1000 {
reader.read_exact(&mut testkb).unwrap();
for j in 0..kb.len() {
assert_eq!(kb[j],testkb[j]);
}
}
}
#[test]
#[cfg(not(miri))]
pub fn test_crypto_big3() {
use byteorder::{LittleEndian};
use byteorder::WriteBytesExt;
use byteorder::ReadBytesExt;
let zerokey = [0u8;32];
let mut kb = [0u8;1024*128-17];
let mut temp = Vec::new();
{
let mut writer = CryptoWriter::new(&mut temp,zerokey).unwrap();
let kbl = kb.len();
for i in 0..kbl {
kb[i] = (i/4) as u8;
}
for _ in 0..10 {
writer.write(&kb).unwrap();
}
writer.flush().unwrap();
}
let zerokey = [0u8;32];
let mut bufr = std::io::BufReader::new(&temp[..]);
let mut reader = CryptoReader::new(&mut bufr, zerokey).unwrap();
use std::io::Read;
let mut testkb= [0;1024*128-17];
for _ in 0..10 {
reader.read_exact(&mut testkb).unwrap();
for j in 0..kb.len() {
assert_eq!(kb[j],testkb[j]);
}
}
}
#[test]
#[cfg(not(miri))]
pub fn test_crypto_big4() {
use byteorder::{LittleEndian};
use byteorder::WriteBytesExt;
use byteorder::ReadBytesExt;
let zerokey = [0u8;32];
let mut kb = [0u8;10000];
let mut temp = Vec::new();
{
let mut writer = CryptoWriter::new(&mut temp,zerokey).unwrap();
let kbl = kb.len();
for i in 0..kbl {
kb[i] = (i/4) as u8;
}
for _ in 0..1000 {
writer.write(&kb).unwrap();
}
writer.flush().unwrap();
}
let zerokey = [0u8;32];
let mut bufr = std::io::BufReader::new(&temp[..]);
let mut reader = CryptoReader::new(&mut bufr, zerokey).unwrap();
use std::io::Read;
let mut testkb= [0;10000];
for _ in 0..1000 {
reader.read_exact(&mut testkb).unwrap();
for j in 0..kb.len() {
assert_eq!(kb[j],testkb[j]);
}
}
}
#[test]
#[cfg(not(miri))]
pub fn test_crypto_big5() {
use byteorder::{LittleEndian};
use byteorder::WriteBytesExt;
use byteorder::ReadBytesExt;
let mut kb = Box::new([0u8;1024*302]);
let mut testkb = Box::new([0;1024*302]);
for i in 0..kb.len() {
kb[i] = (i%257) as u8;
}
use rand::Rng;
let mut rng = rand::thread_rng();
let zerokey = [0u8;32];
let mut temp = Vec::new();
{
let mut writer = CryptoWriter::new(&mut temp,zerokey).unwrap();
let _kbl = kb.len();
let mut offset = 0;
loop {
let mut delta:usize;
if rng.gen_range(0..10) == 0 {
delta = rng.gen_range(0..300_000);
} else {
delta = rng.gen_range(0..80000);
}
if delta + offset > kb.len() {
delta = kb.len() - offset;
}
if delta == 0 {
break;
}
writer.write(&kb[offset..offset+delta]).unwrap();
offset += delta;
}
writer.flush().unwrap();
}
let zerokey = [0u8;32];
let mut bufr = std::io::BufReader::new(&temp[..]);
let mut reader = CryptoReader::new(&mut bufr, zerokey).unwrap();
use std::io::Read;
{
let mut offset = 0;
loop {
let mut delta:usize;
if rng.gen_range(0..10) == 0 {
delta = rng.gen_range(0..300_000);
} else {
delta = rng.gen_range(0..80000);
}
if delta + offset > kb.len() {
delta = kb.len() - offset;
}
if delta == 0 {
break;
}
reader.read_exact(&mut testkb[offset..offset+delta]).unwrap();
for i in offset..offset+delta {
assert_eq!(testkb[i],kb[i]);
}
offset += delta;
}
}
}
#[test]
#[cfg(not(miri))]
pub fn test_encrypted_file1() {
save_encrypted_file("test.bin",1,&47usize,"mypassword").unwrap();
let result : usize = load_encrypted_file("test.bin",1,"mypassword").unwrap();
assert_eq!(result,47usize);
}
#[test]
#[cfg(not(miri))]
pub fn test_encrypted_file_bad_password() {
save_encrypted_file("test2.bin",1,&47usize,"mypassword").unwrap();
let result = load_encrypted_file::<usize>("test2.bin",1,"mypassword2");
assert!(result.is_err());
}
#[test]
#[cfg(not(miri))]
pub fn test_decrypt_junk_file() {
{
use std::fs::File;
use byteorder::WriteBytesExt;
use rand::Rng;
let mut f = File::create("test3.bin").unwrap();
let mut rng = rand::thread_rng();
for _ in 0..1000 {
f.write_u8(rng.gen()).unwrap();
}
}
let result = load_encrypted_file::<usize>("test3.bin",1,"mypassword2");
assert!(result.is_err());
}
#[derive(Savefile)]
struct MySimpleFuzz1 {
integer: i8,
strings: Vec<String>,
}
#[test]
pub fn fuzz_regression1() {
let mut data:&[u8] = &[0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 8, 3, 0, 3, 0, 64, 0, 0, 0];
let _ = load_noschema::<MySimpleFuzz1>(&mut data,0);
}
#[test]
pub fn fuzz_regression2() {
let mut data:&[u8] = &[0, 0, 0, 0, 3, 11, 0, 254, 2, 1, 252, 255, 254];
let _ = load_noschema::<MySimpleFuzz1>(&mut data,0);
}
#[test]
pub fn test_roundtrip_arc_array() {
let a1: Arc<[u32]> = vec![1,2,3,4].into();
assert_roundtrip(a1);
let a2: Arc<[String]> = vec!["Hello".to_string()].into();
assert_roundtrip(a2);
}
#[test]
pub fn test_serialize_btreemap() {
let mut bm = BTreeMap::new();
bm.insert(45,32u16);
assert_roundtrip(bm);
}
#[test]
pub fn test_serialize_hashset() {
let hs = HashSet::<i32>::new();
assert_roundtrip(hs);
let mut hs = HashSet::new();
hs.insert(32u16);
assert_roundtrip(hs);
let mut hs = HashSet::new();
hs.insert("hej".to_string());
hs.insert("san".to_string());
hs.insert("kompis".to_string());
assert_roundtrip(hs);
}
#[test]
pub fn test_pathbuf() {
let x: PathBuf = "/c/hello.txt".into();
assert_roundtrip(x);
}
#[test]
pub fn test_arc_str() {
let x:Arc<str> = "hej".into();
assert_roundtrip(x);
}
#[test]
pub fn test_arc_str_dedup() {
let x:Arc<str> = "hej".into();
let y:Arc<str> = "hejsan".into();
let z:Arc<str> = "hej".into();
let (nx,ny,nz) = roundtrip((x.clone(),y.clone(),z.clone()));
assert_ne!(nx.as_ptr(), x.as_ptr());
assert_ne!(ny.as_ptr(), y.as_ptr());
assert_ne!(nz.as_ptr(), z.as_ptr());
assert_eq!(nx,nz);
assert_ne!(nx,ny);
assert_ne!(ny,nz);
}
#[test]
pub fn test_cow_owned() {
let x:Cow<String> = Cow::Owned("hej".to_string());
assert_roundtrip(x);
}
#[test]
pub fn test_cow_borrowed() {
let borrow = "world".to_string();
let x:Cow<String> = Cow::Borrowed(&borrow);
assert_roundtrip(x);
}
#[derive(Savefile, Debug, PartialEq)]
struct SomethingWithPathbufIn {
my_pathbuf: PathBuf
}
#[test]
pub fn test_pathbuf2() {
let x = SomethingWithPathbufIn {
my_pathbuf: "/d/something.txt".into()
};
assert_roundtrip(x);
}
#[derive(SavefileNoIntrospect,Debug,PartialEq)]
struct ExampleWithoutAutomaticIntrospect {
x: u32
}
impl Introspect for ExampleWithoutAutomaticIntrospect {
fn introspect_value(&self) -> String {
"Example".into()
}
fn introspect_child<'a>(&'a self, _index: usize) -> Option<Box<dyn IntrospectItem<'a> + 'a>> {
None
}
} |
//! An expression that evaluates a sub-expression and rejects successful results.
//!
//! See [`crate::Parser::reject`].
use crate::parser::Parser;
use crate::span::Span;
/// The struct returned from [`crate::Parser::reject`].
pub struct Reject<P>(pub(crate) P);
impl<P> Parser for Reject<P>
where
P: Parser,
{
type Value = ();
type Error = ();
fn parse(&self, input: &'_ str) -> Result<Span<Self::Value>, Span<Self::Error>> {
self.0
.parse(input)
.map_or_else(|_| Ok(Span::new(0..0, ())), |_| Err(Span::new(0..0, ())))
}
}
#[cfg(test)]
mod tests {
use quickcheck_macros::quickcheck;
use crate::expression::test_expr::*;
use crate::parser::Parser;
use crate::span::Span;
use super::Reject;
#[test]
fn p_match() {
assert_eq!(
Reject(TestExpr::ok(12..37)).parse("hello"),
Err(Span::new(0..0, ()))
);
}
#[test]
fn p_error() {
assert_eq!(
Reject(TestExpr::err(12..37)).parse("hello"),
Ok(Span::new(0..0, ()))
);
}
#[quickcheck]
fn parse(p: TestExpr, input: String) {
assert_eq!(
Reject(&p).parse(&input),
match p {
ParseMatch(_, _) => Err(Span::new(0..0, ())),
ParseError(_) => Ok(Span::new(0..0, ())),
}
);
}
}
|
extern crate cc;
extern crate fs_extra;
extern crate metadeps;
use std::env;
#[cfg(not(feature = "build-cmake"))]
use std::ffi::OsString;
use std::fs;
use std::path::PathBuf;
#[cfg(not(feature = "build-cmake"))]
use std::process::Command;
fn main() {
// Rerun if the c-ares source code has changed.
println!("cargo:rerun-if-changed=c-ares");
// Use the installed libcares if it is available.
if metadeps::probe().is_ok() {
return;
}
// We'll compile from source. Clean up previous build, if any.
let outdir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
let build = outdir.join("build");
let _ = fs::remove_dir_all(&build);
fs::create_dir(&build).unwrap();
// Copy the c-ares source code into $OUT_DIR, where it's safe for the build
// process to modify it.
let c_ares_dir = outdir.join("c-ares");
let _ = fs::remove_dir_all(&c_ares_dir);
let copy_options = fs_extra::dir::CopyOptions::new();
let src = env::current_dir().unwrap().join("c-ares");
fs_extra::dir::copy(src, &outdir, ©_options).unwrap();
// Export the include path for crates dependending on c-ares
println!("cargo:include={}", c_ares_dir.join("include").display());
// Need libresolv on macos.
if cfg!(target_os = "macos") {
println!("cargo:rustc-link-lib=resolv");
}
compile();
}
#[cfg(feature = "build-cmake")]
fn compile() {
let outdir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
let c_ares_dir = outdir.join("c-ares");
let dst = cmake::Config::new(c_ares_dir)
.define("CARES_STATIC", "ON")
.define("CARES_SHARED", "OFF")
.define("CARES_BUILD_TOOLS", "OFF")
.define("CMAKE_INSTALL_LIBDIR", "lib")
.build();
println!("cargo:rustc-link-search={}/lib", dst.display());
println!("cargo:rustc-link-lib=static=cares");
}
#[cfg(not(feature = "build-cmake"))]
fn run(cmd: &mut Command) {
println!("running: {cmd:?}");
match cmd.status() {
Ok(t) => assert!(t.success()),
Err(e) => panic!("{} return the error {}", stringify!($e), e),
}
}
#[cfg(not(feature = "build-cmake"))]
const fn make() -> &'static str {
if cfg!(target_os = "freebsd") {
"gmake"
} else {
"make"
}
}
#[cfg(not(feature = "build-cmake"))]
fn nmake(target: &str) -> Command {
// cargo messes with the environment in a way that nmake does not like -
// see https://github.com/rust-lang/cargo/issues/4156. Explicitly remove
// the unwanted variables.
let mut cmd = cc::windows_registry::find(target, "nmake.exe").unwrap();
cmd.env_remove("MAKEFLAGS").env_remove("MFLAGS");
cmd
}
#[cfg(not(feature = "build-cmake"))]
fn compile() {
// MSVC builds are different.
let target = env::var("TARGET").unwrap();
if target.contains("msvc") {
build_msvc(&target);
return;
}
// Prepare.
let outdir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
let c_ares_dir = outdir.join("c-ares");
let build = outdir.join("build");
run(Command::new("sh").current_dir(&c_ares_dir).arg("buildconf"));
// Configure.
let cfg = cc::Build::new();
let compiler = cfg.get_compiler();
let mut cflags = OsString::new();
for arg in compiler.args() {
cflags.push(arg);
cflags.push(" ");
}
let mut cmd = Command::new("sh");
cmd.env("CFLAGS", &cflags)
.env("CC", compiler.path())
.current_dir(&build)
.arg(format!("{}", c_ares_dir.join("configure").display()))
.arg("--enable-static")
.arg("--disable-shared")
.arg("--enable-optimize")
.arg("--disable-debug")
.arg("--disable-tests")
.arg(format!("--prefix={}", outdir.display()));
// This code fragment copied from curl-rust... c-ares and curl come from
// the same developer so are usually pretty similar, and this seems to
// work.
//
// NOTE GNU terminology
// BUILD = machine where we are (cross) compiling
// HOST = machine where the compiled binary will be used
// TARGET = only relevant when compiling compilers
let host = env::var("HOST").unwrap();
if target != host && (!target.contains("windows") || !host.contains("windows")) {
if target.contains("windows") {
cmd.arg(format!("--host={host}"));
cmd.arg(format!("--target={target}"));
} else {
cmd.arg(format!("--build={host}"));
cmd.arg(format!("--host={target}"));
}
}
run(&mut cmd);
// Compile.
run(Command::new(make())
.arg(format!("-j{}", env::var("NUM_JOBS").unwrap()))
.current_dir(&build));
// Link to compiled library.
println!("cargo:rustc-link-search={}/src/lib/.libs", build.display());
println!("cargo:rustc-link-lib=static=cares");
}
#[cfg(not(feature = "build-cmake"))]
fn build_msvc(target: &str) {
// Prepare. We've already copied the c-ares source code into the output
// directory.
let outdir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
let c_ares_dir = outdir.join("c-ares");
run(Command::new("cmd")
.current_dir(&c_ares_dir)
.arg("/c")
.arg("buildconf.bat"));
// Compile.
let mut cmd = nmake(target);
cmd.current_dir(&c_ares_dir);
cmd.args(["/f", "Makefile.msvc", "CFG=lib-release", "c-ares"]);
run(&mut cmd);
// Install library.
let build = outdir.join("build");
let mut cmd = nmake(target);
cmd.current_dir(&c_ares_dir);
cmd.args(["/f", "Makefile.msvc", "/a", "CFG=lib-release", "install"]);
cmd.env("INSTALL_DIR", format!("{}", build.display()));
run(&mut cmd);
// Link to compiled library.
println!("cargo:rustc-link-search={}/lib", build.display());
println!("cargo:rustc-link-lib=iphlpapi");
println!("cargo:rustc-link-lib=static=libcares");
}
|
#![allow(unused_imports)]
#![allow(dead_code)]
use std::convert::TryFrom;
use std::fmt::{self, Formatter};
use std::str::FromStr;
use crate::quickfix_errors::*;
#[derive(Debug, Clone, Copy)]
pub enum FixType {
INT,
FLOAT,
STRING,
BOOL,
CHAR,
}
// seems unnecessaery
// #[derive(Debug)]
// pub struct FixTypeField {
// pub field_type: FixType,
// pub data: String
// }
// impl fmt::Display for FixTypeField {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(f, "{}", self.data)
// }
// }
#[derive(Debug, Clone, Copy)]
pub struct Int(i64);
impl Int {
pub fn new<T: Into<i64>>(value: T) -> Int {
Int(value.into())
}
}
impl fmt::Display for Int {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
// impl From<Int> for FixTypeField {
// fn from(value: Int) -> FixTypeField {
// FixTypeField {
// field_type: FixType::INT,
// data: value.to_string()
// }
// }
// }
impl<T: Into<i64>> From<T> for Int {
fn from(value: T) -> Int {
Int::new(value)
}
}
// impl TryFrom<FixTypeField> for Int {
// type Error = FixTypeFieldParseError;
// fn try_from(value: FixTypeField) -> Result<Self, Self::Error> {
// match value.field_type {
// FixType::INT => {
// match value.data.parse::<i64>() {
// Ok(i) => Ok(Int::new(i)),
// Err(_) => Err(FixTypeFieldParseError {
// kind: FixTypeFieldParseErrorKind::NotInt
// })
// }
// },
// _ => Err(FixTypeFieldParseError {
// kind: FixTypeFieldParseErrorKind::NotInt
// })
// }
// }
// }
impl FromStr for Int {
type Err = SessionLevelRejectErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.parse::<i64>() {
Ok(i) => Ok(Int::new(i)),
Err(e) => Err(SessionLevelRejectErr::parse_err(Some(Box::new(e)))),
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct Float(f64);
impl Float {
pub fn new<T: Into<f64>>(value: T) -> Float {
Float(value.into())
}
}
impl fmt::Display for Float {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
// impl From<Float> for FixTypeField {
// fn from(value: Float) -> FixTypeField {
// FixTypeField {
// field_type: FixType::FLOAT,
// data: value.to_string()
// }
// }
// }
impl<T: Into<f64>> From<T> for Float {
fn from(value: T) -> Float {
Float::new(value)
}
}
impl FromStr for Float {
type Err = SessionLevelRejectErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.parse::<f64>() {
Ok(f) => Ok(Float::new(f)),
Err(e) => Err(SessionLevelRejectErr::parse_err(Some(Box::new(e)))),
}
}
}
// impl TryFrom<FixTypeField> for Float {
// type Error = FixTypeFieldParseError;
// fn try_from(value: FixTypeField) -> Result<Self, Self::Error> {
// match value.field_type {
// FixType::FLOAT => {
// match value.data.parse::<f64>() {
// Ok(i) => Ok(Float::new(i)),
// Err(_) => Err(FixTypeFieldParseError {
// kind: FixTypeFieldParseErrorKind::NotFloat
// })
// }
// },
// _ => Err(FixTypeFieldParseError {
// kind: FixTypeFieldParseErrorKind::NotFloat
// })
// }
// }
// }
// #[derive(Debug)]
// pub struct Str (String);
// impl Str {
// pub fn new<T: Into<String>>(value: T) -> Str {
// Str (value.into())
// }
// }
// impl fmt::Display for Str {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(f, "{}", self.0)
// }
// }
// impl From<Str> for FixTypeField {
// fn from(value: Str) -> FixTypeField {
// FixTypeField {
// field_type: FixType::STRING,
// data: value.to_string()
// }
// }
// }
// impl From<String> for FixTypeField {
// fn from(value: String) -> FixTypeField {
// FixTypeField {
// field_type: FixType::STRING,
// data: value
// }
// }
// }
// impl From<FixTypeField> for Str {
// fn from(value: FixTypeField) -> Str {
// Str::new(value.data)
// }
// }
// impl From<FixTypeField> for String {
// fn from(value: FixTypeField) -> String {
// value.data.to_string()
// }
// }
#[derive(Debug, Clone, Copy)]
pub struct Char(char);
impl Char {
pub fn new<T: Into<char>>(value: T) -> Char {
Char(value.into())
}
}
impl fmt::Display for Char {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
// impl From<Char> for FixTypeField {
// fn from(value: Char) -> FixTypeField {
// FixTypeField {
// field_type: FixType::CHAR,
// data: value.to_string()
// }
// }
// }
impl<T: Into<char>> From<T> for Char {
fn from(value: T) -> Char {
Char::new(value)
}
}
impl FromStr for Char {
type Err = SessionLevelRejectErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.parse::<char>() {
Ok(c) if c.is_ascii() => Ok(Char::new(c)),
Ok(_) => Err(SessionLevelRejectErr::parse_err(None)),
Err(e) => Err(SessionLevelRejectErr::parse_err(Some(Box::new(e)))),
}
}
}
// impl TryFrom<FixTypeField> for Char {
// type Error = FixTypeFieldParseError;
// fn try_from(value: FixTypeField) -> Result<Self, Self::Error> {
// match value.field_type {
// FixType::CHAR => {
// match value.data.parse::<char>() {
// Ok(i) => Ok(Char::new(i)),
// Err(_) => Err(FixTypeFieldParseError {
// kind: FixTypeFieldParseErrorKind::NotChar
// })
// }
// },
// _ => Err(FixTypeFieldParseError {
// kind: FixTypeFieldParseErrorKind::NotChar
// })
// }
// }
// }
#[derive(Debug, Clone, Copy)]
pub struct Bool(char);
impl Bool {
pub fn new<T: Into<bool>>(value: T) -> Bool {
if value.into() {
Bool('Y')
} else {
Bool('N')
}
}
}
impl fmt::Display for Bool {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
// impl From<Bool> for FixTypeField {
// fn from(value: Bool) -> FixTypeField {
// FixTypeField {
// field_type: FixType::BOOL,
// data: value.to_string()
// }
// }
// }
impl<T: Into<bool>> From<T> for Bool {
fn from(value: T) -> Bool {
Bool::new(value)
}
}
impl FromStr for Bool {
type Err = SessionLevelRejectErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.parse::<char>() {
Ok(ch) => {
if ch.eq_ignore_ascii_case(&'y') {
Ok(Bool::new(true))
} else if ch.eq_ignore_ascii_case(&'n') {
Ok(Bool::new(false))
} else {
Err(SessionLevelRejectErr::parse_err(None))
}
}
Err(e) => Err(SessionLevelRejectErr::parse_err(Some(Box::new(e)))),
}
}
}
// impl TryFrom<FixTypeField> for Bool {
// type Error = FixTypeFieldParseError;
// fn try_from(value: FixTypeField) -> Result<Self, Self::Error> {
// match value.field_type {
// FixType::BOOL => {
// match value.data.parse::<bool>() {
// Ok(i) => Ok(Bool::new(i)),
// Err(_) => Err(FixTypeFieldParseError {
// kind: FixTypeFieldParseErrorKind::NotBool
// })
// }
// },
// _ => Err(FixTypeFieldParseError {
// kind: FixTypeFieldParseErrorKind::NotBool
// })
// }
// }
// }
#[cfg(test)]
mod types_tests {}
|
/*===============================================================================================*/
// Copyright 2016 Kyle Finlay
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*===============================================================================================*/
/*===============================================================================================*/
//! This crate is the core of ionEngine.
//!
//! It provides an easy to use framework for creating 2D / 3D games and multimedia applications.
/*===============================================================================================*/
// Crate attributes
#![deny (missing_copy_implementations)]
#![deny (missing_docs)]
#![feature (custom_derive)]
#![feature (plugin)]
#![plugin (serde_macros)]
#[macro_use]
extern crate log;
// Modules
pub mod engine;
pub mod resource;
pub mod util;
//pub mod window;
pub mod renderer;
|
use bytes::{Buf, BufMut, Bytes, BytesMut};
use super::{utils, Body, Frame, REQUEST_MAX};
use crate::error::RSocketError;
use crate::utils::Writeable;
#[derive(Debug, Eq, PartialEq)]
pub struct RequestN {
n: u32,
}
pub struct RequestNBuilder {
stream_id: u32,
flag: u16,
value: RequestN,
}
impl RequestNBuilder {
fn new(stream_id: u32, flag: u16) -> RequestNBuilder {
RequestNBuilder {
stream_id,
flag,
value: RequestN { n: REQUEST_MAX },
}
}
pub fn set_n(mut self, n: u32) -> Self {
self.value.n = n;
self
}
pub fn build(self) -> Frame {
Frame::new(self.stream_id, Body::RequestN(self.value), self.flag)
}
}
impl RequestN {
pub(crate) fn decode(flag: u16, bf: &mut BytesMut) -> crate::Result<RequestN> {
if bf.len() < 4 {
Err(RSocketError::InCompleteFrame.into())
} else {
let n = bf.get_u32();
Ok(RequestN { n })
}
}
pub fn builder(stream_id: u32, flag: u16) -> RequestNBuilder {
RequestNBuilder::new(stream_id, flag)
}
pub fn get_n(&self) -> u32 {
self.n
}
}
impl Writeable for RequestN {
fn write_to(&self, bf: &mut BytesMut) {
bf.put_u32(self.get_n())
}
fn len(&self) -> usize {
4
}
}
|
pub mod post;
pub mod settings;
pub mod typer;
use crate::application::App;
use crossterm::event::KeyEvent;
pub type KeyHandler = fn(KeyEvent, &mut App);
|
#![allow(warnings)]
extern crate failure;
extern crate rusqlite;
use failure::{Error, err_msg};
extern crate pretty_env_logger;
extern crate ctrlc;
extern crate chrono;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
use std::collections::HashMap;
use std::io;
extern crate fern;
extern crate clap;
extern crate rumqtt;
use std::{thread, time};
use std::any::Any;
use std::fmt;
use std::fmt::Debug;
use std::sync::Arc;
use rumqtt::{MqttOptions, MqttClient, MqttCallback, Message, QoS};
use rusqlite::Connection;
use clap::{Arg, App};
fn setup_logging(verbosity: u8) -> std::prelude::v1::Result<(), fern::InitError> {
let mut base_config = fern::Dispatch::new();
base_config = match verbosity {
0 => {
// Let's say we depend on something which whose "info" level messages are too verbose
// to include in end-user output. If we don't need them, let's not include them.
base_config
.level(log::LevelFilter::Info)
.level_for("overly-verbose-target", log::LevelFilter::Warn)
}
1 => base_config
.level(log::LevelFilter::Debug)
.level_for("overly-verbose-target", log::LevelFilter::Info),
2 => base_config.level(log::LevelFilter::Debug),
_3_or_more => base_config.level(log::LevelFilter::Trace),
};
base_config.chain(io::stdout()).apply()?;
Ok(())
}
#[derive(Debug)]
struct Reading {
topic: String,
datetime: i32,
value: f64
}
fn run() -> Result<(), Error> {
let matches = App::new("Mqtt-Monitor")
.version("0.1")
.author("Glenn Pierce <glennpierce@gmail.com>")
.about("Mqtt-Monitor")
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true)
.help("Increases logging verbosity each use for up to 3 times"),
)
.get_matches();
let verbosity: u8 = matches.occurrences_of("verbose");
setup_logging(verbosity).expect("failed to initialize logging.");
info!("Mqtt-Monitor v0.0.1 starting up!");
debug!("DEBUG output enabled.");
trace!("TRACE output enabled.");
info!(target: "overly-verbose-target", "hey, another library here, we're starting.");
let conn = Connection::open_in_memory().unwrap();
conn.execute("CREATE TABLE IF NOT EXISTS topics(id INTEGER PRIMARY KEY AUTOINCREMENT,
topic TEXT NOT NULL,
CONSTRAINT unique_topic UNIQUE (topic)
)", &[]).unwrap();
conn.execute("CREATE TABLE IF NOT EXISTS reading (
topic INTEGER PRIMARY KEY,
datetime INTEGER,
value REAL,
FOREIGN KEY(topic) REFERENCES topics(id)
)", &[]).unwrap();
conn.close();
let client_options = MqttOptions::new()
.set_keep_alive(5)
.set_reconnect(3)
.set_client_id("pierce_house-mqtt")
.set_broker("192.168.1.7:1883");
let msg_current_callback_fn = move |message : Message| {
let topic = message.topic.to_string();
let payload = Arc::try_unwrap(message.payload).unwrap();
let s = String::from_utf8_lossy(&payload);
//print!("{}: ", topic);
//println!("{}", s.to_string());
// let reading = Reading {
// topic: 0,
// datetime: "Steven".to_string(),
// value: time::get_time()
// };
let conn = Connection::open_in_memory().unwrap();
conn.execute("INSERT OR IGNORE INTO topics(topic) VALUES('?1')", &[&topic]).unwrap();
//SELECT id FROM "Values" WHERE data = 'SOME_DATA';
// conn.execute("INSERT INTO person (name, time_created, data)
// VALUES (?1, ?2, ?3)",
// &[&me.name, &me.time_created, &me.data]).unwrap();
// let current: f32 = s.parse().unwrap();
// let power = current * 248.0;
// let watt_payload = format!("{}", power);
// request.publish("test/basic", QoS::Level0, watt_payload.into_bytes()).expect("Publish failure");
};
let msg_callback = MqttCallback::new().on_message(msg_current_callback_fn);
let mut request = MqttClient::start(client_options, Some(msg_callback)).expect("Coudn't start");
let topics = vec![("wemos/+/current/status", QoS::Level0)];
request.subscribe(topics).expect("Subcription failure");
while true {
thread::sleep(time::Duration::from_millis(100));
}
Ok(())
}
fn main() {
if let Err(ref e) = run() {
println!("{}", e.backtrace());
std::process::exit(1);
}
} |
use std::collections::HashSet;
use utils;
pub fn problem_032() -> u32 {
// 2 possible multipulcation setups
// 2 * 3 * 4
// 1 * 4 * 4
// # 2 * 3
let mut pandigital = HashSet::new();
let zero: u32 = 0;
for a in 12..99{
for b in 123..(10000/a){
let m = a * b;
let mut digits : HashSet<u32> = HashSet::new();
for &d in utils::as_digit_array(a).iter() {
digits.insert(d);
}
for &d in utils::as_digit_array(b).iter() {
digits.insert(d);
}
for &d in utils::as_digit_array(m).iter() {
digits.insert(d);
}
if !(digits.contains(&zero)) & (digits.len() == 9){
pandigital.insert(m as u32);
}
}
}
// 1 * 4
for a in 2..10 {
for b in 1234..(10000/a) {
let m = a * b;
let mut digits : HashSet<u32> = HashSet::new();
for &d in utils::as_digit_array(a).iter() {
digits.insert(d);
}
for &d in utils::as_digit_array(b).iter() {
digits.insert(d);
}
for &d in utils::as_digit_array(m).iter() {
digits.insert(d);
}
if !(digits.contains(&zero)) & (digits.len() == 9){
pandigital.insert(m as u32);
}
}
}
pandigital.iter().fold(0,|a,&b| a + b)
}
#[cfg(test)]
mod test {
use super::*;
use test::Bencher;
#[test]
fn test_problem_032() {
let ans: u32 = problem_032();
println!("Answer to Problem 32: {}", ans);
assert!(ans == 45228)
}
#[bench]
fn bench_problem_032(b: &mut Bencher) {
b.iter(|| problem_032());
}
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - SYSCFG secure configuration register"]
pub seccfgr: SECCFGR,
#[doc = "0x04 - configuration register 1"]
pub cfgr1: CFGR1,
#[doc = "0x08 - FPU interrupt mask register"]
pub fpuimr: FPUIMR,
#[doc = "0x0c - SYSCFG CPU non-secure lock register"]
pub cnslckr: CNSLCKR,
#[doc = "0x10 - SYSCFG CPU secure lock register"]
pub cslockr: CSLOCKR,
#[doc = "0x14 - CFGR2"]
pub cfgr2: CFGR2,
#[doc = "0x18 - SCSR"]
pub scsr: SCSR,
#[doc = "0x1c - SKR"]
pub skr: SKR,
#[doc = "0x20 - SWPR"]
pub swpr: SWPR,
#[doc = "0x24 - SWPR2"]
pub swpr2: SWPR2,
_reserved10: [u8; 0x04],
#[doc = "0x2c - RSSCMDR"]
pub rsscmdr: RSSCMDR,
}
#[doc = "SECCFGR (rw) register accessor: SYSCFG secure configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`seccfgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`seccfgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`seccfgr`]
module"]
pub type SECCFGR = crate::Reg<seccfgr::SECCFGR_SPEC>;
#[doc = "SYSCFG secure configuration register"]
pub mod seccfgr;
#[doc = "CFGR1 (rw) register accessor: configuration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cfgr1`]
module"]
pub type CFGR1 = crate::Reg<cfgr1::CFGR1_SPEC>;
#[doc = "configuration register 1"]
pub mod cfgr1;
#[doc = "FPUIMR (rw) register accessor: FPU interrupt mask register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fpuimr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fpuimr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fpuimr`]
module"]
pub type FPUIMR = crate::Reg<fpuimr::FPUIMR_SPEC>;
#[doc = "FPU interrupt mask register"]
pub mod fpuimr;
#[doc = "CNSLCKR (rw) register accessor: SYSCFG CPU non-secure lock register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cnslckr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cnslckr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cnslckr`]
module"]
pub type CNSLCKR = crate::Reg<cnslckr::CNSLCKR_SPEC>;
#[doc = "SYSCFG CPU non-secure lock register"]
pub mod cnslckr;
#[doc = "CSLOCKR (rw) register accessor: SYSCFG CPU secure lock register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cslockr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cslockr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cslockr`]
module"]
pub type CSLOCKR = crate::Reg<cslockr::CSLOCKR_SPEC>;
#[doc = "SYSCFG CPU secure lock register"]
pub mod cslockr;
#[doc = "SCSR (rw) register accessor: SCSR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`scsr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`scsr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`scsr`]
module"]
pub type SCSR = crate::Reg<scsr::SCSR_SPEC>;
#[doc = "SCSR"]
pub mod scsr;
#[doc = "CFGR2 (rw) register accessor: CFGR2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cfgr2`]
module"]
pub type CFGR2 = crate::Reg<cfgr2::CFGR2_SPEC>;
#[doc = "CFGR2"]
pub mod cfgr2;
#[doc = "SWPR (w) register accessor: SWPR\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`swpr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`swpr`]
module"]
pub type SWPR = crate::Reg<swpr::SWPR_SPEC>;
#[doc = "SWPR"]
pub mod swpr;
#[doc = "SKR (w) register accessor: SKR\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`skr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`skr`]
module"]
pub type SKR = crate::Reg<skr::SKR_SPEC>;
#[doc = "SKR"]
pub mod skr;
#[doc = "SWPR2 (w) register accessor: SWPR2\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`swpr2::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`swpr2`]
module"]
pub type SWPR2 = crate::Reg<swpr2::SWPR2_SPEC>;
#[doc = "SWPR2"]
pub mod swpr2;
#[doc = "RSSCMDR (rw) register accessor: RSSCMDR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rsscmdr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rsscmdr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rsscmdr`]
module"]
pub type RSSCMDR = crate::Reg<rsscmdr::RSSCMDR_SPEC>;
#[doc = "RSSCMDR"]
pub mod rsscmdr;
|
#[doc = "Register `OAR2` reader"]
pub type R = crate::R<OAR2_SPEC>;
#[doc = "Register `OAR2` writer"]
pub type W = crate::W<OAR2_SPEC>;
#[doc = "Field `ENDUAL` reader - Dual addressing mode enable"]
pub type ENDUAL_R = crate::BitReader<ENDUAL_A>;
#[doc = "Dual addressing mode enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ENDUAL_A {
#[doc = "0: Single addressing mode"]
Single = 0,
#[doc = "1: Dual addressing mode"]
Dual = 1,
}
impl From<ENDUAL_A> for bool {
#[inline(always)]
fn from(variant: ENDUAL_A) -> Self {
variant as u8 != 0
}
}
impl ENDUAL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ENDUAL_A {
match self.bits {
false => ENDUAL_A::Single,
true => ENDUAL_A::Dual,
}
}
#[doc = "Single addressing mode"]
#[inline(always)]
pub fn is_single(&self) -> bool {
*self == ENDUAL_A::Single
}
#[doc = "Dual addressing mode"]
#[inline(always)]
pub fn is_dual(&self) -> bool {
*self == ENDUAL_A::Dual
}
}
#[doc = "Field `ENDUAL` writer - Dual addressing mode enable"]
pub type ENDUAL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ENDUAL_A>;
impl<'a, REG, const O: u8> ENDUAL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Single addressing mode"]
#[inline(always)]
pub fn single(self) -> &'a mut crate::W<REG> {
self.variant(ENDUAL_A::Single)
}
#[doc = "Dual addressing mode"]
#[inline(always)]
pub fn dual(self) -> &'a mut crate::W<REG> {
self.variant(ENDUAL_A::Dual)
}
}
#[doc = "Field `ADD2` reader - Interface address"]
pub type ADD2_R = crate::FieldReader;
#[doc = "Field `ADD2` writer - Interface address"]
pub type ADD2_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 7, O>;
impl R {
#[doc = "Bit 0 - Dual addressing mode enable"]
#[inline(always)]
pub fn endual(&self) -> ENDUAL_R {
ENDUAL_R::new((self.bits & 1) != 0)
}
#[doc = "Bits 1:7 - Interface address"]
#[inline(always)]
pub fn add2(&self) -> ADD2_R {
ADD2_R::new(((self.bits >> 1) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bit 0 - Dual addressing mode enable"]
#[inline(always)]
#[must_use]
pub fn endual(&mut self) -> ENDUAL_W<OAR2_SPEC, 0> {
ENDUAL_W::new(self)
}
#[doc = "Bits 1:7 - Interface address"]
#[inline(always)]
#[must_use]
pub fn add2(&mut self) -> ADD2_W<OAR2_SPEC, 1> {
ADD2_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Own address register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`oar2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`oar2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct OAR2_SPEC;
impl crate::RegisterSpec for OAR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`oar2::R`](R) reader structure"]
impl crate::Readable for OAR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`oar2::W`](W) writer structure"]
impl crate::Writable for OAR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets OAR2 to value 0"]
impl crate::Resettable for OAR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[cfg(test)]
#[macro_use]
extern crate yaserde_derive;
pub mod discovery;
pub mod soap;
pub use schema;
mod utils;
|
#![allow(unused_macros)]
macro_rules! gen_hashmap {
(@single $($e: tt)*) => (());
(@count $($e: expr)*) => (<[()]>::len(&[$(gen_hashmap!(@single $e)),*]));
($($key: expr => $value: expr,)+) => (gen_hashmap!($($key => $value),+));
{$($key: expr => $value: expr),*} => {
{
let count = gen_hashmap!(@count $($key)*);
let mut hashmap = ::std::collections::HashMap::with_capacity(count);
$(
hashmap.insert($key, $value);
)*
hashmap
}
}
}
|
use ComponentWeak;
#[derive(Debug)]
pub enum Error {
#[allow(dead_code)]
Unknown,
HighVoltage(HighVoltage),
}
impl Error {
pub fn is_high_voltage_error(&self) -> bool {
match *self {
Error::HighVoltage(_) => true,
_ => false,
}
}
}
#[derive(Debug)]
pub struct HighVoltage {
pub component: ComponentWeak,
pub input_voltage: f32,
pub expected_voltage: (f32, f32),
}
|
use std::iter::Iterator;
use std::collections::HashMap;
fn sum(spiral: &HashMap<(isize, isize), usize>, x: isize, y: isize) -> usize {
let mut sum = 0usize;
for i in x-1..x+2 {
for j in y-1..y+2 {
sum += match spiral.get(&(i, j)) {
Some(value) => *value,
None => 0,
};
}
}
sum
}
pub fn search(needle: usize) -> usize {
let mut x: isize = 0;
let mut y: isize = 0;
let mut spiral: HashMap<(isize, isize), usize> = HashMap::new();
spiral.insert((0, 0), 1);
let res = (1usize..).filter(|x| x % 2 != 0).flat_map(|elem| {
let mut round: Vec<usize> = vec![];
for _ in 0..elem {
x += 1;
let value = sum(&spiral, x, y);
spiral.insert((x, y), value);
round.push(value);
}
for _ in 0..elem {
y -= 1;
let value = sum(&spiral, x, y);
spiral.insert((x, y), value);
round.push(value);
}
for _ in 0..elem+1 {
x -= 1;
let value = sum(&spiral, x, y);
spiral.insert((x, y), value);
round.push(value);
}
for _ in 0..elem+1 {
y += 1;
let value = sum(&spiral, x, y);
spiral.insert((x, y), value);
round.push(value);
}
round
});
for x in res {
if x > needle {
return x;
}
}
0
}
|
//! Deserializing CDR into Rust data types.
use std::{self, io::Read, marker::PhantomData};
use byteorder::{BigEndian, ByteOrder, LittleEndian, ReadBytesExt};
use serde::de::{self, IntoDeserializer};
use crate::{
error::{Error, Result},
size::{Infinite, SizeLimit},
};
/// A deserializer that reads bytes from a buffer.
pub struct Deserializer<R, S, E> {
reader: R,
size_limit: S,
pos: u64,
phantom: PhantomData<E>,
}
impl<R, S, E> Deserializer<R, S, E>
where
R: Read,
S: SizeLimit,
E: ByteOrder,
{
pub fn new(reader: R, size_limit: S) -> Self {
Self {
reader,
size_limit,
pos: 0,
phantom: PhantomData,
}
}
fn read_padding_of<T>(&mut self) -> Result<()> {
// Calculate the required padding to align with 1-byte, 2-byte, 4-byte, 8-byte
// boundaries Instead of using the slow modulo operation '%', the faster
// bit-masking is used
let alignment = std::mem::size_of::<T>();
let rem_mask = alignment - 1; // mask like 0x0, 0x1, 0x3, 0x7
let mut padding: [u8; 8] = [0; 8];
match (self.pos as usize) & rem_mask {
0 => Ok(()),
n @ 1..=7 => {
let amt = alignment - n;
self.read_size(amt as u64)?;
self.reader
.read_exact(&mut padding[..amt])
.map_err(Into::into)
}
_ => unreachable!(),
}
}
fn read_size(&mut self, size: u64) -> Result<()> {
self.pos += size;
self.size_limit.add(size)
}
fn read_size_of<T>(&mut self) -> Result<()> {
self.read_size(std::mem::size_of::<T>() as u64)
}
fn read_string(&mut self) -> Result<String> {
String::from_utf8(self.read_bytes().map(|mut v| {
v.pop(); // removes a terminating null character
v
})?)
.map_err(|e| Error::InvalidUtf8Encoding(e.utf8_error()))
}
fn read_bytes(&mut self) -> Result<Vec<u8>> {
let len: u32 = de::Deserialize::deserialize(&mut *self)?;
let mut buf = vec![0_u8; len as usize];
self.read_size(u64::from(len))?;
self.reader.read_exact(&mut buf[..])?;
Ok(buf)
}
pub(crate) fn reset_pos(&mut self) {
self.pos = 0;
}
}
macro_rules! impl_deserialize_value {
($de_method:ident<$ty:ty> = $visitor_method:ident ($reader_method:ident)) => {
fn $de_method<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
self.read_padding_of::<$ty>()?;
self.read_size_of::<$ty>()?;
visitor.$visitor_method(self.reader.$reader_method::<E>()?)
}
};
}
impl<'de, 'a, R, S, E> de::Deserializer<'de> for &'a mut Deserializer<R, S, E>
where
R: Read,
S: SizeLimit,
E: ByteOrder,
{
type Error = Error;
impl_deserialize_value!(deserialize_i16<i16> = visit_i16(read_i16));
impl_deserialize_value!(deserialize_i32<i32> = visit_i32(read_i32));
impl_deserialize_value!(deserialize_i64<i64> = visit_i64(read_i64));
impl_deserialize_value!(deserialize_u16<u16> = visit_u16(read_u16));
impl_deserialize_value!(deserialize_u32<u32> = visit_u32(read_u32));
impl_deserialize_value!(deserialize_u64<u64> = visit_u64(read_u64));
impl_deserialize_value!(deserialize_f32<f32> = visit_f32(read_f32));
impl_deserialize_value!(deserialize_f64<f64> = visit_f64(read_f64));
fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
Err(Error::DeserializeAnyNotSupported)
}
fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let value: u8 = de::Deserialize::deserialize(self)?;
match value {
1 => visitor.visit_bool(true),
0 => visitor.visit_bool(false),
value => Err(Error::InvalidBoolEncoding(value)),
}
}
fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
self.read_size_of::<i8>()?;
visitor.visit_i8(self.reader.read_i8()?)
}
fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
self.read_size_of::<u8>()?;
visitor.visit_u8(self.reader.read_u8()?)
}
fn deserialize_char<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let mut buf = [0u8; 4];
self.reader.read_exact(&mut buf[..1])?;
if utf8_char_width(buf[0]) != 1 {
Err(Error::InvalidCharEncoding)
} else {
self.read_size(1)?;
visitor.visit_char(buf[0] as char)
}
}
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
visitor.visit_str(&self.read_string()?)
}
fn deserialize_string<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
visitor.visit_string(self.read_string()?)
}
fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
visitor.visit_bytes(&self.read_bytes()?)
}
fn deserialize_byte_buf<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
visitor.visit_byte_buf(self.read_bytes()?)
}
fn deserialize_option<V>(self, _visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
Err(Error::TypeNotSupported)
}
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_unit_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_newtype_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
visitor.visit_newtype_struct(self)
}
fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let len: u32 = de::Deserialize::deserialize(&mut *self)?;
self.deserialize_tuple(len as usize, visitor)
}
fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
struct Access<'a, R, S, E>
where
R: Read + 'a,
S: SizeLimit + 'a,
E: ByteOrder + 'a,
{
deserializer: &'a mut Deserializer<R, S, E>,
len: usize,
}
impl<'de, 'a, R, S, E> de::SeqAccess<'de> for Access<'a, R, S, E>
where
R: Read + 'a,
S: SizeLimit,
E: ByteOrder,
{
type Error = Error;
fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>>
where
T: de::DeserializeSeed<'de>,
{
if self.len > 0 {
self.len -= 1;
let value = de::DeserializeSeed::deserialize(seed, &mut *self.deserializer)?;
Ok(Some(value))
} else {
Ok(None)
}
}
fn size_hint(&self) -> Option<usize> {
Some(self.len)
}
}
visitor.visit_seq(Access {
deserializer: self,
len,
})
}
fn deserialize_tuple_struct<V>(
self,
_name: &'static str,
len: usize,
visitor: V,
) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
self.deserialize_tuple(len, visitor)
}
fn deserialize_map<V>(self, _visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
Err(Error::TypeNotSupported)
}
fn deserialize_struct<V>(
self,
_name: &'static str,
fields: &'static [&'static str],
visitor: V,
) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
self.deserialize_tuple(fields.len(), visitor)
}
fn deserialize_enum<V>(
self,
_name: &'static str,
_variants: &'static [&'static str],
visitor: V,
) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
impl<'de, 'a, R, S, E> de::EnumAccess<'de> for &'a mut Deserializer<R, S, E>
where
R: Read + 'a,
S: SizeLimit,
E: ByteOrder,
{
type Error = Error;
type Variant = Self;
fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant)>
where
V: de::DeserializeSeed<'de>,
{
let idx: u32 = de::Deserialize::deserialize(&mut *self)?;
let val: Result<_> = seed.deserialize(idx.into_deserializer());
Ok((val?, self))
}
}
visitor.visit_enum(self)
}
fn deserialize_identifier<V>(self, _visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
Err(Error::TypeNotSupported)
}
fn deserialize_ignored_any<V>(self, _visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
Err(Error::TypeNotSupported)
}
fn is_human_readable(&self) -> bool {
false
}
}
impl<'de, 'a, R, S, E> de::VariantAccess<'de> for &'a mut Deserializer<R, S, E>
where
R: Read,
S: SizeLimit,
E: ByteOrder,
{
type Error = Error;
fn unit_variant(self) -> Result<()> {
Ok(())
}
fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value>
where
T: de::DeserializeSeed<'de>,
{
de::DeserializeSeed::deserialize(seed, self)
}
fn tuple_variant<V>(self, len: usize, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
de::Deserializer::deserialize_tuple(self, len, visitor)
}
fn struct_variant<V>(self, fields: &'static [&'static str], visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
de::Deserializer::deserialize_tuple(self, fields.len(), visitor)
}
}
impl<R, S> From<Deserializer<R, S, BigEndian>> for Deserializer<R, S, LittleEndian> {
fn from(t: Deserializer<R, S, BigEndian>) -> Self {
Self {
reader: t.reader,
size_limit: t.size_limit,
pos: t.pos,
phantom: PhantomData,
}
}
}
#[inline]
fn utf8_char_width(first_byte: u8) -> usize {
UTF8_CHAR_WIDTH[first_byte as usize] as usize
}
// https://tools.ietf.org/html/rfc3629
const UTF8_CHAR_WIDTH: &[u8; 256] = &[
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, //
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x1F
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, //
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x3F
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, //
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x5F
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, //
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x7F
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x9F
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, //
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0xBF
0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, //
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // 0xDF
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // 0xEF
4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0xFF
];
/// Deserializes a slice of bytes into an object.
pub fn deserialize_data<'de, T, E>(bytes: &[u8]) -> Result<T>
where
T: de::Deserialize<'de>,
E: ByteOrder,
{
deserialize_data_from::<_, _, _, E>(bytes, Infinite)
}
/// Deserializes an object directly from a `Read`.
pub fn deserialize_data_from<'de, R, T, S, E>(reader: R, size_limit: S) -> Result<T>
where
R: Read,
T: de::Deserialize<'de>,
S: SizeLimit,
E: ByteOrder,
{
let mut deserializer = Deserializer::<_, S, E>::new(reader, size_limit);
de::Deserialize::deserialize(&mut deserializer)
}
|
use crate::{Net, Spec, DEFAULT_TX_PROPOSAL_WINDOW};
use log::info;
pub struct PoolReconcile;
impl Spec for PoolReconcile {
crate::name!("pool_reconcile");
crate::setup!(connect_all: false, num_nodes: 2);
fn run(&self, net: &mut Net) {
let node0 = &net.nodes[0];
let node1 = &net.nodes[1];
info!("Generate DEFAULT_TX_PROPOSAL_WINDOW block on node0");
node0.generate_blocks((DEFAULT_TX_PROPOSAL_WINDOW.1 + 2) as usize);
info!("Use generated block's cellbase as tx input");
let hash = node0.generate_transaction();
info!("Generate 3 more blocks on node0");
node0.generate_blocks(3);
info!("Pool should be empty");
assert!(node0
.rpc_client()
.get_transaction(hash.clone())
.unwrap()
.tx_status
.block_hash
.is_some());
info!("Generate 5 blocks on node1");
node1.generate_blocks(20);
info!("Connect node0 to node1");
node0.connect(node1);
net.waiting_for_sync(20);
info!("Tx should be re-added to node0's pool");
assert!(node0
.rpc_client()
.get_transaction(hash)
.unwrap()
.tx_status
.block_hash
.is_none());
}
}
|
extern crate rustc_serialize;
extern crate rand;
pub mod wrap_koh;
pub mod koh;
|
use std::str::FromStr;
use std::env;
use std::process::*;
#[derive(Debug)]
pub enum SupportedLanguage {
Rust,
Typescript,
}
impl FromStr for SupportedLanguage {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"rust" => Ok(SupportedLanguage::Rust),
"typescript" => Ok(SupportedLanguage::Typescript),
_ => Err(()),
}
}
}
impl SupportedLanguage {
pub fn start_language_server(&self) -> Result<Child, ()> {
match *self {
SupportedLanguage::Rust => {
Ok(Command::new("rls").spawn().unwrap())
}
SupportedLanguage::Typescript => {
Ok(Command::new("./node_modules/typescript/bin/tsserver").spawn().unwrap())
}
}
}
}
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(cell_extras)]
#![feature(plugin)]
#![feature(plugin_registrar)]
#![feature(rustc_private)]
#![plugin(clippy)]
extern crate clippy;
extern crate env_logger;
extern crate js;
extern crate libc;
extern crate rustc_plugin;
mod error;
mod script;
use error::Error;
use rustc_plugin::Registry;
use std::env;
use std::ffi::OsString;
use std::path::Path;
use std::process;
#[plugin_registrar]
pub fn plugin_registrar(registry: &mut Registry) {
clippy::plugin_registrar(registry);
}
fn do_main(path: Option<OsString>) -> Result<(), Error> {
let path = try!(path.ok_or(Error::MissingArgument));
script::run_script(Path::new(&path))
}
fn main() {
env_logger::init().unwrap();
match do_main(env::args_os().nth(1)) {
Ok(()) => println!("Hello, world!"),
Err(error) => {
println!("Finished unsuccessfully: {}.", error);
process::exit(1);
}
}
}
#[test]
fn missing_argument() {
match do_main(None) {
Err(Error::MissingArgument) => (),
Err(error) => panic!("Unexpected error: {}", error),
Ok(()) => panic!("Unexpected ok"),
}
}
|
//! # Async GraphQL Telemetry Extension
//!
//! The `Extensions` trait in [async-graphql](https://github.com/async-graphql/async-graphql) essentially mimics traditional
//! middleware in HTTP servers (although arguably more powerful due to the
//! ability to hook into various stages of the query resolution). This extension is an
//! attempt at adding in some of the Open Telemetry integrations in order
//! to handle metric and trace creation through this API, as opposed to manually
//! having to instrument every query.
//!
//! It is essentially a straight copy and paste from the [ApolloTracing](https://docs.rs/async-graphql/latest/async_graphql/extensions/struct.ApolloTracing.html) & [Tracing](https://docs.rs/async-graphql/latest/async_graphql/extensions/struct.Tracing.html) Extensions from
//! the core library, just modified to enable metric creation and a slightly
//! different span generation pattern.
//!
//! ## Features
//!
//! This extension includes
//! - Tracing (via [tracing](https://github.com/tokio-rs/tracing))
//! - High Level Metrics (via [OpenTelemetry](https://github.com/open-telemetry/opentelemetry-rust/tree/main/opentelemetry))
//!
//! ## Reason for combining the extensions
//!
//! The primary reason for combining these extensions is to minimise the amount of data required to
//! be stored per request. As an example, in order to generate the high level metrics, the Apollo Tracing data can
//! be used. So combining all 3 made the most sense for minimising the space and
//! computation done while processing requests.
//!
//! ## License
//!
//! Anything found within here falls under the same licenses as the main
//! repository, which can be found here <https://github.com/async-graphql/async-graphql>
//!
//! MIT or Apache version 2.0
use opentelemetry::metrics::{Counter, ValueRecorder};
use opentelemetry::{global, Key, Unit};
use lazy_static::lazy_static;
use futures_util::stream::BoxStream;
use futures_util::TryFutureExt;
use tokio::time::Instant;
use tracing::{span, Level};
use tracing_futures::Instrument;
use async_graphql::extensions::{
Extension, ExtensionContext, ExtensionFactory, NextExecute, NextParseQuery, NextRequest,
NextResolve, NextSubscribe, NextValidation, ResolveInfo,
};
use async_graphql::parser::types::ExecutableDocument;
use async_graphql::{Response, ServerError, ServerResult, ValidationResult, Value, Variables};
use std::sync::Arc;
lazy_static! {
static ref REQUESTS: Counter<u64> = {
let meter = global::meter(NAME);
let counter = meter
.u64_counter("graphql_requests")
.with_description("total number of HTTP requests sent to the graphQL server")
.init();
counter
};
static ref SUBSCRIPTIONS: Counter<u64> = {
let meter = global::meter(NAME);
let counter = meter
.u64_counter("graphql_subscriptions")
.with_description("total number of subscriptions sent to the graphQL server")
.init();
counter
};
static ref REQUEST_DURATION: ValueRecorder<u64> = {
let meter = global::meter(NAME);
let observer = meter
.u64_value_recorder("graphql_request_duration")
.with_description("duration of successful graphql queries in milliseconds")
.with_unit(Unit::new("milliseconds"))
.init();
observer
};
static ref REQUEST_ERRORS: Counter<u64> = {
let meter = global::meter(NAME);
let counter = meter
.u64_counter("graphql_request_errors")
.with_description(
"total number of graphQL queries resulting in an error being returned",
)
.init();
counter
};
}
const TARGET: &str = "async_graphql::graphql";
const NAME: &str = "graphql";
const QUERY_KEY: Key = Key::from_static_str("query_name");
const QUERY_TYPE_KEY: Key = Key::from_static_str("query_type");
const RETURN_TYPE_KEY: Key = Key::from_static_str("return_type");
pub struct OpenTelemetry;
pub struct OpenTelemetryExtension {
start: Instant,
}
impl Default for OpenTelemetryExtension {
fn default() -> Self {
Self {
start: Instant::now(),
}
}
}
impl ExtensionFactory for OpenTelemetry {
fn create(&self) -> Arc<dyn Extension> {
Arc::new(OpenTelemetryExtension::default())
}
}
#[async_trait::async_trait]
impl Extension for OpenTelemetryExtension {
async fn request(&self, ctx: &ExtensionContext<'_>, next: NextRequest<'_>) -> Response {
REQUESTS.add(1, &[]);
next.run(ctx)
.instrument(span!(target: TARGET, Level::INFO, "request"))
.await
}
fn subscribe<'s>(
&self,
ctx: &ExtensionContext<'_>,
stream: BoxStream<'s, Response>,
next: NextSubscribe<'_>,
) -> BoxStream<'s, Response> {
SUBSCRIPTIONS.add(1, &[]);
Box::pin(
next.run(ctx, stream)
.instrument(span!(target: TARGET, Level::INFO, "subscribe")),
)
}
async fn parse_query(
&self,
ctx: &ExtensionContext<'_>,
query: &str,
variables: &Variables,
next: NextParseQuery<'_>,
) -> ServerResult<ExecutableDocument> {
let span = span!(target: TARGET, Level::INFO, "parse", source = query);
tracing::trace!(parent: &span, source = query, "parsing received query");
next.run(ctx, query, variables).instrument(span).await
}
async fn validation(
&self,
ctx: &ExtensionContext<'_>,
next: NextValidation<'_>,
) -> Result<ValidationResult, Vec<ServerError>> {
let span = span!(target: TARGET, Level::INFO, "validation");
next.run(ctx).instrument(span).await
}
async fn execute(
&self,
ctx: &ExtensionContext<'_>,
operation_name: Option<&str>,
next: NextExecute<'_>,
) -> Response {
let span = span!(target: TARGET, Level::INFO, "execute");
next.run(ctx, operation_name).instrument(span).await
}
async fn resolve(
&self,
ctx: &ExtensionContext<'_>,
info: ResolveInfo<'_>,
next: NextResolve<'_>,
) -> ServerResult<Option<Value>> {
let path = info.path_node.to_string();
let parent_type = info.parent_type.to_string();
let return_type = info.return_type.to_string();
let span = span!(
target: TARGET,
Level::INFO,
"field",
%path,
%parent_type,
%return_type
);
let result = next.run(ctx, info)
.instrument(span)
.map_err(|err| {
REQUEST_ERRORS.add(1, &[QUERY_KEY.string(path.clone()), QUERY_TYPE_KEY.string(parent_type.clone()), RETURN_TYPE_KEY.string(return_type.clone())]);
tracing::error!(target: TARGET, error = %err.message, extensions = ?&err.extensions);
err
})
.await;
let duration = Instant::now() - self.start;
// This cast should be fine, because if this request duration overflows an u64, we have
// bigger issues
REQUEST_DURATION.record(
duration.as_millis() as u64,
&[
QUERY_KEY.string(path),
QUERY_TYPE_KEY.string(parent_type),
RETURN_TYPE_KEY.string(return_type),
],
);
result
}
}
#[cfg(test)]
mod tests {
use super::*;
use async_graphql::*;
struct QueryRoot;
#[Object]
impl QueryRoot {
pub async fn get_jane(&self) -> Query {
Query {
id: 100,
details: SubQuery {
name: "Jane".to_owned(),
},
}
}
}
#[derive(SimpleObject)]
struct Query {
id: i32,
details: SubQuery,
}
#[derive(SimpleObject)]
struct SubQuery {
name: String,
}
#[tokio::test]
async fn basic_test() {
let schema = Schema::build(QueryRoot, EmptyMutation, EmptySubscription)
.extension(OpenTelemetry)
.finish();
let query = r#"
query {
getJane {
id
details {
name
}
}
}
"#;
let request = Request::new(query);
schema.execute(request).await;
}
}
|
use std::cell::Cell;
use std::cell::RefCell;
use std::f64;
use std::rc::Rc;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use web_sys::{CanvasRenderingContext2d, HtmlCanvasElement, MouseEvent};
use crate::state::State;
// setup mouse event listener for drawing and start
pub fn canvas_draw_start(
canvas: &HtmlCanvasElement,
state: &Rc<RefCell<State>>,
) -> Result<(), JsValue> {
let context = canvas
.get_context("2d")
.expect("Could not get context")
.unwrap()
.dyn_into::<CanvasRenderingContext2d>()
.unwrap();
let pressed = Rc::new(Cell::new(false));
// mousedown
{
let context = context.clone();
let state = state.clone();
let pressed = pressed.clone();
let mouse_down = Closure::wrap(Box::new(move |event: MouseEvent| {
pressed.set(true);
let image_data = context
.get_image_data(
0.0,
0.0,
state.borrow().get_width() as f64,
state.borrow().get_height() as f64,
)
.unwrap();
state.borrow_mut().add_undo(image_data);
let new_x = event.offset_x() as f64;
let new_y = event.offset_y() as f64;
context.begin_path();
context.set_stroke_style(&JsValue::from(state.borrow().get_color()));
context.set_line_width(state.borrow().get_pen_thin());
context.move_to(new_x, new_y);
}) as Box<dyn FnMut(_)>);
canvas
.add_event_listener_with_callback("mousedown", mouse_down.as_ref().unchecked_ref())?;
mouse_down.forget(); // memory leak
}
// mouseup
{
let context = context.clone();
let pressed = pressed.clone();
let mouse_up = Closure::wrap(Box::new(move |event: MouseEvent| {
pressed.set(false);
let new_x = event.offset_x() as f64;
let new_y = event.offset_y() as f64;
context.fill_rect(new_x, new_y, 1.0, 1.0);
context.line_to(new_x, new_y);
context.stroke();
}) as Box<dyn FnMut(_)>);
canvas.add_event_listener_with_callback("mouseup", mouse_up.as_ref().unchecked_ref())?;
mouse_up.forget();
}
// mousemove
{
let context = context.clone();
let pressed = pressed.clone();
let mouse_move = Closure::wrap(Box::new(move |event: MouseEvent| {
if pressed.get() {
let new_x = event.offset_x() as f64;
let new_y = event.offset_y() as f64;
context.line_to(new_x, new_y);
context.stroke();
}
}) as Box<dyn FnMut(_)>);
canvas
.add_event_listener_with_callback("mousemove", mouse_move.as_ref().unchecked_ref())?;
mouse_move.forget();
}
Ok(())
}
|
#![allow(non_snake_case)]
extern crate bulletproofs;
extern crate curve25519_dalek;
extern crate merlin;
extern crate rand;
use bulletproofs::r1cs::*;
use bulletproofs::{BulletproofGens, PedersenGens};
use curve25519_dalek::ristretto::CompressedRistretto;
use curve25519_dalek::scalar::Scalar;
use merlin::Transcript;
use rand::seq::SliceRandom;
use rand::thread_rng;
// Shuffle gadget (documented in markdown file)
/// A proof-of-shuffle.
struct ShuffleProof(R1CSProof);
impl ShuffleProof {
fn gadget<CS: RandomizableConstraintSystem>(
cs: &mut CS,
x: Vec<Variable>,
y: Vec<Variable>,
) -> Result<(), R1CSError> {
assert_eq!(x.len(), y.len());
let k = x.len();
if k == 1 {
cs.constrain(y[0] - x[0]);
return Ok(());
}
cs.specify_randomized_constraints(move |cs| {
let z = cs.challenge_scalar(b"shuffle challenge");
// Make last x multiplier for i = k-1 and k-2
let (_, _, last_mulx_out) = cs.multiply(x[k - 1] - z, x[k - 2] - z);
// Make multipliers for x from i == [0, k-3]
let first_mulx_out = (0..k - 2).rev().fold(last_mulx_out, |prev_out, i| {
let (_, _, o) = cs.multiply(prev_out.into(), x[i] - z);
o
});
// Make last y multiplier for i = k-1 and k-2
let (_, _, last_muly_out) = cs.multiply(y[k - 1] - z, y[k - 2] - z);
// Make multipliers for y from i == [0, k-3]
let first_muly_out = (0..k - 2).rev().fold(last_muly_out, |prev_out, i| {
let (_, _, o) = cs.multiply(prev_out.into(), y[i] - z);
o
});
// Constrain last x mul output and last y mul output to be equal
cs.constrain(first_mulx_out - first_muly_out);
Ok(())
})
}
}
impl ShuffleProof {
/// Attempt to construct a proof that `output` is a permutation of `input`.
///
/// Returns a tuple `(proof, input_commitments || output_commitments)`.
pub fn prove<'a, 'b>(
pc_gens: &'b PedersenGens,
bp_gens: &'b BulletproofGens,
transcript: &'a mut Transcript,
input: &[Scalar],
output: &[Scalar],
) -> Result<
(
ShuffleProof,
Vec<CompressedRistretto>,
Vec<CompressedRistretto>,
),
R1CSError,
> {
// Apply a domain separator with the shuffle parameters to the transcript
// XXX should this be part of the gadget?
let k = input.len();
transcript.append_message(b"dom-sep", b"ShuffleProof");
transcript.append_u64(b"k", k as u64);
let mut prover = Prover::new(&pc_gens, transcript);
// Construct blinding factors using an RNG.
// Note: a non-example implementation would want to operate on existing commitments.
let mut blinding_rng = rand::thread_rng();
let (input_commitments, input_vars): (Vec<_>, Vec<_>) = input
.into_iter()
.map(|v| prover.commit(*v, Scalar::random(&mut blinding_rng)))
.unzip();
let (output_commitments, output_vars): (Vec<_>, Vec<_>) = output
.into_iter()
.map(|v| prover.commit(*v, Scalar::random(&mut blinding_rng)))
.unzip();
ShuffleProof::gadget(&mut prover, input_vars, output_vars)?;
let proof = prover.prove(&bp_gens)?;
Ok((ShuffleProof(proof), input_commitments, output_commitments))
}
}
impl ShuffleProof {
/// Attempt to verify a `ShuffleProof`.
pub fn verify<'a, 'b>(
&self,
pc_gens: &'b PedersenGens,
bp_gens: &'b BulletproofGens,
transcript: &'a mut Transcript,
input_commitments: &Vec<CompressedRistretto>,
output_commitments: &Vec<CompressedRistretto>,
) -> Result<(), R1CSError> {
// Apply a domain separator with the shuffle parameters to the transcript
// XXX should this be part of the gadget?
let k = input_commitments.len();
transcript.append_message(b"dom-sep", b"ShuffleProof");
transcript.append_u64(b"k", k as u64);
let mut verifier = Verifier::new(transcript);
let input_vars: Vec<_> = input_commitments
.iter()
.map(|V| verifier.commit(*V))
.collect();
let output_vars: Vec<_> = output_commitments
.iter()
.map(|V| verifier.commit(*V))
.collect();
ShuffleProof::gadget(&mut verifier, input_vars, output_vars)?;
verifier.verify(&self.0, &pc_gens, &bp_gens)
}
}
fn kshuffle_helper(k: usize) {
use rand::Rng;
// Common code
let pc_gens = PedersenGens::default();
let bp_gens = BulletproofGens::new((2 * k).next_power_of_two(), 1);
let (proof, input_commitments, output_commitments) = {
// Randomly generate inputs and outputs to kshuffle
let mut rng = rand::thread_rng();
let (min, max) = (0u64, std::u64::MAX);
let input: Vec<Scalar> = (0..k)
.map(|_| Scalar::from(rng.gen_range(min, max)))
.collect();
let mut output = input.clone();
output.shuffle(&mut rand::thread_rng());
let mut prover_transcript = Transcript::new(b"ShuffleProofTest");
ShuffleProof::prove(&pc_gens, &bp_gens, &mut prover_transcript, &input, &output).unwrap()
};
{
let mut verifier_transcript = Transcript::new(b"ShuffleProofTest");
assert!(proof
.verify(
&pc_gens,
&bp_gens,
&mut verifier_transcript,
&input_commitments,
&output_commitments
)
.is_ok());
}
}
#[test]
fn shuffle_gadget_test_1() {
kshuffle_helper(1);
}
#[test]
fn shuffle_gadget_test_2() {
kshuffle_helper(2);
}
#[test]
fn shuffle_gadget_test_3() {
kshuffle_helper(3);
}
#[test]
fn shuffle_gadget_test_4() {
kshuffle_helper(4);
}
#[test]
fn shuffle_gadget_test_5() {
kshuffle_helper(5);
}
#[test]
fn shuffle_gadget_test_6() {
kshuffle_helper(6);
}
#[test]
fn shuffle_gadget_test_7() {
kshuffle_helper(7);
}
#[test]
fn shuffle_gadget_test_24() {
kshuffle_helper(24);
}
#[test]
fn shuffle_gadget_test_42() {
kshuffle_helper(42);
}
/// Constrains (a1 + a2) * (b1 + b2) = (c1 + c2)
fn example_gadget<CS: ConstraintSystem>(
cs: &mut CS,
a1: LinearCombination,
a2: LinearCombination,
b1: LinearCombination,
b2: LinearCombination,
c1: LinearCombination,
c2: LinearCombination,
) {
let (_, _, c_var) = cs.multiply(a1 + a2, b1 + b2);
cs.constrain(c1 + c2 - c_var);
}
// Prover's scope
fn example_gadget_proof(
pc_gens: &PedersenGens,
bp_gens: &BulletproofGens,
a1: u64,
a2: u64,
b1: u64,
b2: u64,
c1: u64,
c2: u64,
) -> Result<(R1CSProof, Vec<CompressedRistretto>), R1CSError> {
let mut transcript = Transcript::new(b"R1CSExampleGadget");
// 1. Create a prover
let mut prover = Prover::new(pc_gens, &mut transcript);
// 2. Commit high-level variables
let (commitments, vars): (Vec<_>, Vec<_>) = [a1, a2, b1, b2, c1]
.into_iter()
.map(|x| prover.commit(Scalar::from(*x), Scalar::random(&mut thread_rng())))
.unzip();
// 3. Build a CS
example_gadget(
&mut prover,
vars[0].into(),
vars[1].into(),
vars[2].into(),
vars[3].into(),
vars[4].into(),
Scalar::from(c2).into(),
);
// 4. Make a proof
let proof = prover.prove(bp_gens)?;
Ok((proof, commitments))
}
// Verifier logic
fn example_gadget_verify(
pc_gens: &PedersenGens,
bp_gens: &BulletproofGens,
c2: u64,
proof: R1CSProof,
commitments: Vec<CompressedRistretto>,
) -> Result<(), R1CSError> {
let mut transcript = Transcript::new(b"R1CSExampleGadget");
// 1. Create a verifier
let mut verifier = Verifier::new(&mut transcript);
// 2. Commit high-level variables
let vars: Vec<_> = commitments.iter().map(|V| verifier.commit(*V)).collect();
// 3. Build a CS
example_gadget(
&mut verifier,
vars[0].into(),
vars[1].into(),
vars[2].into(),
vars[3].into(),
vars[4].into(),
Scalar::from(c2).into(),
);
// 4. Verify the proof
verifier
.verify(&proof, &pc_gens, &bp_gens)
.map_err(|_| R1CSError::VerificationError)
}
fn example_gadget_roundtrip_helper(
a1: u64,
a2: u64,
b1: u64,
b2: u64,
c1: u64,
c2: u64,
) -> Result<(), R1CSError> {
// Common
let pc_gens = PedersenGens::default();
let bp_gens = BulletproofGens::new(128, 1);
let (proof, commitments) = example_gadget_proof(&pc_gens, &bp_gens, a1, a2, b1, b2, c1, c2)?;
example_gadget_verify(&pc_gens, &bp_gens, c2, proof, commitments)
}
fn example_gadget_roundtrip_serialization_helper(
a1: u64,
a2: u64,
b1: u64,
b2: u64,
c1: u64,
c2: u64,
) -> Result<(), R1CSError> {
// Common
let pc_gens = PedersenGens::default();
let bp_gens = BulletproofGens::new(128, 1);
let (proof, commitments) = example_gadget_proof(&pc_gens, &bp_gens, a1, a2, b1, b2, c1, c2)?;
let proof = proof.to_bytes();
let proof = R1CSProof::from_bytes(&proof)?;
example_gadget_verify(&pc_gens, &bp_gens, c2, proof, commitments)
}
#[test]
fn example_gadget_test() {
// (3 + 4) * (6 + 1) = (40 + 9)
assert!(example_gadget_roundtrip_helper(3, 4, 6, 1, 40, 9).is_ok());
// (3 + 4) * (6 + 1) != (40 + 10)
assert!(example_gadget_roundtrip_helper(3, 4, 6, 1, 40, 10).is_err());
}
#[test]
fn example_gadget_serialization_test() {
// (3 + 4) * (6 + 1) = (40 + 9)
assert!(example_gadget_roundtrip_serialization_helper(3, 4, 6, 1, 40, 9).is_ok());
// (3 + 4) * (6 + 1) != (40 + 10)
assert!(example_gadget_roundtrip_serialization_helper(3, 4, 6, 1, 40, 10).is_err());
}
// Range Proof gadget
/// Enforces that the quantity of v is in the range [0, 2^n).
pub fn range_proof<CS: ConstraintSystem>(
cs: &mut CS,
mut v: LinearCombination,
v_assignment: Option<u64>,
n: usize,
) -> Result<(), R1CSError> {
let mut exp_2 = Scalar::one();
for i in 0..n {
// Create low-level variables and add them to constraints
let (a, b, o) = cs.allocate_multiplier(v_assignment.map(|q| {
let bit: u64 = (q >> i) & 1;
((1 - bit).into(), bit.into())
}))?;
// Enforce a * b = 0, so one of (a,b) is zero
cs.constrain(o.into());
// Enforce that a = 1 - b, so they both are 1 or 0.
cs.constrain(a + (b - 1u64));
// Add `-b_i*2^i` to the linear combination
// in order to form the following constraint by the end of the loop:
// v = Sum(b_i * 2^i, i = 0..n-1)
v = v - b * exp_2;
exp_2 = exp_2 + exp_2;
}
// Enforce that v = Sum(b_i * 2^i, i = 0..n-1)
cs.constrain(v);
Ok(())
}
#[test]
fn range_proof_gadget() {
use rand::thread_rng;
use rand::Rng;
let mut rng = thread_rng();
let m = 3; // number of values to test per `n`
for n in [2, 10, 32, 63].iter() {
let (min, max) = (0u64, ((1u128 << n) - 1) as u64);
let values: Vec<u64> = (0..m).map(|_| rng.gen_range(min, max)).collect();
for v in values {
assert!(range_proof_helper(v.into(), *n).is_ok());
}
assert!(range_proof_helper((max + 1).into(), *n).is_err());
}
}
fn range_proof_helper(v_val: u64, n: usize) -> Result<(), R1CSError> {
// Common
let pc_gens = PedersenGens::default();
let bp_gens = BulletproofGens::new(128, 1);
// Prover's scope
let (proof, commitment) = {
// Prover makes a `ConstraintSystem` instance representing a range proof gadget
let mut prover_transcript = Transcript::new(b"RangeProofTest");
let mut rng = rand::thread_rng();
let mut prover = Prover::new(&pc_gens, &mut prover_transcript);
let (com, var) = prover.commit(v_val.into(), Scalar::random(&mut rng));
assert!(range_proof(&mut prover, var.into(), Some(v_val), n).is_ok());
let proof = prover.prove(&bp_gens)?;
(proof, com)
};
// Verifier makes a `ConstraintSystem` instance representing a merge gadget
let mut verifier_transcript = Transcript::new(b"RangeProofTest");
let mut verifier = Verifier::new(&mut verifier_transcript);
let var = verifier.commit(commitment);
// Verifier adds constraints to the constraint system
assert!(range_proof(&mut verifier, var.into(), None, n).is_ok());
// Verifier verifies proof
Ok(verifier.verify(&proof, &pc_gens, &bp_gens)?)
}
|
use std::collections::HashMap;
use crate::{
ast::{Expr, FunctionStmt, Stmt},
interpreter::Interpreter,
token::Token,
};
pub struct Resolver<'interp> {
interpreter: &'interp mut Interpreter,
scopes: Vec<HashMap<String, bool>>,
current_function: FunctionType,
current_class: ClassType,
}
impl<'interp> Resolver<'interp> {
pub fn new(interpreter: &'interp mut Interpreter) -> Self {
Self {
interpreter,
scopes: vec![],
current_function: FunctionType::None,
current_class: ClassType::None,
}
}
pub fn resolve(&mut self, statements: &[Stmt]) {
for stmt in statements {
self.resolve_stmt(stmt);
}
}
fn resolve_stmt(&mut self, statement: &Stmt) {
match statement {
Stmt::Block { statements } => {
self.begin_scope();
self.resolve(statements);
self.end_scope();
}
Stmt::Var { name, initializer } => {
self.declare(name);
if let Some(initializer) = initializer {
self.resolve_expr(initializer);
}
self.define(name);
}
Stmt::Function(fun) => {
self.declare(&fun.name);
self.define(&fun.name);
self.resolve_function(fun, FunctionType::Function);
}
Stmt::Expression { expression } => {
self.resolve_expr(expression);
}
Stmt::If {
condition,
then_branch,
else_branch,
} => {
self.resolve_expr(condition);
self.resolve_stmt(then_branch);
if let Some(branch) = else_branch {
self.resolve_stmt(branch);
}
}
Stmt::Print { expression } => {
self.resolve_expr(expression);
}
Stmt::Return { value, .. } => {
if self.current_function == FunctionType::None {
todo!("Can't return from top-level code.");
}
if self.current_function == FunctionType::Initializer {
todo!("Can't return a value from an initializer.");
}
if let Some(value) = value {
self.resolve_expr(value);
}
}
Stmt::While { condition, body } => {
self.resolve_expr(condition);
self.resolve_stmt(body);
}
Stmt::Class {
name,
superclass,
methods,
} => {
let enclosing_class = self.current_class;
self.current_class = ClassType::Class;
self.declare(name);
self.define(name);
if let Some(superclass) = superclass {
self.current_class = ClassType::Subclass;
if name.lexeme == superclass.lexeme {
todo!("A class can't inherit from itself.");
}
self.resolve_expr(&Expr::Variable {
name: superclass.clone(),
});
self.begin_scope();
self.scopes.last_mut().unwrap().insert("super".into(), true);
}
self.begin_scope();
self.scopes.last_mut().unwrap().insert("this".into(), true);
for method in methods {
let declaration = if method.name.lexeme == "init" {
FunctionType::Initializer
} else {
FunctionType::Method
};
self.resolve_function(method, declaration);
}
self.end_scope();
if superclass.is_some() {
self.end_scope();
}
self.current_class = enclosing_class;
}
}
}
fn resolve_expr(&mut self, expression: &Expr) {
match expression {
Expr::Variable { name } => {
if let Some(false) = self.scopes.last().and_then(|it| it.get(&name.lexeme)) {
todo!(
"Can't read local variable in its own initializer. {}",
name.line
)
}
self.resolve_local(expression, name);
}
Expr::Assign { name, value } => {
self.resolve_expr(value);
self.resolve_local(expression, name);
}
Expr::Call {
callee, arguments, ..
} => {
self.resolve_expr(callee);
for argument in arguments {
self.resolve_expr(argument);
}
}
Expr::Get { object, .. } => {
self.resolve_expr(object);
}
Expr::Set { object, value, .. } => {
self.resolve_expr(value);
self.resolve_expr(object);
}
Expr::Grouping { expression } => {
self.resolve_expr(expression);
}
Expr::Literal { .. } => {}
Expr::Logical { left, right, .. } => {
self.resolve_expr(left);
self.resolve_expr(right);
}
Expr::Binary { left, right, .. } => {
self.resolve_expr(left);
self.resolve_expr(right);
}
Expr::Unary { right, .. } => {
self.resolve_expr(right);
}
Expr::This { keyword } => {
if self.current_class == ClassType::None {
todo!("Can't use 'this' outside of a class. {}", keyword.line);
}
self.resolve_local(expression, keyword);
}
Expr::Super { keyword, .. } => {
if self.current_class == ClassType::None {
todo!("Can't use 'super' outside of a class.");
} else if self.current_class != ClassType::Subclass {
todo!("Can't use 'super' with no superclass.");
}
self.resolve_local(expression, keyword);
}
}
}
fn resolve_local(&mut self, expression: &Expr, name: &Token) {
for (i, scope) in self.scopes.iter().enumerate() {
if scope.contains_key(&name.lexeme) {
self.interpreter
.resolve(expression, self.scopes.len() - 1 - i);
return;
}
}
}
fn resolve_function(&mut self, fun: &FunctionStmt, kind: FunctionType) {
let enclosing_function = self.current_function;
self.current_function = kind;
self.begin_scope();
for param in &fun.params {
self.declare(param);
self.define(param);
}
self.resolve(&fun.body);
self.end_scope();
self.current_function = enclosing_function;
}
fn begin_scope(&mut self) {
self.scopes.push(HashMap::new());
}
fn end_scope(&mut self) {
self.scopes.pop();
}
fn declare(&mut self, name: &Token) {
if let Some(scope) = self.scopes.last_mut() {
if scope.contains_key(&name.lexeme) {
todo!("Already variable with this name in this scope.");
}
scope.insert(name.lexeme.clone(), false);
}
}
fn define(&mut self, name: &Token) {
if let Some(scope) = self.scopes.last_mut() {
scope.insert(name.lexeme.clone(), true);
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum FunctionType {
None,
Function,
Initializer,
Method,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ClassType {
None,
Class,
Subclass,
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use async::Interval;
use futures::prelude::*;
use zx;
pub fn retry_until<T, E, FUNC, FUT>(retry_interval: zx::Duration, mut f: FUNC)
-> impl Future<Item = T, Error = E>
where FUNC: FnMut() -> FUT,
FUT: Future<Item = Option<T>, Error = E>
{
Interval::new(retry_interval)
.and_then(move |()| f())
.filter_map(|x| Ok(x))
.next()
.map_err(|(e, _stream)| e)
.map(|(x, _stream)| {
x.expect("Interval stream is not expected to ever end")
})
}
|
use crate::http;
use crate::settings::global_user::GlobalUser;
use crate::settings::target::Target;
use crate::terminal::{emoji, message};
use serde::{Deserialize, Serialize};
#[derive(Serialize)]
pub struct Subdomain {
subdomain: String,
}
impl Subdomain {
pub fn get(account_id: &str, user: &GlobalUser) -> Result<Option<String>, failure::Error> {
let addr = subdomain_addr(account_id);
let client = http::auth_client(None, user);
let mut response = client.get(&addr).send()?;
if !response.status().is_success() {
failure::bail!(
"{} There was an error fetching your subdomain.\n Status Code: {}\n Msg: {}",
emoji::WARN,
response.status(),
response.text()?,
)
}
let response: Response = serde_json::from_str(&response.text()?)?;
Ok(response.result.map(|r| r.subdomain))
}
pub fn put(name: &str, account_id: &str, user: &GlobalUser) -> Result<(), failure::Error> {
let addr = subdomain_addr(account_id);
let subdomain = Subdomain {
subdomain: name.to_string(),
};
let subdomain_request = serde_json::to_string(&subdomain)?;
let client = http::auth_client(None, user);
let mut response = client.put(&addr).body(subdomain_request).send()?;
if !response.status().is_success() {
let response_text = response.text()?;
log::debug!("Status Code: {}", response.status());
log::debug!("Status Message: {}", response_text);
let msg = if response.status() == 409 {
format!(
"{} Your requested subdomain is not available. Please pick another one.",
emoji::WARN
)
} else {
format!(
"{} There was an error creating your requested subdomain.\n Status Code: {}\n Msg: {}",
emoji::WARN,
response.status(),
response_text
)
};
failure::bail!(msg)
}
message::success(&format!("Success! You've registered {}.", name));
Ok(())
}
}
#[derive(Deserialize)]
struct Response {
result: Option<SubdomainResult>,
}
#[derive(Deserialize)]
struct SubdomainResult {
subdomain: String,
}
fn subdomain_addr(account_id: &str) -> String {
format!(
"https://api.cloudflare.com/client/v4/accounts/{}/workers/subdomain",
account_id
)
}
fn register_subdomain(
name: &str,
user: &GlobalUser,
target: &Target,
) -> Result<(), failure::Error> {
let msg = format!(
"Registering your subdomain, {}.workers.dev, this could take up to a minute.",
name
);
message::working(&msg);
Subdomain::put(name, &target.account_id, user)
}
pub fn set_subdomain(name: &str, user: &GlobalUser, target: &Target) -> Result<(), failure::Error> {
if target.account_id.is_empty() {
failure::bail!(format!(
"{} You must provide an account_id in your wrangler.toml before creating a subdomain!",
emoji::WARN
))
}
let subdomain = Subdomain::get(&target.account_id, user)?;
if let Some(subdomain) = subdomain {
let msg = if subdomain == name {
format!("You have previously registered {}.workers.dev", subdomain)
} else {
format!("This account already has a registered subdomain. You can only register one subdomain per account. Your subdomain is {}.workers.dev", subdomain)
};
failure::bail!(msg)
} else {
register_subdomain(&name, &user, &target)
}
}
pub fn get_subdomain(user: &GlobalUser, target: &Target) -> Result<(), failure::Error> {
let subdomain = Subdomain::get(&target.account_id, user)?;
if let Some(subdomain) = subdomain {
let msg = format!("{}.workers.dev", subdomain);
message::info(&msg);
} else {
let msg =
"No subdomain registered. Use `wrangler subdomain <name>` to register one.".to_string();
message::user_error(&msg);
}
Ok(())
}
|
// Copyright (C) 2019 Boyu Yang
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use jsonrpc_core;
pub use serde_json;
pub use jsonrpc_sdk_macros::jsonrpc_interfaces;
mod error;
mod kernel;
pub use error::{Error, Result};
pub use kernel::{CommonPart, JsonRpcRequest};
|
mod gen_network;
use actix_rt::System;
use bus::{Bus, BusActor};
use chain::ChainActor;
use config::{get_available_port, NodeConfig};
use consensus::dev::DevConsensus;
use crypto::{hash::PlainCryptoHash, keygen::KeyGen};
use futures_timer::Delay;
use gen_network::gen_network;
use libp2p::multiaddr::Multiaddr;
use logger::prelude::*;
use network_api::NetworkService;
use starcoin_genesis::Genesis;
use starcoin_storage::cache_storage::CacheStorage;
use starcoin_storage::storage::StorageInstance;
use starcoin_storage::Storage;
use starcoin_sync::SyncActor;
use starcoin_sync_api::StartSyncTxnEvent;
use starcoin_txpool_api::TxPoolSyncService;
use std::{sync::Arc, time::Duration};
use txpool::TxPool;
use types::{
account_address,
transaction::{authenticator::AuthenticationKey, SignedUserTransaction},
};
#[test]
fn test_txn_sync_actor() {
::logger::init_for_test();
let rt = tokio::runtime::Runtime::new().unwrap();
let handle = rt.handle().clone();
let mut system = System::new("test");
let fut = async move {
// first chain
// bus
let bus_1 = BusActor::launch();
// storage
let storage_1 = Arc::new(
Storage::new(StorageInstance::new_cache_instance(CacheStorage::new())).unwrap(),
);
// node config
let mut config_1 = NodeConfig::random_for_test();
config_1.network.listen = format!("/ip4/127.0.0.1/tcp/{}", get_available_port())
.parse()
.unwrap();
let node_config_1 = Arc::new(config_1);
// genesis
let genesis_1 = Genesis::load(node_config_1.net()).unwrap();
let genesis_hash = genesis_1.block().header().id();
let startup_info_1 = genesis_1.execute(storage_1.clone()).unwrap();
let txpool_1 = {
let best_block_id = *startup_info_1.get_master();
TxPool::start(
node_config_1.tx_pool.clone(),
storage_1.clone(),
best_block_id,
bus_1.clone(),
)
};
// network
let (network_1, addr_1, rpc_rx) = gen_network(
node_config_1.clone(),
bus_1.clone(),
handle.clone(),
genesis_hash,
);
debug!("addr_1 : {:?}", addr_1);
// chain
let first_chain = ChainActor::<DevConsensus>::launch(
node_config_1.clone(),
startup_info_1.clone(),
storage_1.clone(),
bus_1.clone(),
txpool_1.get_service(),
)
.unwrap();
// sync
let first_p = Arc::new(network_1.identify().clone().into());
let _first_sync_actor = SyncActor::launch(
node_config_1.clone(),
bus_1.clone(),
first_p,
first_chain.clone(),
txpool_1.get_service(),
network_1.clone(),
storage_1.clone(),
rpc_rx,
)
.unwrap();
// add txn to node1
let user_txn = gen_user_txn();
let import_result = txpool_1
.get_service()
.add_txns(vec![user_txn.clone()])
.pop();
assert!(import_result.unwrap().is_ok());
////////////////////////
// second chain
// bus
let bus_2 = BusActor::launch();
// storage
let storage_2 = Arc::new(
Storage::new(StorageInstance::new_cache_instance(CacheStorage::new())).unwrap(),
);
// node config
let mut config_2 = NodeConfig::random_for_test();
let addr_1_hex = network_1.identify().to_base58();
let seed: Multiaddr = format!("{}/p2p/{}", &node_config_1.network.listen, addr_1_hex)
.parse()
.unwrap();
config_2.network.listen = format!("/ip4/127.0.0.1/tcp/{}", config::get_available_port())
.parse()
.unwrap();
config_2.network.seeds = vec![seed];
let node_config_2 = Arc::new(config_2);
let genesis_2 = Genesis::load(node_config_2.net()).unwrap();
let genesis_hash = genesis_2.block().header().id();
let startup_info_2 = genesis_2.execute(storage_2.clone()).unwrap();
// txpool
let txpool_2 = {
let best_block_id = *startup_info_2.get_master();
TxPool::start(
node_config_2.tx_pool.clone(),
storage_2.clone(),
best_block_id,
bus_2.clone(),
)
};
// network
let (network_2, addr_2, rpc_rx_2) = gen_network(
node_config_2.clone(),
bus_2.clone(),
handle.clone(),
genesis_hash,
);
debug!("addr_2 : {:?}", addr_2);
// chain
let second_chain = ChainActor::<DevConsensus>::launch(
node_config_2.clone(),
startup_info_2.clone(),
storage_2.clone(),
bus_2.clone(),
txpool_2.get_service(),
)
.unwrap();
// sync
let second_p = Arc::new(network_2.identify().clone().into());
let _second_sync_actor = SyncActor::<DevConsensus>::launch(
node_config_2.clone(),
bus_2.clone(),
Arc::clone(&second_p),
second_chain.clone(),
txpool_2.get_service(),
network_2.clone(),
storage_2.clone(),
rpc_rx_2,
)
.unwrap();
Delay::new(Duration::from_secs(10)).await;
// make node2 to sync txn
bus_2.clone().broadcast(StartSyncTxnEvent).await.unwrap();
// wait 10s to sync done
Delay::new(Duration::from_secs(10)).await;
// check txn
let mut txns = txpool_2.get_service().get_pending_txns(None);
assert!(txns.len() == 1);
let txn = txns.pop().unwrap();
assert_eq!(user_txn.crypto_hash(), txn.crypto_hash());
};
system.block_on(fut);
drop(rt);
}
fn gen_user_txn() -> SignedUserTransaction {
let (_private_key, public_key) = KeyGen::from_os_rng().generate_keypair();
let account_address = account_address::from_public_key(&public_key);
let auth_prefix = AuthenticationKey::ed25519(&public_key).prefix().to_vec();
let txn = executor::build_transfer_from_association(account_address, auth_prefix, 0, 10000);
txn.as_signed_user_txn().unwrap().clone()
}
|
use crate as mongodb;
// begin lambda connection example 2
use async_once::AsyncOnce;
use lambda_runtime::{service_fn, LambdaEvent};
use lazy_static::lazy_static;
use mongodb::{
bson::doc,
options::{AuthMechanism, ClientOptions, Credential},
Client,
};
use serde_json::Value;
// Initialize a global static MongoDB Client with AWS authentication. The following environment
// variables should also be set: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, and, optionally,
// AWS_SESSION_TOKEN.
//
// The client can be accessed as follows:
// let client = MONGODB_CLIENT.get().await;
lazy_static! {
static ref MONGODB_CLIENT: AsyncOnce<Client> = AsyncOnce::new(async {
let uri = std::env::var("MONGODB_URI")
.expect("MONGODB_URI must be set to the URI of the MongoDB deployment");
let mut options = ClientOptions::parse(uri)
.await
.expect("Failed to parse options from URI");
let credential = Credential::builder()
.mechanism(AuthMechanism::MongoDbAws)
.build();
options.credential = Some(credential);
Client::with_options(options).expect("Failed to create MongoDB Client")
});
}
// Runs a ping operation on the "db" database and returns the response.
async fn handler(_: LambdaEvent<Value>) -> Result<Value, lambda_runtime::Error> {
let client = MONGODB_CLIENT.get().await;
let response = client
.database("db")
.run_command(doc! { "ping": 1 }, None)
.await?;
let json = serde_json::to_value(response)?;
Ok(json)
}
#[tokio::main]
async fn main() -> Result<(), lambda_runtime::Error> {
let service = service_fn(handler);
lambda_runtime::run(service).await?;
Ok(())
}
// end lambda connection example 2
// Runs a ping operation on the "db" database and returns the response.
async fn handler_create_client(_: LambdaEvent<Value>) -> Result<Value, lambda_runtime::Error> {
let uri = std::env::var("MONGODB_URI").unwrap();
let client = Client::with_uri_str(uri).await.unwrap();
let response = client
.database("db")
.run_command(doc! { "ping": 1 }, None)
.await?;
let json = serde_json::to_value(response)?;
Ok(json)
}
#[cfg_attr(feature = "tokio-runtime", tokio::test(flavor = "multi_thread"))]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn test_handler() {
let event = LambdaEvent::new(Value::Null, Default::default());
handler_create_client(event).await.unwrap();
}
|
#[doc = "Register `ICR` writer"]
pub type W = crate::W<ICR_SPEC>;
#[doc = "compare match Clear Flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CMPMCFW_AW {
#[doc = "1: Compare match Clear Flag"]
Clear = 1,
}
impl From<CMPMCFW_AW> for bool {
#[inline(always)]
fn from(variant: CMPMCFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CMPMCF` writer - compare match Clear Flag"]
pub type CMPMCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CMPMCFW_AW>;
impl<'a, REG, const O: u8> CMPMCF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Compare match Clear Flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(CMPMCFW_AW::Clear)
}
}
#[doc = "Autoreload match Clear Flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ARRMCFW_AW {
#[doc = "1: Autoreload match Clear Flag"]
Clear = 1,
}
impl From<ARRMCFW_AW> for bool {
#[inline(always)]
fn from(variant: ARRMCFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `ARRMCF` writer - Autoreload match Clear Flag"]
pub type ARRMCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ARRMCFW_AW>;
impl<'a, REG, const O: u8> ARRMCF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Autoreload match Clear Flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(ARRMCFW_AW::Clear)
}
}
#[doc = "External trigger valid edge Clear Flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EXTTRIGCFW_AW {
#[doc = "1: External trigger valid edge Clear Flag"]
Clear = 1,
}
impl From<EXTTRIGCFW_AW> for bool {
#[inline(always)]
fn from(variant: EXTTRIGCFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EXTTRIGCF` writer - External trigger valid edge Clear Flag"]
pub type EXTTRIGCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, EXTTRIGCFW_AW>;
impl<'a, REG, const O: u8> EXTTRIGCF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "External trigger valid edge Clear Flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(EXTTRIGCFW_AW::Clear)
}
}
#[doc = "Compare register update OK Clear Flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CMPOKCFW_AW {
#[doc = "1: Compare register update OK Clear Flag"]
Clear = 1,
}
impl From<CMPOKCFW_AW> for bool {
#[inline(always)]
fn from(variant: CMPOKCFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CMPOKCF` writer - Compare register update OK Clear Flag"]
pub type CMPOKCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CMPOKCFW_AW>;
impl<'a, REG, const O: u8> CMPOKCF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Compare register update OK Clear Flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(CMPOKCFW_AW::Clear)
}
}
#[doc = "Autoreload register update OK Clear Flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ARROKCFW_AW {
#[doc = "1: Autoreload register update OK Clear Flag"]
Clear = 1,
}
impl From<ARROKCFW_AW> for bool {
#[inline(always)]
fn from(variant: ARROKCFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `ARROKCF` writer - Autoreload register update OK Clear Flag"]
pub type ARROKCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ARROKCFW_AW>;
impl<'a, REG, const O: u8> ARROKCF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Autoreload register update OK Clear Flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(ARROKCFW_AW::Clear)
}
}
#[doc = "Direction change to UP Clear Flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum UPCFW_AW {
#[doc = "1: Direction change to up Clear Flag"]
Clear = 1,
}
impl From<UPCFW_AW> for bool {
#[inline(always)]
fn from(variant: UPCFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `UPCF` writer - Direction change to UP Clear Flag"]
pub type UPCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, UPCFW_AW>;
impl<'a, REG, const O: u8> UPCF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Direction change to up Clear Flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(UPCFW_AW::Clear)
}
}
#[doc = "Direction change to down Clear Flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DOWNCFW_AW {
#[doc = "1: Direction change to down Clear Flag"]
Clear = 1,
}
impl From<DOWNCFW_AW> for bool {
#[inline(always)]
fn from(variant: DOWNCFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `DOWNCF` writer - Direction change to down Clear Flag"]
pub type DOWNCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DOWNCFW_AW>;
impl<'a, REG, const O: u8> DOWNCF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Direction change to down Clear Flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(DOWNCFW_AW::Clear)
}
}
impl W {
#[doc = "Bit 0 - compare match Clear Flag"]
#[inline(always)]
#[must_use]
pub fn cmpmcf(&mut self) -> CMPMCF_W<ICR_SPEC, 0> {
CMPMCF_W::new(self)
}
#[doc = "Bit 1 - Autoreload match Clear Flag"]
#[inline(always)]
#[must_use]
pub fn arrmcf(&mut self) -> ARRMCF_W<ICR_SPEC, 1> {
ARRMCF_W::new(self)
}
#[doc = "Bit 2 - External trigger valid edge Clear Flag"]
#[inline(always)]
#[must_use]
pub fn exttrigcf(&mut self) -> EXTTRIGCF_W<ICR_SPEC, 2> {
EXTTRIGCF_W::new(self)
}
#[doc = "Bit 3 - Compare register update OK Clear Flag"]
#[inline(always)]
#[must_use]
pub fn cmpokcf(&mut self) -> CMPOKCF_W<ICR_SPEC, 3> {
CMPOKCF_W::new(self)
}
#[doc = "Bit 4 - Autoreload register update OK Clear Flag"]
#[inline(always)]
#[must_use]
pub fn arrokcf(&mut self) -> ARROKCF_W<ICR_SPEC, 4> {
ARROKCF_W::new(self)
}
#[doc = "Bit 5 - Direction change to UP Clear Flag"]
#[inline(always)]
#[must_use]
pub fn upcf(&mut self) -> UPCF_W<ICR_SPEC, 5> {
UPCF_W::new(self)
}
#[doc = "Bit 6 - Direction change to down Clear Flag"]
#[inline(always)]
#[must_use]
pub fn downcf(&mut self) -> DOWNCF_W<ICR_SPEC, 6> {
DOWNCF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Interrupt Clear Register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`icr::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ICR_SPEC;
impl crate::RegisterSpec for ICR_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [`icr::W`](W) writer structure"]
impl crate::Writable for ICR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets ICR to value 0"]
impl crate::Resettable for ICR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use super::*;
use math::*;
use std::marker::PhantomData;
pub enum Item<'a, T: Clone + 'a> {
Text(T, &'a str, &'a str),
Separator,
Menu(&'a [Item<'a, T>]),
}
pub struct ItemStyle<D: ?Sized + Graphics> {
pub label: D::Color,
pub shortcut: D::Color,
pub bg: D::Color,
}
pub struct MenuStyle<D: ?Sized + Graphics> {
pub normal: ItemStyle<D>,
pub hovered: ItemStyle<D>,
pub separator: D::Color,
pub width: f32,
pub text_height: f32,
pub text_inset: f32,
pub sep_height: f32,
pub sep_inset: f32,
}
pub enum MenuEvent<T> {
Clicked(T),
Nothing,
Exit,
}
pub struct Menu<D: ?Sized + Graphics, T = usize> {
pub style: MenuStyle<D>,
pub marker: PhantomData<T>,
}
impl<T: Clone, D: ?Sized + Graphics> Menu<D, T> {
pub fn run<'a, 'b, 'c>(&self,
ctx: &Context<'a, D>, state: &mut UiState,
id: Id, base_rect: Rect<f32>, items: &'b [Item<'c, T>],
) -> MenuEvent<T> {
let mut min = Point2::new(base_rect.min.x, base_rect.max.y);
let mut any_hovering = false;
let label_align = Vector2::new(0.0, 0.5);
let shortcut_align = Vector2::new(1.0, 0.5);
let mut event = None;
for item in items.iter() {
let rect = match item {
&Item::Text(ref id, name, shortcut) => {
let rect = Rect { min, max: Point2::new(min.x + self.style.width, min.y + self.style.text_height) };
let style = if ctx.is_cursor_in_rect(rect) {
if ctx.was_released() {
event = Some(id.clone());
}
&self.style.hovered
} else {
&self.style.normal
};
ctx.quad(style.bg, rect);
let inset = rect.pad_x(self.style.text_inset);
ctx.label_rect(inset, label_align, style.label, name);
ctx.label_rect(inset, shortcut_align, style.shortcut, shortcut);
rect
}
&Item::Separator => {
let rect = Rect { min, max: Point2::new(min.x + self.style.width, min.y + self.style.sep_height) };
ctx.quad(self.style.normal.bg, rect);
ctx.quad(self.style.separator, rect.pad_y(self.style.sep_inset));
rect
}
&Item::Menu(_) => unimplemented!(),
};
min.y += rect.dy();
any_hovering = any_hovering || ctx.is_cursor_in_rect(rect);
}
if let Some(item) = event {
state.active_widget = None;
MenuEvent::Clicked(item)
} else if !any_hovering && !ctx.is_cursor_in_rect(base_rect) {
state.active_widget = None;
MenuEvent::Exit
} else {
state.active_widget = Some(id);
MenuEvent::Nothing
}
}
}
|
use imgui::*;
use crate::{element::{EleAddr, Element}, entity::*};
fn find_entities(man: &mut Manager, name: &str) -> Vec<EntAddr> {
let mut res = Vec::new();
for b in man.all_entities().iter() {
let ent_name = b.get_ref().unwrap().name.clone();
if ent_name.contains(name) {
res.push(b.clone());
}
}
res
}
pub fn select_entity(res: &mut EntAddr, label: &str, ui: &Ui, man: &mut Manager) -> bool {
//let mut search_str = String::new();
//ui.input_text("Search Entities", &mut search_str).build();
let mut entities = find_entities(man, "");//&search_str);
entities.insert(0, EntAddr::new());
let ents_names: Vec<String>
=entities.iter()
.map(|ent| {
if ent.valid() {
let r = ent.get_ref().unwrap();
format!("\"{}\",{}",
r.name,
r.get_id().to_string().chars().take(8).collect::<String>()
)
} else {
"(Null)".to_string()
}
})
.collect();
let mut val: usize
=entities.iter()
.position(|e| {
if !e.valid() && !res.valid() {
true
} else if e.valid() && res.valid() {
e.get_ref().unwrap().get_id() == res.get_ref().unwrap().get_id()
} else {
false
}
}).unwrap_or(0);
let selected = ui.combo_simple_string(label, &mut val, ents_names.as_slice());
if selected {
*res = entities[val].clone();
}
selected
}
pub fn select_element<T: Element>(res: &mut EleAddr<T>, label: &str, ui: &Ui, man: &mut Manager) -> bool {
//let mut search_str = String::new();
//ui.input_text("Search Entities", &mut search_str).build();
let mut entities = find_entities(man, "");//&search_str);
entities.insert(0, EntAddr::new());
let ents_names: Vec<String>
=entities.iter()
.map(|ent| {
if ent.valid() {
let mut r = ent.get_ref_mut().unwrap();
let warn = match r.query_element::<T>() {
Some(_) => "",
None => "[!]"
};
format!("{}\"{}\",{}",
warn,
r.name,
r.get_id().to_string().chars().take(8).collect::<String>()
)
} else {
"(Null)".to_string()
}
})
.collect();
let mut val: usize
=entities.iter()
.position(|e| {
if !e.valid() && !res.valid() {
true
} else if e.valid() && res.valid() {
e.get_ref().unwrap().get_id() == res.get_owner().get_ref().unwrap().get_id()
} else {
false
}
}).unwrap_or(0);
let selected = ui.combo_simple_string(label, &mut val, ents_names.as_slice());
if selected {
*res = entities[val].get_ref_mut().unwrap().query_element_addr::<T>();
}
selected
} |
#[macro_use] extern crate buildinfo;
fn main() {
let info = buildinfo!();
println!("Target triple: {}", info.target_triple());
println!("Host triple: {}", info.host_triple());
println!("Opt level: {}", info.opt_level());
println!("Debug: {}", info.debug());
println!("Profile: {}", info.profile());
println!("Rustc version: {}", info.rustc_version());
println!("Compiled at: {:?}", info.compiled_at());
println!("Git commit: {:?}", info.git_commit());
println!("Hostname: {:?}", info.hostname());
println!("Username: {:?}", info.username());
println!("{:?}", info)
}
|
#[doc = "Reader of register MBIST_STAT"]
pub type R = crate::R<u32, super::MBIST_STAT>;
#[doc = "Reader of field `SFP_READY`"]
pub type SFP_READY_R = crate::R<bool, bool>;
#[doc = "Reader of field `SFP_FAIL`"]
pub type SFP_FAIL_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - Flag indicating the BIST run is done. Note that after starting a BIST run this flag must be set before a new run can be started. For the first BIST run this will be 0."]
#[inline(always)]
pub fn sfp_ready(&self) -> SFP_READY_R {
SFP_READY_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Report status of the BIST run, only valid if SFP_READY=1"]
#[inline(always)]
pub fn sfp_fail(&self) -> SFP_FAIL_R {
SFP_FAIL_R::new(((self.bits >> 1) & 0x01) != 0)
}
}
|
use std::path::PathBuf;
fn main() -> anyhow::Result<()> {
if !PathBuf::from("./minecraft").exists() {
vanilla_assets::download_vanilla_assets(&PathBuf::from("../"))?;
}
Ok(())
}
|
use std::io::Cursor;
use bitcoinrs_bytes::decode::ReadBuffer;
use bitcoinrs_bytes::encode::WriteBuffer;
use bitcoinrs_bytes::endian::{u32_b, u64_b};
type Word = u32;
type HashValue = [Word; 8];
type MsgBlock = [Word; 16];
type ExpandedMsgBlock = [Word; 64]; // aka message schedule.
pub fn sha256(msg: &[u8]) -> [u8; 32] {
// Preprocessing
let bytes = get_padded_bytes(msg);
let msg_block_iter = MsgBlockIter::new(bytes.as_slice());
// Computation
let hash_val = compute_hash(msg_block_iter);
// Finalize
parse_into_result(hash_val)
}
/* ===================================== */
/* Preprocessing */
/* ===================================== */
const BYTE_SIZE_PADD_BASE: usize = 512 / 8;
const BYTE_SIZE_DATA_LEN: usize = 64 / 8;
fn get_padded_bytes(msg: &[u8]) -> Vec<u8> {
// Calc after padded size
let size_zero_padding = size_zero_padding(msg.len());
let padded_size = msg.len() + 1 + size_zero_padding + 8;
// Prepare buffer
let mut bytes = Vec::with_capacity(padded_size);
bytes.write_bytes(msg);
bytes.write(0b_1000_0000_u8);
write_zeros(&mut bytes, size_zero_padding);
bytes.write(u64_b::new(msg.len() as u64 * 8)); // Length in bits.
bytes
}
fn size_zero_padding(l: usize) -> usize {
let resv_size = (l + 1 + BYTE_SIZE_DATA_LEN) % BYTE_SIZE_PADD_BASE;
BYTE_SIZE_PADD_BASE - resv_size
}
fn write_zeros(vec: &mut Vec<u8>, zeros: usize) {
unsafe {
let p = vec.len();
vec.set_len(p + zeros);
}
}
struct MsgBlockIter<'a> {
msg: Cursor<&'a [u8]>,
}
impl<'a> MsgBlockIter<'a> {
pub fn new(msg: &'a [u8]) -> MsgBlockIter<'a> {
MsgBlockIter {
msg: Cursor::new(msg),
}
}
}
impl<'a> Iterator for MsgBlockIter<'a> {
type Item = MsgBlock;
fn next(&mut self) -> Option<MsgBlock> {
let mut msg_block = [0; 16];
for i in 0..16 {
msg_block[i] = self.msg.read::<u32_b>().ok()?.value();
}
Some(msg_block)
}
}
/* ===================================== */
/* Computation */
/* ===================================== */
fn compute_hash(msg_blocks: MsgBlockIter) -> HashValue {
msg_blocks.fold(INIT_HASH_VAL, |hash, msg_block| {
compute_next_hash_val(msg_block, hash)
})
}
/// Sha-256 initial hash value.
const INIT_HASH_VAL: HashValue = [
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19
];
const SHA256_CONST_WORDS: [Word; 64] = [
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
];
fn compute_next_hash_val(msg_block: MsgBlock, prev_hash: HashValue) -> HashValue {
use self::word_ops::*;
// Initialize working variables.
let (mut a, mut b, mut c, mut d, mut e, mut f, mut g, mut h) = (
prev_hash[0],
prev_hash[1],
prev_hash[2],
prev_hash[3],
prev_hash[4],
prev_hash[5],
prev_hash[6],
prev_hash[7],
);
// Prepare the expanded message block (aka message schedule).
let expanded_msg_block = get_expand_msg_block(msg_block);
// Compute working variables.
for t in 0..64 {
let t1 = h.wrapping_add(big_sigma_1(e))
.wrapping_add(choose(e, f, g))
.wrapping_add(SHA256_CONST_WORDS[t])
.wrapping_add(expanded_msg_block[t]);
let t2 = big_sigma_0(a).wrapping_add(majority(a, b, c));
h = g;
g = f;
f = e;
e = d.wrapping_add(t1);
d = c;
c = b;
b = a;
a = t1.wrapping_add(t2);
}
// Compute next hash value.
let mut new_hash = [0; 8];
new_hash[0] = a.wrapping_add(prev_hash[0]);
new_hash[1] = b.wrapping_add(prev_hash[1]);
new_hash[2] = c.wrapping_add(prev_hash[2]);
new_hash[3] = d.wrapping_add(prev_hash[3]);
new_hash[4] = e.wrapping_add(prev_hash[4]);
new_hash[5] = f.wrapping_add(prev_hash[5]);
new_hash[6] = g.wrapping_add(prev_hash[6]);
new_hash[7] = h.wrapping_add(prev_hash[7]);
new_hash
}
fn get_expand_msg_block(msg_block: MsgBlock) -> ExpandedMsgBlock {
let mut expanded = [0; 64];
expanded[..16].copy_from_slice(&msg_block);
use self::word_ops::*;
for t in 16..64 {
let w = small_sigma_1(expanded[t - 2])
.wrapping_add(expanded[t - 7])
.wrapping_add(small_sigma_0(expanded[t - 15]))
.wrapping_add(expanded[t - 16]);
expanded[t] = w;
}
expanded
}
mod word_ops {
use super::Word;
pub fn choose(x: Word, y: Word, z: Word) -> Word {
(x & y) ^ (!x & z)
}
pub fn majority(x: Word, y: Word, z: Word) -> Word {
(x & y) ^ (x & z) ^ (y & z)
}
/// Represented as large sigma 0 to 256.
pub fn big_sigma_0(x: Word) -> Word {
x.rotate_right(2) ^ x.rotate_right(13) ^ x.rotate_right(22)
}
pub fn big_sigma_1(x: Word) -> Word {
x.rotate_right(6) ^ x.rotate_right(11) ^ x.rotate_right(25)
}
pub fn small_sigma_0(x: Word) -> Word {
x.rotate_right(7) ^ x.rotate_right(18) ^ (x >> 3)
}
pub fn small_sigma_1(x: Word) -> Word {
x.rotate_right(17) ^ x.rotate_right(19) ^ (x >> 10)
}
}
/* ===================================== */
/* Computation */
/* ===================================== */
fn parse_into_result(hash_val: HashValue) -> [u8; 32] {
let mut res = [0; 32];
for i in 0..8 {
let bytes = u32_to_bytes(hash_val[i]);
(&mut res[i * 4..(i + 1) * 4]).copy_from_slice(&bytes);
}
res
}
fn u32_to_bytes(n: u32) -> [u8; 4] {
use std::mem::transmute;
unsafe { transmute::<u32, [u8; 4]>(n.to_be()) }
}
|
#[macro_export]
macro_rules! cast {
($e:expr) => {
match $e.dyn_into() {
Ok(casted) => Ok(casted),
Err(_original) => Err(JsValue::from_str("failed to cast JsValue to given type")),
}
};
}
#[macro_export]
macro_rules! map_err_to_anyhow {
($e:expr) => {
match $e {
Ok(i) => Ok(i),
Err(e) => Err(::anyhow::anyhow!(
"{}",
e.as_string()
.unwrap_or_else(|| "no error message".to_string())
)),
}
};
}
#[macro_export]
macro_rules! map_err_from_anyhow {
($e:expr) => {
match $e {
Ok(i) => Ok(i),
Err(e) => Err(JsValue::from_str(&format!("{:#}", e))),
}
};
}
|
use aoc::read_data_space_saperator2;
use std::collections::HashMap;
use std::convert::Infallible;
use std::error::Error;
use std::str::FromStr;
#[derive(Clone, Debug, PartialEq)]
enum D {
/// .
D,
/// #
H,
}
impl std::fmt::Display for D {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
D::D => write!(f, "."),
D::H => write!(f, "#"),
}
}
}
impl D {
fn from_char(c: char) -> Self {
match c {
'.' => D::D,
'#' => D::H,
_ => panic!("wc"),
}
}
}
#[derive(Clone, Debug, PartialEq)]
struct Row {
row: Vec<D>,
}
impl Row {
fn rotate(&mut self) {
self.row.reverse();
}
fn rot(&self) -> Self {
let mut c = self.clone();
c.row.reverse();
c
}
}
impl FromStr for Row {
type Err = Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self {
row: s.trim().chars().map(D::from_char).collect(),
})
}
}
impl std::iter::FromIterator<D> for Row {
fn from_iter<I: IntoIterator<Item = D>>(iter: I) -> Self {
let mut row = Vec::new();
for i in iter {
row.push(i);
}
Self { row }
}
}
impl std::fmt::Display for Row {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut s = String::new();
for r in &self.row {
s = format!("{}{}", s, r);
}
writeln!(f, "{}", s)
}
}
#[derive(Debug, Clone)]
struct Tile {
/// id
id: usize,
/// all data
data: Vec<Row>,
// edges
/// long edge 1
le1: Row,
/// long edge 1
le2: Row,
/// short edge 1
se1: Row,
/// short edge 1
se2: Row,
}
impl std::fmt::Display for Tile {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut s = String::new();
for r in &self.data {
s = format!("{}{}", s, r);
}
writeln!(f, "Tile {}:\n{}", self.id, s)
}
}
impl Tile {
fn flip(&self) -> Self {
let mut res = self.clone();
res.data = Vec::new();
for i in 0..self.data[0].row.len() {
let mut row = Vec::new();
for d in &self.data {
row.push(d.row[i].clone());
}
res.data.push(Row { row });
}
res.se1 = res.data.get(0).unwrap().clone();
res.se2 = res.data.last().unwrap().clone();
res.le1 = res.data.iter().map(|x| x.row[0].clone()).collect();
res.le2 = res
.data
.iter()
.map(|x| x.row.last().unwrap().clone())
.collect();
res
}
}
fn trans(dataa: Vec<String>) -> Vec<Tile> {
let mut res = Vec::new();
for d in dataa {
let mut id = 0;
let mut data: Vec<Row> = Vec::new();
for s in d.split('|') {
if s == "" {
continue;
} else if s.contains("Tile") {
id = s[5..9].parse().unwrap();
} else {
data.push(s.parse().unwrap());
}
}
res.push(Tile {
id,
data: data.clone(),
se1: data.get(0).unwrap().clone(),
se2: data.last().unwrap().clone(),
le1: data.iter().map(|x| x.row[0].clone()).collect(),
le2: data.iter().map(|x| x.row.last().unwrap().clone()).collect(),
});
}
res
}
#[derive(Debug)]
/// sosedi
struct Ne {
le1: Option<usize>,
/// long edge 1
le2: Option<usize>,
/// short edge 1
se1: Option<usize>,
/// short edge 1
se2: Option<usize>,
}
impl Ne {
fn len_some(&self) -> usize {
let mut c = 0;
if self.le1.is_some() {
c += 1;
}
if self.le2.is_some() {
c += 1;
}
if self.se1.is_some() {
c += 1;
}
if self.se2.is_some() {
c += 1;
}
c
}
}
fn check(ne: &mut Ne, d: &Tile, e: &Tile) {
if e.le1 == d.le1 || e.le2 == d.le1 {
ne.le1 = Some(e.id);
} else if e.le1 == d.le2 || e.le2 == d.le2 {
ne.le2 = Some(e.id);
} else if e.se1 == d.se1 || e.se2 == d.se1 {
ne.se1 = Some(e.id);
} else if e.se1 == d.se2 || e.se2 == d.se2 {
ne.se2 = Some(e.id);
} else if e.le1.rot() == d.le1 || e.le2.rot() == d.le1 {
ne.le1 = Some(e.id);
} else if e.le1.rot() == d.le2 || e.le2.rot() == d.le2 {
ne.le2 = Some(e.id);
} else if e.se1.rot() == d.se1 || e.se2.rot() == d.se1 {
ne.se1 = Some(e.id);
} else if e.se1.rot() == d.se2 || e.se2.rot() == d.se2 {
ne.se2 = Some(e.id);
}
}
fn p1(data: &[Tile]) -> usize {
let mut h: HashMap<usize, Ne> = HashMap::new();
for d in data {
let mut ne = Ne {
le1: None,
le2: None,
se1: None,
se2: None,
};
for e in data {
if d.id == e.id {
continue;
};
check(&mut ne, d, e);
let e = e.flip();
check(&mut ne, d, &e);
}
h.insert(d.id, ne);
}
let mut m = 1;
for (k, v) in h {
if v.len_some() == 2 {
m *= k
}
}
m
}
fn p2(data: &[Tile]) -> i32 {
5
}
fn main() -> Result<(), Box<dyn Error>> {
println!("Hello, Advent Of Code 2020!");
let data: Vec<String> = read_data_space_saperator2::<String>("./data/data20", '|').unwrap();
let data = trans(data);
// part 1
println!(
"What do you get if you multiply together the IDs of the four corner tiles? {}",
p1(&data)
);
// part 2
println!("Fuel needed (part 2): {}", p2(&data));
Ok(())
}
#[test]
fn data_read20() {
println!(
"{:?}",
read_data_space_saperator2::<String>("./data/data20", '|').unwrap()
);
}
#[test]
fn calc20() {
let data: Vec<String> = vec![
"|Tile 2311:|..##.#..#.|##..#.....|#...##..#.|####.#...#|##.##.###.|##...#.###|.#.#.#..##|..#....#..|###...#.#.|..###..###".to_string(),
"|Tile 1951:|#.##...##.|#.####...#|.....#..##|#...######|.##.#....#|.###.#####|###.##.##.|.###....#.|..#.#..#.#|#...##.#..".to_string(),
"|Tile 1171:|####...##.|#..##.#..#|##.#..#.#.|.###.####.|..###.####|.##....##.|.#...####.|#.##.####.|####..#...|.....##...".to_string(),
"|Tile 1427:|###.##.#..|.#..#.##..|.#.##.#..#|#.#.#.##.#|....#...##|...##..##.|...#.#####|.#.####.#.|..#..###.#|..##.#..#.".to_string(),
"|Tile 1489:|##.#.#....|..##...#..|.##..##...|..#...#...|#####...#.|#..#.#.#.#|...#.#.#..|##.#...##.|..##.##.##|###.##.#..".to_string(),
"|Tile 2473:|#....####.|#..#.##...|#.##..#...|######.#.#|.#...#.#.#|.#########|.###.#..#.|########.#|##...##.#.|..###.#.#.".to_string(),
"|Tile 2971:|..#.#....#|#...###...|#.#.###...|##.##..#..|.#####..##|.#..####.#|#..#.#..#.|..####.###|..#.#.###.|...#.#.#.#".to_string(),
"|Tile 2729:|...#.#.#.#|####.#....|..#.#.....|....#..#.#|.##..##.#.|.#.####...|####.#.#..|##.####...|##..#.##..|#.##...##.".to_string(),
"|Tile 3079:|#.#.#####.|.#..######|..#.......|######....|####.#..#.|.#...#.##.|#.#####.##|..#.###...|..#.......|..#.###...".to_string()
];
let data = trans(data);
println!("{:?}", data);
// part 1
assert_eq!(p1(&data), 20899048083289);
}
|
use crate::cast_slice::cast_slice;
use gl;
use gl::types::{GLuint, GLint, GLchar, GLenum, GLsizeiptr, GLsizei};
use std;
use std::ffi::CString;
use std::error::Error;
pub struct GlShader {
pub handle: GLuint,
}
impl GlShader {
pub fn new(shader_type: GLuint) -> GlShader {
unsafe {
GlShader { handle: gl::CreateShader(shader_type) }
}
}
pub fn compile(code: &str, shader_type: GLenum) -> Result<GlShader, String> {
let shader;
unsafe {
shader = GlShader::new(shader_type);
let c_code = CString::new(code.as_bytes()).map_err(|e| e.description().to_string())?;
gl::ShaderSource(shader.handle, 1, &c_code.as_ptr(), std::ptr::null());
gl::CompileShader(shader.handle);
let mut status = gl::FALSE as GLint;
gl::GetShaderiv(shader.handle, gl::COMPILE_STATUS, &mut status);
if status != gl::TRUE as GLint {
// Compilation failed.
let mut info_len = 0;
gl::GetShaderiv(shader.handle, gl::INFO_LOG_LENGTH, &mut info_len);
// There is always a null character at the end of the info string, this ignores it:
if info_len > 0 {
info_len -= 1;
}
let mut buf = Vec::with_capacity(info_len as usize);
buf.set_len(info_len as usize);
gl::GetShaderInfoLog(
shader.handle,
info_len,
std::ptr::null_mut(),
buf.as_mut_ptr() as *mut GLchar,
);
return Err(String::from_utf8(buf).ok()
.expect("ShaderInfoLog is not a valid UTF8 string"));
}
}
Ok(shader)
}
}
impl Drop for GlShader {
fn drop(&mut self) {
unsafe {
gl::DeleteShader(self.handle);
}
}
}
pub struct GlProgram {
pub vert_shader: GlShader,
pub frag_shader: GlShader,
pub handle: GLuint,
}
impl GlProgram {
pub fn link(vert_shader: GlShader, frag_shader: GlShader, attrib_bindings: &[(CString, GLuint)]) -> Result<GlProgram, String> {
let program_handle = unsafe { gl::CreateProgram() };
let program = GlProgram {
vert_shader, frag_shader,
handle: program_handle,
};
unsafe {
gl::AttachShader(program.handle, program.vert_shader.handle);
gl::AttachShader(program.handle, program.frag_shader.handle);
for binding in attrib_bindings {
gl::BindAttribLocation(program.handle, binding.1, binding.0.as_ptr());
}
gl::LinkProgram(program.handle);
let mut status = gl::FALSE as GLint;
gl::GetProgramiv(program.handle, gl::LINK_STATUS, &mut status);
if status != gl::TRUE as GLint {
// Compilation failed.
let mut info_len = 0;
gl::GetProgramiv(program.handle, gl::INFO_LOG_LENGTH, &mut info_len);
// There is always a null character at the end of the info string, this ignores it:
if info_len > 0 {
info_len -= 1;
}
let mut buf = Vec::with_capacity(info_len as usize);
buf.set_len(info_len as usize);
gl::GetProgramInfoLog(
program.handle,
info_len,
std::ptr::null_mut(),
buf.as_mut_ptr() as *mut GLchar,
);
return Err(String::from_utf8(buf).ok()
.expect("ProgramInfoLog is not a valid UTF8 string"));
}
}
Ok(program)
}
pub fn use_program(&self) {
unsafe {
gl::UseProgram(self.handle);
}
}
}
impl Drop for GlProgram {
fn drop(&mut self) {
unsafe {
gl::DeleteProgram(self.handle);
}
}
}
pub struct GlVertexArray {
pub handle: GLuint,
}
impl GlVertexArray {
pub fn new() -> GlVertexArray {
let mut handle = 0;
unsafe {
gl::GenVertexArrays(1, &mut handle);
}
GlVertexArray {
handle
}
}
pub fn bind(&self) {
unsafe {
gl::BindVertexArray(self.handle);
}
}
}
impl Drop for GlVertexArray {
fn drop(&mut self) {
unsafe {
gl::DeleteVertexArrays(1, &self.handle);
}
}
}
pub struct GlArrayBuffer {
pub handle: GLuint,
pub target: GLenum,
}
impl GlArrayBuffer {
pub fn new(data: &[u8], target: GLenum, usage: GLenum) -> GlArrayBuffer {
let mut handle = 0;
unsafe {
gl::GenBuffers(1, &mut handle);
}
let buffer = GlArrayBuffer {
handle,
target,
};
buffer.bind();
unsafe {
gl::BufferData(
buffer.target,
data.len() as GLsizeiptr,
std::mem::transmute(data.as_ptr()),
usage,
);
}
buffer
}
pub fn bind(&self) {
unsafe {
gl::BindBuffer(self.target, self.handle);
}
}
}
impl Drop for GlArrayBuffer {
fn drop(&mut self) {
unsafe {
gl::DeleteBuffers(1, &self.handle);
}
}
}
pub struct GlIndexBuffer {
pub array_buffer: GlArrayBuffer,
pub count: GLsizei,
}
impl GlIndexBuffer {
pub fn new(indices: &[u32]) -> GlIndexBuffer {
let raw_indices = unsafe { cast_slice(indices) };
let array_buffer = GlArrayBuffer::new(raw_indices, gl::ELEMENT_ARRAY_BUFFER, gl::STATIC_DRAW);
GlIndexBuffer {
array_buffer,
count: indices.len() as GLsizei,
}
}
pub fn draw(&self) {
self.array_buffer.bind();
unsafe {
gl::DrawElements(gl::TRIANGLES, self.count, gl::UNSIGNED_INT, std::ptr::null());
}
}
}
|
use std::process::Command;
use std::env;
use std::path::Path;
fn main() {
let out_dir = env::var("OUT_DIR").ok().expect("can't find out_dir");
Command::new("windres").args(&["src/hello.rc", "-o"])
.arg(&format!("{}/hello.rc.o", out_dir))
.status().unwrap();
Command::new("ar").args(&["crus", "libhello_rc.a", "hello.rc.o"])
.current_dir(&Path::new(&out_dir))
.status().unwrap();
println!("cargo:rustc-link-search=native={}", out_dir);
println!("cargo:rustc-link-lib=static=hello_rc");
} |
use std::collections::HashSet;
use std::fs::{self, File};
use std::io::{self, Read, Write};
use std::net::{SocketAddr, UdpSocket};
use std::path::Path;
use std::str::{self, FromStr};
use std::sync::mpsc;
use std::thread;
use client::*;
use wa_fsp::*;
struct FspClient {
socket: UdpSocket,
server: SocketAddr,
// files: HashMap<String, fs::File>,
files: HashSet<String>,
}
impl FspClient {
fn new() -> FspClient {
let mut files = HashSet::new();
for entry in fs::read_dir(Path::new("./files/")).unwrap() {
let entry = entry.unwrap();
files.insert(entry.file_name().into_string().unwrap());
}
let server = Server::from_file("config.yaml");
let server = SocketAddr::new(server.address, server.port);
let socket =
UdpSocket::bind("0.0.0.0:0").expect("Could not bind client socket");
FspClient {
socket,
server,
files,
}
}
fn run(&mut self) {
// register files with server
self.send_reg();
let (tx, rx) = mpsc::channel();
// create a separate thread listening to incomming messages
let mut buffer: Vec<u8> = Vec::new();
buffer.resize(BUF_SIZE, 0);
let c_socket = self.socket.try_clone().unwrap();
thread::spawn(move || loop {
if let Ok((bytes_read, src)) = c_socket.recv_from(&mut buffer) {
let msg: Message = serde_json::from_str(
str::from_utf8(&buffer[..bytes_read]).unwrap(),
)
.expect("Error parsing message");
match msg.msg_type {
MsgType::List => {
println!("Files registered with the server: ");
let filenames: Vec<String> =
serde_json::from_str(&msg.content)
.expect("Error parsing filenames");
for filename in filenames {
println!("{}", filename);
}
}
MsgType::FileResp => {
println!("\nReceived list from the server");
if !FspClient::handle_file_resp(&c_socket, &msg) {
tx.send(String::new()).unwrap();
}
}
MsgType::FileReq => {
println!("\nProcessing request from peer");
FspClient::handle_file_req(
&c_socket,
&msg.content,
src,
);
}
MsgType::FileTrans => {
println!("\nProcessing file transmission");
let filename = FspClient::handle_file_trans(&msg);
tx.send(filename).unwrap()
}
_ => {}
}
}
});
loop {
print!("> ");
io::stdout().flush().unwrap();
let mut msg = String::new();
io::stdin().read_line(&mut msg).unwrap();
let msg = msg.trim();
if msg.len() > 0 {
if msg == ":l" {
self.req_list();
} else {
if self.req_file(&String::from_str(msg).unwrap()) {
let filename = rx.recv().unwrap();
if filename.len() > 0 {
self.files.insert(filename);
self.send_reg();
}
}
}
}
}
}
fn send_reg(&self) {
// let filenames = self
// .files
// // .keys()
// // .map(|k| k.clone())
// .collect::<Vec<String>>();
let msg = serde_json::to_string(&Message {
msg_type: MsgType::Register,
content: serde_json::to_string(&self.files).unwrap(),
})
.unwrap();
self.socket
.send_to(msg.as_bytes(), self.server)
.expect("Could not send to server");
}
fn req_list(&self) {
let msg = serde_json::to_string(&Message {
msg_type: MsgType::List,
content: String::new(),
})
.unwrap();
self.socket
.send_to(msg.as_bytes(), self.server)
.expect("Could not send to server");
}
fn req_file(&self, filename: &String) -> bool {
if self.files.contains(filename) {
println!("File already exists locally!");
return false;
}
println!("Requesting file");
let msg = serde_json::to_string(&Message {
msg_type: MsgType::FileReq,
content: filename.clone(),
})
.unwrap();
self.socket
.send_to(msg.as_bytes(), self.server)
.expect("Could not send to server");
true
}
fn handle_file_resp(socket: &UdpSocket, msg: &Message) -> bool {
let (filename, clients): (String, HashSet<SocketAddr>) =
serde_json::from_str(&msg.content)
.expect("Cannot parse server response");
if clients.is_empty() {
println!("File not found!");
return false;
}
let msg = serde_json::to_string(&Message {
msg_type: MsgType::FileReq,
content: filename,
})
.unwrap();
for client in clients {
match socket.send_to(msg.as_bytes(), client) {
Ok(_) => {
println!("Sending request to {}", client);
return true;
}
Err(_) => {}
}
}
false
}
fn handle_file_req(socket: &UdpSocket, filename: &String, src: SocketAddr) {
let path = format!("{}{}", "./files/", filename);
let path = Path::new(&path);
let mut file = File::open(path).expect("Unable to open file");
let mut buffer = String::new();
file.read_to_string(&mut buffer).unwrap();
let msg = serde_json::to_string(&Message {
msg_type: MsgType::FileTrans,
content: serde_json::to_string(&(filename.clone(), buffer))
.unwrap(),
})
.unwrap();
socket
.send_to(msg.as_bytes(), src)
.expect("Unable to send to requesting client");
}
fn handle_file_trans(msg: &Message) -> String {
let (filename, content): (String, String) =
serde_json::from_str(&msg.content)
.expect("Unable to parse file transmission");
let path = format!("{}{}", "./files/", filename);
let mut file =
File::create(Path::new(&path)).expect("Unable to create file");
file.write_all(content.as_bytes())
.expect("Unable to write to files");
println!("Files successfuly transmitted");
filename
}
}
fn main() {
// create client with server config and files
let mut client = FspClient::new();
client.run();
}
|
use std::sync::Arc;
use eyre::Report;
use rosu_v2::prelude::GameMode;
use twilight_model::application::{
command::CommandOptionChoice,
interaction::{
application_command::{CommandDataOption, CommandOptionValue},
ApplicationCommand,
},
};
use crate::{
commands::{MyCommand, MyCommandOption},
database::UserStatsColumn,
embeds::{EmbedData, RankingEmbed, RankingKindData},
pagination::{Pagination, RankingPagination},
util::{
constants::{
common_literals::{ACCURACY, CTB, MANIA, OSU, TAIKO},
GENERAL_ISSUE,
},
numbers, ApplicationCommandExt, InteractionExt, MessageExt,
},
BotResult, Context, Error,
};
pub async fn slash_serverleaderboard(
ctx: Arc<Context>,
mut command: ApplicationCommand,
) -> BotResult<()> {
let kind = UserStatsColumn::slash(&mut command)?;
let owner = command.user_id()?;
let guild_id = command.guild_id.unwrap(); // command is only processed in guilds
let members: Vec<_> = ctx.cache.members(guild_id, |id| id.get() as i64);
let guild_icon = ctx
.cache
.guild(guild_id, |g| g.icon().copied())
.ok()
.flatten()
.map(|icon| (guild_id, icon));
let name = match ctx.user_config(owner).await {
Ok(config) => config.into_username(),
Err(err) => {
let report = Report::new(err).wrap_err("failed to retrieve user config");
warn!("{report:?}");
None
}
};
let leaderboard = match ctx.psql().get_osu_users_stats(kind, &members).await {
Ok(values) => values,
Err(why) => {
let _ = command.error(&ctx, GENERAL_ISSUE).await;
return Err(why);
}
};
let author_idx = name.and_then(|name| {
leaderboard
.iter()
.find(|(_, entry)| entry.name == name)
.map(|(idx, _)| *idx)
});
if leaderboard.is_empty() {
let content = "No user data found for members of this server :(\n\
There could be three reasons:\n\
- Members of this server are not linked through the `/link` command\n\
- Their osu! user stats have not been cached yet. \
Try using any command that retrieves an osu! user, e.g. `/profile`, in order to cache them.\n\
- Members of this server are not stored as such. Maybe let bade know :eyes:";
command.error(&ctx, content).await?;
return Ok(());
}
let data = RankingKindData::UserStats { guild_icon, kind };
let total = leaderboard.len();
let pages = numbers::div_euclid(20, total);
// Creating the embed
let embed_data = RankingEmbed::new(&leaderboard, &data, author_idx, (1, pages));
let builder = embed_data.into_builder().build().into();
let response_raw = command.create_message(&ctx, builder).await?;
if total <= 20 {
return Ok(());
}
let response = response_raw.model().await?;
// Pagination
let pagination = RankingPagination::new(
response,
Arc::clone(&ctx),
total,
leaderboard,
author_idx,
data,
);
tokio::spawn(async move {
if let Err(err) = pagination.start(&ctx, owner, 60).await {
warn!("{:?}", Report::new(err));
}
});
Ok(())
}
impl UserStatsColumn {
fn slash(command: &mut ApplicationCommand) -> BotResult<Self> {
let mut kind = None;
for option in command.yoink_options() {
match option.value {
CommandOptionValue::String(_) => return Err(Error::InvalidCommandOptions),
CommandOptionValue::Integer(_) => return Err(Error::InvalidCommandOptions),
CommandOptionValue::Boolean(_) => return Err(Error::InvalidCommandOptions),
CommandOptionValue::SubCommand(options) => match option.name.as_str() {
"all_modes" => kind = Some(Self::parse_all_modes_subcommand(options)?),
OSU | TAIKO | CTB | MANIA => {
let mode = match option.name.as_str() {
OSU => GameMode::STD,
TAIKO => GameMode::TKO,
CTB => GameMode::CTB,
MANIA => GameMode::MNA,
_ => unreachable!(),
};
kind = Some(Self::parse_mode_subcommand(mode, options)?);
}
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
}
}
kind.ok_or(Error::InvalidCommandOptions)
}
fn parse_all_modes_subcommand(options: Vec<CommandDataOption>) -> BotResult<Self> {
let mut kind = None;
for option in options {
kind = match option.value {
CommandOptionValue::String(value) => match option.name.as_str() {
"type" => match value.as_str() {
"badges" => Some(UserStatsColumn::Badges),
"comments" => Some(UserStatsColumn::Comments),
"followers" => Some(UserStatsColumn::Followers),
"forum_posts" => Some(UserStatsColumn::ForumPosts),
"graveyard_mapsets" => Some(UserStatsColumn::GraveyardMapsets),
"join_date" => Some(UserStatsColumn::JoinDate),
"loved_mapsets" => Some(UserStatsColumn::LovedMapsets),
"mapping_followers" => Some(UserStatsColumn::MappingFollowers),
"medals" => Some(UserStatsColumn::Medals),
"namechanges" => Some(UserStatsColumn::Usernames),
"played_maps" => Some(UserStatsColumn::PlayedMaps),
"ranked_mapsets" => Some(UserStatsColumn::RankedMapsets),
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
};
}
kind.ok_or(Error::InvalidCommandOptions)
}
fn parse_mode_subcommand(mode: GameMode, options: Vec<CommandDataOption>) -> BotResult<Self> {
let mut kind = None;
for option in options {
kind = match option.value {
CommandOptionValue::String(value) => match option.name.as_str() {
"type" => match value.as_str() {
ACCURACY => Some(UserStatsColumn::Accuracy { mode }),
"avg_hits" => Some(UserStatsColumn::AverageHits { mode }),
"count_ssh" => Some(UserStatsColumn::CountSsh { mode }),
"count_ss" => Some(UserStatsColumn::CountSs { mode }),
"count_sh" => Some(UserStatsColumn::CountSh { mode }),
"count_s" => Some(UserStatsColumn::CountS { mode }),
"count_a" => Some(UserStatsColumn::CountA { mode }),
"level" => Some(UserStatsColumn::Level { mode }),
"max_combo" => Some(UserStatsColumn::MaxCombo { mode }),
"playcount" => Some(UserStatsColumn::Playcount { mode }),
"playtime" => Some(UserStatsColumn::Playtime { mode }),
"pp" => Some(UserStatsColumn::Pp { mode }),
"rank_country" => Some(UserStatsColumn::RankCountry { mode }),
"rank_global" => Some(UserStatsColumn::RankGlobal { mode }),
"replays" => Some(UserStatsColumn::Replays { mode }),
"score_ranked" => Some(UserStatsColumn::ScoreRanked { mode }),
"score_total" => Some(UserStatsColumn::ScoreTotal { mode }),
"scores_first" => Some(UserStatsColumn::ScoresFirst { mode }),
"total_hits" => Some(UserStatsColumn::TotalHits { mode }),
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
};
}
kind.ok_or(Error::InvalidCommandOptions)
}
}
const SPECIFY_KIND: &str = "Specify what kind of leaderboard to show";
fn mode_option() -> Vec<MyCommandOption> {
let choices = vec![
CommandOptionChoice::String {
name: "Accuracy".to_owned(),
value: ACCURACY.to_owned(),
},
CommandOptionChoice::String {
name: "Average hits per play".to_owned(),
value: "avg_hits".to_owned(),
},
CommandOptionChoice::String {
name: "Count SSH".to_owned(),
value: "count_ssh".to_owned(),
},
CommandOptionChoice::String {
name: "Count SS".to_owned(),
value: "count_ss".to_owned(),
},
CommandOptionChoice::String {
name: "Count SH".to_owned(),
value: "count_sh".to_owned(),
},
CommandOptionChoice::String {
name: "Count S".to_owned(),
value: "count_s".to_owned(),
},
CommandOptionChoice::String {
name: "Count A".to_owned(),
value: "count_a".to_owned(),
},
CommandOptionChoice::String {
name: "Country rank".to_owned(),
value: "rank_country".to_owned(),
},
CommandOptionChoice::String {
name: "Global numbers 1s".to_owned(),
value: "scores_first".to_owned(),
},
CommandOptionChoice::String {
name: "Global rank".to_owned(),
value: "rank_global".to_owned(),
},
CommandOptionChoice::String {
name: "Level".to_owned(),
value: "level".to_owned(),
},
CommandOptionChoice::String {
name: "Max combo".to_owned(),
value: "max_combo".to_owned(),
},
CommandOptionChoice::String {
name: "Playcount".to_owned(),
value: "playcount".to_owned(),
},
CommandOptionChoice::String {
name: "Playtime".to_owned(),
value: "playtime".to_owned(),
},
CommandOptionChoice::String {
name: "PP".to_owned(),
value: "pp".to_owned(),
},
CommandOptionChoice::String {
name: "Ranked score".to_owned(),
value: "score_ranked".to_owned(),
},
CommandOptionChoice::String {
name: "Replays watched".to_owned(),
value: "replays".to_owned(),
},
CommandOptionChoice::String {
name: "Total hits".to_owned(),
value: "total_hits".to_owned(),
},
CommandOptionChoice::String {
name: "Total score".to_owned(),
value: "score_total".to_owned(),
},
];
let kind = MyCommandOption::builder("type", SPECIFY_KIND).string(choices, true);
vec![kind]
}
fn all_modes_option() -> Vec<MyCommandOption> {
let choices = vec![
CommandOptionChoice::String {
name: "Badges".to_owned(),
value: "badges".to_owned(),
},
CommandOptionChoice::String {
name: "Comments".to_owned(),
value: "comments".to_owned(),
},
CommandOptionChoice::String {
name: "Followers".to_owned(),
value: "followers".to_owned(),
},
CommandOptionChoice::String {
name: "Forum posts".to_owned(),
value: "forum_posts".to_owned(),
},
CommandOptionChoice::String {
name: "Graveyard mapsets".to_owned(),
value: "graveyard_mapsets".to_owned(),
},
CommandOptionChoice::String {
name: "Join date".to_owned(),
value: "join_date".to_owned(),
},
CommandOptionChoice::String {
name: "Loved mapsets".to_owned(),
value: "loved_mapsets".to_owned(),
},
CommandOptionChoice::String {
name: "Mapping followers".to_owned(),
value: "mapping_followers".to_owned(),
},
CommandOptionChoice::String {
name: "Medals".to_owned(),
value: "medals".to_owned(),
},
CommandOptionChoice::String {
name: "Namechanges".to_owned(),
value: "namechanges".to_owned(),
},
CommandOptionChoice::String {
name: "Played maps".to_owned(),
value: "played_maps".to_owned(),
},
CommandOptionChoice::String {
name: "Ranked mapsets".to_owned(),
value: "ranked_mapsets".to_owned(),
},
];
let help = "Specify what kind of leaderboard to show.\
Notably:\n\
- `Comments`: Considers comments on things like osu! articles or mapsets\n\
- `Played maps`: Only maps with leaderboards count i.e. ranked, loved, or approved maps";
let kind = MyCommandOption::builder("type", SPECIFY_KIND)
.help(help)
.string(choices, true);
vec![kind]
}
pub fn define_serverleaderboard() -> MyCommand {
let all_modes_description = "Various leaderboards across all modes for linked server members";
let all_modes =
MyCommandOption::builder("all_modes", all_modes_description).subcommand(all_modes_option());
let osu_description = "Various osu!standard leaderboards for linked server members";
let osu = MyCommandOption::builder(OSU, osu_description).subcommand(mode_option());
let taiko_description = "Various osu!taiko leaderboards for linked server members";
let taiko = MyCommandOption::builder(TAIKO, taiko_description).subcommand(mode_option());
let ctb_description = "Various osu!ctb leaderboards for linked server members";
let ctb = MyCommandOption::builder(CTB, ctb_description).subcommand(mode_option());
let mania_description = "Various osu!mania leaderboards for linked server members";
let mania = MyCommandOption::builder(MANIA, mania_description).subcommand(mode_option());
let options = vec![all_modes, osu, taiko, ctb, mania];
let description = "Various osu! leaderboards for linked server members.";
let help = "Various osu! leaderboards for linked server members.\n\
Whenever any command is used that requests an osu! user, the retrieved user will be cached.\n\
The leaderboards will contain all members of this server that are linked to an osu! username \
which was cached through some command beforehand.\n\
Since only the cached data is used, no values are guaranteed to be up-to-date. \
They're just snapshots from the last time the user was retrieved through a command.\n\n\
There are three reasons why a user might be missing from the leaderboard:\n\
- They are not linked through the `/link` command\n\
- Their osu! user stats have not been cached yet. \
Try using any command that retrieves the user, e.g. `/profile`, in order to cache them.\n\
- Members of this server are not stored as such. Maybe let bade know :eyes:";
MyCommand::new("serverleaderboard", description)
.help(help)
.options(options)
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::story_manager::StoryManager,
failure::{Error, ResultExt},
fidl_fuchsia_app_discover::{
SessionDiscoverContextRequest, SessionDiscoverContextRequestStream,
StoryDiscoverContextRequest, StoryDiscoverContextRequestStream, SurfaceData,
},
fuchsia_async as fasync,
fuchsia_syslog::macros::*,
futures::prelude::*,
parking_lot::Mutex,
std::sync::Arc,
};
pub async fn run_server(
mut stream: SessionDiscoverContextRequestStream,
story_manager: Arc<Mutex<StoryManager>>,
) -> Result<(), Error> {
while let Some(request) = stream.try_next().await.context("Error running session context")? {
match request {
SessionDiscoverContextRequest::GetStoryContext { story_id, request, .. } => {
let story_context_stream = request.into_stream()?;
StoryContextService::new(story_id, story_manager.clone())
.spawn(story_context_stream);
}
}
}
Ok(())
}
/// The StoryDiscoverContext protocol implementation.
pub struct StoryContextService {
/// The story id to which the module belongs.
story_id: String,
story_manager: Arc<Mutex<StoryManager>>,
}
impl StoryContextService {
pub fn new(story_id: impl Into<String>, story_manager: Arc<Mutex<StoryManager>>) -> Self {
StoryContextService { story_id: story_id.into(), story_manager }
}
pub fn spawn(self, mut stream: StoryDiscoverContextRequestStream) {
fasync::spawn_local(
async move {
while let Some(request) = stream
.try_next()
.await
.context(format!("Error running story context for {:?}", self.story_id))?
{
match request {
StoryDiscoverContextRequest::GetSurfaceData { surface_id, responder } => {
// TODO: actually return the proper data.
let manager_lock = self.story_manager.lock();
let graph_result = manager_lock.get_story_graph(&self.story_id).await;
let result = graph_result
.as_ref()
.and_then(|result| result.get_module_data(&surface_id));
match result {
None => {
responder.send(SurfaceData {
action: None,
parameter_types: None,
})?;
}
Some(ref module_data) => {
responder.send(SurfaceData {
action: module_data.last_intent.action.clone(),
// TODO: story_manager still doesn't contain the outputs
parameter_types: Some(vec![]),
})?;
}
}
}
StoryDiscoverContextRequest::SetProperty { key, value, responder } => {
let mut story_manager = self.story_manager.lock();
story_manager.set_property(&self.story_id, &key, value).await?;
// TODO: handle the errors properly in a followup CL.
responder.send(&mut Ok(()))?;
}
StoryDiscoverContextRequest::GetProperty { key, responder } => {
let story_manager = self.story_manager.lock();
let property = story_manager.get_property(&self.story_id, key).await?;
// TODO: handle the errors properly in a followup CL.
responder.send(&mut Ok(property))?;
}
}
}
Ok(())
}
.unwrap_or_else(|e: Error| fx_log_err!("error serving story context {}", e)),
)
}
}
#[cfg(test)]
mod tests {
use {
super::*,
crate::{
constants::TITLE_KEY, models::AddModInfo, story_manager::StoryManager,
story_storage::MemoryStorage, utils,
},
fidl_fuchsia_app_discover::{SessionDiscoverContextMarker, StoryDiscoverContextMarker},
fidl_fuchsia_mem::Buffer,
fuchsia_async as fasync, fuchsia_zircon as zx,
};
#[fasync::run_singlethreaded(test)]
async fn story_context_get_surface_data() -> Result<(), Error> {
// Initialize some fake state.
let story_id = "my-story".to_string();
let mod_name = "my-mod".to_string();
let action_name = "my-action".to_string();
let story_manager = Arc::new(Mutex::new(StoryManager::new(Box::new(MemoryStorage::new()))));
let mut action = AddModInfo::new_raw(
"some-component-url",
Some(story_id.clone()),
Some(mod_name.clone()),
);
action.intent.action = Some(action_name.clone());
{
let mut manager_lock = story_manager.lock();
manager_lock.add_to_story_graph(&action).await?;
}
// Initialize service client and server.
let (client, request_stream) =
fidl::endpoints::create_proxy_and_stream::<SessionDiscoverContextMarker>().unwrap();
fasync::spawn_local(
async move { run_server(request_stream, story_manager).await }
.unwrap_or_else(|e: Error| eprintln!("error running server {}", e)),
);
// Get the story context
let (story_context_proxy, server_end) =
fidl::endpoints::create_proxy::<StoryDiscoverContextMarker>()?;
assert!(client.get_story_context(&story_id, server_end).is_ok());
let surface_data = story_context_proxy.get_surface_data(&mod_name).await?;
assert_eq!(surface_data.action, Some(action_name));
assert_eq!(surface_data.parameter_types, Some(vec![]));
Ok(())
}
#[fasync::run_until_stalled(test)]
async fn test_get_set_property() -> Result<(), Error> {
let (client, request_stream) =
fidl::endpoints::create_proxy_and_stream::<SessionDiscoverContextMarker>().unwrap();
let story_manager_arc =
Arc::new(Mutex::new(StoryManager::new(Box::new(MemoryStorage::new()))));
let cloned_story_manager_arc = story_manager_arc.clone();
fasync::spawn_local(
async move { run_server(request_stream, cloned_story_manager_arc).await }
.unwrap_or_else(|e: Error| eprintln!("error running server {}", e)),
);
// Get the StoryDiscoverContext connection.
let (story_discover_context_proxy, server_end) =
fidl::endpoints::create_proxy::<StoryDiscoverContextMarker>()?;
assert!(client.get_story_context("story_name", server_end).is_ok());
// Set the title of the story via SetProperty service
let data_to_write = "new_title".as_bytes();
let vmo = zx::Vmo::create(data_to_write.len() as u64)?;
vmo.write(&data_to_write, 0)?;
assert!(story_discover_context_proxy
.set_property(TITLE_KEY, &mut Buffer { vmo, size: data_to_write.len() as u64 })
.await
.is_ok());
// Get the title of the story via GetProperty service
let returned_title = utils::vmo_buffer_to_string(Box::new(
story_discover_context_proxy.get_property(TITLE_KEY).await?.unwrap(),
))?;
// Ensure that set & get all succeed
assert_eq!(returned_title, "new_title".to_string());
Ok(())
}
}
|
#![feature(duration_as_u128)]
#![feature(fn_traits)]
#![feature(unboxed_closures)]
extern crate piston_window;
extern crate piston;
extern crate rand;
extern crate euclid;
extern crate conrod;
extern crate rosrust;
#[macro_use]
extern crate rosrust_codegen;
rosmsg_include!();
extern crate roadsim2dlib;
use roadsim2dlib::*;
// mod camera;
// mod car;
// mod simulation;
// mod primitives;
// mod color_utils;
// mod ibeo;
// mod sim_id;
// mod grid;
// mod vehicle_manager;
// mod debouncer;
// mod roads;
use std::time;
use piston_window::*;
fn main() {
let mut window: PistonWindow =
WindowSettings::new("carsim2D - ROAD", [640, 480])
.exit_on_esc(true).build().expect("Unable to create piston application");
let id_provider = Box::new(IdProvider::new());
let mut previous_frame_end_timestamp = time::Instant::now();
let previous_msg_stamp = time::Instant::now();
let mut grid = Grid{ enabled: false};
let mut camera = Camera::new( Vec2f64{x: 0.0, y: 0.0}, 40.0);
// for e in window.events().ups(60).max_fps(60) {
let mut simulation = Simulation::new();
let mut fps_window = window.max_fps(30);
while let Some(e) = fps_window.next() {
if let Some(args) = e.press_args() {
simulation.key_press(args);
}
if let Some(args) = e.release_args() {
simulation.key_release(args);
}
if let Some(args) = e.update_args() {
grid.update(simulation.get_buttons());
simulation.update_camera(&mut camera, args.dt, fps_window.draw_size());
}
if let Some(_args) = e.render_args() {
let now = time::Instant::now();
let dt = now-previous_frame_end_timestamp;
let dt_s = (dt.as_millis() as f32)/1000.0f32;
fps_window.draw_2d(&e, |context, graphics| {
clear([1.0; 4], graphics);
let mut context = context;
let new_trans = camera.apply(context.transform);
context.transform = new_trans;
grid.draw(context, graphics);
});
if (now-previous_msg_stamp).as_secs() >= 1 {
// let mut msg = msg::ibeo_msgs::ObjectListEcu::default();
}
previous_frame_end_timestamp = now;
}
// Event::Update(args) => {
// //game.update(&args);
// ;
// }
}
} |
//! Defines data structures of command line arguments.
use clap;
#[derive(Debug)]
pub struct Config {
pub src_path: Option<String>,
pub main_path: Option<String>,
pub install_mod_names: Vec<String>,
}
impl Config {
pub fn from_matches(gm: &clap::ArgMatches) -> Self {
let install_mod_names = match gm.subcommand() {
("install", None) => {
warn!("Nothing to install");
Vec::new()
}
("install", Some(sm)) => {
let mod_names = sm
.values_of("mod-name")
.into_iter()
.flat_map(|names| names)
.map(|name| name.to_owned())
.collect::<Vec<_>>();
trace!("install {:?}", mod_names);
mod_names
}
_ => {
error!("unknown subcommand");
vec![]
}
};
let src_path = gm.value_of("src-path").map(|s| s.to_owned());
let main_path = gm.value_of("main-path").map(|s| s.to_owned());
Config {
src_path,
main_path,
install_mod_names,
}
}
}
|
use crate::{
atlas::SpriteSheetInfo,
tiled::{
map::{TiledMap, TiledMapLayerType},
tileset::TiledTileset,
},
};
use serde::Serialize;
use std::{
collections::HashMap,
fs::{read_to_string, write},
io::{Error, ErrorKind},
path::Path,
};
pub mod map;
pub mod tileset;
#[derive(Debug, Clone, Serialize)]
struct LayerObject {
pub name: String,
pub object_type: String,
pub visible: bool,
pub x: isize,
pub y: isize,
pub width: usize,
pub height: usize,
}
#[derive(Debug, Clone, Serialize)]
enum LayerData {
Tiles(Vec<usize>),
Objects(Vec<LayerObject>),
}
#[derive(Debug, Clone, Serialize)]
struct Layer {
pub name: String,
pub data: LayerData,
}
#[derive(Debug, Clone, Serialize)]
struct Map {
pub cols: usize,
pub rows: usize,
pub tile_width: usize,
pub tile_height: usize,
pub sprite_sheets: Vec<String>,
pub tiles_mapping: HashMap<usize, (String, String)>,
pub layers: Vec<Layer>,
}
pub fn build_map<P: AsRef<Path>>(
input: P,
spritesheets: &[P],
full_names: bool,
quiet: bool,
) -> Result<Vec<u8>, Error> {
if !quiet {
println!("* Load Tiled map file: {:?}", input.as_ref());
}
let input_json = read_to_string(input.as_ref())?;
let input_data = match serde_json::from_str::<TiledMap>(&input_json) {
Ok(data) => data,
Err(error) => {
return Err(Error::new(
ErrorKind::Other,
format!(
"Could not parse JSON input file: {:?}. Error: {:?}",
input.as_ref(),
error
),
))
}
};
let tilesets_image_map = input_data
.tilesets
.iter()
.map(|t| {
if !quiet {
println!("* Load Tiled tileset file: {:?}", &t.source);
}
let mut path = input.as_ref().to_path_buf();
path.pop();
let path = path.join(&t.source);
let json = read_to_string(&path)?;
match serde_json::from_str::<TiledTileset>(&json) {
Ok(data) => Ok((
t.firstgid,
data.tiles
.iter()
.map(|t| {
let name = if full_names {
t.image.to_str().unwrap().to_owned()
} else {
t.image.file_name().unwrap().to_str().unwrap().to_owned()
};
(t.id, name)
})
.collect::<Vec<_>>(),
)),
Err(error) => Err(Error::new(
ErrorKind::Other,
format!(
"Could not parse JSON tileset file: {:?}. Error: {:?}",
&path, error
),
)),
}
})
.collect::<Result<Vec<_>, _>>()?;
let image_id_map = tilesets_image_map
.into_iter()
.flat_map(|(fid, ts)| {
ts.into_iter()
.map(|(id, img)| (fid + id, img))
.collect::<Vec<_>>()
})
.collect::<HashMap<_, _>>();
let spritesheets_data = spritesheets
.iter()
.map(|s| {
if !quiet {
println!("* Load sprite sheet file: {:?}", s.as_ref());
}
let name = if full_names {
s.as_ref().to_str().unwrap().to_owned()
} else {
s.as_ref().file_name().unwrap().to_str().unwrap().to_owned()
};
let json = read_to_string(s.as_ref())?;
match serde_json::from_str::<SpriteSheetInfo>(&json) {
Ok(data) => Ok((name, data)),
Err(error) => Err(Error::new(
ErrorKind::Other,
format!(
"Could not parse JSON spritesheet file: {:?}. Error: {:?}",
s.as_ref(),
error
),
)),
}
})
.collect::<Result<HashMap<_, _>, _>>()?;
let layers = input_data
.layers
.iter()
.enumerate()
.map(|(i, layer)| {
let data = match layer.layer_type {
TiledMapLayerType::TileLayer => {
if let Some(data) = &layer.data {
Ok(LayerData::Tiles(data.clone()))
} else {
Err(Error::new(
ErrorKind::Other,
format!("There is no tiles data for layer: {}", i),
))
}
}
TiledMapLayerType::ObjectGroup => {
if let Some(objects) = &layer.objects {
Ok(LayerData::Objects(
objects
.iter()
.map(|o| LayerObject {
name: o.name.clone(),
object_type: o.object_type.clone(),
visible: o.visible,
x: o.x,
y: o.y,
width: o.width,
height: o.height,
})
.collect::<Vec<_>>(),
))
} else {
Err(Error::new(
ErrorKind::Other,
format!("There is no objects data for layer: {}", i),
))
}
}
}?;
Ok(Layer {
name: layer.name.clone(),
data,
})
})
.collect::<Result<Vec<Layer>, Error>>()?;
let tiles_mapping = image_id_map
.into_iter()
.filter(|(id, _)| {
layers.iter().any(|layer| {
if let LayerData::Tiles(data) = &layer.data {
data.iter().any(|i| i == id)
} else {
false
}
})
})
.map(|(id, img)| {
if let Some((atl, _)) = spritesheets_data
.iter()
.find(|(_, s)| s.frames.keys().any(|k| k == &img))
{
Ok((id, (atl.clone(), img)))
} else {
Err(Error::new(
ErrorKind::Other,
format!("Could not find image in spritesheets: {:?}", img),
))
}
})
.collect::<Result<HashMap<_, _>, _>>()?;
let map = Map {
cols: input_data.width,
rows: input_data.height,
tile_width: input_data.tilewidth,
tile_height: input_data.tileheight,
sprite_sheets: spritesheets_data.keys().cloned().collect::<Vec<_>>(),
tiles_mapping,
layers,
};
match bincode::serialize(&map) {
Ok(bytes) => Ok(bytes),
Err(error) => Err(Error::new(
ErrorKind::Other,
format!("Could not serialize map data: {:?}", error),
)),
}
}
pub fn build_map_and_write_to_file<P: AsRef<Path>>(
input: P,
output: P,
spritesheets: &[P],
full_names: bool,
quiet: bool,
) -> Result<(), Error> {
let contents = build_map(input, spritesheets, full_names, quiet)?;
write(output.as_ref(), contents)?;
if !quiet {
println!(" Done! map built to file: {:?}", output.as_ref());
}
Ok(())
}
|
#![cfg_attr(not(feature = "with-syntex"), feature(rustc_private, plugin))]
#![cfg_attr(not(feature = "with-syntex"), plugin(quasi_macros))]
#![deny(missing_docs)]
//! nue derive syntax extension.
//!
//! Provides the `#[derive(PodPacked, Pod, NueEncode, NueDecode)]` extensions documented in `nue-macros`.
//!
//! ## Stable
//!
//! See the [syntex documentation](https://github.com/erickt/rust-syntex/blob/master/README.md)
//! for instructions on how to set up your project to use these macros in stable Rust.
//!
//! ## Nightly / Unstable
//!
//! See the example in `nue-macros` for usage as a normal syntax extension.
extern crate aster;
extern crate quasi;
#[cfg(feature = "with-syntex")]
extern crate syntex;
#[cfg(feature = "with-syntex")]
extern crate syntex_syntax as syntax;
#[cfg(not(feature = "with-syntex"))]
extern crate syntax;
#[cfg(not(feature = "with-syntex"))]
extern crate rustc;
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
#[cfg(not(feature = "with-syntex"))]
include!("lib.rs");
/// Registers the plugin for expansion with syntex.
#[cfg(feature = "with-syntex")]
pub fn register(reg: &mut syntex::Registry) {
use syntax::{ast, fold};
reg.add_attr("feature(custom_derive)");
reg.add_attr("feature(custom_attribute)");
reg.add_modifier("packed", expand_packed);
reg.add_modifier("derive_PodPacked", expand_derive_pod_packed);
reg.add_decorator("derive_Packed", expand_derive_packed);
reg.add_decorator("derive_Pod", expand_derive_pod);
reg.add_decorator("derive_NueEncode", expand_derive_encode);
reg.add_decorator("derive_NueDecode", expand_derive_decode);
reg.add_post_expansion_pass(strip_attributes);
#[cfg(feature = "with-syntex")]
fn strip_attributes(krate: ast::Crate) -> ast::Crate {
struct StripAttributeFolder;
impl fold::Folder for StripAttributeFolder {
fn fold_attribute(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {
match attr.node.value.node {
ast::MetaWord(ref n) if *n == "__nue_packed" => { return None; },
ast::MetaList(ref n, _) if *n == "nue" || *n == "nue_enc" || *n == "nue_dec" => { return None; },
_ => {}
}
Some(attr)
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
}
fold::Folder::fold_crate(&mut StripAttributeFolder, krate)
}
}
#[doc(hidden)]
#[cfg(not(feature = "with-syntex"))]
pub fn register(reg: &mut rustc::plugin::Registry) {
reg.register_syntax_extension(
syntax::parse::token::intern("packed"),
syntax::ext::base::MultiModifier(
Box::new(expand_packed)
)
);
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Packed"),
syntax::ext::base::MultiDecorator(
Box::new(expand_derive_packed)
)
);
reg.register_syntax_extension(
syntax::parse::token::intern("derive_PodPacked"),
syntax::ext::base::MultiModifier(
Box::new(expand_derive_pod_packed)
)
);
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Pod"),
syntax::ext::base::MultiDecorator(
Box::new(expand_derive_pod)
)
);
reg.register_syntax_extension(
syntax::parse::token::intern("derive_NueEncode"),
syntax::ext::base::MultiDecorator(
Box::new(expand_derive_encode)
)
);
reg.register_syntax_extension(
syntax::parse::token::intern("derive_NueDecode"),
syntax::ext::base::MultiDecorator(
Box::new(expand_derive_decode)
)
);
}
|
pub use {
any_bin::*, bin::*, bin_builder::*, bin_segment::*, excess_shrink::*, factory::*, into_iter::*,
s_bin::*,
};
mod any_bin;
mod bin;
mod bin_builder;
mod bin_segment;
mod excess_shrink;
mod factory;
mod into_iter;
mod s_bin;
|
use std::collections::HashMap;
fn play(input: &[i32], final_turn: usize) -> i32 {
let mut seen: HashMap<i32, usize> = input[..input.len()-1].iter().enumerate()
.map(|(t, &n)| (n, t))
.collect();
let mut last_spoken = *input.last().unwrap();
for turn in input.len()..final_turn {
if let Some(&t) = seen.get(&last_spoken) {
let diff = turn - 1 - t;
seen.insert(last_spoken, turn - 1);
last_spoken = diff as i32;
} else {
seen.insert(last_spoken, turn - 1);
last_spoken = 0;
}
}
last_spoken
}
fn part1(input: &[i32]) {
println!("{}", play(input, 2020));
}
fn part2(input: &[i32]) {
println!("{}", play(input, 30_000_000));
}
fn main() {
let input = [7, 14, 0, 17, 11, 1, 2];
part1(&input);
part2(&input);
}
|
use structopt::StructOpt;
use std::{fs::File, io::prelude::*, io::BufReader, io::Write};
use failure::ResultExt;
use exitfailure::ExitFailure;
#[derive(StructOpt, Debug)]
struct Cli {
/// The pattern to look for
pattern: String,
/// The path to the file to read
#[structopt(parse(from_os_str))]
path: std::path::PathBuf,
}
fn main() -> Result<(), ExitFailure> {
let args = Cli::from_args();
let content = File::open(&args.path).with_context(|_| format!("Could not read the file {:?}", &args.path))?;
let reader = BufReader::new(content);
let stdout = std::io::stdout();
let mut handle = std::io::BufWriter::new(stdout);
for line in reader.lines() {
writeln!(handle, "{}", line.unwrap())?;
}
handle.flush().unwrap();
Ok(())
}
|
use {data::semantics::Semantics, proc_macro2::TokenStream, quote::quote};
impl Semantics {
pub fn runtime_render_functions() -> TokenStream {
quote! {
fn render_elements(
group: &Group,
parent: &mut HtmlElement,
classes: &mut HashMap<&'static str, Group>,
) {
let window = web_sys::window().unwrap();
let document = &window.document().unwrap();
for element in &group.elements {
let tag = if element.properties.get(&Property::Link).is_some() {
"a"
} else {
"div"
};
let mut child = &mut document
.create_element(tag)
.unwrap()
.dyn_into::<HtmlElement>()
.unwrap();
child.set_class_name(&*element.class_names.join(" "));
parent.append_child(&child).unwrap();
render_classes(element, classes);
render_listeners(element, child);
render_properties(element, child);
// TODO: verify if tail recursion is a thing
render_elements(element, child, classes);
}
}
fn render_classes(
group: &Group,
classes: &mut HashMap<&'static str, Group>,
) {
let window = web_sys::window().unwrap();
let document = &window.document().unwrap();
register_classes(group, classes);
for class in &group.classes {
let elements = document.get_elements_by_class_name(class.selector);
for i in 0..elements.length() {
let element = &mut elements
.item(i)
.unwrap()
.dyn_into::<HtmlElement>()
.unwrap();
render_elements(class, element, classes);
render_listeners(class, element);
render_properties(class, element);
}
// TODO: verify if tail recursion is a thing
render_classes(class, classes);
}
}
fn render_listeners(
group: &Group,
target: &mut HtmlElement
) {
let window = web_sys::window().unwrap();
let document = &window.document().unwrap();
for listener in &group.listeners {
let closure = {
let mut element = target.clone();
let group = listener.clone();
Closure::wrap(Box::new(move |e: Event| {
e.stop_propagation();
let window = web_sys::window().unwrap();
let document = window.document().unwrap();
CLASSES.with(|classes| {
let mut classes = classes.borrow_mut();
// TODO: does this make sense if something else changes the group?
render_classes(&group, &mut classes);
render_elements(&group, &mut element, &mut classes);
render_listeners(&group, &mut element);
render_properties(&group, &mut element);
});
}) as Box<dyn FnMut(Event)>)
};
target
.add_event_listener_with_callback(
listener.selector,
closure.as_ref().unchecked_ref()
)
.unwrap();
closure.forget();
}
}
fn render_properties(group: &Group, element: &mut HtmlElement) {
for (property, value) in &group.properties {
match property {
Property::Css(property) => element.css(property, value),
Property::Link => (),
Property::Text => element.text(value),
Property::Tooltip => (),
Property::Image => (),
}
}
}
fn render_variables(group: &Group, element: &mut HtmlElement) {
STATE.with(|state| {
let mut state = state.borrow_mut();
for (id, value) in &group.variables {
state[*id] = value.clone();
}
})
}
}
}
}
|
impl Solution {
pub fn first_bad_version(&self, n: i32) -> i32 {
let (mut l,mut r) = (1,n);
while l < r {
let mid = ((l as i64 + r as i64) >> 1) as i32;
if self.isBadVersion(mid){
r = mid;
}else{
l = mid + 1;
}
}
l
}
} |
#[doc = "Register `OFR2` reader"]
pub type R = crate::R<OFR2_SPEC>;
#[doc = "Register `OFR2` writer"]
pub type W = crate::W<OFR2_SPEC>;
#[doc = "Field `OFFSET2` reader - Data offset 2 for the channel programmed into bits OFFSET2_CH"]
pub type OFFSET2_R = crate::FieldReader<u16>;
#[doc = "Field `OFFSET2` writer - Data offset 2 for the channel programmed into bits OFFSET2_CH"]
pub type OFFSET2_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 12, O, u16>;
#[doc = "Field `OFFSET2_CH` reader - Channel selection for the Data offset 2"]
pub type OFFSET2_CH_R = crate::FieldReader;
#[doc = "Field `OFFSET2_CH` writer - Channel selection for the Data offset 2"]
pub type OFFSET2_CH_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 5, O>;
#[doc = "Field `OFFSET2_EN` reader - Offset 2 Enable"]
pub type OFFSET2_EN_R = crate::BitReader<OFFSET2_EN_A>;
#[doc = "Offset 2 Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OFFSET2_EN_A {
#[doc = "0: Offset disabled"]
Disabled = 0,
#[doc = "1: Offset enabled"]
Enabled = 1,
}
impl From<OFFSET2_EN_A> for bool {
#[inline(always)]
fn from(variant: OFFSET2_EN_A) -> Self {
variant as u8 != 0
}
}
impl OFFSET2_EN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OFFSET2_EN_A {
match self.bits {
false => OFFSET2_EN_A::Disabled,
true => OFFSET2_EN_A::Enabled,
}
}
#[doc = "Offset disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == OFFSET2_EN_A::Disabled
}
#[doc = "Offset enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == OFFSET2_EN_A::Enabled
}
}
#[doc = "Field `OFFSET2_EN` writer - Offset 2 Enable"]
pub type OFFSET2_EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, OFFSET2_EN_A>;
impl<'a, REG, const O: u8> OFFSET2_EN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Offset disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(OFFSET2_EN_A::Disabled)
}
#[doc = "Offset enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(OFFSET2_EN_A::Enabled)
}
}
impl R {
#[doc = "Bits 0:11 - Data offset 2 for the channel programmed into bits OFFSET2_CH"]
#[inline(always)]
pub fn offset2(&self) -> OFFSET2_R {
OFFSET2_R::new((self.bits & 0x0fff) as u16)
}
#[doc = "Bits 26:30 - Channel selection for the Data offset 2"]
#[inline(always)]
pub fn offset2_ch(&self) -> OFFSET2_CH_R {
OFFSET2_CH_R::new(((self.bits >> 26) & 0x1f) as u8)
}
#[doc = "Bit 31 - Offset 2 Enable"]
#[inline(always)]
pub fn offset2_en(&self) -> OFFSET2_EN_R {
OFFSET2_EN_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:11 - Data offset 2 for the channel programmed into bits OFFSET2_CH"]
#[inline(always)]
#[must_use]
pub fn offset2(&mut self) -> OFFSET2_W<OFR2_SPEC, 0> {
OFFSET2_W::new(self)
}
#[doc = "Bits 26:30 - Channel selection for the Data offset 2"]
#[inline(always)]
#[must_use]
pub fn offset2_ch(&mut self) -> OFFSET2_CH_W<OFR2_SPEC, 26> {
OFFSET2_CH_W::new(self)
}
#[doc = "Bit 31 - Offset 2 Enable"]
#[inline(always)]
#[must_use]
pub fn offset2_en(&mut self) -> OFFSET2_EN_W<OFR2_SPEC, 31> {
OFFSET2_EN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "ADC offset register2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ofr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ofr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct OFR2_SPEC;
impl crate::RegisterSpec for OFR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ofr2::R`](R) reader structure"]
impl crate::Readable for OFR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ofr2::W`](W) writer structure"]
impl crate::Writable for OFR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets OFR2 to value 0"]
impl crate::Resettable for OFR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::circular_buffer::CircularBuffer;
use std::fmt;
struct Indexes {
i: usize,
match_count: usize,
min_match: usize,
}
pub struct Pattern<'a> {
pattern: &'a [u8],
lookback: CircularBuffer,
idx: Indexes,
}
impl<'a> Pattern<'a> {
pub fn new(pattern: &'a [u8]) -> Self {
Pattern {
pattern,
lookback: CircularBuffer::new(0),
idx: Indexes {
i: 0,
match_count: 0,
min_match: 0,
},
}
}
pub fn push(&mut self, c: u8) -> Option<isize> {
if self.pattern.is_empty() {
return None;
}
let l = self.pattern.len();
let i = &mut self.idx.i;
let looking_back = !self.lookback.is_empty();
// println!(">>> {} ({:#?})", c as char, self.lookback);
// no match, reset and return
if c != self.pattern[*i] as u8 {
if looking_back {
for _ in 0..self.idx.match_count {
for p in self.pattern.iter() {
self.lookback.push(*p);
}
}
// push the half-backed pattern we were matching
for j in 0..(*i) {
self.lookback.push(self.pattern[j]);
}
}
self.idx.match_count = 0;
if c != self.pattern[0] as u8 {
*i = 0;
self.lookback.push(c);
return None;
}
*i = 0;
}
// matching, but still didn't consume pattern
if (*i + 1) < (l) {
*i += 1;
return None;
}
*i = 0;
// matched, we don't look for repeats
if self.idx.min_match == 0 {
return Some(l as isize);
}
// repeats handling
self.idx.match_count += 1;
// println!("MATCH COUNT: {}", self.idx.match_count);
if self.idx.match_count < self.idx.min_match {
return None;
}
Some((l * (self.idx.match_count)) as isize)
}
pub fn _ro_get(&'a self) -> &'a [u8] {
let buf = &self.lookback;
if !(0..(self.idx.i)).is_empty() {
println!("WE ARE MISSING DATA ! you should use .get()");
}
&buf[0..buf.len()]
}
pub fn get(&'a mut self) -> &'a [u8] {
let buf = &self.lookback;
if buf.is_empty() {
return &[];
}
for j in 0..(self.idx.i) {
self.lookback.push(self.pattern[j]);
}
/* println!(
"GET self.buffer: {:#?}, self.pattern: {:#?}",
self.buffer, self.pattern
);*/
self._ro_get()
}
pub fn lookback(mut self, p: usize) -> Self {
self.lookback = CircularBuffer::new(p);
self
}
pub fn repeats(mut self, r: usize) -> Self {
self.idx.min_match = r;
self
}
}
fn to_str(a: &[u8]) -> String {
a.iter().map(|c| format!("{:02x} ", c)).collect::<String>()
}
impl fmt::Debug for Pattern<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let buffer = self._ro_get();
if !buffer.is_empty() {
f.write_str("[ ")?;
let s = to_str(&buffer[0..buffer.len()]);
f.write_str(&s)?;
f.write_str("] ")?;
}
if self.idx.min_match > 0 {
f.write_str("( ")?;
}
let s = to_str(self.pattern);
f.write_str(&s)?;
if self.idx.min_match > 0 {
f.write_str(&format!(") * {} ", self.idx.match_count))?;
};
std::result::Result::Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty() {
let mut p = Pattern::new(&[]);
assert_eq!(p.push(0), None);
}
#[test]
fn oneinone() {
let mut p = Pattern::new(b"a");
assert_eq!(p.push(b'0'), None);
assert_eq!(p.push(b'a'), Some(1));
assert_eq!(p.get(), []);
}
#[test]
fn oneintwo() {
let mut p = Pattern::new(b"b");
assert_eq!(p.push(0), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), Some(1));
assert_eq!(p.get(), []);
}
#[test]
fn many() {
let mut p = Pattern::new(b"a");
assert_eq!(p.push(b'0'), None);
assert_eq!(p.push(b'a'), Some(1));
assert_eq!(p.push(b'a'), Some(1));
assert_eq!(p.get(), []);
}
#[test]
fn manytwo() {
let mut p = Pattern::new(b"ab");
assert_eq!(p.push(b'0'), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), Some(2));
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), Some(2));
assert_eq!(p.get(), []);
}
#[test]
fn twoinfour() {
let mut p = Pattern::new(b"bc");
assert_eq!(p.push(0), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), None);
assert_eq!(p.push(b'c'), Some(2));
assert_eq!(p.get(), []);
}
#[test]
fn threeinsix() {
let mut p = Pattern::new(b"cde");
assert_eq!(p.push(b'0'), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), None);
assert_eq!(p.push(b'c'), None);
assert_eq!(p.push(b'd'), None);
assert_eq!(p.push(b'e'), Some(3));
assert_eq!(p.get(), []);
}
#[test]
fn empty_lookback() {
let mut p = Pattern::new(&[]).lookback(0);
assert_eq!(p.push(0), None)
}
#[test]
fn oneinone_lookback() {
let mut p = Pattern::new(b"a").lookback(1);
assert_eq!(p.push(b'0'), None);
assert_eq!(p.push(b'a'), Some(1));
println!("DEBUG {:#?}", p);
assert_eq!(p.get(), b"0");
}
#[test]
fn oneintwo_lookback() {
let mut p = Pattern::new(b"b").lookback(2);
assert_eq!(p.push(0), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), Some(1));
}
#[test]
fn twoinfour_lookback() {
let mut p = Pattern::new(b"bc").lookback(2);
assert_eq!(p.push(0), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), None);
assert_eq!(p.push(b'c'), Some(2));
assert_eq!(p.push(b'd'), None);
}
#[test]
fn empty_repeats() {
let mut p = Pattern::new(&[]).repeats(0);
assert_eq!(p.push(0), None);
}
#[test]
fn oneinone_repeats() {
let mut p = Pattern::new(b"a").repeats(2);
assert_eq!(p.push(b'0'), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'a'), Some(2));
assert_eq!(p.get(), []);
}
#[test]
fn oneintwo_repeats() {
let mut p = Pattern::new(b"b").repeats(2);
assert_eq!(p.push(b'0'), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), None);
assert_eq!(p.push(b'b'), Some(2));
assert_eq!(p.get(), []);
}
#[test]
fn twoinfour_repeats() {
let mut p = Pattern::new(b"bc").repeats(2);
assert_eq!(p.push(0), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), None);
assert_eq!(p.push(b'c'), None);
assert_eq!(p.push(b'b'), None);
assert_eq!(p.push(b'c'), Some(4));
assert_eq!(p.get(), []);
}
#[test]
fn twoinfour_repeats_lookback() {
let mut p = Pattern::new(b"bc").repeats(2).lookback(2);
assert_eq!(p.push(b'0'), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), None);
assert_eq!(p.push(b'c'), None);
assert_eq!(p.push(b'b'), None);
assert_eq!(p.push(b'c'), Some(4));
assert_eq!(p.get(), b"0a");
}
#[test]
fn twoinfour_repeats_lookback_overflow() {
let mut p = Pattern::new(b"cd").repeats(2).lookback(2);
assert_eq!(p.push(b'0'), None);
assert_eq!(p.push(b'a'), None);
assert_eq!(p.push(b'b'), None);
assert_eq!(p.push(b'c'), None);
assert_eq!(p.push(b'd'), None);
assert_eq!(p.push(b'c'), None);
assert_eq!(p.push(b'd'), Some(4));
assert_eq!(p.get(), b"ab");
}
#[test]
fn many_almost_many() {
let mut p = Pattern::new(b"cd").repeats(2).lookback(2);
let res = b"bcdecdcdf"
.iter()
.map(|c| p.push(*c))
.collect::<Vec<Option<isize>>>();
assert_eq!(p.get(), b"df");
assert_eq!(
res,
//b c d e c d c d f
[None, None, None, None, None, None, None, Some(4), None]
);
}
#[test]
fn many_many() {
let mut p = Pattern::new(b"cd").repeats(2).lookback(2);
let res = b"abccdcdefghcdcdcd"
.iter()
.map(|c| p.push(*c))
.collect::<Vec<Option<isize>>>();
assert_eq!(p.get(), b"gh");
assert_eq!(
res,
[
None,
None,
None,
None,
None,
None,
Some(4),
None,
None,
None,
None,
None,
None,
None,
Some(4),
None,
Some(6)
]
);
}
#[test]
fn more_than_you_asked() {
let mut p = Pattern::new(b"01").repeats(2).lookback(2);
let res = b"abc0101010101010101010"
.iter()
.map(|c| p.push(*c))
.collect::<Vec<Option<isize>>>();
assert_eq!(p.get(), b"c0");
assert_eq!(
res,
[
None,
None,
None,
None,
None,
None,
Some(4),
None,
Some(6),
None,
Some(8),
None,
Some(10),
None,
Some(12),
None,
Some(14),
None,
Some(16),
None,
Some(18),
None
]
);
}
#[test]
fn many_many_prefix() {
let mut p = Pattern::new(b"cd").repeats(2).lookback(2);
let res = b"abccdcd"
.iter()
.map(|c| p.push(*c))
.collect::<Vec<Option<isize>>>();
assert_eq!(p.get(), b"bc");
assert_eq!(res, [None, None, None, None, None, None, Some(4)]);
}
#[test]
fn many_in_many() {
let mut p = Pattern::new(b"cd").repeats(2).lookback(2);
let res = b"hello darkness my old friend cdcdcdcdcdcdcdcd, I've come to talk to you again ! cdcdcdcdcdcdcd".iter().map(|c| p.push(*c)).collect::<Vec<Option<isize>>>();
assert_eq!(
res,
[
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
Some(4),
None,
Some(6),
None,
Some(8),
None,
Some(10),
None,
Some(12),
None,
Some(14),
None,
Some(16),
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
Some(4),
None,
Some(6),
None,
Some(8),
None,
Some(10),
None,
Some(12),
None,
Some(14)
]
)
}
}
|
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_end_to_end_length(number_of_links: u8, link_length: f64, well_width: f64, force: f64, temperature: f64) -> f64
{
super::end_to_end_length(&number_of_links, &link_length, &well_width, &force, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_end_to_end_length_per_link(link_length: f64, well_width: f64, force: f64, temperature: f64) -> f64
{
super::end_to_end_length_per_link(&link_length, &well_width, &force, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_nondimensional_end_to_end_length(number_of_links: u8, link_length: f64, well_width: f64, nondimensional_force: f64) -> f64
{
super::nondimensional_end_to_end_length(&number_of_links, &link_length, &well_width, &nondimensional_force)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_nondimensional_end_to_end_length_per_link(link_length: f64, well_width: f64, nondimensional_force: f64) -> f64
{
super::nondimensional_end_to_end_length_per_link(&link_length, &well_width, &nondimensional_force)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_gibbs_free_energy(number_of_links: u8, link_length: f64, hinge_mass: f64, well_width: f64, force: f64, temperature: f64) -> f64
{
super::gibbs_free_energy(&number_of_links, &link_length, &hinge_mass, &well_width, &force, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_gibbs_free_energy_per_link(link_length: f64, hinge_mass: f64, well_width: f64, force: f64, temperature: f64) -> f64
{
super::gibbs_free_energy_per_link(&link_length, &hinge_mass, &well_width, &force, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_relative_gibbs_free_energy(number_of_links: u8, link_length: f64, well_width: f64, force: f64, temperature: f64) -> f64
{
super::relative_gibbs_free_energy(&number_of_links, &link_length, &well_width, &force, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_relative_gibbs_free_energy_per_link(link_length: f64, well_width: f64, force: f64, temperature: f64) -> f64
{
super::relative_gibbs_free_energy_per_link(&link_length, &well_width, &force, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_nondimensional_gibbs_free_energy(number_of_links: u8, link_length: f64, hinge_mass: f64, well_width: f64, nondimensional_force: f64, temperature: f64) -> f64
{
super::nondimensional_gibbs_free_energy(&number_of_links, &link_length, &hinge_mass, &well_width, &nondimensional_force, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_nondimensional_gibbs_free_energy_per_link(link_length: f64, hinge_mass: f64, well_width: f64, nondimensional_force: f64, temperature: f64) -> f64
{
super::nondimensional_gibbs_free_energy_per_link(&link_length, &hinge_mass, &well_width, &nondimensional_force, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_nondimensional_relative_gibbs_free_energy(number_of_links: u8, link_length: f64, well_width: f64, nondimensional_force: f64) -> f64
{
super::nondimensional_relative_gibbs_free_energy(&number_of_links, &link_length, &well_width, &nondimensional_force)
}
#[no_mangle]
pub extern fn physics_single_chain_swfjc_thermodynamics_isotensional_nondimensional_relative_gibbs_free_energy_per_link(link_length: f64, well_width: f64, nondimensional_force: f64) -> f64
{
super::nondimensional_relative_gibbs_free_energy_per_link(&link_length, &well_width, &nondimensional_force)
}
|
use crate::rtb_type_strict;
rtb_type_strict! {
SizeUnit,
Dips=1;
Inches = 2;
Centimeters = 3
}
impl Default for SizeUnit {
fn default() -> Self {
Self::Dips
}
}
|
//! src/models.rs
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize, Deserialize)]
pub struct Recipe {
id: uuid::Uuid,
title: String,
content: String,
author: User,
published: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RecipeOut {
pub id: uuid::Uuid,
pub title: String,
pub content: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RecipeIn {
pub title: String,
pub content: String,
pub published: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RecipesOut {
pub id: Uuid,
pub title: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RecipeUpdate {
pub title: String,
pub content: String,
pub published: bool
}
#[derive(Debug, Serialize, Deserialize)]
pub struct User {
id: uuid::Uuid,
username: String,
password: String,
}
#[derive(Serialize, Debug)]
pub struct UserDbIn {
username: String,
hashed_password: String,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct UserOut {
id: uuid::Uuid,
username: String,
}
|
use dashmap::DashMap;
use serenity::prelude::TypeMapKey;
use std::sync::Arc;
#[derive(Clone, Default)]
pub struct GameState {
pub channel: Arc<DashMap<u64, bool>>,
}
impl GameState {
pub fn new() -> Self {
Self::default()
}
}
impl TypeMapKey for GameState {
type Value = GameState;
}
|
pub fn salary() {
println!("Welcome in HR -> Salary ")
}
|
use std::ops::ControlFlow;
use std::time::Instant;
const INPUT: &str = include_str!("../input.txt");
fn part1() -> i64 {
INPUT.chars().fold(0, |acc, c| match c {
')' => acc - 1,
'(' => acc + 1,
_ => unreachable!(),
})
}
fn part2() -> usize {
let result: ControlFlow<usize, i64> = INPUT.chars().enumerate().try_fold(0, |acc, (pos, c)| {
let new_floor = match c {
')' => acc - 1,
'(' => acc + 1,
_ => unreachable!(),
};
if new_floor == -1 {
ControlFlow::Break(pos)
} else {
ControlFlow::Continue(new_floor)
}
});
match result {
ControlFlow::Break(pos) => pos + 1,
_ => unreachable!(),
}
}
fn main() {
let start = Instant::now();
println!("part 1: {}", part1());
println!("part 1 took {}ms", (Instant::now() - start).as_millis());
let start = Instant::now();
println!("part 2: {}", part2());
println!("part 2 took {}ms", (Instant::now() - start).as_millis());
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(part1(), 232);
}
#[test]
fn test_part2() {
assert_eq!(part2(), 1783);
}
}
|
fn square_sum(vec: Vec<i32>) -> i32 {
let mut sum: i32 = 0;
for i in vec {
sum += i.pow(2);
}
return sum;
}
|
use smol::channel::{Receiver, Sender};
use std::fmt::{Debug, Formatter, Result as FmtResult};
use std::net::SocketAddr;
/// Endpoint can receied this message channel.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum EndpointSendMessage {
/// connect to a socket address.
/// params is `socket_addr`, `remote_pk_info`, `is_stable_data`.
Connect(SocketAddr, Vec<u8>, Option<Vec<u8>>),
/// close a connection.
/// params is `socket_addr`.
Close(SocketAddr),
}
/// when endpoint get a incoming connection, will send to outside.
/// params: `socket_addr`, `endpoint_stream_receiver`,
/// `endpoint_stream_sender` and `is_start_connect_by_self`.
#[derive(Clone)]
pub struct EndpointIncomingMessage(
pub SocketAddr,
pub Receiver<EndpointStreamMessage>,
pub Sender<EndpointStreamMessage>,
pub Option<Vec<u8>>,
);
/// StreamMessage use in endpoint and outside.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum EndpointStreamMessage {
/// transfer bytes.
Bytes(Vec<u8>),
/// closed.
Close,
}
impl Debug for EndpointIncomingMessage {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
write!(f, "Ednpoint incoming: {}.", self.0)
}
}
|
// #![crate_type="lib"]
pub mod Hangman{
// use std::io;
pub struct Hangman{
word: String,
}
pub fn new(word: String) -> Hangman{
Hangman{
word:word,
}
}
// pub fn get_input() -> String{
// let mut input = String::new();
// println!("input a new letter: ");
// io::stdin().read_line(&mut input).expect("Failed to read line");
// println!("the input was: {}", input);
// return input;
// }
impl Hangman{
pub fn is_correct(&mut self, mut guess:String) -> bool{
println!("in is_correct {}", self.word);
guess.pop();
if self.word.contains(&guess){
self.word = self.word.replace(&guess, "");
return true;
}
return false;
}
pub fn empty(&mut self) -> bool{
return self.word.is_empty();
}
}
} |
use crate::error::{from_protobuf_error, NiaServerError, NiaServerResult};
use crate::protocol::Serializable;
use protobuf::Message;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ActionMouseAbsoluteMove {
x: i32,
y: i32,
}
impl ActionMouseAbsoluteMove {
pub fn new(x: i32, y: i32) -> ActionMouseAbsoluteMove {
ActionMouseAbsoluteMove { x, y }
}
pub fn get_x(&self) -> i32 {
self.x
}
pub fn get_y(&self) -> i32 {
self.y
}
}
impl
Serializable<
ActionMouseAbsoluteMove,
nia_protocol_rust::ActionMouseAbsoluteMove,
> for ActionMouseAbsoluteMove
{
fn to_pb(&self) -> nia_protocol_rust::ActionMouseAbsoluteMove {
let mut action_mouse_absolute_move_pb =
nia_protocol_rust::ActionMouseAbsoluteMove::new();
action_mouse_absolute_move_pb.set_x(self.x);
action_mouse_absolute_move_pb.set_y(self.y);
action_mouse_absolute_move_pb
}
fn from_pb(
object_pb: nia_protocol_rust::ActionMouseAbsoluteMove,
) -> NiaServerResult<ActionMouseAbsoluteMove> {
let action_mouse_absolute_move =
ActionMouseAbsoluteMove::new(object_pb.get_x(), object_pb.get_y());
Ok(action_mouse_absolute_move)
}
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn serializable_and_deserializable() {
let x_expected = 100;
let y_expected = 200;
let action_mouse_absolute_move =
ActionMouseAbsoluteMove::new(x_expected, y_expected);
let bytes = action_mouse_absolute_move.to_bytes().unwrap();
let action_mouse_absolute_move =
ActionMouseAbsoluteMove::from_bytes(bytes).unwrap();
let x_actual = action_mouse_absolute_move.x;
let y_actual = action_mouse_absolute_move.y;
assert_eq!(x_expected, x_actual);
assert_eq!(y_expected, y_actual);
}
}
|
// Handles config related to the workspace
// Current use case is setting up a git repo
// for the workspace
use crate::er::{self, Result};
use crate::git;
use crate::utils::{self, CliEnv};
use futures::{
future::{self, Either},
Future,
};
/// Initializes a git repo for the workspace, using
/// defined git account
pub fn init_git<'a>(env: &'a CliEnv) -> impl Future<Item = (), Error = failure::Error> + 'a {
let git_config = match git::select_account(env, None) {
Ok(git_account) => git_account,
Err(e) => return Either::A(future::err(failure::Error::from(e))),
};
let repo_name = match env.get_input("Repo name", Some("workspace".into())) {
Ok(repo_name) => repo_name,
Err(e) => return Either::A(future::err(failure::Error::from(e))),
};
let dir_git = match git::inspect_git(env.config_dirs.config_root.clone()) {
Ok(dir_git) => dir_git,
Err(e) => return Either::A(future::err(failure::Error::from(e))),
};
let git_uri = git_config.repo_uri(repo_name);
Either::B(
git::setup_git_dir(env, dir_git, git_config, git_uri).map_err(|e| failure::Error::from(e)),
)
}
/// Given git username and password, can recreate
/// the workspace (with todos)
pub fn clone_workspace(env: &CliEnv) -> Result<()> {
let dir_git = git::inspect_git(env.config_dirs.config_root.clone())?;
if dir_git.repo.is_some() {
println!("Found existing repo, not proceeding with clone");
return Ok(());
} else if dir_git.has_files {
println!("Found no repo, but existing files. Not proceeding with clone");
println!("Could initialize repository instead");
return er::err("No repo, but files");
}
let git_user = env.get_input("Git user", None)?;
let repo_name = env.get_input("Repo name", Some("workspace".into()))?;
let git_pass = env.get_pass("Git password")?;
let repo_uri = format!("https://github.com/{}/{}", git_user, repo_name);
Ok(git::clone_repo(
git_user,
git_pass,
&repo_uri,
&env.config_dirs.config_root,
))
}
// todo: Could run this automatically after config files have changed
// when repo is initialized, could propose to add if not
pub fn push_workspace(env: &CliEnv) -> Result<()> {
let dir_git = git::inspect_git(env.config_dirs.config_root.clone())?;
let repo = match dir_git.repo {
Some(repo) => {
println!("Found repo");
repo
}
None => {
// todo: propose run init
println!("Please try workspace init-git, or setup repository manually");
return er::err("No repo");
}
};
// todo: Possibly not necessary, could test,
// also good to err on side of doing it
std::env::set_current_dir(&dir_git.dir)?;
let tree = git::add_all(&repo)?;
let default_msg = format!("Workspace commit: {}", utils::now_formatted());
let message = env.get_input("Message", Some(default_msg))?;
git::commit(&repo, tree, &message)?;
git::push_origin_master(&repo)?;
println!("Commit and push to origin successful");
Ok(())
}
|
use byteorder::{ByteOrder, LittleEndian};
use ckb_chain_spec::consensus::Consensus;
use ckb_dao_utils::{extract_dao_data, pack_dao_data, DaoError};
use ckb_error::Error;
use ckb_script_data_loader::DataLoader;
use ckb_store::{data_loader_wrapper::DataLoaderWrapper, ChainStore};
use ckb_types::{
bytes::Bytes,
core::{
cell::{CellMeta, ResolvedTransaction},
Capacity, CapacityResult, HeaderView, ScriptHashType,
},
packed::{Byte32, CellOutput, OutPoint, Script, WitnessArgs},
prelude::*,
};
use std::collections::HashSet;
use std::convert::TryFrom;
pub struct DaoCalculator<'a, CS, DL> {
pub consensus: &'a Consensus,
pub store: &'a CS,
pub data_loader: DL,
}
impl<'a, CS: ChainStore<'a>> DaoCalculator<'a, CS, DataLoaderWrapper<'a, CS>> {
pub fn new(consensus: &'a Consensus, store: &'a CS) -> Self {
let data_loader = DataLoaderWrapper::new(store);
DaoCalculator {
consensus,
store,
data_loader,
}
}
pub fn primary_block_reward(&self, target: &HeaderView) -> Result<Capacity, Error> {
let target_epoch = self
.store
.get_block_epoch_index(&target.hash())
.and_then(|index| self.store.get_epoch_ext(&index))
.ok_or(DaoError::InvalidHeader)?;
target_epoch.block_reward(target.number())
}
pub fn secondary_block_reward(&self, target: &HeaderView) -> Result<Capacity, Error> {
if target.number() == 0 {
return Ok(Capacity::zero());
}
let target_parent_hash = target.data().raw().parent_hash();
let target_parent = self
.store
.get_block_header(&target_parent_hash)
.ok_or(DaoError::InvalidHeader)?;
let target_epoch = self
.store
.get_block_epoch_index(&target.hash())
.and_then(|index| self.store.get_epoch_ext(&index))
.ok_or(DaoError::InvalidHeader)?;
let target_g2 = target_epoch
.secondary_block_issuance(target.number(), self.consensus.secondary_epoch_reward())?;
let (_, target_parent_c, _, target_parent_u) = extract_dao_data(target_parent.dao())?;
let reward128 = u128::from(target_g2.as_u64()) * u128::from(target_parent_u.as_u64())
/ u128::from(target_parent_c.as_u64());
let reward = u64::try_from(reward128).map_err(|_| DaoError::Overflow)?;
Ok(Capacity::shannons(reward))
}
// Used for testing only.
//
// Notice unlike primary_block_reward and secondary_epoch_reward above,
// this starts calculating from parent, not target header.
pub fn base_block_reward(&self, parent: &HeaderView) -> Result<Capacity, Error> {
let target_number = self
.consensus
.finalize_target(parent.number() + 1)
.ok_or(DaoError::InvalidHeader)?;
let target = self
.store
.get_block_hash(target_number)
.and_then(|hash| self.store.get_block_header(&hash))
.ok_or(DaoError::InvalidHeader)?;
let primary_block_reward = self.primary_block_reward(&target)?;
let secondary_block_reward = self.secondary_block_reward(&target)?;
Ok(primary_block_reward.safe_add(secondary_block_reward)?)
}
pub fn dao_field(
&self,
rtxs: &[ResolvedTransaction],
parent: &HeaderView,
) -> Result<Byte32, Error> {
// Freed occupied capacities from consumed inputs
let freed_occupied_capacities =
rtxs.iter().try_fold(Capacity::zero(), |capacities, rtx| {
self.input_occupied_capacities(rtx)
.and_then(|c| capacities.safe_add(c).map_err(Into::into))
})?;
let added_occupied_capacities = self.added_occupied_capacities(rtxs)?;
let withdrawed_interests = self.withdrawed_interests(rtxs)?;
let (parent_ar, parent_c, parent_s, parent_u) = extract_dao_data(parent.dao())?;
// g contains both primary issuance and secondary issuance,
// g2 is the secondary issuance for the block, which consists of
// issuance for the miner, NervosDAO and treasury.
// When calculating issuance in NervosDAO, we use the real
// issuance for each block(which will only be issued on chain
// after the finalization delay), not the capacities generated
// in the cellbase of current block.
let parent_block_epoch = self
.store
.get_block_epoch_index(&parent.hash())
.and_then(|index| self.store.get_epoch_ext(&index))
.ok_or(DaoError::InvalidHeader)?;
let current_block_epoch = self
.store
.next_epoch_ext(&self.consensus, &parent_block_epoch, &parent)
.unwrap_or(parent_block_epoch);
let current_block_number = parent.number() + 1;
let current_g2 = current_block_epoch.secondary_block_issuance(
current_block_number,
self.consensus.secondary_epoch_reward(),
)?;
let current_g = current_block_epoch
.block_reward(current_block_number)
.and_then(|c| c.safe_add(current_g2).map_err(Into::into))?;
let miner_issuance128 = u128::from(current_g2.as_u64()) * u128::from(parent_u.as_u64())
/ u128::from(parent_c.as_u64());
let miner_issuance =
Capacity::shannons(u64::try_from(miner_issuance128).map_err(|_| DaoError::Overflow)?);
let nervosdao_issuance = current_g2.safe_sub(miner_issuance)?;
let current_c = parent_c.safe_add(current_g)?;
let current_u = parent_u
.safe_add(added_occupied_capacities)
.and_then(|u| u.safe_sub(freed_occupied_capacities))?;
let current_s = parent_s
.safe_add(nervosdao_issuance)
.and_then(|s| s.safe_sub(withdrawed_interests))?;
let ar_increase128 =
u128::from(parent_ar) * u128::from(current_g2.as_u64()) / u128::from(parent_c.as_u64());
let ar_increase = u64::try_from(ar_increase128).map_err(|_| DaoError::Overflow)?;
let current_ar = parent_ar
.checked_add(ar_increase)
.ok_or(DaoError::Overflow)?;
Ok(pack_dao_data(current_ar, current_c, current_s, current_u))
}
pub fn maximum_withdraw(
&self,
out_point: &OutPoint,
withdrawing_header_hash: &Byte32,
) -> Result<Capacity, Error> {
let (tx, block_hash) = self
.store
.get_transaction(&out_point.tx_hash())
.ok_or(DaoError::InvalidOutPoint)?;
let output = tx
.outputs()
.get(out_point.index().unpack())
.ok_or(DaoError::InvalidOutPoint)?;
let output_data = tx
.outputs_data()
.get(out_point.index().unpack())
.ok_or(DaoError::InvalidOutPoint)?;
self.calculate_maximum_withdraw(
&output,
Capacity::bytes(output_data.len())?,
&block_hash,
withdrawing_header_hash,
)
}
pub fn transaction_fee(&self, rtx: &ResolvedTransaction) -> Result<Capacity, Error> {
let maximum_withdraw = self.transaction_maximum_withdraw(rtx)?;
rtx.transaction
.outputs_capacity()
.and_then(|y| maximum_withdraw.safe_sub(y))
.map_err(Into::into)
}
fn added_occupied_capacities(&self, rtxs: &[ResolvedTransaction]) -> Result<Capacity, Error> {
// Newly added occupied capacities from outputs
let added_occupied_capacities =
rtxs.iter().try_fold(Capacity::zero(), |capacities, rtx| {
rtx.transaction
.outputs_with_data_iter()
.enumerate()
.try_fold(Capacity::zero(), |tx_capacities, (_, (output, data))| {
Capacity::bytes(data.len())
.and_then(|c| output.occupied_capacity(c))
.and_then(|c| tx_capacities.safe_add(c))
})
.and_then(|c| capacities.safe_add(c))
})?;
Ok(added_occupied_capacities)
}
fn input_occupied_capacities(&self, rtx: &ResolvedTransaction) -> Result<Capacity, Error> {
rtx.resolved_inputs
.iter()
.try_fold(Capacity::zero(), |capacities, cell_meta| {
let current_capacity = modified_occupied_capacity(&cell_meta, &self.consensus);
current_capacity.and_then(|c| capacities.safe_add(c))
})
.map_err(Into::into)
}
fn withdrawed_interests(&self, rtxs: &[ResolvedTransaction]) -> Result<Capacity, Error> {
let maximum_withdraws = rtxs.iter().try_fold(Capacity::zero(), |capacities, rtx| {
self.transaction_maximum_withdraw(rtx)
.and_then(|c| capacities.safe_add(c).map_err(Into::into))
})?;
let input_capacities = rtxs.iter().try_fold(Capacity::zero(), |capacities, rtx| {
let tx_input_capacities = rtx.resolved_inputs.iter().try_fold(
Capacity::zero(),
|tx_capacities, cell_meta| {
let output_capacity: Capacity = cell_meta.cell_output.capacity().unpack();
tx_capacities.safe_add(output_capacity)
},
)?;
capacities.safe_add(tx_input_capacities)
})?;
maximum_withdraws
.safe_sub(input_capacities)
.map_err(Into::into)
}
fn transaction_maximum_withdraw(&self, rtx: &ResolvedTransaction) -> Result<Capacity, Error> {
let header_deps: HashSet<Byte32> = rtx.transaction.header_deps_iter().collect();
rtx.resolved_inputs.iter().enumerate().try_fold(
Capacity::zero(),
|capacities, (i, cell_meta)| {
let capacity: Result<Capacity, Error> = {
let output = &cell_meta.cell_output;
let is_dao_type_script = |type_script: Script| {
Into::<u8>::into(type_script.hash_type())
== Into::<u8>::into(ScriptHashType::Type)
&& type_script.code_hash()
== self.consensus.dao_type_hash().expect("No dao system cell")
};
let is_withdrawing_input =
|cell_meta: &CellMeta| match self.data_loader.load_cell_data(&cell_meta) {
Some((data, _)) => data.len() == 8 && LittleEndian::read_u64(&data) > 0,
None => false,
};
if output
.type_()
.to_opt()
.map(is_dao_type_script)
.unwrap_or(false)
&& is_withdrawing_input(&cell_meta)
{
let withdrawing_header_hash = cell_meta
.transaction_info
.as_ref()
.map(|info| &info.block_hash)
.filter(|hash| header_deps.contains(&hash))
.ok_or(DaoError::InvalidOutPoint)?;
let deposit_header_hash = rtx
.transaction
.witnesses()
.get(i)
.ok_or(DaoError::InvalidOutPoint)
.and_then(|witness_data| {
// dao contract stores header deps index as u64 in the input_type field of WitnessArgs
let witness = WitnessArgs::from_slice(&Unpack::<Bytes>::unpack(
&witness_data,
))
.map_err(|_| DaoError::InvalidDaoFormat)?;
let header_deps_index_data: Option<Bytes> = witness
.input_type()
.to_opt()
.map(|witness| witness.unpack());
if header_deps_index_data.is_none()
|| header_deps_index_data.clone().map(|data| data.len())
!= Some(8)
{
return Err(DaoError::InvalidDaoFormat);
}
Ok(LittleEndian::read_u64(&header_deps_index_data.unwrap()))
})
.and_then(|header_dep_index| {
rtx.transaction
.header_deps()
.get(header_dep_index as usize)
.and_then(|hash| header_deps.get(&hash))
.ok_or(DaoError::InvalidOutPoint)
})?;
self.calculate_maximum_withdraw(
&output,
Capacity::bytes(cell_meta.data_bytes as usize)?,
&deposit_header_hash,
&withdrawing_header_hash,
)
} else {
Ok(output.capacity().unpack())
}
};
capacity.and_then(|c| c.safe_add(capacities).map_err(Into::into))
},
)
}
fn calculate_maximum_withdraw(
&self,
output: &CellOutput,
output_data_capacity: Capacity,
deposit_header_hash: &Byte32,
withdrawing_header_hash: &Byte32,
) -> Result<Capacity, Error> {
let deposit_header = self
.store
.get_block_header(deposit_header_hash)
.ok_or(DaoError::InvalidHeader)?;
let withdrawing_header = self
.store
.get_block_header(withdrawing_header_hash)
.ok_or(DaoError::InvalidHeader)?;
if deposit_header.number() >= withdrawing_header.number() {
return Err(DaoError::InvalidOutPoint.into());
}
let (deposit_ar, _, _, _) = extract_dao_data(deposit_header.dao())?;
let (withdrawing_ar, _, _, _) = extract_dao_data(withdrawing_header.dao())?;
let occupied_capacity = output.occupied_capacity(output_data_capacity)?;
let output_capacity: Capacity = output.capacity().unpack();
let counted_capacity = output_capacity.safe_sub(occupied_capacity)?;
let withdraw_counted_capacity = u128::from(counted_capacity.as_u64())
* u128::from(withdrawing_ar)
/ u128::from(deposit_ar);
let withdraw_capacity =
Capacity::shannons(withdraw_counted_capacity as u64).safe_add(occupied_capacity)?;
Ok(withdraw_capacity)
}
}
/// return special occupied capacity if cell is satoshi's gift
/// otherwise return cell occupied capacity
pub fn modified_occupied_capacity(
cell_meta: &CellMeta,
consensus: &Consensus,
) -> CapacityResult<Capacity> {
if let Some(tx_info) = &cell_meta.transaction_info {
if tx_info.is_genesis()
&& tx_info.is_cellbase()
&& cell_meta.cell_output.lock().args().raw_data() == consensus.satoshi_pubkey_hash.0[..]
{
return Unpack::<Capacity>::unpack(&cell_meta.cell_output.capacity())
.safe_mul_ratio(consensus.satoshi_cell_occupied_ratio);
}
}
cell_meta.occupied_capacity()
}
#[cfg(test)]
mod tests {
use super::*;
use ckb_db::RocksDB;
use ckb_store::{ChainDB, COLUMNS};
use ckb_types::{
bytes::Bytes,
core::{
capacity_bytes, cell::CellMetaBuilder, BlockBuilder, BlockNumber, EpochExt,
HeaderBuilder, TransactionBuilder,
},
h256,
utilities::DIFF_TWO,
H256, U256,
};
fn new_store() -> ChainDB {
ChainDB::new(RocksDB::open_tmp(COLUMNS), Default::default())
}
fn prepare_store(
parent: &HeaderView,
epoch_start: Option<BlockNumber>,
) -> (ChainDB, HeaderView) {
let store = new_store();
let txn = store.begin_transaction();
let parent_block = BlockBuilder::default().header(parent.clone()).build();
txn.insert_block(&parent_block).unwrap();
txn.attach_block(&parent_block).unwrap();
let epoch_ext = EpochExt::new_builder()
.number(parent.number())
.base_block_reward(Capacity::shannons(50_000_000_000))
.remainder_reward(Capacity::shannons(1_000_128))
.previous_epoch_hash_rate(U256::one())
.last_block_hash_in_previous_epoch(h256!("0x1").pack())
.start_number(epoch_start.unwrap_or_else(|| parent.number() - 1000))
.length(2091)
.compact_target(DIFF_TWO)
.build();
let epoch_hash = h256!("0x123455").pack();
txn.insert_block_epoch_index(&parent.hash(), &epoch_hash)
.unwrap();
txn.insert_epoch_ext(&epoch_hash, &epoch_ext).unwrap();
txn.commit().unwrap();
(store, parent.clone())
}
#[test]
fn check_dao_data_calculation() {
let consensus = Consensus::default();
let parent_number = 12345;
let parent_header = HeaderBuilder::default()
.number(parent_number.pack())
.dao(pack_dao_data(
10_000_000_000_123_456,
Capacity::shannons(500_000_000_123_000),
Capacity::shannons(400_000_000_123),
Capacity::shannons(600_000_000_000),
))
.build();
let (store, parent_header) = prepare_store(&parent_header, None);
let result = DaoCalculator::new(&consensus, &store)
.dao_field(&[], &parent_header)
.unwrap();
let dao_data = extract_dao_data(result).unwrap();
assert_eq!(
dao_data,
(
10_000_586_990_682_998,
Capacity::shannons(500_079_349_650_985),
Capacity::shannons(429_314_308_674),
Capacity::shannons(600_000_000_000)
)
);
}
#[test]
fn check_initial_dao_data_calculation() {
let consensus = Consensus::default();
let parent_number = 0;
let parent_header = HeaderBuilder::default()
.number(parent_number.pack())
.dao(pack_dao_data(
10_000_000_000_000_000,
Capacity::shannons(500_000_000_000_000),
Capacity::shannons(400_000_000_000),
Capacity::shannons(600_000_000_000),
))
.build();
let (store, parent_header) = prepare_store(&parent_header, Some(0));
let result = DaoCalculator::new(&consensus, &store)
.dao_field(&[], &parent_header)
.unwrap();
let dao_data = extract_dao_data(result).unwrap();
assert_eq!(
dao_data,
(
10_000_586_990_559_680,
Capacity::shannons(500_079_349_527_985),
Capacity::shannons(429_314_308_551),
Capacity::shannons(600_000_000_000)
)
);
}
#[test]
fn check_first_epoch_block_dao_data_calculation() {
let consensus = Consensus::default();
let parent_number = 12340;
let parent_header = HeaderBuilder::default()
.number(parent_number.pack())
.dao(pack_dao_data(
10_000_000_000_123_456,
Capacity::shannons(500_000_000_123_000),
Capacity::shannons(400_000_000_123),
Capacity::shannons(600_000_000_000),
))
.build();
let (store, parent_header) = prepare_store(&parent_header, Some(12340));
let result = DaoCalculator::new(&consensus, &store)
.dao_field(&[], &parent_header)
.unwrap();
let dao_data = extract_dao_data(result).unwrap();
assert_eq!(
dao_data,
(
10_000_586_990_682_998,
Capacity::shannons(500_079_349_650_985),
Capacity::shannons(429_314_308_674),
Capacity::shannons(600_000_000_000)
)
);
}
#[test]
fn check_dao_data_calculation_overflows() {
let consensus = Consensus::default();
let parent_number = 12345;
let parent_header = HeaderBuilder::default()
.number(parent_number.pack())
.dao(pack_dao_data(
10_000_000_000_123_456,
Capacity::shannons(18_446_744_073_709_000_000),
Capacity::shannons(446_744_073_709),
Capacity::shannons(600_000_000_000),
))
.build();
let (store, parent_header) = prepare_store(&parent_header, None);
let result = DaoCalculator::new(&consensus, &store).dao_field(&[], &parent_header);
assert!(result
.unwrap_err()
.to_string()
.contains("Internal(CapacityOverflow)"));
}
#[test]
fn check_dao_data_calculation_with_transactions() {
let consensus = Consensus::default();
let parent_number = 12345;
let parent_header = HeaderBuilder::default()
.number(parent_number.pack())
.dao(pack_dao_data(
10_000_000_000_123_456,
Capacity::shannons(500_000_000_123_000),
Capacity::shannons(400_000_000_123),
Capacity::shannons(600_000_000_000),
))
.build();
let (store, parent_header) = prepare_store(&parent_header, None);
let input_cell_data = Bytes::from("abcde");
let input_cell = CellOutput::new_builder()
.capacity(capacity_bytes!(10000).pack())
.build();
let output_cell_data = Bytes::from("abcde12345");
let output_cell = CellOutput::new_builder()
.capacity(capacity_bytes!(20000).pack())
.build();
let tx = TransactionBuilder::default()
.output(output_cell)
.output_data(output_cell_data.pack())
.build();
let rtx = ResolvedTransaction {
transaction: tx,
resolved_cell_deps: vec![],
resolved_inputs: vec![
CellMetaBuilder::from_cell_output(input_cell, input_cell_data).build(),
],
resolved_dep_groups: vec![],
};
let result = DaoCalculator::new(&consensus, &store)
.dao_field(&[rtx], &parent_header)
.unwrap();
let dao_data = extract_dao_data(result).unwrap();
assert_eq!(
dao_data,
(
10_000_586_990_682_998,
Capacity::shannons(500_079_349_650_985),
Capacity::shannons(429_314_308_674),
Capacity::shannons(600_500_000_000)
)
);
}
#[test]
fn check_withdraw_calculation() {
let data = Bytes::from(vec![1; 10]);
let output = CellOutput::new_builder()
.capacity(capacity_bytes!(1000000).pack())
.build();
let tx = TransactionBuilder::default()
.output(output)
.output_data(data.pack())
.build();
let deposit_header = HeaderBuilder::default()
.number(100.pack())
.dao(pack_dao_data(
10_000_000_000_123_456,
Default::default(),
Default::default(),
Default::default(),
))
.build();
let deposit_block = BlockBuilder::default()
.header(deposit_header)
.transaction(tx.clone())
.build();
let out_point = OutPoint::new(tx.hash(), 0);
let withdrawing_header = HeaderBuilder::default()
.number(200.pack())
.dao(pack_dao_data(
10_000_000_001_123_456,
Default::default(),
Default::default(),
Default::default(),
))
.build();
let withdrawing_block = BlockBuilder::default().header(withdrawing_header).build();
let store = new_store();
let txn = store.begin_transaction();
txn.insert_block(&deposit_block).unwrap();
txn.attach_block(&deposit_block).unwrap();
txn.insert_block(&withdrawing_block).unwrap();
txn.attach_block(&withdrawing_block).unwrap();
txn.commit().unwrap();
let consensus = Consensus::default();
let calculator = DaoCalculator::new(&consensus, &store);
let result = calculator.maximum_withdraw(&out_point, &withdrawing_block.hash());
assert_eq!(result.unwrap(), Capacity::shannons(100_000_000_009_999));
}
#[test]
fn check_withdraw_calculation_overflows() {
let output = CellOutput::new_builder()
.capacity(Capacity::shannons(18_446_744_073_709_550_000).pack())
.build();
let tx = TransactionBuilder::default().output(output).build();
let deposit_header = HeaderBuilder::default()
.number(100.pack())
.dao(pack_dao_data(
10_000_000_000_123_456,
Default::default(),
Default::default(),
Default::default(),
))
.build();
let deposit_block = BlockBuilder::default()
.header(deposit_header)
.transaction(tx.clone())
.build();
let out_point = OutPoint::new(tx.hash(), 0);
let withdrawing_header = HeaderBuilder::default()
.number(200.pack())
.dao(pack_dao_data(
10_000_000_001_123_456,
Default::default(),
Default::default(),
Default::default(),
))
.build();
let withdrawing_block = BlockBuilder::default()
.header(withdrawing_header.clone())
.build();
let store = new_store();
let txn = store.begin_transaction();
txn.insert_block(&deposit_block).unwrap();
txn.attach_block(&deposit_block).unwrap();
txn.insert_block(&withdrawing_block).unwrap();
txn.attach_block(&withdrawing_block).unwrap();
txn.commit().unwrap();
let consensus = Consensus::default();
let calculator = DaoCalculator::new(&consensus, &store);
let result = calculator.maximum_withdraw(&out_point, &withdrawing_header.hash());
assert!(result.is_err());
}
}
|
mod commands;
mod events;
// mod server;
use commands::math::*;
use commands::music::*;
use commands::reply::*;
use serenity::{framework::standard::StandardFramework, http::Http, Client};
use songbird::SerenityInit;
use std::{collections::HashSet, thread};
// Start ////////////////////////////////////////////////////////////////////
#[tokio::main]
async fn main() {
let token = std::env::var("DISCORD_TOKEN").unwrap_or("none".to_string());
// thread::spawn(|| server::server());
let http = Http::new_with_token(&token);
let (owners, _bot_id) = match http.get_current_application_info().await {
Ok(info) => {
let mut owners = HashSet::new();
owners.insert(info.owner.id);
(owners, info.id)
}
Err(why) => panic!("{:?}", why),
};
let framework = StandardFramework::new()
.configure(|c| c.owners(owners).prefix("!"))
.group(&MATH_GROUP)
.help(&HELP)
.group(&MUSIC_GROUP)
.group(&GENERAL_GROUP);
let mut client = Client::builder(&token)
.framework(framework)
.event_handler(events::Handler)
.register_songbird()
.application_id(897082403003187210)
.await
.expect("error creating client");
if let Err(why) = client.start_shards(3).await {
println!("Client error: {:?}", why);
}
}
// https://discord.com/api/oauth2/authorize?client_id=897082403003187210&permissions=536870387447&scope=bot
|
/*
chapter 4
syntax and semantics
*/
fn main() {
let a = vec![1, 2, 3, 4, 5];
// works
let b: usize = 0;
println!("the first element of n is {}", a[b]);
// doesn't
/*
let c: i32 = 0;
a[c];
*/
}
// output should be:
/*
the first element of n is 1
*/
|
pub fn run() {
let closures_var = |x: i32| {
println!("{}", x);
};
closures_var(2019);
} |
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00..0x40 - DMAMux - DMA request line multiplexer channel x control register"]
pub ccr: [CCR; 16],
_reserved1: [u8; 0x40],
#[doc = "0x80 - DMAMUX request line multiplexer interrupt channel status register"]
pub csr: CSR,
#[doc = "0x84 - DMAMUX request line multiplexer interrupt clear flag register"]
pub cfr: CFR,
_reserved3: [u8; 0x78],
#[doc = "0x100..0x120 - DMAMux - DMA request generator channel x control register"]
pub rgcr: [RGCR; 8],
_reserved4: [u8; 0x20],
#[doc = "0x140 - DMAMux - DMA request generator status register"]
pub rgsr: RGSR,
#[doc = "0x144 - DMAMux - DMA request generator clear flag register"]
pub rgcfr: RGCFR,
}
#[doc = "CCR (rw) register accessor: DMAMux - DMA request line multiplexer channel x control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccr`]
module"]
pub type CCR = crate::Reg<ccr::CCR_SPEC>;
#[doc = "DMAMux - DMA request line multiplexer channel x control register"]
pub mod ccr;
#[doc = "RGCR (rw) register accessor: DMAMux - DMA request generator channel x control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rgcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rgcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rgcr`]
module"]
pub type RGCR = crate::Reg<rgcr::RGCR_SPEC>;
#[doc = "DMAMux - DMA request generator channel x control register"]
pub mod rgcr;
#[doc = "RGSR (r) register accessor: DMAMux - DMA request generator status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rgsr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rgsr`]
module"]
pub type RGSR = crate::Reg<rgsr::RGSR_SPEC>;
#[doc = "DMAMux - DMA request generator status register"]
pub mod rgsr;
#[doc = "RGCFR (w) register accessor: DMAMux - DMA request generator clear flag register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rgcfr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rgcfr`]
module"]
pub type RGCFR = crate::Reg<rgcfr::RGCFR_SPEC>;
#[doc = "DMAMux - DMA request generator clear flag register"]
pub mod rgcfr;
#[doc = "CSR (r) register accessor: DMAMUX request line multiplexer interrupt channel status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`csr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`csr`]
module"]
pub type CSR = crate::Reg<csr::CSR_SPEC>;
#[doc = "DMAMUX request line multiplexer interrupt channel status register"]
pub mod csr;
#[doc = "CFR (w) register accessor: DMAMUX request line multiplexer interrupt clear flag register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cfr`]
module"]
pub type CFR = crate::Reg<cfr::CFR_SPEC>;
#[doc = "DMAMUX request line multiplexer interrupt clear flag register"]
pub mod cfr;
|
#[doc = "Register `CFR` writer"]
pub type W = crate::W<CFR_SPEC>;
#[doc = "Field `CSOF0` writer - Clear synchronization overrun event flag Writing 1 in each bit clears the corresponding overrun flag SOFx in the DMAMUX_CSR register."]
pub type CSOF0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CSOF1` writer - Clear synchronization overrun event flag Writing 1 in each bit clears the corresponding overrun flag SOFx in the DMAMUX_CSR register."]
pub type CSOF1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CSOF2` writer - Clear synchronization overrun event flag Writing 1 in each bit clears the corresponding overrun flag SOFx in the DMAMUX_CSR register."]
pub type CSOF2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl W {
#[doc = "Bit 0 - Clear synchronization overrun event flag Writing 1 in each bit clears the corresponding overrun flag SOFx in the DMAMUX_CSR register."]
#[inline(always)]
#[must_use]
pub fn csof0(&mut self) -> CSOF0_W<CFR_SPEC, 0> {
CSOF0_W::new(self)
}
#[doc = "Bit 1 - Clear synchronization overrun event flag Writing 1 in each bit clears the corresponding overrun flag SOFx in the DMAMUX_CSR register."]
#[inline(always)]
#[must_use]
pub fn csof1(&mut self) -> CSOF1_W<CFR_SPEC, 1> {
CSOF1_W::new(self)
}
#[doc = "Bit 2 - Clear synchronization overrun event flag Writing 1 in each bit clears the corresponding overrun flag SOFx in the DMAMUX_CSR register."]
#[inline(always)]
#[must_use]
pub fn csof2(&mut self) -> CSOF2_W<CFR_SPEC, 2> {
CSOF2_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DMAMUX request line multiplexer interrupt clear flag register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfr::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFR_SPEC;
impl crate::RegisterSpec for CFR_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [`cfr::W`](W) writer structure"]
impl crate::Writable for CFR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFR to value 0"]
impl crate::Resettable for CFR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
extern crate reqwest;
extern crate web3;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
use std::env;
use web3::futures::Future;
use web3::types::BlockId;
const DEFAULT_NUM_BLOCKS: usize = 5;
#[derive(Deserialize, Debug)]
struct BlockNumber {
jsonrpc: String,
id: String,
result: String
}
#[tokio::main]
async fn main() -> Result<(), reqwest::Error> {
let num_blocks = match env::var("NUM_BLOCKS") {
Ok(val) => val.parse().unwrap(),
Err(_) => DEFAULT_NUM_BLOCKS,
};
let mut block_timestamps: Vec<usize> = vec![];
let mut blocktimes: Vec<usize> = vec![];
let request_url = match env::var("ETH_CLIENT_URL") {
Ok(val) => val,
Err(_) => "http://localhost:8545".to_string(),
};
let (_eloop, transport) = web3::transports::Http::new(&request_url).unwrap();
let web3 = web3::Web3::new(transport);
let latest_blocknum = web3.eth().block_number().wait().unwrap();
println!("Latest height: {:?}\n", latest_blocknum);
for n in 0..num_blocks {
let blocknum = latest_blocknum - n;
let block_data = web3.eth().block(BlockId::from(blocknum)).wait().unwrap();
let timestamp = block_data.unwrap().timestamp.as_usize();
block_timestamps.push(timestamp);
println!("Block #{:?} has timestamp {:?}", blocknum, timestamp);
}
for n in 0..num_blocks - 1 {
let later_block = block_timestamps[n];
let earlier_block = block_timestamps[n + 1];
blocktimes.push(later_block - earlier_block);
}
println!("\nBlock times: {:?}\n", blocktimes);
let total_blocktime = blocktimes.iter().sum::<usize>();
println!("Max blocktime: {:?}s", blocktimes.iter().max().unwrap());
println!("Min blocktime: {:?}s", blocktimes.iter().min().unwrap());
println!("Avg blocktime: {:?}s", total_blocktime as f32 / blocktimes.len() as f32);
println!("Total blocktime: {:?}s ({:?}mins)", total_blocktime, total_blocktime as f32 / 60.0);
Ok(())
}
|
mod actions;
use yew::prelude::*;
pub struct Body;
impl Component for Body {
type Message = ();
type Properties = ();
fn create(_: Self::Properties, _: ComponentLink<Self>) -> Self {
Body
}
fn update(&mut self, _: Self::Message) -> ShouldRender {
false
}
}
impl Renderable<Body> for Body {
fn view(&self) -> Html<Self> {
html! {
<div class="PageBody",>
<actions::Actions: />
<div class="LoadChart", />
</div>
}
}
}
|
/// An enum to represent all characters in the Sharada block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Sharada {
/// \u{11180}: '๐'
SignCandrabindu,
/// \u{11181}: '๐'
SignAnusvara,
/// \u{11182}: '๐'
SignVisarga,
/// \u{11183}: '๐'
LetterA,
/// \u{11184}: '๐'
LetterAa,
/// \u{11185}: '๐
'
LetterI,
/// \u{11186}: '๐'
LetterIi,
/// \u{11187}: '๐'
LetterU,
/// \u{11188}: '๐'
LetterUu,
/// \u{11189}: '๐'
LetterVocalicR,
/// \u{1118a}: '๐'
LetterVocalicRr,
/// \u{1118b}: '๐'
LetterVocalicL,
/// \u{1118c}: '๐'
LetterVocalicLl,
/// \u{1118d}: '๐'
LetterE,
/// \u{1118e}: '๐'
LetterAi,
/// \u{1118f}: '๐'
LetterO,
/// \u{11190}: '๐'
LetterAu,
/// \u{11191}: '๐'
LetterKa,
/// \u{11192}: '๐'
LetterKha,
/// \u{11193}: '๐'
LetterGa,
/// \u{11194}: '๐'
LetterGha,
/// \u{11195}: '๐'
LetterNga,
/// \u{11196}: '๐'
LetterCa,
/// \u{11197}: '๐'
LetterCha,
/// \u{11198}: '๐'
LetterJa,
/// \u{11199}: '๐'
LetterJha,
/// \u{1119a}: '๐'
LetterNya,
/// \u{1119b}: '๐'
LetterTta,
/// \u{1119c}: '๐'
LetterTtha,
/// \u{1119d}: '๐'
LetterDda,
/// \u{1119e}: '๐'
LetterDdha,
/// \u{1119f}: '๐'
LetterNna,
/// \u{111a0}: '๐ '
LetterTa,
/// \u{111a1}: '๐ก'
LetterTha,
/// \u{111a2}: '๐ข'
LetterDa,
/// \u{111a3}: '๐ฃ'
LetterDha,
/// \u{111a4}: '๐ค'
LetterNa,
/// \u{111a5}: '๐ฅ'
LetterPa,
/// \u{111a6}: '๐ฆ'
LetterPha,
/// \u{111a7}: '๐ง'
LetterBa,
/// \u{111a8}: '๐จ'
LetterBha,
/// \u{111a9}: '๐ฉ'
LetterMa,
/// \u{111aa}: '๐ช'
LetterYa,
/// \u{111ab}: '๐ซ'
LetterRa,
/// \u{111ac}: '๐ฌ'
LetterLa,
/// \u{111ad}: '๐ญ'
LetterLla,
/// \u{111ae}: '๐ฎ'
LetterVa,
/// \u{111af}: '๐ฏ'
LetterSha,
/// \u{111b0}: '๐ฐ'
LetterSsa,
/// \u{111b1}: '๐ฑ'
LetterSa,
/// \u{111b2}: '๐ฒ'
LetterHa,
/// \u{111b3}: '๐ณ'
VowelSignAa,
/// \u{111b4}: '๐ด'
VowelSignI,
/// \u{111b5}: '๐ต'
VowelSignIi,
/// \u{111b6}: '๐ถ'
VowelSignU,
/// \u{111b7}: '๐ท'
VowelSignUu,
/// \u{111b8}: '๐ธ'
VowelSignVocalicR,
/// \u{111b9}: '๐น'
VowelSignVocalicRr,
/// \u{111ba}: '๐บ'
VowelSignVocalicL,
/// \u{111bb}: '๐ป'
VowelSignVocalicLl,
/// \u{111bc}: '๐ผ'
VowelSignE,
/// \u{111bd}: '๐ฝ'
VowelSignAi,
/// \u{111be}: '๐พ'
VowelSignO,
/// \u{111bf}: '๐ฟ'
VowelSignAu,
/// \u{111c0}: '๐'
SignVirama,
/// \u{111c1}: '๐'
SignAvagraha,
/// \u{111c2}: '๐'
SignJihvamuliya,
/// \u{111c3}: '๐'
SignUpadhmaniya,
/// \u{111c4}: '๐'
Om,
/// \u{111c5}: '๐
'
Danda,
/// \u{111c6}: '๐'
DoubleDanda,
/// \u{111c7}: '๐'
AbbreviationSign,
/// \u{111c8}: '๐'
Separator,
/// \u{111c9}: '๐'
SandhiMark,
/// \u{111ca}: '๐'
SignNukta,
/// \u{111cb}: '๐'
VowelModifierMark,
/// \u{111cc}: '๐'
ExtraShortVowelMark,
/// \u{111cd}: '๐'
SutraMark,
/// \u{111d0}: '๐'
DigitZero,
/// \u{111d1}: '๐'
DigitOne,
/// \u{111d2}: '๐'
DigitTwo,
/// \u{111d3}: '๐'
DigitThree,
/// \u{111d4}: '๐'
DigitFour,
/// \u{111d5}: '๐'
DigitFive,
/// \u{111d6}: '๐'
DigitSix,
/// \u{111d7}: '๐'
DigitSeven,
/// \u{111d8}: '๐'
DigitEight,
/// \u{111d9}: '๐'
DigitNine,
/// \u{111da}: '๐'
Ekam,
/// \u{111db}: '๐'
SignSiddham,
/// \u{111dc}: '๐'
Headstroke,
/// \u{111dd}: '๐'
ContinuationSign,
/// \u{111de}: '๐'
SectionMarkDash1,
}
impl Into<char> for Sharada {
fn into(self) -> char {
match self {
Sharada::SignCandrabindu => '๐',
Sharada::SignAnusvara => '๐',
Sharada::SignVisarga => '๐',
Sharada::LetterA => '๐',
Sharada::LetterAa => '๐',
Sharada::LetterI => '๐
',
Sharada::LetterIi => '๐',
Sharada::LetterU => '๐',
Sharada::LetterUu => '๐',
Sharada::LetterVocalicR => '๐',
Sharada::LetterVocalicRr => '๐',
Sharada::LetterVocalicL => '๐',
Sharada::LetterVocalicLl => '๐',
Sharada::LetterE => '๐',
Sharada::LetterAi => '๐',
Sharada::LetterO => '๐',
Sharada::LetterAu => '๐',
Sharada::LetterKa => '๐',
Sharada::LetterKha => '๐',
Sharada::LetterGa => '๐',
Sharada::LetterGha => '๐',
Sharada::LetterNga => '๐',
Sharada::LetterCa => '๐',
Sharada::LetterCha => '๐',
Sharada::LetterJa => '๐',
Sharada::LetterJha => '๐',
Sharada::LetterNya => '๐',
Sharada::LetterTta => '๐',
Sharada::LetterTtha => '๐',
Sharada::LetterDda => '๐',
Sharada::LetterDdha => '๐',
Sharada::LetterNna => '๐',
Sharada::LetterTa => '๐ ',
Sharada::LetterTha => '๐ก',
Sharada::LetterDa => '๐ข',
Sharada::LetterDha => '๐ฃ',
Sharada::LetterNa => '๐ค',
Sharada::LetterPa => '๐ฅ',
Sharada::LetterPha => '๐ฆ',
Sharada::LetterBa => '๐ง',
Sharada::LetterBha => '๐จ',
Sharada::LetterMa => '๐ฉ',
Sharada::LetterYa => '๐ช',
Sharada::LetterRa => '๐ซ',
Sharada::LetterLa => '๐ฌ',
Sharada::LetterLla => '๐ญ',
Sharada::LetterVa => '๐ฎ',
Sharada::LetterSha => '๐ฏ',
Sharada::LetterSsa => '๐ฐ',
Sharada::LetterSa => '๐ฑ',
Sharada::LetterHa => '๐ฒ',
Sharada::VowelSignAa => '๐ณ',
Sharada::VowelSignI => '๐ด',
Sharada::VowelSignIi => '๐ต',
Sharada::VowelSignU => '๐ถ',
Sharada::VowelSignUu => '๐ท',
Sharada::VowelSignVocalicR => '๐ธ',
Sharada::VowelSignVocalicRr => '๐น',
Sharada::VowelSignVocalicL => '๐บ',
Sharada::VowelSignVocalicLl => '๐ป',
Sharada::VowelSignE => '๐ผ',
Sharada::VowelSignAi => '๐ฝ',
Sharada::VowelSignO => '๐พ',
Sharada::VowelSignAu => '๐ฟ',
Sharada::SignVirama => '๐',
Sharada::SignAvagraha => '๐',
Sharada::SignJihvamuliya => '๐',
Sharada::SignUpadhmaniya => '๐',
Sharada::Om => '๐',
Sharada::Danda => '๐
',
Sharada::DoubleDanda => '๐',
Sharada::AbbreviationSign => '๐',
Sharada::Separator => '๐',
Sharada::SandhiMark => '๐',
Sharada::SignNukta => '๐',
Sharada::VowelModifierMark => '๐',
Sharada::ExtraShortVowelMark => '๐',
Sharada::SutraMark => '๐',
Sharada::DigitZero => '๐',
Sharada::DigitOne => '๐',
Sharada::DigitTwo => '๐',
Sharada::DigitThree => '๐',
Sharada::DigitFour => '๐',
Sharada::DigitFive => '๐',
Sharada::DigitSix => '๐',
Sharada::DigitSeven => '๐',
Sharada::DigitEight => '๐',
Sharada::DigitNine => '๐',
Sharada::Ekam => '๐',
Sharada::SignSiddham => '๐',
Sharada::Headstroke => '๐',
Sharada::ContinuationSign => '๐',
Sharada::SectionMarkDash1 => '๐',
}
}
}
impl std::convert::TryFrom<char> for Sharada {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'๐' => Ok(Sharada::SignCandrabindu),
'๐' => Ok(Sharada::SignAnusvara),
'๐' => Ok(Sharada::SignVisarga),
'๐' => Ok(Sharada::LetterA),
'๐' => Ok(Sharada::LetterAa),
'๐
' => Ok(Sharada::LetterI),
'๐' => Ok(Sharada::LetterIi),
'๐' => Ok(Sharada::LetterU),
'๐' => Ok(Sharada::LetterUu),
'๐' => Ok(Sharada::LetterVocalicR),
'๐' => Ok(Sharada::LetterVocalicRr),
'๐' => Ok(Sharada::LetterVocalicL),
'๐' => Ok(Sharada::LetterVocalicLl),
'๐' => Ok(Sharada::LetterE),
'๐' => Ok(Sharada::LetterAi),
'๐' => Ok(Sharada::LetterO),
'๐' => Ok(Sharada::LetterAu),
'๐' => Ok(Sharada::LetterKa),
'๐' => Ok(Sharada::LetterKha),
'๐' => Ok(Sharada::LetterGa),
'๐' => Ok(Sharada::LetterGha),
'๐' => Ok(Sharada::LetterNga),
'๐' => Ok(Sharada::LetterCa),
'๐' => Ok(Sharada::LetterCha),
'๐' => Ok(Sharada::LetterJa),
'๐' => Ok(Sharada::LetterJha),
'๐' => Ok(Sharada::LetterNya),
'๐' => Ok(Sharada::LetterTta),
'๐' => Ok(Sharada::LetterTtha),
'๐' => Ok(Sharada::LetterDda),
'๐' => Ok(Sharada::LetterDdha),
'๐' => Ok(Sharada::LetterNna),
'๐ ' => Ok(Sharada::LetterTa),
'๐ก' => Ok(Sharada::LetterTha),
'๐ข' => Ok(Sharada::LetterDa),
'๐ฃ' => Ok(Sharada::LetterDha),
'๐ค' => Ok(Sharada::LetterNa),
'๐ฅ' => Ok(Sharada::LetterPa),
'๐ฆ' => Ok(Sharada::LetterPha),
'๐ง' => Ok(Sharada::LetterBa),
'๐จ' => Ok(Sharada::LetterBha),
'๐ฉ' => Ok(Sharada::LetterMa),
'๐ช' => Ok(Sharada::LetterYa),
'๐ซ' => Ok(Sharada::LetterRa),
'๐ฌ' => Ok(Sharada::LetterLa),
'๐ญ' => Ok(Sharada::LetterLla),
'๐ฎ' => Ok(Sharada::LetterVa),
'๐ฏ' => Ok(Sharada::LetterSha),
'๐ฐ' => Ok(Sharada::LetterSsa),
'๐ฑ' => Ok(Sharada::LetterSa),
'๐ฒ' => Ok(Sharada::LetterHa),
'๐ณ' => Ok(Sharada::VowelSignAa),
'๐ด' => Ok(Sharada::VowelSignI),
'๐ต' => Ok(Sharada::VowelSignIi),
'๐ถ' => Ok(Sharada::VowelSignU),
'๐ท' => Ok(Sharada::VowelSignUu),
'๐ธ' => Ok(Sharada::VowelSignVocalicR),
'๐น' => Ok(Sharada::VowelSignVocalicRr),
'๐บ' => Ok(Sharada::VowelSignVocalicL),
'๐ป' => Ok(Sharada::VowelSignVocalicLl),
'๐ผ' => Ok(Sharada::VowelSignE),
'๐ฝ' => Ok(Sharada::VowelSignAi),
'๐พ' => Ok(Sharada::VowelSignO),
'๐ฟ' => Ok(Sharada::VowelSignAu),
'๐' => Ok(Sharada::SignVirama),
'๐' => Ok(Sharada::SignAvagraha),
'๐' => Ok(Sharada::SignJihvamuliya),
'๐' => Ok(Sharada::SignUpadhmaniya),
'๐' => Ok(Sharada::Om),
'๐
' => Ok(Sharada::Danda),
'๐' => Ok(Sharada::DoubleDanda),
'๐' => Ok(Sharada::AbbreviationSign),
'๐' => Ok(Sharada::Separator),
'๐' => Ok(Sharada::SandhiMark),
'๐' => Ok(Sharada::SignNukta),
'๐' => Ok(Sharada::VowelModifierMark),
'๐' => Ok(Sharada::ExtraShortVowelMark),
'๐' => Ok(Sharada::SutraMark),
'๐' => Ok(Sharada::DigitZero),
'๐' => Ok(Sharada::DigitOne),
'๐' => Ok(Sharada::DigitTwo),
'๐' => Ok(Sharada::DigitThree),
'๐' => Ok(Sharada::DigitFour),
'๐' => Ok(Sharada::DigitFive),
'๐' => Ok(Sharada::DigitSix),
'๐' => Ok(Sharada::DigitSeven),
'๐' => Ok(Sharada::DigitEight),
'๐' => Ok(Sharada::DigitNine),
'๐' => Ok(Sharada::Ekam),
'๐' => Ok(Sharada::SignSiddham),
'๐' => Ok(Sharada::Headstroke),
'๐' => Ok(Sharada::ContinuationSign),
'๐' => Ok(Sharada::SectionMarkDash1),
_ => Err(()),
}
}
}
impl Into<u32> for Sharada {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Sharada {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Sharada {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Sharada {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Sharada::SignCandrabindu
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Sharada{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
use lexer::{Input, State, Reader};
use super::super::utils;
use super::super::token::{Token, TokenKind};
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct WhitespaceReader;
impl Reader<TokenKind> for WhitespaceReader {
#[inline(always)]
fn priority(&self) -> usize { 0usize }
fn read(&self, input: &Input, state: &mut State) -> Option<Token> {
let ch = input.read(state);
if utils::is_whitespace(ch) {
let mut string = String::new();
string.push(ch);
while !input.done(state) {
let ch = input.char_at(state, 0);
if utils::is_whitespace(ch) {
input.read(state);
string.push(ch);
} else {
break;
}
}
Some(Token::new(
input.new_state_meta(state),
TokenKind::WHITESPACE,
string
))
} else {
None
}
}
}
|
use super::*;
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[repr(transparent)]
pub struct WaveControl(u8);
impl WaveControl {
const_new!();
bitfield_bool!(u8; 5, two_banks, with_two_banks, set_two_banks);
bitfield_bool!(u8; 6, use_bank1, with_use_bank1, set_use_bank1);
bitfield_bool!(u8; 7, playing, with_playing, set_playing);
}
|
use std::fs::File;
use std::io::{BufRead, BufReader};
fn main() {
let path = "input.txt";
let input = File::open(path).expect("Unable to open file!");
let buffered = BufReader::new(input);
let mut list = buffered
.lines()
.map(|word| word.unwrap().parse::<i32>().unwrap())
.collect::<Vec<i32>>();
let mut copy_list = list.clone();
println!(
"Steps version one: {:?}",
count_steps(&mut list, Version::One)
);
println!(
"Steps version two: {:?}",
count_steps(&mut copy_list, Version::Two)
);
}
#[derive(Debug, PartialEq)]
enum Version {
One,
Two,
}
fn count_steps(list: &mut [i32], ver: Version) -> i32 {
let mut curr_pos = 0;
let mut no_steps = 1;
let len = list.len();
loop {
if curr_pos + list[curr_pos as usize] < len as i32
&& curr_pos + list[curr_pos as usize] >= 0
{
let last_pos = curr_pos;
curr_pos += list[curr_pos as usize];
if ver == Version::Two && list[last_pos as usize] >= 3 {
list[last_pos as usize] -= 1;
} else {
list[last_pos as usize] += 1;
}
no_steps += 1;
} else {
break;
}
}
no_steps
}
|
extern crate rand;
extern crate sdl2;
mod mmu;
mod cpu;
mod gfx;
mod term_gfx;
use std::time::Duration;
use std::thread;
use mmu::Mmu;
use cpu::Cpu;
use gfx::Gfx;
fn main() {
let filename = String::from("./roms/UFO");
let mut mmu = Mmu::new();
mmu.load_rom(filename);
let mut cpu = Cpu::new(mmu);
let (mut gfx, sdl) = Gfx::new(1);
loop {
cpu.step();
gfx.composite(cpu.video_buffer());
thread::sleep(Duration::new(0, 1200000));
}
}
|
#![feature(test)]
extern crate funnel;
#[macro_use]
extern crate lazy_static;
extern crate rand;
extern crate test;
use rand::{
distributions::{Alphanumeric, Standard},
thread_rng, Rng,
};
use test::Bencher;
use funnel::{
bit,
signs::{self, Signs},
};
const MAX: u64 = 10_000_000;
lazy_static! {
static ref SIGNS: Signs = {
let mut signs = signs::optimal(8, 0.05);
for i in 0..MAX {
let log = Log::gen();
let mut sign = signs.sign_mut(i);
sign.add(&("uuid", log.uuid));
sign.add(&("pref", log.pref));
sign.add(&("unum", log.unum));
sign.add(&("inum", log.inum));
for tag in &log.tags {
sign.add(&("tags", tag));
}
}
signs
};
}
#[derive(Debug, Clone, Hash)]
struct Log {
uuid: String,
pref: usize,
unum: usize,
inum: isize,
tags: Vec<u64>,
}
impl Log {
fn gen() -> Self {
let mut rng = thread_rng();
let uuid = rng.sample_iter(&Alphanumeric).take(10).collect::<String>();
let pref = rng.gen_range(0, 50);
let inum = rng.gen_range(0, 1000);
let unum = rng.gen_range(0, 10000);
let tags = rng.sample_iter(&Standard).take(4).collect::<Vec<u64>>();
Log {
uuid,
pref,
unum,
inum,
tags,
}
}
}
use funnel::bit::ops::*;
macro_rules! fold {
($i:expr, $($bvs:expr),*) => {
$i $( .and($bvs) )*
};
($i:expr, $($bvs:expr),* ,) => {
$i $( .and($bvs) )*
};
}
// #[bench]
// fn signatures_filter(bench: &mut Bencher) {
// let mut accum = 0;
// let n = 4_000_000;
// let m = 9_000_000;
// bench.iter(|| {
// accum = 0;
// let selected1 = fold![
// SIGNS.bits(0).get(n..m),
// SIGNS.bits(2).get(n..m),
// SIGNS.bits(3).get(n..m),
// SIGNS.bits(6).get(n..m),
// SIGNS.bits(7).get(n..m),
// SIGNS.bits(8).get(n..m),
// SIGNS.bits(10).get(n..m),
// SIGNS.bits(11).get(n..m),
// ];
// let selected2 = fold![
// SIGNS.bits(19).get(n..m),
// SIGNS.bits(21).get(n..m),
// SIGNS.bits(26).get(n..m),
// SIGNS.bits(28).get(n..m),
// SIGNS.bits(29).get(n..m),
// SIGNS.bits(31).get(n..m),
// SIGNS.bits(33).get(n..m),
// SIGNS.bits(34).get(n..m),
// ];
// for page in selected1.and(selected2) {
// accum += bit::count1(&page.value);
// }
// });
// }
#[bench]
fn add(bench: &mut Bencher) {
let mut signs = Signs::default();
let mut n = 0;
bench.iter(|| {
for i in n..n + 8 {
signs.sign_mut(0).add(&("data", i));
}
n += 8;
});
}
#[bench]
fn test(bench: &mut Bencher) {
bench.iter(|| SIGNS.sign(0).test(&("score", 0)));
}
#[bench]
#[ignore]
fn test_all(bench: &mut Bencher) {
bench.iter(|| {
let mut r = false;
for i in 0..MAX {
let a = SIGNS.sign(i).test(&("data", 1));
let b = SIGNS.sign(i).test(&("data", 2));
r = a && b;
}
r
})
}
|
use config_rs;
use std::collections::HashMap;
type Config = HashMap<String, String>;
pub fn load(config_file: String) -> Config {
// create config object
let mut settings = config_rs::Config::default();
// check if config_file was supplied, if it was then load it
if config_file != "" {
settings.merge(config_rs::File::with_name(config_file.as_str())).unwrap();
}
//check for and add env variables
settings.merge(config_rs::Environment::with_prefix(::CONFIG_PREFIX)).unwrap();
settings.try_into::<Config>().unwrap()
}
|
use rustc_serialize::base64;
use rustc_serialize::base64::{ToBase64, FromBase64, FromBase64Error};
use std::convert::AsRef;
fn config_base64() -> base64::Config {
base64::Config {
char_set: base64::CharacterSet::Standard,
newline: base64::Newline::LF,
pad: false,
line_length: None,
}
}
pub fn to_base64(string: &str) -> String {
let slice: &[u8] = string.as_ref();
(*slice).to_base64(config_base64())
}
pub fn from_base64(string: &str) -> Result<Vec<u8>, FromBase64Error> {
let slice: &[u8] = string.as_ref();
slice.from_base64()
} |
enum MessageType {
Error, // 1
Warning, // 2
Info, // 3
Log, // 4
}
struct ShowMessageNotificationParams {
type: MessageType,
message: String,
}
impl Notification for ShowMessageNotificationParams {
method = "window/showMessage"
}
struct MessageActionItem {
title: String,
}
struct ShowMessageRequestParams {
type: i32,
message: String,
actions: Option<Vec<MessageActionItem>>
}
impl ServerRequest for ShowMessageRequestParams {
method = "window/showMessageRequest"
}
// Response: a MessageActionItem
|
use aes_soft::Aes256;
use block_modes::block_padding::Pkcs7;
use block_modes::{BlockMode, Cbc};
use ed25519_dalek::{
Keypair as Ed25519_Keypair, PublicKey as Ed25519_PublicKey, Signature as Ed25519_Signature,
Signer, Verifier, KEYPAIR_LENGTH, PUBLIC_KEY_LENGTH, SECRET_KEY_LENGTH, SIGNATURE_LENGTH,
};
use postcard::{from_bytes, to_allocvec};
use serde::{Deserialize, Serialize};
use sha3::{Digest, Sha3_256, Sha3_512};
use std::convert::TryFrom;
use std::fmt::{Debug, Formatter, Result as FmtResult};
use std::ops::Rem;
use x25519_dalek::{PublicKey as Ed25519_DH_Public, StaticSecret as Ed25519_DH_Secret};
use chamomile_types::types::PeerId;
// create an alias for convenience
type Aes256Cbc = Cbc<Aes256, Pkcs7>;
#[derive(Copy, Clone, Serialize, Deserialize, Debug)]
pub enum KeyType {
Ed25519, // Ed25519 = 0
Lattice, // Lattice-based = 1
None, // None 255
}
impl Default for KeyType {
fn default() -> Self {
KeyType::None
}
}
impl KeyType {
fn pk_len(&self) -> usize {
match self {
KeyType::Ed25519 => PUBLIC_KEY_LENGTH,
_ => 0,
}
}
fn psk_len(&self) -> usize {
match self {
KeyType::Ed25519 => SECRET_KEY_LENGTH,
_ => 0,
}
}
fn sign_len(&self) -> usize {
match self {
KeyType::Ed25519 => SIGNATURE_LENGTH,
_ => 0,
}
}
fn dh_sk_len(&self) -> usize {
match self {
KeyType::Ed25519 => 32,
_ => 0,
}
}
fn dh_pk_len(&self) -> usize {
match self {
KeyType::Ed25519 => 32,
_ => 0,
}
}
pub fn generate_kepair(&self) -> Keypair {
match self {
KeyType::Ed25519 => {
let keypair = Ed25519_Keypair::generate(&mut rand::thread_rng());
Keypair {
key: *self,
sk: keypair.secret.as_bytes().to_vec(),
pk: keypair.public.as_bytes().to_vec(),
}
}
_ => Default::default(),
}
}
fn sign(&self, keypair: &Keypair, msg: &[u8]) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
match self {
KeyType::Ed25519 => {
let mut keypair_bytes: [u8; KEYPAIR_LENGTH] = [0u8; KEYPAIR_LENGTH];
keypair_bytes[..SECRET_KEY_LENGTH].copy_from_slice(&keypair.sk);
keypair_bytes[SECRET_KEY_LENGTH..].copy_from_slice(&keypair.pk);
let keypair = Ed25519_Keypair::from_bytes(&keypair_bytes).unwrap();
Ok(keypair.sign(msg).to_bytes().to_vec())
}
_ => Ok(Default::default()),
}
}
fn verify(&self, pk: &[u8], msg: &[u8], sign: &[u8]) -> bool {
match self {
KeyType::Ed25519 => {
let ed_pk = Ed25519_PublicKey::from_bytes(&pk[..]).unwrap();
ed_pk
.verify(msg, &Ed25519_Signature::try_from(&sign[..]).unwrap())
.is_ok()
}
_ => true,
}
}
pub fn session_key(&self, self_keypair: &Keypair, remote_keypair: &Keypair) -> SessionKey {
match self {
KeyType::Ed25519 => {
let alice_secret = Ed25519_DH_Secret::new(&mut rand::thread_rng());
let alice_public = Ed25519_DH_Public::from(&alice_secret).as_bytes().to_vec();
let sign = self_keypair.sign(&alice_public[..]).unwrap();
SessionKey {
key: *self,
sk: alice_secret.to_bytes().to_vec(),
pk: alice_public,
sign: sign,
remote: remote_keypair.pk.clone(),
is_ok: false,
ss: [0u8; 32],
iv: [0u8; 16],
}
}
_ => panic!("Not Support"),
}
}
fn dh(&self, sk: &[u8], pk: &[u8]) -> Result<Vec<u8>, ()> {
match self {
KeyType::Ed25519 => {
let mut sk_bytes = [0u8; 32];
sk_bytes.copy_from_slice(&sk);
let mut pk_bytes = [0u8; 32];
pk_bytes.copy_from_slice(&pk);
let alice_secret: Ed25519_DH_Secret = sk_bytes.into();
let bob_public: Ed25519_DH_Public = pk_bytes.into();
Ok(alice_secret.diffie_hellman(&bob_public).as_bytes().to_vec())
}
_ => Ok(vec![0u8; 32]),
}
}
}
#[derive(Default, Clone, Serialize, Deserialize, Debug)]
pub struct Keypair {
pub key: KeyType,
pub sk: Vec<u8>,
pub pk: Vec<u8>,
}
#[derive(Clone)]
pub struct SessionKey {
key: KeyType,
sk: Vec<u8>,
pk: Vec<u8>,
sign: Vec<u8>,
remote: Vec<u8>,
is_ok: bool,
ss: [u8; 32],
iv: [u8; 16],
}
impl Keypair {
pub fn peer_id(&self) -> PeerId {
let mut sha = Sha3_256::new();
sha.update(&self.pk);
let mut peer_bytes = [0u8; 32];
peer_bytes.copy_from_slice(&sha.finalize()[..]);
PeerId(peer_bytes)
}
pub fn public(&self) -> Self {
Keypair {
key: self.key,
sk: vec![],
pk: self.pk.clone(),
}
}
pub fn sign(&self, msg: &[u8]) -> Result<Vec<u8>, ()> {
self.key.sign(&self, msg).map_err(|_e| ())
}
pub fn verify(&self, msg: &[u8], sign: &[u8]) -> bool {
self.key.verify(&self.pk, msg, sign)
}
pub fn to_bytes(&self) -> Vec<u8> {
to_allocvec(self).unwrap_or(vec![])
}
pub fn from_bytes(bytes: Vec<u8>) -> Result<Self, ()> {
from_bytes(&bytes).map_err(|_e| ())
}
pub fn from_pk(key: KeyType, bytes: Vec<u8>) -> Result<Self, ()> {
if bytes.len() == key.pk_len() {
Ok(Keypair {
key,
sk: vec![],
pk: bytes,
})
} else {
Err(())
}
}
}
/// Simple DH on 25519 to get AES-256 session key.
/// 1. new a tmp public_key and sign it.
/// 2. send tmp public key and signature to remote.
/// 2. receive remote tmp public_key and signature, verify it.
/// 3. use remote public_key and self tmp private key to compute.
/// 4. get session key, and encrypt / decrypt message.
impl SessionKey {
pub fn is_ok(&self) -> bool {
self.is_ok
}
fn cipher(&self) -> Aes256Cbc {
Aes256Cbc::new_var(&self.ss, &self.iv)
.map_err(|e| debug!("{:?}", e))
.unwrap()
}
pub fn in_bytes(&mut self, bytes: Vec<u8>) -> bool {
if bytes.len() < self.key.dh_pk_len() {
return false;
}
let (tmp_pk, tmp_sign) = bytes.split_at(self.key.dh_pk_len());
if self.key.verify(&self.remote, tmp_pk, tmp_sign) {
self.key
.dh(&self.sk, tmp_pk)
.map(|session_key| {
let mut sha = Sha3_256::new();
sha.update(session_key);
let result = sha.finalize();
self.ss.copy_from_slice(&result[..]);
let mut n_sha = Sha3_256::new();
n_sha.update(&result[..]);
self.iv.copy_from_slice(&n_sha.finalize()[..16]);
self.is_ok = true;
debug!("{:?}", self);
})
.is_ok()
} else {
false
}
}
pub fn out_bytes(&self) -> Vec<u8> {
let mut vec = self.pk.clone();
vec.append(&mut self.sign.clone());
vec
}
pub fn encrypt(&self, msg: Vec<u8>) -> Vec<u8> {
self.cipher().encrypt_vec(&msg)
}
pub fn decrypt(&self, msg: Vec<u8>) -> Result<Vec<u8>, ()> {
self.cipher().decrypt_vec(&msg).map_err(|_e| ())
}
}
impl Debug for SessionKey {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
let mut hex = String::new();
hex.extend(self.ss.iter().map(|byte| format!("{:02x?}", byte)));
write!(f, "Shared Secret: 0x{}", hex)
}
}
|
//! Timers
// TODO: on the h7x3 at least, only TIM2, TIM3, TIM4, TIM5 can support 32 bits.
// TIM1 is 16 bit.
use crate::hal::timer::{CountDown, Periodic};
use crate::stm32::{LPTIM1, LPTIM2, LPTIM3, LPTIM4, LPTIM5};
use crate::stm32::{
TIM1, TIM12, TIM13, TIM14, TIM15, TIM16, TIM17, TIM2, TIM3, TIM4, TIM5,
TIM6, TIM7, TIM8,
};
use cast::{u16, u32};
use nb;
use void::Void;
use crate::rcc::Ccdr;
use crate::stm32::rcc::{d2ccip2r, d3ccipr};
use crate::time::Hertz;
use stm32h7::Variant::Val;
/// Associate clocks with timers
pub trait GetClk {
fn get_clk(ccdr: &Ccdr) -> Option<Hertz>;
}
/// Timers with CK_INT derived from rcc_tim[xy]_ker_ck
macro_rules! impl_tim_ker_ck {
($($ckX:ident: $($TIMX:ident),+)+) => {
$(
$(
impl GetClk for $TIMX {
fn get_clk(ccdr: &Ccdr) -> Option<Hertz> {
Some(ccdr.clocks.$ckX())
}
}
)+
)+
}
}
impl_tim_ker_ck! {
timx_ker_ck: TIM2, TIM3, TIM4, TIM5, TIM6, TIM7, TIM12, TIM13, TIM14
timy_ker_ck: TIM1, TIM8, TIM15, TIM16, TIM17
}
/// LPTIM1 Kernel Clock
impl GetClk for LPTIM1 {
/// Current kernel clock
fn get_clk(ccdr: &Ccdr) -> Option<Hertz> {
match ccdr.rb.d2ccip2r.read().lptim1sel().variant() {
Val(d2ccip2r::LPTIM1SEL_A::RCC_PCLK1) => Some(ccdr.clocks.pclk1()),
Val(d2ccip2r::LPTIM1SEL_A::PLL2_P) => ccdr.clocks.pll2_p_ck(),
Val(d2ccip2r::LPTIM1SEL_A::PLL3_R) => ccdr.clocks.pll3_r_ck(),
Val(d2ccip2r::LPTIM1SEL_A::LSE) => unimplemented!(),
Val(d2ccip2r::LPTIM1SEL_A::LSI) => unimplemented!(),
Val(d2ccip2r::LPTIM1SEL_A::PER) => ccdr.clocks.per_ck(),
_ => unreachable!(),
}
}
}
/// LPTIM2 Kernel Clock
impl GetClk for LPTIM2 {
/// Current kernel clock
fn get_clk(ccdr: &Ccdr) -> Option<Hertz> {
match ccdr.rb.d3ccipr.read().lptim2sel().variant() {
Val(d3ccipr::LPTIM2SEL_A::RCC_PCLK4) => Some(ccdr.clocks.pclk4()),
Val(d3ccipr::LPTIM2SEL_A::PLL2_P) => ccdr.clocks.pll2_p_ck(),
Val(d3ccipr::LPTIM2SEL_A::PLL3_R) => ccdr.clocks.pll3_r_ck(),
Val(d3ccipr::LPTIM2SEL_A::LSE) => unimplemented!(),
Val(d3ccipr::LPTIM2SEL_A::LSI) => unimplemented!(),
Val(d3ccipr::LPTIM2SEL_A::PER) => ccdr.clocks.per_ck(),
_ => unreachable!(),
}
}
}
/// LPTIM345 Kernel Clock
macro_rules! impl_clk_lptim345 {
($($TIMX:ident),+) => {
$(
impl GetClk for $TIMX {
/// Current kernel clock
fn get_clk(ccdr: &Ccdr) -> Option<Hertz> {
match ccdr.rb.d3ccipr.read().lptim345sel().variant() {
Val(d3ccipr::LPTIM345SEL_A::RCC_PCLK4) => Some(ccdr.clocks.pclk4()),
Val(d3ccipr::LPTIM345SEL_A::PLL2_P) => ccdr.clocks.pll2_p_ck(),
Val(d3ccipr::LPTIM345SEL_A::PLL3_R) => ccdr.clocks.pll3_r_ck(),
Val(d3ccipr::LPTIM345SEL_A::LSE) => unimplemented!(),
Val(d3ccipr::LPTIM345SEL_A::LSI) => unimplemented!(),
Val(d3ccipr::LPTIM345SEL_A::PER) => ccdr.clocks.per_ck(),
_ => unreachable!(),
}
}
}
)+
}
}
impl_clk_lptim345! { LPTIM3, LPTIM4, LPTIM5 }
/// External trait for hardware timers
pub trait TimerExt<TIM> {
fn timer<T>(self, timeout: T, ccdr: &mut Ccdr) -> Timer<TIM>
where
T: Into<Hertz>;
}
/// Hardware timers
pub struct Timer<TIM> {
clk: u32,
tim: TIM,
timeout: Hertz,
}
/// Interrupt events
pub enum Event {
/// Timer timed out / count down ended
TimeOut,
}
macro_rules! hal {
($($TIMX:ident: ($timX:ident, $apb:ident, $timXen:ident, $timXrst:ident),)+) => {
$(
impl Periodic for Timer<$TIMX> {}
impl CountDown for Timer<$TIMX> {
type Time = Hertz;
#[allow(unused_unsafe)]
fn start<T>(&mut self, timeout: T)
where
T: Into<Hertz>,
{
// Pause
self.pause();
// Reset counter
self.tim.cnt.reset();
// UEV event occours on next overflow
self.tim.cr1.modify(|_, w| w.urs().counter_only());
self.clear_uif_bit();
// Set PSC and ARR
self.set_freq(timeout);
// Start counter
self.resume()
}
fn wait(&mut self) -> nb::Result<(), Void> {
if self.tim.sr.read().uif().bit_is_clear() {
Err(nb::Error::WouldBlock)
} else {
self.clear_uif_bit();
Ok(())
}
}
}
impl TimerExt<$TIMX> for $TIMX {
fn timer<T>(self, timeout: T, ccdr: &mut Ccdr) -> Timer<$TIMX>
where
T: Into<Hertz>,
{
Timer::$timX(self, timeout, ccdr)
}
}
impl Timer<$TIMX> {
/// Configures a TIM peripheral as a periodic count down timer
pub fn $timX<T>(tim: $TIMX, timeout: T, ccdr: &mut Ccdr) -> Self
where
T: Into<Hertz>,
{
// enable and reset peripheral to a clean slate state
ccdr.$apb.enr().modify(|_, w| w.$timXen().set_bit());
ccdr.$apb.rstr().modify(|_, w| w.$timXrst().set_bit());
ccdr.$apb.rstr().modify(|_, w| w.$timXrst().clear_bit());
let clk = $TIMX::get_clk(&ccdr)
.expect("Timer input clock not running!").0;
let mut timer = Timer {
clk,
tim,
timeout: Hertz(0),
};
timer.start(timeout);
timer
}
pub fn set_freq<T>(&mut self, timeout: T)
where
T: Into<Hertz>,
{
self.timeout = timeout.into();
let clk = self.clk;
let frequency = self.timeout.0;
let ticks = clk / frequency;
let psc = u16((ticks - 1) / (1 << 16)).unwrap();
self.tim.psc.write(|w| { w.psc().bits(psc) });
let arr = u16(ticks / u32(psc + 1)).unwrap();
self.tim.arr.write(|w| unsafe { w.bits(u32(arr)) });
}
/// Clear uif bit
pub fn clear_uif_bit(&mut self) {
self.tim.sr.modify(|_, w| w.uif().clear_bit());
}
/// Pauses the TIM peripheral
pub fn pause(&mut self) {
self.tim.cr1.modify(|_, w| w.cen().clear_bit());
}
/// Resume (unpause) the TIM peripheral
pub fn resume(&mut self) {
self.tim.cr1.modify(|_, w| w.cen().set_bit());
}
/// Reset the counter of the TIM peripheral
pub fn reset_counter(&mut self) {
self.tim.cnt.reset();
}
/// Read the counter of the TIM peripheral
pub fn counter(&self) -> u32 {
self.tim.cnt.read().bits()
}
/// Starts listening for an `event`
pub fn listen(&mut self, event: Event) {
match event {
Event::TimeOut => {
// Enable update event interrupt
self.tim.dier.write(|w| w.uie().set_bit());
}
}
}
/// Stops listening for an `event`
pub fn unlisten(&mut self, event: Event) {
match event {
Event::TimeOut => {
// Enable update event interrupt
self.tim.dier.write(|w| w.uie().clear_bit());
}
}
}
/// Releases the TIM peripheral
pub fn free(mut self) -> $TIMX {
// pause counter
self.pause();
self.tim
}
}
)+
}
}
hal! {
TIM1: (tim1, apb2, tim1en, tim1rst),
TIM2: (tim2, apb1l, tim2en, tim2rst),
// TIM3: (tim3, APB1, tim3en, tim3rst),
// TIM4: (tim4, APB1, tim4en, tim4rst),
// TIM5: (tim5, APB1, tim7en, tim7rst),
}
|
//! Defines how the chess board is represented in memory.
use std::fmt;
use utils::parse_fen;
/// `WHITE` or `BLACK`.
pub type Color = usize;
pub const WHITE: Color = 0;
pub const BLACK: Color = 1;
/// `KING`, `QUEEN`, `ROOK`, `BISHOP`, `KINGHT`, `PAWN` or `PIECE_NONE`.
pub type PieceType = usize;
pub const KING: PieceType = 0;
pub const QUEEN: PieceType = 1;
pub const ROOK: PieceType = 2;
pub const BISHOP: PieceType = 3;
pub const KNIGHT: PieceType = 4;
pub const PAWN: PieceType = 5;
pub const PIECE_NONE: PieceType = 6;
/// From 0 to 63 (0 is A1, 1 is B1, .. , 62 is G8, 63 is H8).
pub type Square = usize;
/// A set of squares on the chessboard.
///
/// `u64` bit-sets called *bitboards* can be used to represent a set
/// of squares on the chessboard. For example, the set of squares that
/// are occupied by white rooks in the beginning of the game is: `1 <<
/// A1 | 1 << H1`. `0` represents the empty set, `0xffffffffffffffff`
/// represents the set of all 64 squares on the board.
pub type Bitboard = u64;
/// Describes how the pieces are placed on the board.
#[derive(Clone, Debug)]
pub struct PiecesPlacement {
/// An array of occupation bitboards indexed by piece type. For
/// example, `pieces_placement.piece_type[PAWN]` gives the set of
/// all pawns on the board (white and black).
pub piece_type: [Bitboard; 6],
/// An array of occupation bitboards indexed by color. For
/// example, `pieces_placement.color[WHITE]` gives the set of all
/// white pieces and pawns on the board.
pub color: [Bitboard; 2],
}
impl fmt::Display for PiecesPlacement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut s = String::new();
for rank in (0..8).rev() {
s.push('\n');
for file in 0..8 {
let square = Board::square(file, rank);
let bb = 1 << square;
let piece = match bb {
x if x & self.piece_type[KING] != 0 => 'k',
x if x & self.piece_type[QUEEN] != 0 => 'q',
x if x & self.piece_type[ROOK] != 0 => 'r',
x if x & self.piece_type[BISHOP] != 0 => 'b',
x if x & self.piece_type[KNIGHT] != 0 => 'n',
x if x & self.piece_type[PAWN] != 0 => 'p',
_ => '.',
};
if bb & self.color[WHITE] != 0 {
s.push(piece.to_uppercase().next().unwrap());
} else {
s.push(piece);
}
}
}
writeln!(f, "{}", s)
}
}
/// `QUEENSIDE` or `KINGSIDE`.
pub type CastlingSide = usize;
pub const QUEENSIDE: CastlingSide = 0;
pub const KINGSIDE: CastlingSide = 1;
/// Holds information about which player can castle on which side.
///
/// The castling rights are held in a `usize` value. The lowest 4 bits
/// of the value contain the whole needed information. It is laid out
/// in the following way:
///
/// ```text
/// usize 3 2 1 0
/// +----------------------+---+---+---+---+
/// | | | | | |
/// | Unused (zeros) |Castling flags |
/// | | | | | |
/// +----------------------+---+---+---+---+
///
/// bit 0 -- if set, white can castle on queen-side;
/// bit 1 -- if set, white can castle on king-side;
/// bit 2 -- if set, black can castle on queen-side;
/// bit 3 -- if set, black can castle on king-side.
/// ```
#[derive(Clone, Copy, Debug)]
pub struct CastlingRights(usize);
impl CastlingRights {
/// Creates a new instance.
///
/// The least significant 4 bits of `value` are used as a raw
/// value for the new instance.
#[inline]
pub fn new(value: usize) -> CastlingRights {
CastlingRights(value & 0b1111)
}
/// Returns the contained raw value (between 0 and 15).
#[inline]
pub fn value(&self) -> usize {
self.0
}
/// Grants a given player the right to castle on a given side.
///
/// This method returns `true` if the player did not have the
/// right to castle on the given side before this method was
/// called, and `false` otherwise.
pub fn grant(&mut self, player: Color, side: CastlingSide) -> bool {
assert!(player <= 1);
assert!(side <= 1);
let rights_before = self.0;
let granted = 1 << (player << 1) << side;
self.0 |= granted;
granted & !rights_before != 0
}
/// Updates the castling rights after played move.
///
/// `orig_square` and `dest_square` describe the played move.
#[inline]
pub fn update(&mut self, orig_square: Square, dest_square: Square) {
debug_assert!(orig_square <= 63);
debug_assert!(dest_square <= 63);
const WQ: usize = (1 << (WHITE << 1) << QUEENSIDE);
const WK: usize = (1 << (WHITE << 1) << KINGSIDE);
const W: usize = WQ | WK;
const BQ: usize = (1 << (BLACK << 1) << QUEENSIDE);
const BK: usize = (1 << (BLACK << 1) << KINGSIDE);
const B: usize = BQ | BK;
// On each move, the value of `CASTLING_RELATION` for the
// origin and destination squares should be AND-ed with the
// castling rights value, to derive the updated castling
// rights.
const CASTLING_RELATION: [usize; 64] = [
!WQ, !0, !0, !0, !W, !0, !0, !WK,
!0, !0, !0, !0, !0, !0, !0, !0,
!0, !0, !0, !0, !0, !0, !0, !0,
!0, !0, !0, !0, !0, !0, !0, !0,
!0, !0, !0, !0, !0, !0, !0, !0,
!0, !0, !0, !0, !0, !0, !0, !0,
!0, !0, !0, !0, !0, !0, !0, !0,
!BQ, !0, !0, !0, !B, !0, !0, !BK
];
self.0 &= CASTLING_RELATION[orig_square] & CASTLING_RELATION[dest_square];
}
/// Returns if a given player has the rights to castle on a given
/// side.
#[inline]
pub fn can_castle(&self, player: Color, side: CastlingSide) -> bool {
debug_assert!(player <= 1);
debug_assert!(side <= 1);
(1 << (player << 1) << side) & self.0 != 0
}
}
impl fmt::Display for CastlingRights {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut value = self.value();
for s in ["Q", "K", "q", "k"].iter() {
if value & 1 == 1 {
try!(f.write_str(s));
}
value >>= 1;
}
Ok(())
}
}
/// Represents an illegal position error.
pub struct IllegalBoard;
/// Holds a chess position.
#[derive(Clone, Debug)]
pub struct Board {
/// The placement of the pieces on the board.
pub pieces: PiecesPlacement,
/// The side to move.
pub to_move: Color,
/// The castling rights for both players.
pub castling_rights: CastlingRights,
/// If the previous move was a double pawn push, contains pushed
/// pawn's file (a value between 0 and 7). Otherwise contains `8`.
pub enpassant_file: usize,
/// The set of all occupied squares on the board.
///
/// Always equals `self.pieces.color[WHITE] |
/// self.pieces.color[BLACK]`. Deserves a field on its own because
/// it is very frequently needed.
pub occupied: Bitboard,
}
impl Board {
/// Creates a new instance from ForsythโEdwards Notation (FEN).
pub fn from_fen(fen: &str) -> Result<Board, IllegalBoard> {
parse_fen(fen).map(|x| x.0)
}
/// Returns the square on given file and rank.
///
/// * `file` should be a number between 0 and 7 (0 is file A, 7 is file H).
/// * `rank` should be a number between 0 and 7 (0 is rank 1, 7 is rank 8).
#[inline]
pub fn square(file: usize, rank: usize) -> Square {
debug_assert!(file < 8);
debug_assert!(rank < 8);
(rank << 3) + file
}
/// Returns the file of a given square.
///
/// The returned number will be between 0 and 7 (0 is file A, 7 is file H).
#[inline]
pub fn file(square: Square) -> usize {
debug_assert!(square <= 63);
square % 8
}
/// Returns the rank of a given square.
///
/// The returned number will be between 0 and 7 (0 is rank 1, 7 is rank 8).
#[inline]
pub fn rank(square: Square) -> usize {
debug_assert!(square <= 63);
square >> 3
}
}
#[cfg(test)]
mod tests {
use super::*;
use squares::*;
#[test]
fn castling_rights() {
let mut c = CastlingRights::new(0b1110);
assert_eq!(c.can_castle(WHITE, QUEENSIDE), false);
assert_eq!(c.can_castle(WHITE, KINGSIDE), true);
assert_eq!(c.can_castle(BLACK, QUEENSIDE), true);
assert_eq!(c.can_castle(BLACK, KINGSIDE), true);
c.update(H8, H7);
assert_eq!(c.can_castle(WHITE, QUEENSIDE), false);
assert_eq!(c.can_castle(WHITE, KINGSIDE), true);
assert_eq!(c.can_castle(BLACK, QUEENSIDE), true);
assert_eq!(c.can_castle(BLACK, KINGSIDE), false);
assert_eq!(c.value(), 0b0110);
assert_eq!(c.grant(BLACK, KINGSIDE), true);
assert_eq!(c.grant(BLACK, KINGSIDE), false);
assert_eq!(c.value(), 0b1110);
}
}
|
use criterion::{criterion_group, criterion_main, Bencher, Criterion};
use rand::{
distributions::{Distribution, Standard, Uniform},
Rng, SeedableRng,
};
use rand_distr::Alphanumeric;
use rand_regex::Regex;
use rand_xorshift::XorShiftRng;
fn alphanumeric_baseline(b: &mut Bencher<'_>) {
let mut rng = XorShiftRng::seed_from_u64(0);
let count_distr = Uniform::new_inclusive(10, 20);
b.iter(|| {
let count = count_distr.sample(&mut rng);
Alphanumeric
.sample_iter(&mut rng)
.take(count)
.collect::<Vec<u8>>()
});
}
fn alphanumeric_rand_regex(b: &mut Bencher<'_>) {
let regex = Regex::compile("[0-9a-zA-Z]{10,20}", 100).unwrap();
let mut rng = XorShiftRng::seed_from_u64(0);
b.iter(|| -> Vec<u8> { rng.sample(®ex) })
}
fn all_char_baseline(b: &mut Bencher<'_>) {
let mut rng = XorShiftRng::seed_from_u64(0);
b.iter(|| {
Distribution::<char>::sample_iter(Standard, &mut rng)
.take(10)
.collect::<String>()
});
}
fn all_char_rand_regex(b: &mut Bencher<'_>) {
let regex = Regex::compile("(?s:.{10})", 100).unwrap();
let mut rng = XorShiftRng::seed_from_u64(0);
b.iter(|| -> Vec<u8> { rng.sample(®ex) })
}
fn run_benchmark(c: &mut Criterion) {
let mut group = c.benchmark_group("alphanumeric");
group.bench_function("baseline", alphanumeric_baseline);
group.bench_function("rand_regex", alphanumeric_rand_regex);
group.finish();
let mut group = c.benchmark_group("all_char");
group.bench_function("baseline", all_char_baseline);
group.bench_function("rand_regex", all_char_rand_regex);
group.finish();
}
criterion_group!(benches, run_benchmark);
criterion_main!(benches);
|
pub fn get_row(r: i32) -> Vec<i32> {
let r = r as usize;
let mut result = Vec::with_capacity(r+1);
let mut c = 1;
result.push(c as i32);
for i in 0..r {
c *= r - i;
c /= i + 1;
result.push(c as i32);
}
result
} |
use crate::system::System;
use imgui::{im_str, Condition, Slider, Window};
use std::f32::consts::PI;
mod simulation;
mod system;
fn main() {
println!("Hello, world!");
let system = System::init("Slime Simulation");
// ---- Computing to an image buffer ----
let sim = simulation::Simulation::init(system.device.clone(), system.queue.clone());
// ---- Window imgui loop ----
system.main_loop(sim, move |_, sim_parameters, fade_parameters, ui| {
Window::new(im_str!("Hello World!"))
.size([300.0, 200.0], Condition::FirstUseEver)
.build(ui, || {
ui.push_item_width(100.0);
ui.text(im_str!("Hello World!"));
ui.input_float(im_str!("Speed (px/s)"), &mut sim_parameters.agent_speed)
.build();
ui.input_float(
im_str!("Turn speed (rad/s)"),
&mut sim_parameters.agent_turn_speed,
)
.build();
ui.input_int(im_str!("Sensor radius"), &mut sim_parameters.sensor_radius)
.build();
Slider::new(im_str!("Sensor angles"))
.range(0.0..=PI)
.build(&ui, &mut sim_parameters.sensor_angle_spacing);
ui.input_float(im_str!("Fade speed"), &mut fade_parameters.evaporate_speed)
.build();
});
})
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.