text stringlengths 8 4.13M |
|---|
pub mod request;
pub mod response;
pub mod session;
pub mod user;
|
use std::collections::HashMap;
use anyhow::{anyhow, Result};
use itertools::Itertools as _;
use petgraph::graph::{DiGraph, NodeIndex};
use crate::Challenge;
pub struct Day07;
type Rules = DiGraph<(), usize>;
impl Challenge for Day07 {
const DAY_NUMBER: u32 = 7;
type InputType = (Rules, HashMap<String, NodeIndex>);
type OutputType = usize;
fn part1(input: &Self::InputType) -> Result<Self::OutputType> {
use petgraph::visit::{Dfs, Reversed};
let shiny_gold = input.1["shiny gold"];
let rev = Reversed(&input.0);
let mut dfs = Dfs::new(&rev, shiny_gold);
let mut n = 0;
while dfs.next(&rev).is_some() {
n += 1;
}
Ok(n - 1)
}
fn part2(input: &Self::InputType) -> Result<Self::OutputType> {
let shiny_gold = input.1["shiny gold"];
let mut cache = HashMap::new();
Ok(get_number_of_bags(shiny_gold, &input.0, &mut cache) - 1)
}
fn parse(content: &str) -> Result<Self::InputType> {
let rules_v = content
.lines()
.map(parse_rule)
.collect::<Result<Vec<_>>>()?;
let mut rules = Rules::new();
let mut indexes = HashMap::new();
for rule in rules_v {
let rule_index = *indexes.entry(rule.0).or_insert_with(|| rules.add_node(()));
for edge in rule.1 {
let contained_index = *indexes.entry(edge.0).or_insert_with(|| rules.add_node(()));
rules.add_edge(rule_index, contained_index, edge.1);
}
}
Ok((rules, indexes))
}
}
fn get_number_of_bags(
node: NodeIndex,
rules: &Rules,
cache: &mut HashMap<NodeIndex, usize>,
) -> usize {
use petgraph::Direction;
rules
.neighbors_directed(node, Direction::Outgoing)
.map(|next_node| {
let mult = rules
.edge_weight(rules.find_edge(node, next_node).unwrap())
.unwrap();
let bag_size = if cache.contains_key(&next_node) {
cache[&next_node]
} else {
let n = get_number_of_bags(next_node, rules, cache);
cache.insert(next_node, n);
n
};
mult * bag_size
})
.sum::<usize>()
+ 1
}
fn parse_rule(rule: &str) -> Result<(String, Vec<(String, usize)>)> {
let (name, contained) = rule
.split(" bags contain ")
.collect_tuple()
.ok_or_else(|| anyhow!("could not parse rule"))?;
let contained = if contained == "no other bags." {
Vec::new()
} else {
contained
.split(", ")
.map(|c| {
let (n_str, name) = c
.splitn(2, ' ')
.collect_tuple()
.ok_or_else(|| anyhow!("could not parse rule"))?;
let n = n_str.parse()?;
Ok((
name.trim_end_matches('.')
.trim_end_matches(" bags")
.trim_end_matches(" bag")
.to_owned(),
n,
))
})
.collect::<Result<Vec<(String, usize)>>>()?
};
Ok((name.to_owned(), contained))
}
#[cfg(test)]
mod tests {
use super::*;
const EXAMPLE1: &str = "light red bags contain 1 bright white bag, 2 muted yellow bags.
dark orange bags contain 3 bright white bags, 4 muted yellow bags.
bright white bags contain 1 shiny gold bag.
muted yellow bags contain 2 shiny gold bags, 9 faded blue bags.
shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags.
dark olive bags contain 3 faded blue bags, 4 dotted black bags.
vibrant plum bags contain 5 faded blue bags, 6 dotted black bags.
faded blue bags contain no other bags.
dotted black bags contain no other bags.";
const EXAMPLE2: &str = "shiny gold bags contain 2 dark red bags.
dark red bags contain 2 dark orange bags.
dark orange bags contain 2 dark yellow bags.
dark yellow bags contain 2 dark green bags.
dark green bags contain 2 dark blue bags.
dark blue bags contain 2 dark violet bags.
dark violet bags contain no other bags.";
#[test]
fn test_parse() {
let (rules, indexes) = Day07::parse(EXAMPLE1).expect("should parse");
assert!(indexes.contains_key::<str>("dark orange"));
assert!(indexes.contains_key::<str>("bright white"));
let light_red = indexes["light red"];
let muted_yellow = indexes["muted yellow"];
assert_eq!(
rules
.edge_weight(rules.find_edge(light_red, muted_yellow).unwrap())
.copied(),
Some(2)
);
}
#[test]
fn test_part1() {
assert_eq!(Day07::solve1(EXAMPLE1).unwrap(), 4);
}
#[test]
fn test_part2() {
assert_eq!(Day07::solve2(EXAMPLE1).unwrap(), 32);
assert_eq!(Day07::solve2(EXAMPLE2).unwrap(), 126);
}
}
crate::benchmark_challenge!(crate::day07::Day07);
|
//! # PubNub Core
//!
//! Provides the common high-level logic for PubNub clients.
//!
//! - Fully `async`/`await` ready.
//! - Modular, bring your own [`Transport`] and [`Runtime`].
//! - Multiplexes subscription polling for multiple logical streams over a
//! single transport invocation to optimize kernel network subsystem pressure.
//!
//! Build your own client, or use preconfigured [`pubnub-hyper`](pubnub-hyper).
#![deny(
clippy::all,
clippy::pedantic,
missing_docs,
missing_debug_implementations,
missing_copy_implementations,
intra_doc_link_resolution_failure
)]
#![allow(clippy::doc_markdown)]
#![forbid(unsafe_code)]
pub use crate::builder::Builder;
pub use crate::pubnub::PubNub;
pub use crate::runtime::Runtime;
pub use crate::subscription::Subscription;
pub use crate::transport::{Service as TransportService, Transport};
pub use json;
pub use async_trait::async_trait;
mod builder;
pub mod data;
mod pubnub;
mod runtime;
mod subscription;
mod transport;
#[cfg(feature = "mock")]
pub mod mock;
|
use actix_web::get;
use actix_web::web::HttpResponse;
use serde::Serialize;
#[derive(Serialize)]
struct VersionResponseBody<'a> {
version: &'a str,
}
// The current Matrix version of the server.
const MATRIX_VERSION: &str = "1.0";
// The response body returned by versions endpoint.
const VERSION_RESPONSE: VersionResponseBody = VersionResponseBody {
version: MATRIX_VERSION,
};
#[get("/_matrix/client/versions")]
pub async fn version() -> HttpResponse {
HttpResponse::Ok().json(VERSION_RESPONSE)
}
|
mod board;
mod color;
mod direction;
mod file;
pub mod game;
mod name;
mod piece;
mod rank;
mod square;
|
use std::iter::FromIterator;
use crate::{
grid::dimension::CompleteDimensionVecRecords,
grid::records::{ExactRecords, Records},
settings::TableOption,
};
/// A structure used to set [`Table`] height via a list of rows heights.
///
/// [`Table`]: crate::Table
#[derive(Debug)]
pub struct HeightList {
list: Vec<usize>,
}
impl HeightList {
/// Creates a new object.
pub fn new(list: Vec<usize>) -> Self {
Self { list }
}
}
impl From<Vec<usize>> for HeightList {
fn from(list: Vec<usize>) -> Self {
Self::new(list)
}
}
impl FromIterator<usize> for HeightList {
fn from_iter<T: IntoIterator<Item = usize>>(iter: T) -> Self {
Self::new(iter.into_iter().collect())
}
}
impl<R, C> TableOption<R, CompleteDimensionVecRecords<'_>, C> for HeightList
where
R: ExactRecords + Records,
{
fn change(self, records: &mut R, _: &mut C, dims: &mut CompleteDimensionVecRecords<'_>) {
if self.list.len() < records.count_rows() {
return;
}
dims.set_heights(self.list);
}
}
|
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::Path;
fn implement_type_conversion_for_trait() {
let out_dir = env::var("OUT_DIR").unwrap();
let dest_path = Path::new(&out_dir).join("conv.rs");
let mut f = File::create(&dest_path).unwrap();
let supported_types = [
("ByteTensor", "ByteStorage", "u8"),
("CharTensor", "CharStorage", "i8"),
("FloatTensor", "FloatStorage", "f32"),
("DoubleTensor", "DoubleStorage", "f64"),
("IntTensor", "IntStorage", "i32"),
("LongTensor", "LongStorage", "i64"),
("ShortTensor", "ShortStorage", "i16")
];
for src in &supported_types {
for dest in &supported_types {
if src != dest {
f.write_all(format!("
impl<'a> From<&'a {src}> for {dest}
{{
/// Perform type casting from {src} to {dest}.
/// This is done by deep cloning on entire storage while also
/// casting each element in storage from `u8` to `i8` at the same time.
/// The return tensor is completely independent from original tensor.
fn from(src: &'a {src}) -> {dest} {{
let (size, stride) = src.shape();
let src_data = src.data();
let mut storage = {dest_storage}::new_with_size(src_data.len());
let data = storage.data_mut();
data.iter_mut().zip(src_data.iter()).for_each(|(dest, src)| *dest = *src as {dest_ty});
{dest}::new_with_storage_nd(storage, 0, size, stride)
}}
}}
",
src=src.0,
dest=dest.0,
dest_storage=dest.1,
dest_ty=dest.2
).as_bytes()).expect("Auto implementation error on implementing From for each tensor");
}
}
}
}
fn main() {
// implement auto conversion
implement_type_conversion_for_trait();
println!(r"cargo:rustc-link-search=clib");
} |
use crate::utils::translators;
use std::char;
pub fn decode_str(val: &str) -> String {
let mut decoded: String = String::new();
let mut byte_buf: u32 = 0;
let mut buf_size: usize = 0;
let padding = val.len() - val.trim_end_matches('=').len();
let val = val.trim_end_matches('=');
//cw==
for c in val.chars() {
buf_size += 1;
// dbg!(c);
byte_buf |= translators::from_base64(c) as u32;
// println!("1: {:b} buf_size: {}", byte_buf, buf_size);
if buf_size == 4 {
let bytes = decode_match(byte_buf, buf_size);
// println!("inside: {}", bytes);
decoded.push_str(&bytes.iter().map(|x| *x as char).collect::<String>());
buf_size = 0;
}
byte_buf <<= 6;
}
// println!("2: {:b} buf_size: {}", byte_buf, buf_size);
if padding == 2 {
let bytes = decode_match(byte_buf >> 10, buf_size);
decoded.push_str(&bytes.iter().map(|x| *x as char).collect::<String>());
}
if padding == 1 {
let bytes = decode_match(byte_buf >> 8, buf_size);
decoded.push_str(&bytes.iter().map(|x| *x as char).collect::<String>());
}
decoded
}
pub fn decode_str_to_u8(val: &str) -> Vec<u8> {
let mut decoded: Vec<u8> = Vec::new();
let mut byte_buf: u32 = 0;
let mut buf_size: usize = 0;
let padding = val.len() - val.trim_end_matches('=').len();
let val = val.trim_end_matches('=');
//cw==
for c in val.chars() {
buf_size += 1;
// dbg!(c);
byte_buf |= translators::from_base64(c) as u32;
// println!("1: {:b} buf_size: {}", byte_buf, buf_size);
if buf_size == 4 {
let mut bytes = decode_match(byte_buf, buf_size);
// println!("inside: {}", bytes);
decoded.append(&mut bytes);
buf_size = 0;
}
byte_buf <<= 6;
}
// println!("2: {:b} buf_size: {}", byte_buf, buf_size);
if padding == 2 {
let mut bytes = decode_match(byte_buf >> 10, buf_size);
decoded.append(&mut bytes);
}
if padding == 1 {
let mut bytes = decode_match(byte_buf >> 8, buf_size);
decoded.append(&mut bytes);
}
decoded
}
fn decode_match(byte_buf: u32, mut buf_size: usize) -> Vec::<u8> {
let mut buf: Vec<u8> = Vec::new();
// println!("decode_match -- byte_buf: {:b} buf_size: {}", byte_buf, buf_size);
while buf_size > 0 {
let byte = match buf_size {
4 => ((byte_buf & 0xFF0000) >> 16) as u8,
3 => ((byte_buf & 0xFF00) >> 8) as u8,
2 => (byte_buf & 0xFF) as u8,
1 => break,
_ => panic!("Error in decode_match, unknown value found"),
};
buf_size -= 1;
// println!("byte after match {:b} {}", byte, byte);
buf.push(byte);
// dbg!(&str_buff);
}
buf
}
#[cfg(test)]
mod tests {
use super::*;
const ASCII: &'static str = "Man is distinguished, not only by his reason, but by this singular passion from other animals, which is a lust of the mind, that by a perseverance of delight in the continued and indefatigable generation of knowledge, exceeds the short vehemence of any carnal pleasure.";
const BASE64: &'static str = "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=";
#[test]
pub fn test_alpha_to_hex() {
let alpha = "abcdefghijklmnopqrstuvwxyz";
for a in alpha.chars() {
println!("{}", a);
}
}
#[test]
pub fn test_base64_to_ascii() {
assert_eq!(decode_str(BASE64), ASCII);
}
#[test]
pub fn test_xor_values() {
let val = "abcdefg"; // a
let alpha = 'a'; // b
dbg!(alpha as u32);
println!("alpha as u32 as hex {:x}", alpha as u32);
println!("alpha to_digit as hex {:x}", alpha.to_digit(16).unwrap());
println!("alpha to_digit as hex {:b}", alpha.to_digit(16).unwrap());
dbg!(translators::ascii_to_hex(&alpha.to_string()));
let hex_enc_val: Vec<u32> = val.chars().map(|x| x as u32).collect(); // convert to hex
dbg!(&hex_enc_val);
let xor_env_val: Vec<u32> = val.chars().map(|x| x as u32 ^ alpha as u32).collect(); // xor a ^ b = c
dbg!(&xor_env_val);
let re_xor_env_val: Vec<u32> = xor_env_val.iter().map(|x| *x ^ alpha as u32).collect(); // xor c ^ b = a
dbg!(&re_xor_env_val);
let decoded: Vec<&str> = re_xor_env_val.iter().map(|x| {
translators::hex_val_to_ascii(*x).1
}).collect();
dbg!(decoded);
}
#[test]
pub fn test_xor_single() {
let val = "1b37";
for v in val.chars() {
print!("|char {} ", v);
print!("|as u32 {} ", v as u32);
print!("|to_digit(16) {} ", v.to_digit(16).unwrap());
println!();
}
}
#[test]
pub fn test_run_2() {
let test_val = ("s", "cw==");
dbg!(test_val);
assert_eq!(decode_str(test_val.1),test_val.0);
let test_val = ("Ma", "TWE=");
dbg!(test_val);
assert_eq!(decode_str(test_val.1),test_val.0);
let test_val = ("Man", "TWFu");
dbg!(test_val);
assert_eq!(decode_str(test_val.1),test_val.0);
let test_val = ("Man is", "TWFuIGlz");
dbg!(test_val);
assert_eq!(decode_str(test_val.1),test_val.0);
let test_val = ("Man is d", "TWFuIGlzIGQ=");
dbg!(test_val);
assert_eq!(decode_str(test_val.1),test_val.0);
let test_val = ("Man is di", "TWFuIGlzIGRp");
dbg!(test_val);
assert_eq!(decode_str(test_val.1),test_val.0);
let test_val = ("Man is dis", "TWFuIGlzIGRpcw==");
dbg!(test_val);
assert_eq!(decode_str(test_val.1),test_val.0);
}
#[test]
pub fn test_decode() {
let test_val = ("s", "cw==");
dbg!(test_val);
assert_eq!(decode_str(test_val.1),test_val.0);
}
#[test]
pub fn test_trim_count() {
let x = "cw==";
let y = x.trim_end_matches('=');
assert_eq!(y.len(), x.len() -2);
}
} |
extern crate cc;
use std::env;
use std::fs::{self, File};
use std::io::{self, Read, Write};
use std::path::{Path, PathBuf};
const ENV_NAME: &str = "NATIVE_VERSIONING_VERSION";
/// Error enum.
#[derive(Debug)]
pub enum Error {
Io(::std::io::Error),
EnvVar(::std::env::VarError),
Fmt(::std::fmt::Error)
}
impl From<::std::io::Error> for Error {
fn from(error: ::std::io::Error) -> Self {
Error::Io(error)
}
}
impl From<::std::env::VarError> for Error {
fn from(error: ::std::env::VarError) -> Self {
Error::EnvVar(error)
}
}
impl From<::std::fmt::Error> for Error {
fn from(error: ::std::fmt::Error) -> Self {
Error::Fmt(error)
}
}
fn git_shorthash() -> io::Result<Option<String>> {
let git_base = Path::new(".git");
if let Err(e) = fs::metadata(git_base) {
match e.kind() {
io::ErrorKind::NotFound => return Ok(None),
_ => return Err(e)
}
}
let mut contents = String::new();
let mut file = File::open(git_base.join("HEAD"))?;
file.read_to_string(&mut contents)?;
if contents.starts_with("ref: ") {
let ref_path = git_base.join(&contents[5..].trim_right());
let mut ref_file = File::open(ref_path)?;
contents.truncate(0);
ref_file.read_to_string(&mut contents)?;
}
if contents.len() < 8 {
Err(io::Error::new(io::ErrorKind::InvalidData, "invalid git ref"))
} else {
Ok(Some(contents[..8].into()))
}
}
fn crate_version() -> Result<String, Error> {
use std::fmt::Write;
let mut version = String::new();
write!(version, "v{}", env::var("CARGO_PKG_VERSION_MAJOR")?)?;
write!(version, "_{}", env::var("CARGO_PKG_VERSION_MINOR")?)?;
write!(version, "_{}", env::var("CARGO_PKG_VERSION_PATCH")?)?;
let pre = env::var("CARGO_PKG_VERSION_PRE")?;
if !pre.is_empty() {
write!(version, "_{}", pre)?;
}
Ok(version)
}
fn version() -> Result<String, Error> {
use std::fmt::Write;
let mut version = crate_version()?;
if let Some(shorthash) = git_shorthash()? {
write!(version, "_{}", shorthash)?;
}
Ok(version)
}
/// Trait that provides the `include_header()` method for `cc::Build`.
pub trait HeaderInclude {
/// Adds a header file to this compilation.
///
/// Adding a header via this method has the same effect as specifying the
/// file with double quotation marks in an `#include` directive on the first
/// line of every source file being compiled by `self`. If you use this
/// method multiple times, files are included in the order this method is
/// called.
///
/// XX: This doc is mostly copied from MSDN. That okay?
///
/// # Panics
///
/// Panics if an error occurred while determining the compiler that will be
/// used or if the determined compiler is unknown.
fn include_header<P: AsRef<Path>>(&mut self, header: P) -> &mut Self;
}
impl HeaderInclude for cc::Build {
fn include_header<P: AsRef<Path>>(&mut self, header: P) -> &mut Self {
let compiler = self.get_compiler();
if compiler.is_like_gnu() || compiler.is_like_clang() {
self.flag("-include").flag(&header.as_ref().display().to_string())
} else if compiler.is_like_msvc() {
self.flag("/FI").flag(&header.as_ref().display().to_string())
} else {
panic!("determined compiler is unknown")
}
}
}
/// Generates the versioned header file with the version mangling CPP macro and
/// exports an environment variable with the current project's version.
///
/// The header is generated in a file named `header_filename` in the path
/// `include_dir`. The versioned macro will be named `macro_name`. The
/// environment variable is exported by printing
/// `cargo:rustc-env=NATIVE_VERSIONING_VERSION=$value` to `stdout`.
pub fn write_versioned_header<I, H>(
include_dir: I,
header_filename: H,
macro_name: &str,
) -> Result<PathBuf, Error>
where I: AsRef<Path>, H: AsRef<Path>
{
let include_dir = include_dir.as_ref();
let versioned_h = include_dir.join(header_filename.as_ref());
let version = version()?;
fs::create_dir_all(include_dir)?;
let mut file = File::create(&versioned_h)?;
write!(file, "#define {}(sym) sym ## _{}\n", macro_name, version)?;
println!("cargo:rustc-env={}={}", ENV_NAME, version);
Ok(versioned_h)
}
|
use std::thread;
use std::sync::mpsc::{Receiver, sync_channel};
use websocket::{ClientBuilder, Message};
use websocket::result::WebSocketResult;
use websocket::stream::Stream;
use websocket::client::sync::Client;
use websocket::message::OwnedMessage;
use serde_json;
#[derive(Serialize, Deserialize)]
pub struct GDAXMessage {
#[serde(rename = "type")]
pub type_name: String,
pub trade_id: i64,
pub sequence: i64,
pub maker_order_id: String,
pub taker_order_id: String,
pub time: String,
pub product_id: String,
pub size: String,
pub price: String,
pub side: String
}
pub fn start_receiver_thread(url: &str, products: Vec<String>) -> Receiver<GDAXMessage> {
// Construct a websocket client to connect to the exchange server.
let mut client = match ClientBuilder::new(url) {
Ok(mut client_builder) => {
match client_builder.connect(None) {
Ok(client) => client,
Err(err) => panic!("Failed to connect: {:?}", err),
}
},
Err(err) => panic!("Failed to parse url: {:?}", err),
};
start_stream(&mut client, &products).expect("Failed to start stream");
/*
* Push the receiver off into a seperate thread, queueing each message out
* to the main thread.
*/
let (tx, rx) = sync_channel(500);
thread::spawn(move || {
let prod = products;
loop {
for message in client.incoming_messages() {
match message {
Ok(message) => {
match parse_message(message) {
Some(value) => tx.send(value).unwrap(),
None => (),
};
},
Err(_) => {
println!("Error receiving message, restarting stream.");
},
};
}
start_stream(&mut client, &prod).expect("Failed to restart stream");
}
});
rx
}
fn start_stream<S>(client: &mut Client<S>, products: &Vec<String>) -> WebSocketResult<()>
where S: Stream
{
// Issue the subscribe request to the exchange.
let msg = json!({"type": "subscribe", "product_ids": products});
client.send_message(&Message::text(msg.to_string()))
}
fn parse_message(message: OwnedMessage) -> Option<GDAXMessage> {
match message {
OwnedMessage::Text(data) => {
match serde_json::from_str(data.as_str()) {
Ok(message) => Some(message),
Err(_) => None,
}
},
_ => None,
}
}
|
fn main() {
use std::io::{self, BufRead};
use std::collections::HashMap;
let stdin = io::stdin();
let mut two = 0;
let mut three = 0;
for id in stdin.lock().lines(){
let mut letter_count = HashMap::new();
for c in id.unwrap().chars(){
let count = letter_count.entry(c).or_insert(0);
*count += 1;
}
if letter_count.iter().any(|(_, n)| *n==2) {
two += 1;
}
if letter_count.iter().any(|(_, n)| *n==3) {
three += 1;
}
}
println!("{}", two * three);
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct CH {
#[doc = "0x00 - MDMA channel x interrupt/status register"]
pub isr: ISR,
#[doc = "0x04 - MDMA channel x interrupt flag clear register"]
pub ifcr: IFCR,
#[doc = "0x08 - MDMA Channel x error status register"]
pub esr: ESR,
#[doc = "0x0c - This register is used to control the concerned channel."]
pub cr: CR,
#[doc = "0x10 - This register is used to configure the concerned channel."]
pub tcr: TCR,
#[doc = "0x14 - MDMA Channel x block number of data register"]
pub bndtr: BNDTR,
#[doc = "0x18 - MDMA channel x source address register"]
pub sar: SAR,
#[doc = "0x1c - MDMA channel x destination address register"]
pub dar: DAR,
#[doc = "0x20 - MDMA channel x Block Repeat address Update register"]
pub brur: BRUR,
#[doc = "0x24 - MDMA channel x Link Address register"]
pub lar: LAR,
#[doc = "0x28 - MDMA channel x Trigger and Bus selection Register"]
pub tbr: TBR,
_reserved11: [u8; 0x04],
#[doc = "0x30 - MDMA channel x Mask address register"]
pub mar: MAR,
#[doc = "0x34 - MDMA channel x Mask Data register"]
pub mdr: MDR,
_reserved_end: [u8; 0x08],
}
#[doc = "ISR (r) register accessor: MDMA channel x interrupt/status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`isr`]
module"]
pub type ISR = crate::Reg<isr::ISR_SPEC>;
#[doc = "MDMA channel x interrupt/status register"]
pub mod isr;
#[doc = "IFCR (w) register accessor: MDMA channel x interrupt flag clear register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ifcr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ifcr`]
module"]
pub type IFCR = crate::Reg<ifcr::IFCR_SPEC>;
#[doc = "MDMA channel x interrupt flag clear register"]
pub mod ifcr;
#[doc = "ESR (r) register accessor: MDMA Channel x error status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`esr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`esr`]
module"]
pub type ESR = crate::Reg<esr::ESR_SPEC>;
#[doc = "MDMA Channel x error status register"]
pub mod esr;
#[doc = "CR (rw) register accessor: This register is used to control the concerned channel.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`]
module"]
pub type CR = crate::Reg<cr::CR_SPEC>;
#[doc = "This register is used to control the concerned channel."]
pub mod cr;
#[doc = "TCR (rw) register accessor: This register is used to configure the concerned channel.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tcr`]
module"]
pub type TCR = crate::Reg<tcr::TCR_SPEC>;
#[doc = "This register is used to configure the concerned channel."]
pub mod tcr;
#[doc = "BNDTR (rw) register accessor: MDMA Channel x block number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bndtr`]
module"]
pub type BNDTR = crate::Reg<bndtr::BNDTR_SPEC>;
#[doc = "MDMA Channel x block number of data register"]
pub mod bndtr;
#[doc = "SAR (rw) register accessor: MDMA channel x source address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sar`]
module"]
pub type SAR = crate::Reg<sar::SAR_SPEC>;
#[doc = "MDMA channel x source address register"]
pub mod sar;
#[doc = "DAR (rw) register accessor: MDMA channel x destination address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dar`]
module"]
pub type DAR = crate::Reg<dar::DAR_SPEC>;
#[doc = "MDMA channel x destination address register"]
pub mod dar;
#[doc = "BRUR (rw) register accessor: MDMA channel x Block Repeat address Update register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`brur::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`brur::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`brur`]
module"]
pub type BRUR = crate::Reg<brur::BRUR_SPEC>;
#[doc = "MDMA channel x Block Repeat address Update register"]
pub mod brur;
#[doc = "LAR (rw) register accessor: MDMA channel x Link Address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`lar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`lar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`lar`]
module"]
pub type LAR = crate::Reg<lar::LAR_SPEC>;
#[doc = "MDMA channel x Link Address register"]
pub mod lar;
#[doc = "TBR (rw) register accessor: MDMA channel x Trigger and Bus selection Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tbr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tbr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tbr`]
module"]
pub type TBR = crate::Reg<tbr::TBR_SPEC>;
#[doc = "MDMA channel x Trigger and Bus selection Register"]
pub mod tbr;
#[doc = "MAR (rw) register accessor: MDMA channel x Mask address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mar`]
module"]
pub type MAR = crate::Reg<mar::MAR_SPEC>;
#[doc = "MDMA channel x Mask address register"]
pub mod mar;
#[doc = "MDR (rw) register accessor: MDMA channel x Mask Data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mdr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdr`]
module"]
pub type MDR = crate::Reg<mdr::MDR_SPEC>;
#[doc = "MDMA channel x Mask Data register"]
pub mod mdr;
|
// 所有权和移动语义
// C++ 对 C 中的手动内存管理进行了一个改进,即通过“智能指针”来描述“所有权”(Ownership)概念。
// 这在一定程度上减少了内存使用 bug,实现了“半自动化”的内存管理。
// Rust 在此基础上更进一步,将“所有权”的理念直接融入到了语言之中。
// “所有权”代表着以下意义:
// 1. 每个值在 Rust 中都有一个变量来管理它,这个变量就是这个值、这块内存的所有者;
// 2. 每个值在一个时间点上只有一个管理者;
// 3. 当变量所在的作用域结束的时候,变量以及它代表的值将会被销毁。
pub fn first() {
fn test1() {
// s 为字符串的所有者。mut 代表这个变量是可以可变的
let mut s = String::from("hello");
// 调用 String 的成员方法来修改变量
s.push_str(" world");
println!("{}", s);
// 函数结束的时候,s 将会被析构(不论是堆上的,还是栈上的),它的内存会被释放。
}
test1();
/*
// 编译报错
fn test2() {
let s = String::from("hello");
let s1 = s;
// 编译器显示,在 let s1 = s; 语句中,原本由 s 拥有的字符串
// 已经转移给了 s1 这个变量。所以,后面继续使用 s 是不对的。
// 也就是说每个值只有一个所有者。
// 变量 s 的生命周期从声明开始,到 move 给 s1 就结束了。
// 变量 s1 的生命周期则是从它声明开始,到函数结束。
// 由 String::from 函数创建出来的字符串本身,到函数结束的时候就会销毁。
// 中间所有权的转换,并不会将这个字符串本身销毁再重新创建。
// 在任意时刻,这个字符串只有一个所有者,要么是 s,要么是 s1。
println!("{}", s);
}
test2();
*/
fn test3() {
let s = String::from("hello");
// 手动调用 clone 方法,进行“深复制”
let s1 = s.clone();
println!("{} {}", s, s1);
}
test3();
} |
pub mod websocket;
|
use crate::renderer::{App, AppControl, EventChannel, EventLoop, UserEvent};
use crate::wry::menu::MenuIds;
use anyhow::Error;
use wry::application::event::{Event, StartCause, WindowEvent};
use wry::application::event_loop::{ControlFlow, EventLoopProxy};
pub type WryEventLoop = wry::application::event_loop::EventLoop<UserEvent>;
impl EventChannel for EventLoopProxy<UserEvent> {
fn send_event(&self, event: UserEvent) {
if let Err(err) = self.send_event(event) {
log::error!("Could not send user event for message from WebView: {}", err);
}
}
}
impl EventLoop for WryEventLoop {
type Channel = EventLoopProxy<UserEvent>;
type Menu = MenuIds;
fn create_channel(&self) -> Self::Channel {
self.create_proxy()
}
fn start<A>(self, mut app: A) -> !
where
A: App<Self::Menu> + 'static,
{
fn log_causes(err: Error) {
for err in err.chain() {
log::error!(" Caused by: {}", err);
}
}
self.run(move |event, _, control_flow| {
let control = match event {
Event::NewEvents(StartCause::Init) => {
log::debug!("Application has started");
AppControl::Continue
}
Event::WindowEvent { event: WindowEvent::CloseRequested, .. } => {
log::debug!("Closing window was requested");
AppControl::Exit
}
Event::UserEvent(event) => {
log::debug!("Handling user event {:?}", event);
match app.handle_user_event(event) {
Ok(control) => control,
Err(err) => {
log::error!("Could not handle user event");
log_causes(err);
AppControl::Continue
}
}
}
Event::MenuEvent { menu_id, .. } => match app.handle_menu_event(menu_id) {
Ok(control) => control,
Err(err) => {
log::error!("Could not handle menu event: {}", err);
AppControl::Continue
}
},
_ => AppControl::Continue,
};
match control {
AppControl::Continue => *control_flow = ControlFlow::Wait,
AppControl::Exit => {
if let Err(err) = app.handle_exit() {
log::error!("Could not handle application exit correctly");
log_causes(err);
}
*control_flow = ControlFlow::Exit;
}
}
})
}
}
|
/// Include generated proto server and client items.
///
/// You must specify the gRPC package name.
///
/// ```rust,ignore
/// mod pb {
/// tonic::include_proto("helloworld");
/// }
/// ```
#[macro_export]
macro_rules! include_proto {
($package: tt) => {
include!(concat!(env!("OUT_DIR"), concat!("/", $package, ".rs")));
};
}
|
mod node;
mod problem;
mod solver;
mod cover;
mod iter;
pub mod instances;
pub use problem::Problem;
pub use solver::Solver;
|
use std::convert::TryFrom;
use std::error::Error;
use std::ffi::{CStr, CString};
use std::path::PathBuf;
use std::sync::{Arc, Mutex, RwLock};
use cffi::{FromForeign, InputType, ReturnType, ToForeign};
use once_cell::sync::Lazy;
use pahkat_types::payload::{
macos::InstallTarget as MacOSInstallTarget, windows::InstallTarget as WindowsInstallTarget,
};
use serde::de::DeserializeOwned;
use serde::Serialize;
use url::Url;
use crate::config::ConfigPath;
use crate::ffi::BoxError;
use crate::repo::PayloadError;
use crate::transaction::{PackageStatus, PackageStatusError};
use crate::{Config, PackageKey};
static BASIC_RUNTIME: Lazy<Mutex<tokio::runtime::Runtime>> = Lazy::new(|| {
Mutex::new(
tokio::runtime::Builder::new()
.basic_scheduler()
.enable_all()
.build()
.expect("failed to build tokio runtime"),
)
});
pub fn block_on<F: std::future::Future>(future: F) -> F::Output {
BASIC_RUNTIME.lock().unwrap().block_on(future)
}
|
use crate::msbuild;
use nom::{
branch::alt,
bytes::complete::{is_not, tag, take_until},
character::complete::{self, char},
combinator::{self, recognize},
error::{ParseError, VerboseError},
sequence::{self, pair},
IResult,
};
use petgraph::prelude::*;
#[derive(Debug)]
pub enum Expr<'input> {
Comment(&'input str),
DigitOrDot(&'input str),
Guid(&'input str),
Identifier(&'input str),
Str(&'input str),
Version(Box<Expr<'input>>, Box<Expr<'input>>),
FirstLine(Box<Expr<'input>>),
Global(Vec<Expr<'input>>),
Project(Box<Expr<'input>>, Vec<Expr<'input>>),
ProjectBegin(
Box<Expr<'input>>,
Box<Expr<'input>>,
Box<Expr<'input>>,
Box<Expr<'input>>,
),
Section(Box<Expr<'input>>, Vec<Expr<'input>>),
SectionBegin(Vec<Expr<'input>>, Box<Expr<'input>>),
SectionContent(Box<Expr<'input>>, Box<Expr<'input>>),
SectionKey(Box<Expr<'input>>),
SectionValue(Box<Expr<'input>>),
}
/// Generates simple &str getters from Expr variants
macro_rules! impl_str_getters {
($(($name:ident, $variant:ident)),*) => {
$(
#[must_use] pub fn $name(&self) -> &'input str {
if let Expr::$variant(s) = self {
return *s;
}
""
}
)*
};
}
impl<'input> Expr<'input> {
impl_str_getters!(
(identifier, Identifier),
(digit_or_dot, DigitOrDot),
(string, Str),
(guid, Guid)
);
#[must_use]
pub fn is_section(&self, name: &str) -> bool {
if let Expr::SectionBegin(names, _) = self {
names.iter().any(|n| n.identifier() == name)
} else {
false
}
}
}
#[derive(Debug, Clone)]
pub struct Solution<'input> {
pub format: &'input str,
pub product: &'input str,
pub projects: Vec<Project<'input>>,
pub versions: Vec<Version<'input>>,
pub solution_configs: Vec<Conf<'input>>,
pub project_configs: Vec<ProjectConfigs<'input>>,
pub dependencies: DiGraphMap<&'input str, i32>,
}
#[derive(Debug, Copy, Clone)]
pub struct Version<'input> {
pub name: &'input str,
pub ver: &'input str,
}
#[derive(Debug, Clone)]
pub struct ProjectConfigs<'input> {
pub project_id: &'input str,
pub configs: Vec<Conf<'input>>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Default)]
pub struct Conf<'input> {
pub config: &'input str,
pub platform: &'input str,
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Project<'input> {
pub type_id: &'input str,
pub type_descr: &'input str,
pub id: &'input str,
pub name: &'input str,
pub path: &'input str,
}
impl<'input> Default for Solution<'input> {
fn default() -> Self {
Self {
format: "",
product: "",
projects: Vec::new(),
versions: Vec::new(),
solution_configs: Vec::new(),
project_configs: Vec::new(),
dependencies: DiGraphMap::new(),
}
}
}
impl<'input> Project<'input> {
#[must_use]
pub fn new(id: &'input str, type_id: &'input str) -> Self {
let type_descr = msbuild::describe_project(type_id);
Self {
type_id,
type_descr,
id,
..Default::default()
}
}
#[must_use]
pub fn from_begin(head: &Expr<'input>) -> Option<Self> {
if let Expr::ProjectBegin(project_type, name, path, id) = head {
let prj = Project::from(project_type, name, path, id);
Some(prj)
} else {
None
}
}
#[must_use]
pub fn from(
project_type: &Expr<'input>,
name: &Expr<'input>,
path: &Expr<'input>,
id: &Expr<'input>,
) -> Self {
let type_id = project_type.guid();
let pid = id.guid();
let mut prj = Project::new(pid, type_id);
prj.name = name.string();
prj.path = path.string();
prj
}
}
impl<'input> Version<'input> {
#[must_use]
pub fn new(name: &'input str, ver: &'input str) -> Self {
Self { name, ver }
}
#[must_use]
pub fn from(name: &Expr<'input>, val: &Expr<'input>) -> Self {
let n = name.identifier();
let v = val.digit_or_dot();
Version::new(n, v)
}
}
impl<'input> From<&'input str> for Conf<'input> {
fn from(s: &'input str) -> Self {
pipe_terminated::<VerboseError<&str>>(s)
.map(|(platform, config)| Self { config, platform })
.unwrap_or_default()
}
}
impl<'input> Conf<'input> {
#[must_use]
pub fn new(configuration: &'input str, platform: &'input str) -> Self {
Self {
config: configuration,
platform,
}
}
#[must_use]
pub fn from_expr(expr: &Expr<'input>) -> Option<Self> {
if let Expr::SectionContent(left, _) = expr {
let conf = Conf::from(left.string());
Some(conf)
} else {
None
}
}
}
#[derive(Default, PartialEq, Debug)]
struct ProjectConfig<'input> {
id: &'input str,
configuration: &'input str,
platform: &'input str,
}
impl<'input> ProjectConfigs<'input> {
#[must_use]
pub fn from_id_and_configs(project_id: &'input str, configs: Vec<Conf<'input>>) -> Self {
let mut configurations = Vec::new();
configurations.extend(configs);
Self {
project_id,
configs: configurations,
}
}
#[must_use]
pub fn from_section_content_key(expr: &Expr<'input>) -> Option<Self> {
if let Expr::SectionContent(left, _) = expr {
ProjectConfigs::from_project_configuration_platform(left.string())
} else {
None
}
}
#[must_use]
pub fn from_section_content(expr: &Expr<'input>) -> Option<Self> {
if let Expr::SectionContent(left, right) = expr {
ProjectConfigs::from_project_configuration(left.string(), right.string())
} else {
None
}
}
fn from_project_configuration_platform(k: &'input str) -> Option<Self> {
let r = ProjectConfigs::parse_project_configuration_platform::<VerboseError<&str>>(k);
Self::new(r)
}
fn from_project_configuration(k: &'input str, v: &'input str) -> Option<Self> {
let r = ProjectConfigs::parse_project_configuration::<VerboseError<&str>>(k, v);
Self::new(r)
}
fn new(
r: IResult<&'input str, ProjectConfig<'input>, VerboseError<&'input str>>,
) -> Option<Self> {
r.map(|(_, pc)| {
Some(Self {
project_id: pc.id,
configs: vec![Conf::new(pc.configuration, pc.platform)],
})
})
.unwrap_or(None)
}
fn parse_project_configuration_platform<'a, E>(
key: &'a str,
) -> IResult<&'a str, ProjectConfig<'a>, E>
where
E: ParseError<&'a str> + std::fmt::Debug,
{
let parser =
sequence::separated_pair(guid, char('.'), pair(pipe_terminated, tag_terminated));
combinator::map(parser, |(project_id, (config, platform))| ProjectConfig {
id: project_id,
configuration: config,
platform,
})(key)
}
fn parse_project_configuration<'a, E>(
key: &'a str,
value: &'a str,
) -> IResult<&'a str, ProjectConfig<'a>, E>
where
E: ParseError<&'a str> + std::fmt::Debug,
{
let parser = sequence::separated_pair(guid, char('.'), tag_terminated);
let conf = Conf::from(value);
combinator::map(parser, |(project_id, config)| ProjectConfig {
id: project_id,
configuration: config,
platform: conf.platform,
})(key)
}
}
fn guid<'a, E>(input: &'a str) -> IResult<&'a str, &'a str, E>
where
E: ParseError<&'a str> + std::fmt::Debug,
{
recognize(sequence::delimited(
complete::char('{'),
is_not("{}"),
complete::char('}'),
))(input)
}
fn tag_terminated<'a, E>(input: &'a str) -> IResult<&'a str, &'a str, E>
where
E: ParseError<&'a str> + std::fmt::Debug,
{
const ACTIVE_CFG_TAG: &str = ".ActiveCfg";
const BUILD_TAG: &str = ".Build.0";
const DEPLOY_TAG: &str = ".Deploy.0";
sequence::terminated(
alt((
take_until(ACTIVE_CFG_TAG),
take_until(BUILD_TAG),
take_until(DEPLOY_TAG),
)),
alt((tag(ACTIVE_CFG_TAG), tag(BUILD_TAG), tag(DEPLOY_TAG))),
)(input)
}
fn pipe_terminated<'a, E>(input: &'a str) -> IResult<&'a str, &'a str, E>
where
E: ParseError<&'a str> + std::fmt::Debug,
{
sequence::terminated(is_not("|"), char('|'))(input)
}
#[cfg(test)]
mod tests {
use super::*;
use rstest::rstest;
#[rstest]
#[case("Release|Any CPU", Conf { config: "Release", platform: "Any CPU" })]
#[case("", Conf { config: "", platform: "" })]
#[case("Release Any CPU", Conf { config: "", platform: "" })]
#[case("Release|Any CPU|test", Conf { config: "Release", platform: "Any CPU|test" })]
#[trace]
fn from_configuration_tests(#[case] i: &str, #[case] expected: Conf) {
// Arrange
// Act
let c = Conf::from(i);
// Assert
assert_eq!(c, expected);
}
#[test]
fn from_project_configurations_correct() {
// Arrange
let s = "{27060CA7-FB29-42BC-BA66-7FC80D498354}.Debug|Any CPU.ActiveCfg";
// Act
let c = ProjectConfigs::from_project_configuration_platform(s);
// Assert
assert!(c.is_some());
let c = c.unwrap();
assert_eq!(c.project_id, "{27060CA7-FB29-42BC-BA66-7FC80D498354}");
assert_eq!(c.configs.len(), 1);
assert_eq!(c.configs[0].config, "Debug");
assert_eq!(c.configs[0].platform, "Any CPU");
}
#[test]
fn from_project_configurations_config_with_dot() {
// Arrange
let s = "{27060CA7-FB29-42BC-BA66-7FC80D498354}.Debug .NET 4.0|Any CPU.ActiveCfg";
// Act
let c = ProjectConfigs::from_project_configuration_platform(s);
// Assert
assert!(c.is_some());
let c = c.unwrap();
assert_eq!(c.project_id, "{27060CA7-FB29-42BC-BA66-7FC80D498354}");
assert_eq!(c.configs.len(), 1);
assert_eq!(c.configs[0].config, "Debug .NET 4.0");
assert_eq!(c.configs[0].platform, "Any CPU");
}
#[test]
fn from_project_configurations_platform_with_dot_active() {
// Arrange
let s = "{7C2EF610-BCA0-4D1F-898A-DE9908E4970C}.Release|.NET.ActiveCfg";
// Act
let c = ProjectConfigs::from_project_configuration_platform(s);
// Assert
assert!(c.is_some());
let c = c.unwrap();
assert_eq!(c.project_id, "{7C2EF610-BCA0-4D1F-898A-DE9908E4970C}");
assert_eq!(c.configs.len(), 1);
assert_eq!(c.configs[0].config, "Release");
assert_eq!(c.configs[0].platform, ".NET");
}
#[test]
fn from_project_configurations_without_platform() {
// Arrange
let s = "{5228E9CE-A216-422F-A5E6-58E95E2DD71D}.DLL Debug.ActiveCfg";
// Act
let c = ProjectConfigs::from_project_configuration_platform(s);
// Assert
assert!(c.is_none());
}
#[test]
fn guid_test() {
// Arrange
let s = "{7C2EF610-BCA0-4D1F-898A-DE9908E4970C}.Release|.NET.Build.0";
// Act
let result = guid::<VerboseError<&str>>(s);
// Assert
assert_eq!(
result,
Ok((
".Release|.NET.Build.0",
"{7C2EF610-BCA0-4D1F-898A-DE9908E4970C}",
))
);
}
#[rstest]
#[case(".NET.Build.0", ".NET")]
#[case(".NET.ActiveCfg", ".NET")]
#[trace]
fn tag_terminated_tests(#[case] i: &str, #[case] expected: &str) {
// Arrange
// Act
let result = tag_terminated::<VerboseError<&str>>(i);
// Assert
assert_eq!(result, Ok(("", expected)));
}
#[rstest]
#[case("{7C2EF610-BCA0-4D1F-898A-DE9908E4970C}.Release|.NET.Build.0", ProjectConfig { id: "{7C2EF610-BCA0-4D1F-898A-DE9908E4970C}", configuration: "Release", platform: ".NET" })]
#[case("{60BB14A5-0871-4656-BC38-4F0958230F9A}.Debug|ARM.Deploy.0", ProjectConfig { id: "{60BB14A5-0871-4656-BC38-4F0958230F9A}", configuration: "Debug", platform: "ARM" })]
#[case("{7C2EF610-BCA0-4D1F-898A-DE9908E4970C}.Release|.NET.ActiveCfg", ProjectConfig { id: "{7C2EF610-BCA0-4D1F-898A-DE9908E4970C}", configuration: "Release", platform: ".NET" })]
#[trace]
fn project_configs_parse_project_configuration_platform_tests(
#[case] i: &str,
#[case] expected: ProjectConfig,
) {
// Arrange
// Act
let result = ProjectConfigs::parse_project_configuration_platform::<VerboseError<&str>>(i);
// Assert
assert_eq!(result, Ok(("", expected)));
}
#[rstest]
#[case("{5228E9CE-A216-422F-A5E6-58E95E2DD71D}.DLL Debug.ActiveCfg", "Release|x64", ProjectConfig { id: "{5228E9CE-A216-422F-A5E6-58E95E2DD71D}", configuration: "DLL Debug", platform: "x64" })]
#[trace]
fn project_configs_parse_project_configuration_tests(
#[case] k: &str,
#[case] v: &str,
#[case] expected: ProjectConfig,
) {
// Arrange
// Act
let result = ProjectConfigs::parse_project_configuration::<VerboseError<&str>>(k, v);
// Assert
assert_eq!(result, Ok(("", expected)));
}
}
|
fn main() -> () {
let list: Vec<u32> = vec![1, 2, 3, 4, 5];
fn f(v: &u32) -> bool { 3.eq(v) }
let func: fn(&u32) -> bool = f
let result = find::<u32>(&list, func).unwrap();
assert!(3.eq(result));
}
fn find<T>(xs: &Vec<T>, f: fn(&T) -> bool) -> Option<&T> {
// for x in xs {
// if f(x) {
// return Some(x);
// }
// }
// return None
xs.iter().find(|&x| f(x))
}
|
use log::*;
use sphinx::SphinxPacket;
use std::net::SocketAddr;
use tokio::prelude::*;
pub struct MixClient {}
impl MixClient {
pub fn new() -> MixClient {
MixClient {}
}
// Sends a Sphinx packet to a mixnode.
pub async fn send(
&self,
packet: SphinxPacket,
mix_addr: SocketAddr,
) -> Result<(), Box<dyn std::error::Error>> {
let bytes = packet.to_bytes();
info!("socket addr: {:?}", mix_addr);
let mut stream = tokio::net::TcpStream::connect(mix_addr).await?;
stream.write_all(&bytes[..]).await?;
Ok(())
}
}
#[cfg(test)]
mod sending_a_sphinx_packet {
// use super::*;
// use sphinx::SphinxPacket;
#[test]
fn works() {
// arrange
// let directory = Client::new();
// let message = "Hello, Sphinx!".as_bytes().to_vec();
// let mixes = directory.get_mixes();
// let destination = directory.get_destination();
// let delays = sphinx::header::delays::generate(2);
// let packet = SphinxPacket::new(message, &mixes, &destination, &delays).unwrap();
// let mix_client = MixClient::new();
// let first_hop = mixes.first().unwrap();
//
// // act
// mix_client.send(packet, first_hop);
// assert
// wtf are we supposed to assert here?
}
}
|
use cgmath::{self, InnerSpace, Vector2, Zero};
use midgar::{Button, KeyCode, MouseButton};
use midgar::Input;
#[derive(Debug, Clone)]
pub enum FireInput {
Idle,
Fire(Vector2<f32>),
}
#[derive(Clone, Debug)]
pub struct PlayerInput {
pub move_dir: Vector2<f32>,
pub fire: FireInput,
pub bomb: bool,
}
impl Default for PlayerInput {
fn default() -> Self {
PlayerInput {
move_dir: Vector2::zero(),
fire: FireInput::Idle,
bomb: false,
}
}
}
pub fn check_input(input: &Input) -> PlayerInput {
let controller = input.controllers().first();
let x = match
(input.is_key_held(KeyCode::Left) || input.is_key_held(KeyCode::A) || controller.map(|c| c.is_button_held(Button::DPadLeft)).unwrap_or(false),
input.is_key_held(KeyCode::Right) || input.is_key_held(KeyCode::D) || controller.map(|c| c.is_button_held(Button::DPadRight)).unwrap_or(false)) {
(true, false) => -1.0,
(false, true) => 1.0,
_ => 0.0,
};
let y = match
(input.is_key_held(KeyCode::Up) || input.is_key_held(KeyCode::W) || controller.map(|c| c.is_button_held(Button::DPadUp)).unwrap_or(false),
input.is_key_held(KeyCode::Down) || input.is_key_held(KeyCode::S) || controller.map(|c| c.is_button_held(Button::DPadDown)).unwrap_or(false)) {
(true, false) => -1.0,
(false, true) => 1.0,
_ => 0.0,
};
let move_dir = cgmath::vec2(x, y);
let fire = if input.is_key_held(KeyCode::Space) || controller.map(|c| c.is_button_held(Button::RightShoulder)).unwrap_or(false) {
FireInput::Fire(cgmath::vec2(0.0, -1.0))
} else {
FireInput::Idle
};
let bomb = input.was_key_pressed(KeyCode::Q) || controller.map(|c| c.was_button_pressed(Button::LeftShoulder)).unwrap_or(false);
PlayerInput {
move_dir: if !move_dir.is_zero() {move_dir.normalize()} else {Vector2::zero()},
fire,
bomb,
}
}
|
use arkecosystem_client::connection::Connection;
const MOCK_HOST: &str = "http://127.0.0.1:1234/api/";
#[test]
fn test_connection() {
let connection = Connection::new(MOCK_HOST);
assert_eq!(connection.client.host, MOCK_HOST);
}
|
//! *col* is an esoteric programming language inspired by classical architectural columns and the
//! syntax of other esolangs like [Befunge](https://esolangs.org/wiki/Befunge) and
//! [Brainfuck](https://esolangs.org/wiki/Brainfuck).
//!
//! Learn more in the [project repository](https://github.com/cassaundra/col).
//!
//! To interpret col in your own program, see the [interpreter](interpreter)
//! documentation.
pub mod parser;
pub mod interpreter;
pub mod program;
#[cfg(test)]
mod test; |
pub mod date_format {
use chrono::{DateTime, Local, TimeZone};
use serde::{self, Deserialize, Deserializer, Serializer};
const FORMAT: &str = "%Y-%m-%d %H:%M:%S";
pub fn serialize<S>(date: &DateTime<Local>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let s = format!("{}", date.format(FORMAT));
serializer.serialize_str(&s)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Local>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Local
.datetime_from_str(&s, FORMAT)
.map_err(serde::de::Error::custom)
}
}
|
use std::{collections::HashMap, fs::{self, File}, io::{prelude::*, BufReader}, net::TcpStream, sync::mpsc::{self, Sender}, thread};
use serde::{Serialize};
use chrono::{Utc};
use mysql::{Opts, Pool, prelude::Queryable};
use crate::settings::Settings;
mod settings;
// So we are interested in connecting to the AMI server and get all the events into a "log" file.
// We will use the AMI protocol to do this.
// The AMI protocol is quite simple, its based on the HTML header, each message ends with a line containing only a carriage return.
// This function will read from the TCP stream until it finds a line with only a carriage return.
// It will then return all the lines but the last one.
fn read_ami(stream: &mut TcpStream, first: bool) -> AMIResponse {
let mut ami_response = AMIResponse {
headers: HashMap::new(),
rest: String::from(""),
};
let mut line = String::new();
let mut reader = BufReader::new(stream);
loop {
line.clear();
let res = reader.read_line(&mut line);
match res {
Ok(s) => {
if s == 0 {
break;
}
if line == "\r\n" {
break
}
// Lets check if the line contains a : and if it does, we will split it into the name and value for a header.
if line.contains(":") {
let mut split = line.splitn(2, ":");
let name = split.next().unwrap();
let value = split.next().unwrap();
ami_response.headers.insert(
name.trim().to_owned(),
value.trim().to_owned()
);
}
else {
// Just add it to the rest of the response.
ami_response.rest.push_str(&line);
}
// If it is the first line, there is no need to check for the end of the message.
if first {
break;
}
},
Err(e) => {
println!("Error: {}", e);
break;
}
}
}
ami_response
}
#[derive(Debug, Serialize)]
struct AMIResponse {
headers: HashMap<String, String>,
rest: String,
}
fn listener(server: settings::Server, sender: Sender<(String, AMIResponse)>) {
// Lets start a TCP connection to the AMI server.
let mut stream = match TcpStream::connect(format!("{}:{}", server.host, server.port)) {
Ok(stream) => stream,
Err(e) => {
println!("Unable to connect to TCP of server {}, {}:{}, with error: {}.", server.name, server.host, server.port, e);
return;
}
};
let first_response = read_ami(&mut stream, true);
// Lets check if the first response contains the correct rest data.
// @TODO implement better error handling.
assert_eq!(first_response.rest, "Asterisk Call Manager/1.1\r\n");
// Lets write in the LOGIN command.
stream.write(b"Action: Login\r\n").unwrap();
write!(stream, "Username: {}\r\n", server.username).unwrap();
write!(stream, "Secret: {}\r\n", server.password).unwrap();
stream.write(b"\r\n").unwrap();
// Lets get the login response.
let login_response = read_ami(&mut stream, false);
match login_response.headers.get("Response") {
Some(response) => {
if response != "Success" {
println!("Login failed for server {}, with response: {}.", server.name, response);
return;
}
},
None => {
println!("Unable to get login response while connecting to server {}.", server.name);
return;
}
}
loop {
let ami_response = read_ami(&mut stream, false);
if ami_response.headers.len() > 0 {
// Lets check if the response contains the "Event" header.
// If it does we will print TIMESTAMP::JSON_RESPONSE.
if ami_response.headers.contains_key("Event") {
sender.send(
(server.name.clone(),
ami_response
)
).unwrap();
}
}
}
}
fn get_current_file_name() -> String {
// The name of the file will be:
// "events_YYYY-MM-DD.log"
let mut file_name = String::new();
file_name.push_str(&format!("events_{}.log", Utc::now().date()));
file_name
}
fn open_file(path: String) -> File {
std::fs::OpenOptions::new()
.write(true)
.create(true)
.append(true)
.open(&path)
.unwrap()
}
fn main() {
// Lets get the settings from the settings module.
let mut settings = match Settings::init() {
Ok(settings) => settings,
Err(e) => {
println!("Error: {}", e);
return;
}
};
// Lets check if the file path end with a /.
// If it does lets remove it.
if settings.basic.target_directory.ends_with("/") {
settings.basic.target_directory = settings.basic.target_directory[..(settings.basic.target_directory.len() - 1)].to_string();
}
// Unmutable the settings.
let settings = settings;
let mut handles = vec![];
let (sender, receiver) = mpsc::channel::<(String, AMIResponse)>();
// Lets loop the server list and connect to each one on different threads.
for server in &settings.servers {
println!("Connecting to {}", server.host);
let sender1 = sender.clone();
let server1 = server.clone();
handles.push(thread::spawn(move || {
listener(server1, sender1);
}));
}
// Lets make sure we have a path to our settings.basic.target_directory:
let target_directory = settings.basic.target_directory.clone();
if target_directory.len() == 0 {
println!("Error: No target directory specified.");
return;
}
else {
fs::create_dir_all(target_directory).unwrap();
}
// This hashmap will hold all mysql pools.
let mut mysql_pool = HashMap::new();
// Lets loop settings.databases and create a connection for each one.
for database in &settings.databases {
println!("Connecting to MySQL database {}.", database.host);
// Lets first check if this database is already in the hashmap, if it is, it means there are duplicates in the settings, so we will error out.
if mysql_pool.contains_key(&database.host) {
println!("Database {} is already connected.", database.host);
return;
}
let url = format!("mysql://{}:{}@{}:{}/{}", database.user, database.password, database.host, database.port, database.database);
let opts = match Opts::from_url(&url) {
Ok(opts) => opts,
Err(e) => {
println!("Unable to connect to MySQL database {} with error: {}", database.host, e);
continue;
}
};
let pool = match Pool::new(opts) {
Ok(pool) => pool,
Err(e) => {
println!("Unable to connect to MySQL database {} with error: {}", database.host, e);
continue;
}
};
mysql_pool.insert(database.id.clone(), pool);
println!("Connected successfully to database {}.", database.host);
}
let mut server_paths: HashMap<String, String> = HashMap::new();
// We want to check if directory_per_server is true in the settings if so we will create a directory for each server, and create a hashmap holding the file.
if settings.basic.directory_per_server {
for server in &settings.servers {
let dir = format!("{}/{}", &settings.basic.target_directory, server.name);
println!("Creating directory {}", dir);
fs::create_dir_all(&dir).unwrap();
server_paths.insert(server.name.clone(), dir);
}
}
let mut files: HashMap<String, File> = HashMap::new();
let mut event_file_name = String::from("");
let all = String::from("all");
loop {
let (server_name, ami_response) = match receiver.recv() {
Ok((server_name, ami_response)) => (server_name, ami_response),
Err(e) => {
println!("Error: {}", e);
break;
}
};
// Now lets check if the event name matches any in the settings.event_clauses[event_name]
// If it does we will write the event to the database.
for event_clause in &settings.event_clauses {
if &event_clause.event_name == ami_response.headers.get("Event").unwrap() {
// So now we have a match, so we get the db pool from the db_connection_id, and target table from db_table.
let pool = mysql_pool.get(&event_clause.db_connection_id).unwrap();
let table = event_clause.db_table.clone();
// Now inside the event_clause we have a HashMap named event_data_link that will match the headers of the event to the database columns.
// So now we need to prepare the SQL statement, and the vector that will hold the values.
let mut columns = vec![];
let mut values = vec![];
for (event_key, mysql_column) in &event_clause.event_data_link {
// Lets check if the event_key is in the ami_response.headers.
if ami_response.headers.contains_key(event_key) {
// If it is we will add the value to the values hashmap.
values.push(mysql::Value::from(ami_response.headers.get(event_key)));
} else {
match event_key.as_str() {
"%SERVER_NAME%" => {
// If the event_key is %SERVER_NAME% we will add the server_name to the values hashmap.
values.push(mysql::Value::from(&server_name));
},
_ => {
values.push(mysql::Value::from(None::<String>));
}
}
}
// And add the column name to the columns.
columns.push(mysql_column.clone());
}
// Now we have the columns and values, lets prepare the SQL statement.
let sql = format!(
"INSERT INTO {} ({}) VALUES ({})",
table,
// We want all columns separated by commas.
&columns.join(","),
// Now we want ? for each column or value.
vec!["?"; columns.len()].join(",")
);
let mut conn = pool.get_conn().unwrap();
let _s: Vec<mysql::Row> = match conn.exec(sql, values) {
Ok(s) => {
println!("Successfully inserted row into database {} table {}.", &event_clause.db_connection_id, &event_clause.db_table);
s
},
Err(e) => {
println!("Unable to insert row into database {} table {} with error: {}", &event_clause.db_connection_id, &event_clause.db_table, e);
continue;
}
};
}
}
let mut file: &File;
// Lets check if the file name changed.
if event_file_name != get_current_file_name() {
event_file_name = get_current_file_name();
// We need to update all the files for each server, or not depending on the settings.
if settings.basic.directory_per_server {
for server in &settings.servers {
files.insert(server.name.clone(),
open_file(format!("{}/{}", &server_paths.get(&server.name).unwrap(), event_file_name))
);
}
}
else {
files.insert(all.clone(),
open_file(format!("{}/{}", &settings.basic.target_directory, event_file_name))
);
}
}
// Now lets get the target file for the current server.
if settings.basic.directory_per_server {
file = files.get(&server_name).unwrap();
} else {
file = files.get(&all).unwrap();
}
let time = Utc::now();
let msg =
format!(
"{}::{}::{}\r\n",
server_name,
time.timestamp_millis(),
serde_json::to_string(&ami_response).unwrap()
);
// Lets write the message to the events file.
file.write_all(msg.as_bytes()).unwrap();
}
// Lets wait for all the threads to finish.
for handle in handles {
handle.join().unwrap();
}
}
|
use iron::prelude::*;
use common::http::*;
pub fn render_resource(req: &mut Request) -> IronResult<Response> {
respond_view("resource", &ViewData::new(req))
}
pub fn render_about_site(req: &mut Request) -> IronResult<Response> {
respond_view("about-site", &ViewData::new(req))
} |
pub const DEFAULT_SPEC: &str = "mainnet";
pub const AVAILABLE_SPECS: &[&str] = &["mainnet", "testnet", "staging", "dev"];
pub const DEFAULT_RPC_PORT: &str = "8114";
pub const DEFAULT_P2P_PORT: &str = "8115";
const START_MARKER: &str = " # {{";
const END_MAKER: &str = "# }}";
const WILDCARD_BRANCH: &str = "# _ => ";
use std::io;
pub struct Template<T>(T);
pub struct TemplateContext<'a> {
pub spec: &'a str,
pub spec_source: &'a str,
pub rpc_port: &'a str,
pub p2p_port: &'a str,
pub log_to_file: bool,
pub log_to_stdout: bool,
pub block_assembler: &'a str,
}
impl<T> Template<T> {
pub fn new(content: T) -> Self {
Template(content)
}
}
fn writeln<W: io::Write>(w: &mut W, s: &str, context: &TemplateContext) -> io::Result<()> {
#[cfg(docker)]
let s = s.replace("127.0.0.1:{rpc_port}", "0.0.0.0:{rpc_port}");
writeln!(
w,
"{}",
s.replace("\\n", "\n")
.replace("{rpc_port}", context.rpc_port)
.replace("{p2p_port}", context.p2p_port)
.replace("{log_to_file}", &format!("{}", context.log_to_file))
.replace("{log_to_stdout}", &format!("{}", context.log_to_stdout))
.replace("{block_assembler}", context.block_assembler)
.replace("{spec_source}", context.spec_source)
)
}
#[derive(Debug)]
pub enum TemplateState<'a> {
SearchStartMarker,
MatchBranch(&'a str),
SearchEndMarker,
}
impl<T> Template<T>
where
T: AsRef<str>,
{
pub fn write_to<'c, W: io::Write>(
&self,
w: &mut W,
context: &TemplateContext<'c>,
) -> io::Result<()> {
let spec_branch = format!("# {} => ", context.spec);
let mut state = TemplateState::SearchStartMarker;
for line in self.0.as_ref().lines() {
// dbg!((line, &state));
match state {
TemplateState::SearchStartMarker => {
if line.ends_with(START_MARKER) {
state = TemplateState::MatchBranch(line);
} else {
writeln!(w, "{}", line)?;
}
}
TemplateState::MatchBranch(start_line) => {
if line == END_MAKER {
writeln!(
w,
"{}",
&start_line[..(start_line.len() - START_MARKER.len())],
)?;
state = TemplateState::SearchStartMarker;
} else if line.starts_with(&spec_branch) {
writeln(w, &line[spec_branch.len()..], context)?;
state = TemplateState::SearchEndMarker;
} else if line.starts_with(WILDCARD_BRANCH) {
writeln(w, &line[WILDCARD_BRANCH.len()..], context)?;
state = TemplateState::SearchEndMarker;
}
}
TemplateState::SearchEndMarker => {
if line == END_MAKER {
state = TemplateState::SearchStartMarker;
}
}
}
}
if let TemplateState::MatchBranch(start_line) = state {
writeln!(
w,
"{}",
&start_line[..(start_line.len() - START_MARKER.len())],
)?;
}
Ok(())
}
}
|
pub mod base64;
pub mod command;
pub mod command_runtime;
pub mod error;
pub mod message;
pub mod project;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
#[derive(Clone)]
pub struct WxWorkProjectSet {
pub projs: project::WxWorkProjectMap,
pub cmds: Rc<command::WxWorkCommandList>,
pub events: Rc<command::WxWorkCommandList>,
}
pub type WxWorkProjectSetShared = Arc<Mutex<WxWorkProjectSet>>;
lazy_static! {
pub static ref GLOBAL_EMPTY_JSON_NULL: serde_json::Value = serde_json::Value::Null;
}
pub fn build_project_set(json: &serde_json::Value) -> Option<WxWorkProjectSet> {
let kvs = if let Some(x) = json.as_object() {
x
} else {
error!(
"project set configure must be a json object, but real is {}",
json
);
return None;
};
let projs_json_conf = if let Some(x) = kvs.get("projects") {
x
} else {
error!("project set configure must has projects field {}", json);
return None;
};
let cmds_json_conf = if let Some(x) = kvs.get("cmds") {
x
} else {
&GLOBAL_EMPTY_JSON_NULL
};
let events_json_conf = if let Some(x) = kvs.get("events") {
x
} else {
&GLOBAL_EMPTY_JSON_NULL
};
let ret = WxWorkProjectSet {
projs: project::WxWorkProject::parse(projs_json_conf),
cmds: Rc::new(command::WxWorkCommand::parse(cmds_json_conf)),
events: Rc::new(command::WxWorkCommand::parse(events_json_conf)),
};
Some(ret)
}
pub fn build_project_set_shared(json: &serde_json::Value) -> Option<WxWorkProjectSetShared> {
if let Some(x) = build_project_set(json) {
Some(Arc::new(Mutex::new(x)))
} else {
None
}
}
|
// Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/// Implements the `synthesizer::InputDevice` trait, and the server side of the
/// `fuchsia.input.report.InputDevice` FIDL protocol. Used by
/// `modern_backend::InputDeviceRegistry`.
#[allow(dead_code)] // TODO(fxbug.dev/63985) remove `allow`
pub(super) struct InputDevice;
|
//! Shader settings.
use arctk::{access, clone, img::Gradient, math::Pos3};
/// Colouring settings.
pub struct Shader<'a> {
/// Sun position used for lighting calculations [m].
sun_pos: Pos3,
/// Ambient, diffuse, and occlusion lighting fractions.
light: [f64; 3],
/// Ambient, diffuse, and occlusion shadowing fractions.
shadow: [f64; 2],
/// Ambient lighting fraction.
spec_pow: i32,
/// Lighting and shadowing occlusion testing distances.
occ_dist: [f64; 2],
/// Effect fall-off rate.
fall_off: f64,
/// Optional number of soft shadowing samples, and angular radius [rad].
soft_shadow_samples: Option<(i32, f64)>,
/// Optional number of ambient shadowing samples and the scaling power.
ambient_shadow_samples: Option<(i32, i32)>,
/// Sky colour gradient.
sky_grad: &'a Gradient,
/// Data colouring gradient.
data_grad: &'a Gradient,
}
impl<'a> Shader<'a> {
access!(sun_pos, Pos3);
access!(light, [f64; 3]);
access!(shadow, [f64; 2]);
clone!(spec_pow, i32);
access!(occ_dist, [f64; 2]);
clone!(fall_off, f64);
clone!(soft_shadow_samples, Option<(i32, f64)>);
clone!(ambient_shadow_samples, Option<(i32, i32)>);
access!(sky_grad, Gradient);
access!(data_grad, Gradient);
/// Construct a new instance.
#[allow(clippy::cognitive_complexity)]
#[allow(clippy::too_many_arguments)]
#[inline]
#[must_use]
pub fn new(
sun_pos: Pos3,
light: [f64; 3],
shadow: [f64; 2],
spec_pow: i32,
occ_dist: [f64; 2],
fall_off: f64,
soft_shadow_samples: Option<(i32, f64)>,
ambient_shadow_samples: Option<(i32, i32)>,
sky_grad: &'a Gradient,
data_grad: &'a Gradient,
) -> Self {
debug_assert!(light[0] > 0.0);
debug_assert!(light[1] > 0.0);
debug_assert!(light[2] > 0.0);
debug_assert!(shadow[0] > 0.0);
debug_assert!(shadow[1] > 0.0);
debug_assert!(spec_pow > 0);
debug_assert!(occ_dist[0] > 0.0);
debug_assert!(occ_dist[1] > 0.0);
debug_assert!(fall_off > 0.0);
debug_assert!(soft_shadow_samples.is_none() || soft_shadow_samples.unwrap().0 > 1);
debug_assert!(soft_shadow_samples.is_none() || soft_shadow_samples.unwrap().1 > 0.0);
debug_assert!(ambient_shadow_samples.is_none() || ambient_shadow_samples.unwrap().0 > 1);
debug_assert!(ambient_shadow_samples.is_none() || ambient_shadow_samples.unwrap().1 > 0);
let light_total = light[0] + light[1] + light[2];
let shadow_total = shadow[0] + shadow[1];
Self {
sun_pos,
light: [
light[0] / light_total,
light[1] / light_total,
light[2] / light_total,
],
shadow: [shadow[0] / shadow_total, shadow[1] / shadow_total],
spec_pow,
occ_dist,
fall_off,
soft_shadow_samples,
ambient_shadow_samples,
sky_grad,
data_grad,
}
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// TimeseriesWidgetRequest : Updated timeseries widget.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TimeseriesWidgetRequest {
#[serde(rename = "apm_query", skip_serializing_if = "Option::is_none")]
pub apm_query: Option<Box<crate::models::LogQueryDefinition>>,
#[serde(rename = "display_type", skip_serializing_if = "Option::is_none")]
pub display_type: Option<crate::models::WidgetDisplayType>,
#[serde(rename = "event_query", skip_serializing_if = "Option::is_none")]
pub event_query: Option<Box<crate::models::LogQueryDefinition>>,
/// List of formulas that operate on queries. **This feature is currently in beta.**
#[serde(rename = "formulas", skip_serializing_if = "Option::is_none")]
pub formulas: Option<Vec<crate::models::WidgetFormula>>,
#[serde(rename = "log_query", skip_serializing_if = "Option::is_none")]
pub log_query: Option<Box<crate::models::LogQueryDefinition>>,
/// Used to define expression aliases.
#[serde(rename = "metadata", skip_serializing_if = "Option::is_none")]
pub metadata: Option<Vec<crate::models::TimeseriesWidgetExpressionAlias>>,
#[serde(rename = "network_query", skip_serializing_if = "Option::is_none")]
pub network_query: Option<Box<crate::models::LogQueryDefinition>>,
/// Whether or not to display a second y-axis on the right.
#[serde(rename = "on_right_yaxis", skip_serializing_if = "Option::is_none")]
pub on_right_yaxis: Option<bool>,
#[serde(rename = "process_query", skip_serializing_if = "Option::is_none")]
pub process_query: Option<Box<crate::models::ProcessQueryDefinition>>,
#[serde(rename = "profile_metrics_query", skip_serializing_if = "Option::is_none")]
pub profile_metrics_query: Option<Box<crate::models::LogQueryDefinition>>,
/// Widget query.
#[serde(rename = "q", skip_serializing_if = "Option::is_none")]
pub q: Option<String>,
/// List of queries that can be returned directly or used in formulas. **This feature is currently in beta.**
#[serde(rename = "queries", skip_serializing_if = "Option::is_none")]
pub queries: Option<Vec<crate::models::FormulaAndFunctionQueryDefinition>>,
#[serde(rename = "response_format", skip_serializing_if = "Option::is_none")]
pub response_format: Option<crate::models::FormulaAndFunctionResponseFormat>,
#[serde(rename = "rum_query", skip_serializing_if = "Option::is_none")]
pub rum_query: Option<Box<crate::models::LogQueryDefinition>>,
#[serde(rename = "security_query", skip_serializing_if = "Option::is_none")]
pub security_query: Option<Box<crate::models::LogQueryDefinition>>,
#[serde(rename = "style", skip_serializing_if = "Option::is_none")]
pub style: Option<Box<crate::models::WidgetRequestStyle>>,
}
impl TimeseriesWidgetRequest {
/// Updated timeseries widget.
pub fn new() -> TimeseriesWidgetRequest {
TimeseriesWidgetRequest {
apm_query: None,
display_type: None,
event_query: None,
formulas: None,
log_query: None,
metadata: None,
network_query: None,
on_right_yaxis: None,
process_query: None,
profile_metrics_query: None,
q: None,
queries: None,
response_format: None,
rum_query: None,
security_query: None,
style: None,
}
}
}
|
#[doc = "Register `FIR0` writer"]
pub type W = crate::W<FIR0_SPEC>;
#[doc = "Field `FAE0` writer - FAE0"]
pub type FAE0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE1` writer - FAE1"]
pub type FAE1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE2` writer - FAE2"]
pub type FAE2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE3` writer - FAE3"]
pub type FAE3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE4` writer - FAE4"]
pub type FAE4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE5` writer - FAE5"]
pub type FAE5_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE6` writer - FAE6"]
pub type FAE6_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE7` writer - FAE7"]
pub type FAE7_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE8` writer - FAE8"]
pub type FAE8_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE9` writer - FAE9"]
pub type FAE9_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE10` writer - FAE10"]
pub type FAE10_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE11` writer - FAE11"]
pub type FAE11_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE12` writer - FAE12"]
pub type FAE12_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE13` writer - FAE13"]
pub type FAE13_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE14` writer - FAE14"]
pub type FAE14_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FAE15` writer - FAE15"]
pub type FAE15_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FPE0` writer - FPE0"]
pub type FPE0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FPE1` writer - FPE1"]
pub type FPE1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FPE2` writer - FPE2"]
pub type FPE2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FPE3` writer - FPE3"]
pub type FPE3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FPE4` writer - FPE4"]
pub type FPE4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl W {
#[doc = "Bit 0 - FAE0"]
#[inline(always)]
#[must_use]
pub fn fae0(&mut self) -> FAE0_W<FIR0_SPEC, 0> {
FAE0_W::new(self)
}
#[doc = "Bit 1 - FAE1"]
#[inline(always)]
#[must_use]
pub fn fae1(&mut self) -> FAE1_W<FIR0_SPEC, 1> {
FAE1_W::new(self)
}
#[doc = "Bit 2 - FAE2"]
#[inline(always)]
#[must_use]
pub fn fae2(&mut self) -> FAE2_W<FIR0_SPEC, 2> {
FAE2_W::new(self)
}
#[doc = "Bit 3 - FAE3"]
#[inline(always)]
#[must_use]
pub fn fae3(&mut self) -> FAE3_W<FIR0_SPEC, 3> {
FAE3_W::new(self)
}
#[doc = "Bit 4 - FAE4"]
#[inline(always)]
#[must_use]
pub fn fae4(&mut self) -> FAE4_W<FIR0_SPEC, 4> {
FAE4_W::new(self)
}
#[doc = "Bit 5 - FAE5"]
#[inline(always)]
#[must_use]
pub fn fae5(&mut self) -> FAE5_W<FIR0_SPEC, 5> {
FAE5_W::new(self)
}
#[doc = "Bit 6 - FAE6"]
#[inline(always)]
#[must_use]
pub fn fae6(&mut self) -> FAE6_W<FIR0_SPEC, 6> {
FAE6_W::new(self)
}
#[doc = "Bit 7 - FAE7"]
#[inline(always)]
#[must_use]
pub fn fae7(&mut self) -> FAE7_W<FIR0_SPEC, 7> {
FAE7_W::new(self)
}
#[doc = "Bit 8 - FAE8"]
#[inline(always)]
#[must_use]
pub fn fae8(&mut self) -> FAE8_W<FIR0_SPEC, 8> {
FAE8_W::new(self)
}
#[doc = "Bit 9 - FAE9"]
#[inline(always)]
#[must_use]
pub fn fae9(&mut self) -> FAE9_W<FIR0_SPEC, 9> {
FAE9_W::new(self)
}
#[doc = "Bit 10 - FAE10"]
#[inline(always)]
#[must_use]
pub fn fae10(&mut self) -> FAE10_W<FIR0_SPEC, 10> {
FAE10_W::new(self)
}
#[doc = "Bit 11 - FAE11"]
#[inline(always)]
#[must_use]
pub fn fae11(&mut self) -> FAE11_W<FIR0_SPEC, 11> {
FAE11_W::new(self)
}
#[doc = "Bit 12 - FAE12"]
#[inline(always)]
#[must_use]
pub fn fae12(&mut self) -> FAE12_W<FIR0_SPEC, 12> {
FAE12_W::new(self)
}
#[doc = "Bit 13 - FAE13"]
#[inline(always)]
#[must_use]
pub fn fae13(&mut self) -> FAE13_W<FIR0_SPEC, 13> {
FAE13_W::new(self)
}
#[doc = "Bit 14 - FAE14"]
#[inline(always)]
#[must_use]
pub fn fae14(&mut self) -> FAE14_W<FIR0_SPEC, 14> {
FAE14_W::new(self)
}
#[doc = "Bit 15 - FAE15"]
#[inline(always)]
#[must_use]
pub fn fae15(&mut self) -> FAE15_W<FIR0_SPEC, 15> {
FAE15_W::new(self)
}
#[doc = "Bit 16 - FPE0"]
#[inline(always)]
#[must_use]
pub fn fpe0(&mut self) -> FPE0_W<FIR0_SPEC, 16> {
FPE0_W::new(self)
}
#[doc = "Bit 17 - FPE1"]
#[inline(always)]
#[must_use]
pub fn fpe1(&mut self) -> FPE1_W<FIR0_SPEC, 17> {
FPE1_W::new(self)
}
#[doc = "Bit 18 - FPE2"]
#[inline(always)]
#[must_use]
pub fn fpe2(&mut self) -> FPE2_W<FIR0_SPEC, 18> {
FPE2_W::new(self)
}
#[doc = "Bit 19 - FPE3"]
#[inline(always)]
#[must_use]
pub fn fpe3(&mut self) -> FPE3_W<FIR0_SPEC, 19> {
FPE3_W::new(self)
}
#[doc = "Bit 20 - FPE4"]
#[inline(always)]
#[must_use]
pub fn fpe4(&mut self) -> FPE4_W<FIR0_SPEC, 20> {
FPE4_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DSI Host force interrupt register 0\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fir0::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FIR0_SPEC;
impl crate::RegisterSpec for FIR0_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [`fir0::W`](W) writer structure"]
impl crate::Writable for FIR0_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets FIR0 to value 0"]
impl crate::Resettable for FIR0_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
fn print_point(pnt: &Point) {
println!("{:#?}",pnt);
}
fn inc_x(pnt: &mut Point) {
pnt.x += 1;
}
fn inc_y(pnt: &mut Point) {
pnt.y += 1;
}
#[derive(Clone,Debug)]
struct Point {
x: i32,
y: i32
}
fn max(a: i32, b:i32)->i32{
if a > b {
a
} else {
b
}
}
fn main() {
let hellostr: &str = "Hello";
let name = "world";
let booltest: bool = true;
println!("{}: {}, {}!", booltest, hellostr, name);
let num1: i32 = 24;
let num2: i32 = 42;
if num1 > num2 {
println!("{} is greater than {}", num1, num2);
} else {
println!("{} is less than {}", num1, num2);
}
let mut a = 30;
let mut b = 45;
print!("The gcd of {} and {} is ", a, b);
while b != 0 {
let temp = b;
b = b % a;
a = temp;
}
println!("{}", a);
println!("The maximum of {} and {} is {}", a, b, max(a,b));
let the_point = Point {x:24,y:42};
println!("the_point: x={},y={}",the_point.x,the_point.y);
let another_point = Point { y:1, x:2};
println!("another_point: x={},y={}",another_point.x,another_point.y);
println!("{:#?}",another_point); // pretty-print value
let p1 = Point{x:1,y:2};
println!("{:#?}",p1);
let p2 = &p1;
println!("{:#?}",p1);
println!("{:#?}",p2);
print_point(p2);
let mut p3 = p1.clone();
println!("==============");
print_point(&p1);
print_point(&p3);
p3.x = 14;
print_point(&p1);
print_point(&p3);
println!("==============");
let mut p4 = Point{x:0,y:0};
print_point(&p4);
inc_x(&mut p4);
inc_y(&mut p4);
print_point(&p4);
let p5 = &mut p4;
p5.x = 2;
p5.y = 3;
print_point(&p5);
}
|
#![feature(generic_associated_types)]
use crystalorb::{
client::stage::{Stage, StageMut},
timestamp::Timestamp,
world::Tweened,
Config, TweeningMethod,
};
use pretty_assertions::assert_eq;
use test_env_log::test;
mod common;
use common::{MockClientServer, MockCommand, MockWorld};
#[test]
fn while_all_commands_originate_from_single_client_then_that_client_should_match_server_exactly() {
const TIMESTEP_SECONDS: f64 = 1.0 / 60.0;
for frames_per_update in &[1.0, 0.5, 1.0 / 3.0, 1.5, 2.0, 3.0, 4.0, 6.0] {
// GIVEN a server and multiple clients in a perfect network.
const FRAMES_TO_LAG_BEHIND: i32 = 12;
assert_eq!(
(FRAMES_TO_LAG_BEHIND as f64 / frames_per_update).fract(),
0.0,
"lag needs to be multiple of frames_per_update so the display states line up.",
);
let mut mock_client_server = MockClientServer::new(Config {
lag_compensation_latency: FRAMES_TO_LAG_BEHIND as f64 * TIMESTEP_SECONDS,
blend_latency: 0.2,
timestep_seconds: TIMESTEP_SECONDS,
clock_sync_needed_sample_count: 8,
clock_sync_request_period: 0.0,
clock_sync_assumed_outlier_rate: 0.2,
max_tolerable_clock_deviation: 0.1,
snapshot_send_period: 0.1,
update_delta_seconds_max: 0.5,
timestamp_skip_threshold_seconds: 1.0,
fastforward_max_per_step: 10,
tweening_method: TweeningMethod::MostRecentlyPassed,
});
mock_client_server.client_1_net.connect();
mock_client_server.client_2_net.connect();
// GIVEN that the clients are ready.
mock_client_server.update_until_clients_ready(TIMESTEP_SECONDS * frames_per_update);
// WHEN a single chosen client issue commands.
let mut commands = [
vec![0, 1, 2],
vec![3, 4],
vec![5],
vec![6, 7],
vec![],
vec![8, 9, 10, 11, 12],
];
let start_timestamp = match mock_client_server.client_1.stage() {
Stage::Ready(client) => client.simulating_timestamp(),
_ => unreachable!(),
};
let target_timestamp =
start_timestamp + (commands.len() as i16).max(*frames_per_update as i16);
let mut client_state_history: Vec<Tweened<MockWorld>> = Vec::new();
let mut server_state_history: Vec<Tweened<MockWorld>> = Vec::new();
while mock_client_server.server.display_state().timestamp() < target_timestamp {
let current_client_timestamp = match mock_client_server.client_1.stage() {
Stage::Ready(client) => {
Timestamp::default() + client.display_state().float_timestamp() as i16
}
_ => unreachable!(),
};
let update_client = current_client_timestamp < target_timestamp;
if update_client {
match mock_client_server.client_1.stage_mut() {
StageMut::Ready(mut client) => {
let current_index = (i16::from(current_client_timestamp - start_timestamp))
.clamp(0, commands.len() as i16 - 1)
as usize;
for commands_for_single_timestep in commands[0..=current_index].iter_mut() {
for command in commands_for_single_timestep.drain(..) {
client.issue_command(
MockCommand(command),
&mut mock_client_server.client_1_net,
);
}
}
}
_ => unreachable!(),
}
}
mock_client_server.update(TIMESTEP_SECONDS * frames_per_update);
server_state_history.push(mock_client_server.server.display_state().into());
if update_client {
match mock_client_server.client_1.stage_mut() {
StageMut::Ready(client) => {
client_state_history.push(client.display_state().clone())
}
_ => unreachable!(),
}
}
}
// THEN the recorded server states should perfectly match the chosen client's states.
assert_eq!(
server_state_history[server_state_history.len() - client_state_history.len()..],
client_state_history[..],
"frames per update: {}",
frames_per_update
);
}
}
#[test]
fn while_no_commands_are_issued_then_all_clients_should_match_server_exactly() {
const TIMESTEP_SECONDS: f64 = 1.0 / 60.0;
for frames_per_update in &[1.0, 0.5, 1.0 / 3.0, 1.5, 2.0, 3.0, 4.0, 6.0] {
// GIVEN a server and multiple clients in a perfect network.
const FRAMES_TO_LAG_BEHIND: i32 = 12;
assert_eq!(
(FRAMES_TO_LAG_BEHIND as f64 / frames_per_update).fract(),
0.0,
"lag needs to be multiple of frames_per_update so the display states line up.",
);
let mut mock_client_server = MockClientServer::new(Config {
lag_compensation_latency: FRAMES_TO_LAG_BEHIND as f64 * TIMESTEP_SECONDS,
blend_latency: 0.2,
timestep_seconds: TIMESTEP_SECONDS,
clock_sync_needed_sample_count: 8,
clock_sync_request_period: 0.0,
clock_sync_assumed_outlier_rate: 0.2,
max_tolerable_clock_deviation: 0.1,
snapshot_send_period: 0.1,
update_delta_seconds_max: 0.5,
timestamp_skip_threshold_seconds: 1.0,
fastforward_max_per_step: 10,
tweening_method: TweeningMethod::MostRecentlyPassed,
});
mock_client_server.client_1_net.connect();
mock_client_server.client_2_net.connect();
mock_client_server
.server
.issue_command(MockCommand(123), &mut mock_client_server.server_net);
// WHEN no commands are issued.
mock_client_server.update_until_clients_ready(TIMESTEP_SECONDS * frames_per_update);
// WHEN a single chosen client issue commands.
let start_timestamp = match mock_client_server.client_1.stage() {
Stage::Ready(client) => client.simulating_timestamp(),
_ => unreachable!(),
};
let target_timestamp = start_timestamp + 100;
let mut client_1_state_history: Vec<Tweened<MockWorld>> = Vec::new();
let mut client_2_state_history: Vec<Tweened<MockWorld>> = Vec::new();
let mut server_state_history: Vec<Tweened<MockWorld>> = Vec::new();
while mock_client_server.server.display_state().timestamp() < target_timestamp {
let current_client_timestamp = match mock_client_server.client_1.stage() {
Stage::Ready(client) => {
Timestamp::default() + client.display_state().float_timestamp() as i16
}
_ => unreachable!(),
};
let update_client = current_client_timestamp < target_timestamp;
mock_client_server.update(TIMESTEP_SECONDS * frames_per_update);
server_state_history.push(mock_client_server.server.display_state().into());
if update_client {
// TODO: Mut seems unnecessary.
match mock_client_server.client_1.stage_mut() {
StageMut::Ready(client) => {
client_1_state_history.push(client.display_state().clone())
}
_ => unreachable!(),
}
match mock_client_server.client_2.stage_mut() {
StageMut::Ready(client) => {
client_2_state_history.push(client.display_state().clone())
}
_ => unreachable!(),
}
}
}
// THEN the recorded server states should perfectly match every client's states.
assert_eq!(
server_state_history[server_state_history.len() - client_1_state_history.len()..],
client_1_state_history[..],
"client_1, frames per update: {}",
frames_per_update
);
assert_eq!(
server_state_history[server_state_history.len() - client_2_state_history.len()..],
client_2_state_history[..],
"client_2, frames per update: {}",
frames_per_update
);
}
}
|
use std::fs;
use std::io::prelude::*;
use std::io::Cursor;
use std::net::{TcpListener, TcpStream};
fn handle_header(buffer: &[u8]) -> Result<(String, String), ()> {
let mut cursor = Cursor::new(buffer);
let mut buf = String::new();
let _ = cursor.read_line(&mut buf);
let mut header = buf.split_whitespace();
let method: String = header.next().unwrap().into();
let path: String = header.next().unwrap().into();
Ok((method, path))
}
fn handle_client(mut stream: TcpStream) {
let mut buffer = [0; 1024];
stream.read(&mut buffer).unwrap();
let (status_line, filename): (&str, String) = if let Ok((method, path)) = handle_header(&buffer)
{
println!("{} {}", method, path);
("HTTP/1.1 200 OK\r\n\r\n", path[1..].to_owned())
} else {
("HTTP/1.1 404 NOT FOUND\r\n\r\n", "README.md".to_owned())
};
let contents = match fs::read_to_string(filename) {
Ok(contents) => contents,
Err(_) => return,
};
let response = format!("{}{}", status_line, contents);
stream.write(response.as_bytes()).unwrap();
stream.flush().unwrap();
}
fn main() -> std::io::Result<()> {
let listener = TcpListener::bind("127.0.0.1:8080")?;
// accept connections and process them serially
for stream in listener.incoming() {
match stream {
Ok(stream) => {
handle_client(stream);
}
Err(e) => {
panic!("encountered IO error: {}", e);
}
}
}
Ok(())
}
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg};
use subspace_core_primitives::crypto::Scalar;
use subspace_core_primitives::RawRecord;
fn criterion_benchmark(c: &mut Criterion) {
let values = (0..RawRecord::SIZE / Scalar::SAFE_BYTES)
.map(|_| Scalar::from(rand::random::<[u8; Scalar::SAFE_BYTES]>()))
.collect::<Vec<_>>();
let kzg = Kzg::new(embedded_kzg_settings());
c.bench_function("create-polynomial", |b| {
b.iter(|| {
kzg.poly(black_box(&values)).unwrap();
})
});
c.bench_function("commit", |b| {
let polynomial = kzg.poly(&values).unwrap();
b.iter(|| {
kzg.commit(black_box(&polynomial)).unwrap();
})
});
let num_values = values.len();
c.bench_function("create-witness", |b| {
let polynomial = kzg.poly(&values).unwrap();
b.iter(|| {
kzg.create_witness(black_box(&polynomial), black_box(num_values), black_box(0))
.unwrap();
})
});
c.bench_function("verify", |b| {
let polynomial = kzg.poly(&values).unwrap();
let commitment = kzg.commit(&polynomial).unwrap();
let index = 0;
let witness = kzg.create_witness(&polynomial, num_values, index).unwrap();
let value = values.first().unwrap();
b.iter(|| {
kzg.verify(
black_box(&commitment),
black_box(num_values),
black_box(index),
black_box(value),
black_box(&witness),
);
})
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
pub mod school_member{
pub mod student{
pub fn get_age(){
println!("Age");
}
}
}
|
fn main() {
let aa = [1, 2, 3, 4, 5]; // same type
// aa 不能改变长度了
println!("The value of aa[0] is: {}", aa[0]);
}
|
#[cfg(test)]
mod tests {
use super::super::nock::*;
#[test]
fn it_works() {
assert!(1 == 1)
}
#[test]
fn op_0() {
let input = "[57 [0 1]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "57");
let input = "[[132 19] [0 3]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "19");
let input = "[[[4 5] [6 14 15]] [0 7]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "[ 14 15 ]");
let input = "[[[4 5] [6 14 15]] [0 2]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "[ 4 5 ]");
}
#[test]
fn op_1() {
let input = "[42 [1 153 218]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "[ 153 218 ]");
let input = "[42 [1 153]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "153");
}
#[test]
fn op_2() {
let input = "[77 [2 [1 42] [1 1 153 218]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "[ 153 218 ]");
}
#[test]
fn op_3() {
let input = "[42 [3 0 1]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "1");
let input = "[42 [[4 0 1] [3 0 1]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "[ 43 1 ]");
}
#[test]
fn op_4() {
let input = "[42 4 0 1]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "43");
let input = "[42 [4 0 1]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "43");
let input = "[57 [4 0 1]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "58");
let input = "[[132 19] [4 0 3]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "20");
}
#[test]
fn op_5() {
let input = "[42 [5 [0 1] [0 1]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "0");
let input = "[42 [5 [0 1] [1 42]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "0");
let input = "[42 [5 [0 1] [1 43]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "1");
}
#[test]
fn op_6() {
let input = "[42 [6 [1 0] [4 0 1] [1 233]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "43");
let input = "[42 [6 [1 1] [4 0 1] [1 233]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "233");
}
#[test]
fn op_7() {
let input = "[42 [7 [4 0 1] [4 0 1]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "44");
}
#[test]
fn op_8() {
let input = "[42 [8 [4 0 1] [0 1]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "[ 43 42 ]");
let input = "[42 [8 [4 0 1] [4 0 3]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "43");
}
// TODO: 9
// [[42 [1 1 1]] [2 [0 1] [0 1]]]
// TODO: 10
#[test]
fn op_11() {
let input = "[[132 19] [11 37 [4 0 3]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "20");
let input = "[[132 19] [11 [1 1 1] [4 0 3]]]";
let product = parse_and_nock(input.to_string());
assert_eq!(format!("{}", product), "20");
}
}
|
pub mod matrix;
pub mod neuralnetwork;
pub mod xorshift;
use crate::matrix::Matrix;
use crate::neuralnetwork::NeuralNetwork;
use std::fs;
use std::time::{SystemTime, UNIX_EPOCH};
pub fn current_millis() -> u128 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_millis()
}
pub fn parse_csv(
filename: &str,
input_size: usize,
output_size: usize,
) -> (Vec<Matrix>, Vec<Matrix>) {
let mut inputs = Vec::new();
let mut outputs = Vec::new();
let content = fs::read_to_string(filename).expect("Error: Can't open file!");
let lines: Vec<&str> = content.lines().collect();
for line_index in 0..lines.len() {
let line = lines[line_index];
let values: Vec<&str> = line.split(",").collect();
let mut input_vector = Matrix::new(input_size, 1);
let mut output_vector = Matrix::new(output_size, 1);
for value_index in 0..values.len() {
if value_index < input_size {
input_vector[value_index][0] = values[value_index].parse::<f32>().unwrap();
} else {
output_vector[value_index - input_size][0] =
values[value_index].parse::<f32>().unwrap();
}
}
inputs.push(input_vector);
outputs.push(output_vector);
}
(inputs, outputs)
}
pub fn get_accuracy(nn: &NeuralNetwork, filename: &str) -> f32 {
let (inputs, outputs) = parse_csv(filename, nn.input_nodes, nn.output_nodes);
let mut num_right: usize = 0;
for i in 0..inputs.len() {
if nn.predict(&inputs[i]).index_of_max() == outputs[i].index_of_max() {
num_right += 1;
}
}
num_right as f32 / inputs.len() as f32
}
pub fn train_on_dataset(nn: &mut NeuralNetwork, filename: &str, epochs: u32) {
let (inputs, outputs) = parse_csv(filename, nn.input_nodes, nn.output_nodes);
let start_time = current_millis();
for i in 0..epochs {
for j in 0..inputs.len() {
nn.train(&inputs[j], &outputs[j]);
}
print!("{} of {} epochs done\n", i + 1, epochs);
}
let end_time = (current_millis() - start_time) as f32 / 1000 as f32;
print!("Training took {}s\n", end_time);
}
|
use super::PubNub;
use crate::data::channel;
use crate::data::object::Object;
use crate::data::request;
use crate::data::timetoken::Timetoken;
use crate::runtime::Runtime;
use crate::transport::Transport;
impl<TTransport, TRuntime> PubNub<TTransport, TRuntime>
where
TTransport: Transport + 'static,
TRuntime: Runtime + 'static,
{
/// Publish a message over the PubNub network.
///
/// # Errors
///
/// Returns transport-specific errors.
///
/// # Example
///
/// ```
/// # use pubnub_core::mock::{transport::MockTransport, runtime::MockRuntime};
/// # let transport = MockTransport::new();
/// # let runtime = MockRuntime::new();
/// use pubnub_core::{data::channel, json::object, Builder};
///
/// # async {
/// let pubnub = Builder::with_components(transport, runtime).build();
///
/// let channel_name: channel::Name = "my-channel".parse().unwrap();
/// let timetoken = pubnub
/// .publish(
/// channel_name,
/// object! {
/// "username" => "JoeBob",
/// "content" => "Hello, world!",
/// },
/// )
/// .await?;
///
/// println!("Timetoken: {}", timetoken);
/// # Ok::<(), Box<dyn std::error::Error>>(())
/// # };
/// ```
pub async fn publish(
&self,
channel: channel::Name,
message: Object,
) -> Result<Timetoken, <TTransport as Transport>::Error> {
let request = request::Publish {
channel,
meta: None,
payload: message,
};
self.transport.call(request).await
}
/// Publish a message over the PubNub network with an extra metadata payload.
///
/// # Errors
///
/// Returns transport-specific errors.
///
/// # Example
///
/// ```
/// # use pubnub_core::mock::{transport::MockTransport, runtime::MockRuntime};
/// # let transport = MockTransport::new();
/// # let runtime = MockRuntime::new();
/// use pubnub_core::{data::channel, json::object, Builder};
///
/// # async {
/// let pubnub = Builder::with_components(transport, runtime).build();
///
/// let message = object! {
/// "username" => "JoeBob",
/// "content" => "Hello, world!",
/// };
/// let metadata = object! {
/// "uuid" => "JoeBob",
/// };
///
/// let channel_name: channel::Name = "my-channel".parse().unwrap();
/// let timetoken = pubnub
/// .publish_with_metadata(channel_name, message, metadata)
/// .await?;
///
/// println!("Timetoken: {}", timetoken);
/// # Ok::<(), Box<dyn std::error::Error>>(())
/// # };
/// ```
pub async fn publish_with_metadata(
&self,
channel: channel::Name,
message: Object,
metadata: Object,
) -> Result<Timetoken, <TTransport as Transport>::Error> {
let request = request::Publish {
channel,
meta: Some(metadata),
payload: message,
};
self.transport.call(request).await
}
}
|
use std::sync::Arc;
use rosu_v2::prelude::{Beatmap, GameMode, OsuError, Score, Username};
use twilight_model::application::interaction::{
application_command::{CommandDataOption, CommandOptionValue},
ApplicationCommand,
};
use crate::{
commands::{
osu::{get_user_and_scores, unchoke_pp, ScoreArgs, UserArgs},
parse_discord, parse_mode_option, DoubleResultCow,
},
core::{commands::CommandData, Context},
embeds::{EmbedData, FixScoreEmbed},
error::Error,
tracking::process_osu_tracking,
util::{
constants::{
common_literals::{DISCORD, INDEX, MODE, NAME},
GENERAL_ISSUE, OSU_API_ISSUE,
},
InteractionExt, MessageExt,
},
BotResult,
};
pub(super) async fn _fix(ctx: Arc<Context>, data: CommandData<'_>, args: FixArgs) -> BotResult<()> {
let FixArgs { mode, name, index } = args;
if mode == GameMode::MNA {
return data.error(&ctx, "Can't fix mania scores \\:(").await;
}
let name = match name {
Some(name) => name,
None => return super::require_link(&ctx, &data).await,
};
// Retrieve the user and their recent scores
let user_args = UserArgs::new(name.as_str(), mode);
let score_args = ScoreArgs::recent(100).include_fails(true);
let (mut user, scores) = match get_user_and_scores(&ctx, user_args, &score_args).await {
Ok((_, scores)) if scores.is_empty() => {
let content = format!(
"No recent {}plays found for user `{name}`",
match mode {
GameMode::STD => "",
GameMode::TKO => "taiko ",
GameMode::CTB => "ctb ",
GameMode::MNA => "mania ",
},
);
return data.error(&ctx, content).await;
}
Ok((user, scores)) => (user, scores),
Err(OsuError::NotFound) => {
let content = format!("User `{name}` was not found");
return data.error(&ctx, content).await;
}
Err(why) => {
let _ = data.error(&ctx, OSU_API_ISSUE).await;
return Err(why.into());
}
};
// Overwrite default mode
user.mode = mode;
let num = index.unwrap_or(1).saturating_sub(1);
let scores_len = scores.len();
let (mut score, map, user, mut scores) = match scores.into_iter().nth(num) {
Some(mut score) => {
let mapset_fut = ctx
.psql()
.get_beatmapset(score.map.as_ref().unwrap().mapset_id);
let best_fut = ctx
.osu()
.user_scores(score.user_id)
.mode(mode)
.limit(100)
.best();
let user_fut = ctx.osu().user(score.user_id).mode(mode);
let score_fut = super::prepare_score(&ctx, &mut score);
match tokio::join!(mapset_fut, score_fut, user_fut, best_fut) {
(_, Err(why), ..) | (.., Err(why), _) | (.., Err(why)) => {
let _ = data.error(&ctx, OSU_API_ISSUE).await;
return Err(why.into());
}
(Ok(mapset), Ok(_), Ok(user), Ok(best)) => {
let mut map = score.map.take().unwrap();
map.mapset = Some(mapset);
(score, map, user, best)
}
(Err(_), Ok(_), Ok(user), Ok(best)) => {
let mut map = score.map.take().unwrap();
let mapset = match ctx.osu().beatmapset(map.mapset_id).await {
Ok(mapset) => mapset,
Err(why) => {
let _ = data.error(&ctx, OSU_API_ISSUE).await;
return Err(why.into());
}
};
map.mapset = Some(mapset);
(score, map, user, best)
}
}
}
None => {
let content = format!(
"There {verb} only {num} score{plural} in `{name}`'{genitive} recent history.",
verb = if scores_len != 1 { "are" } else { "is" },
num = scores_len,
plural = if scores_len != 1 { "s" } else { "" },
name = name,
genitive = if name.ends_with('s') { "" } else { "s" }
);
return data.error(&ctx, content).await;
}
};
let unchoked_pp = if score.pp.is_some() && !needs_unchoking(&score, &map) {
None
} else {
match unchoke_pp(&ctx, &mut score, &map).await {
Ok(pp) => pp,
Err(why) => {
let _ = data.error(&ctx, GENERAL_ISSUE).await;
return Err(why);
}
}
};
// Process tracking
process_osu_tracking(&ctx, &mut scores, Some(&user)).await;
let gb = ctx.map_garbage_collector(&map);
let embed_data = FixScoreEmbed::new(user, map, Some((score, scores)), unchoked_pp, None);
let builder = embed_data.into_builder().build().into();
data.create_message(&ctx, builder).await?;
// Set map on garbage collection list if unranked
gb.execute(&ctx);
Ok(())
}
fn needs_unchoking(score: &Score, map: &Beatmap) -> bool {
match map.mode {
GameMode::STD => {
score.statistics.count_miss > 0
|| score.max_combo < map.max_combo.map_or(0, |c| c.saturating_sub(5))
}
GameMode::TKO => score.statistics.count_miss > 0,
GameMode::CTB => score.max_combo != map.max_combo.unwrap_or(0),
GameMode::MNA => panic!("can not unchoke mania scores"),
}
}
pub(super) struct FixArgs {
mode: GameMode,
name: Option<Username>,
index: Option<usize>,
}
impl FixArgs {
pub(super) async fn slash(
ctx: &Context,
command: &ApplicationCommand,
options: Vec<CommandDataOption>,
) -> DoubleResultCow<Self> {
let mut config = ctx.user_config(command.user_id()?).await?;
let mut name = None;
let mut index = None;
for option in options {
match option.value {
CommandOptionValue::Integer(value) => match option.name.as_str() {
INDEX => index = Some(value.max(1).min(100) as usize),
_ => return Err(Error::InvalidCommandOptions),
},
CommandOptionValue::String(value) => match option.name.as_str() {
NAME => name = Some(value.into()),
MODE => config.mode = parse_mode_option(&value),
_ => return Err(Error::InvalidCommandOptions),
},
CommandOptionValue::User(value) => match option.name.as_str() {
DISCORD => match parse_discord(ctx, value).await? {
Ok(osu) => name = Some(osu.into_username()),
Err(content) => return Ok(Err(content)),
},
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
}
}
let mode = config.mode.unwrap_or_default();
let name = name.or_else(|| config.into_username());
Ok(Ok(Self { mode, name, index }))
}
}
|
pub fn test() {
println!("Hello, world!");
demo_ip_addr_kind();
demo_ip_addr();
demo_message();
demo_option();
demo_match();
demo_if_let();
}
fn demo_ip_addr_kind() {
#[derive(Debug)]
enum IpAddrKind {
V4,
V6,
}
#[derive(Debug)]
struct IpAddr {
kind: IpAddrKind,
address: String,
}
let home = IpAddr {
kind: IpAddrKind::V4,
address: String::from("127.0.0.1"),
};
let loopback = IpAddr {
kind: IpAddrKind::V6,
address: String::from("::1"),
};
println!("home: {:?}", home);
println!("loopback: {:?}", loopback);
}
fn demo_ip_addr() {
#[derive(Debug)]
enum IpAddr {
V4(String),
V6(String),
}
let home = IpAddr::V4(String::from("127.0.0.1"));
let loopback = IpAddr::V6(String::from("::1"));
println!("home: {:?}", home);
println!("loopback: {:?}", loopback);
}
fn demo_message() {
enum Message {
Quit,
Move { x: i32, y: i32 },
Write(String),
ChangeColor(i32, i32, i32),
}
impl Message {
fn call(&self) {
match self {
Message::Quit => println!("Quitting!"),
Message::Move { x, y } => println!("Moving to ({}, {})!", x, y),
Message::Write(s) => { println!("{}", s); }
Message::ChangeColor(r, g, b) => {
println!("Changing color to ({}, {}, {})", r, g, b);
}
// _ => println!("Shouldn't reach here!"),
};
}
}
Message::Quit.call();
Message::Move { x: 8, y: 0 }.call();
Message::Write(String::from("String to print")).call();
Message::ChangeColor(1, 2, 3).call();
}
fn demo_option() {
let _x: i8 = 5;
let _y: Option<i8> = Some(5);
}
fn demo_match() {
enum Coin {
Penny,
Nickel,
Dime,
Quarter,
Dollar,
}
fn value_in_cents(coin: Coin) -> u32 {
match coin {
Coin::Penny => 1,
Coin::Nickel => 5,
Coin::Dime => 10,
Coin::Quarter => 25,
Coin::Dollar => 100,
}
}
println!("Value of a Penny: {}", value_in_cents(Coin::Penny));
println!("Value of a Nickel: {}", value_in_cents(Coin::Nickel));
println!("Value of a Dime: {}", value_in_cents(Coin::Dime));
println!("Value of a Quarter: {}", value_in_cents(Coin::Quarter));
println!("Value of a Dollar: {}", value_in_cents(Coin::Dollar));
}
fn demo_if_let() {
let some_u8_value = Some(0u8);
match some_u8_value {
Some(3) => println!("three"),
_ => (),
}
if let Some(3) = some_u8_value {
println!("three");
}
#[derive(Debug)]
enum State {
Alabama,
Alaska,
Arizona,
Arkansas,
}
enum Coin {
Penny,
Nickel,
Dime,
Quarter(State),
Dollar,
}
fn count_non_quarters(coins: &Vec<Coin>) -> u32 {
let mut count = 0;
for coin in coins.iter() {
if let Coin::Quarter(state) = coin {
println!("State quarter from {:?}!", state);
} else {
count += 1;
}
}
count
}
let coins = vec![Coin::Penny, Coin::Quarter(State::Alabama), Coin::Penny, Coin::Dime,
Coin::Nickel, Coin::Dollar, Coin::Quarter(State::Alaska),
Coin::Quarter(State::Arizona), Coin::Quarter(State::Arkansas)];
println!("There are {} non-quarters", count_non_quarters(&coins));
} |
use std::str::FromStr;
use chrono::{Duration, Utc};
use futures::future::join_all;
use lazy_static::lazy_static;
use reqwest;
use tokio::sync::{Mutex, MutexGuard};
use url::Url;
use thetvdb::{
error::{Error, Result},
params::*,
Client,
};
mod data;
use data::*;
const ENV_APIKEY: &str = "THETVDB_APIKEY";
lazy_static! {
static ref CLIENT: Mutex<Option<Client>> = Mutex::new(None);
}
#[tokio::test]
async fn search() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let cases = vec![
("name", SearchBy::Name(&PEII.series_name)),
("IMDb ID", SearchBy::IMDbID(&PEII.imdb_id)),
("Zap2it ID", SearchBy::Zap2itID(&PEII.zap2it_id)),
("slug", SearchBy::Slug(&PEII.slug)),
];
for (case_name, search_by) in cases.into_iter() {
let series = client
.search(search_by)
.await
.unwrap()
.into_iter()
.find(|s| *s == *PEII);
if series.is_none() {
panic!("Expected series missing from {} search results", case_name)
}
}
}
#[tokio::test]
async fn search_urls() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let search_results = client
.search(SearchBy::Name(&PEII.series_name))
.await
.expect("Error searching for series to test url methods");
let series = search_results.first().unwrap();
assert_get_urls_ok(vec![/*series.banner_url(),*/ series.website_url()]).await;
}
#[tokio::test]
async fn series() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let peii = client.series(PEII.id).await.expect("Error fetching series");
assert_eq!(peii, *PEII);
}
#[tokio::test]
async fn series_urls() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let series = client
.series(PEII.id)
.await
.expect("Error fetching series to test url methods");
assert_get_urls_ok(vec![
series.banner_url(),
series.poster_url(),
series.fanart_url(),
series.website_url(),
])
.await;
}
#[tokio::test]
#[ignore]
async fn series_last_modified() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
client
.series_last_modified(PEII.id)
.await
.expect("Error fetching series last modified");
}
#[tokio::test]
async fn series_actors() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let actor = client
.series_actors(PEII.id)
.await
.expect("Error fetching series actors")
.into_iter()
.find(|a| *a == *NARRATOR);
if actor.is_none() {
panic!("Expected actor missing from series actor list");
}
}
#[tokio::test]
async fn series_actors_urls() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let actors = client
.series_actors(PEII.id)
.await
.expect("Error fetching series actors to test url methods");
let actor = actors.first().unwrap();
assert_get_url_ok(actor.image_url()).await;
}
#[tokio::test]
async fn series_episodes() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let params = EpisodeParams::new(PEII.id);
let page = client
.series_episodes(¶ms)
.await
.expect("Error fetching series episodes");
assert_eq!(page.episodes.len(), EPISODE_COUNT);
let episode = page.episodes.into_iter().find(|e| *e == *ISLANDS);
if episode.is_none() {
panic!("Expected episode missing from series episode list");
}
}
#[tokio::test]
async fn series_episodes_urls() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let params = EpisodeParams::new(PEII.id);
let page = client
.series_episodes(¶ms)
.await
.expect("Error fetching series episodes to test url methods");
let episode = page.episodes.first().unwrap();
assert_get_url_ok(episode.filename_url()).await;
}
#[tokio::test]
async fn series_episodes_query() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let params = EpisodeQueryParams::new(PEII.id)
.aired_season(ISLANDS.aired_season)
.aired_episode(ISLANDS.aired_episode_number);
let page = client
.series_episodes_query(¶ms)
.await
.expect("Error fetching series episodes query");
assert_eq!(page.episodes.len(), 1);
let episode = &page.episodes[0];
assert_eq!(*episode, *ISLANDS);
}
#[tokio::test]
async fn series_episode_summary() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let summary = client
.series_episodes_summary(PEII.id)
.await
.expect("Error fetching series episode summary");
assert_eq!(summary, *EPISODE_SUMMARY);
}
#[tokio::test]
async fn series_filter() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let keys = SeriesFilterKeys::new()
.id()
.series_name()
.first_aired()
.network()
.slug()
.status()
.imdb_id()
.zap2it_id();
let series = client
.series_filter(PEII.id, &keys)
.await
.expect("Error fetching filtered series");
assert_eq!(series, *PEII);
}
#[tokio::test]
async fn series_filter_urls() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let keys = SeriesFilterKeys::new().banner().poster().fanart().slug();
let series = client
.series_filter(PEII.id, &keys)
.await
.expect("Error fetching filtered series to test url methods");
assert_get_urls_ok(vec![
series.banner_url(),
series.poster_url(),
series.fanart_url(),
series.website_url(),
])
.await;
}
#[tokio::test]
async fn series_images() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
client
.series_images(PEII.id)
.await
.expect("Error fetching series images");
}
#[tokio::test]
async fn series_images_query() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let params = ImageQueryParams::with_key_type("series");
client
.series_images_query(PEII.id, ¶ms)
.await
.expect("Error fetching series images query");
}
#[tokio::test]
async fn series_images_query_urls() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let params = ImageQueryParams::with_key_type("series");
let images = client
.series_images_query(PEII.id, ¶ms)
.await
.expect("Error fetching series images query to test url methods");
let image = images.first().unwrap();
assert_get_urls_ok(vec![image.file_name_url(), image.thumbnail_url()]).await;
}
#[tokio::test]
async fn series_images_query_params() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
client
.series_images_query_params(PEII.id)
.await
.expect("Error fetching series images query params");
}
#[tokio::test]
async fn episode() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let episode = client
.episode(ISLANDS.id)
.await
.expect("Error fetching episode");
assert_eq!(episode, *ISLANDS);
}
#[tokio::test]
async fn languages() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let language = client
.languages()
.await
.expect("Error fetching languages")
.into_iter()
.find(|l| *l == *SAMOAN);
if language.is_none() {
panic!("Expected language missing from languages list");
}
}
#[tokio::test]
async fn language() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let language = client
.language(SAMOAN.id)
.await
.expect("Error fetching language");
assert_eq!(language, *SAMOAN);
}
#[tokio::test]
async fn updated() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let params = UpdatedParams::new(Utc::now() - Duration::days(1));
client
.updated(¶ms)
.await
.expect("Error fetching updated series");
}
#[tokio::test]
async fn movie() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let movie = client.movie(TSR.id).await.expect("Error fetching movie");
assert_eq!(movie, *TSR);
let genre = movie.genres.iter().find(|g| *g == &*DRAMA);
if genre.is_none() {
panic!("Expected genre missing from movie genre list");
}
let translation = movie.translations.iter().find(|t| *t == &*TSR_ENG);
if translation.is_none() {
panic!("Expected translation missing from movie translation list");
}
let release_date = movie.release_dates.iter().find(|r| *r == &*RELEASE);
if release_date.is_none() {
panic!("Expected release date missing from movie release dates");
}
let remote_id = movie.remoteids.iter().find(|r| *r == &*TSR_IMDB);
if remote_id.is_none() {
panic!("Expected remote id missing from movie remote id list");
}
let actor = movie.people.actors.iter().find(|a| *a == &*ANDY);
if actor.is_none() {
panic!("Expected actor missing from movie actor list");
}
}
#[tokio::test]
async fn movie_urls() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let movie = client
.movie(TSR.id)
.await
.expect("Error fetching movie to test url methods");
let genre = movie.genres.first().unwrap();
let artwork = movie.artworks.first().unwrap();
let trailer = movie.trailers.first().unwrap();
let person = movie.people.actors.first().unwrap();
let remote_id = movie.remoteids.first().unwrap();
// person.role_image_url left out because most role images are missing
assert_get_urls_ok(vec![
genre.full_url(),
artwork.full_url(),
artwork.full_thumb_url(),
Url::from_str(&trailer.url).map_err(Error::from),
Url::from_str(&remote_id.url).map_err(Error::from),
person.people_image_url(),
])
.await;
}
#[tokio::test]
async fn movie_updates() {
let guard = get_client().await;
let client = guard.as_ref().unwrap();
let since = Utc::now() - Duration::days(1);
client
.movie_updates(since)
.await
.expect("Error fetching movie updates");
}
async fn assert_get_url_ok(url: Result<Url>) {
let res = reqwest::get(url.unwrap()).await.unwrap();
assert!(res.status().is_success());
}
async fn assert_get_urls_ok<I>(urls: I)
where
I: IntoIterator<Item = Result<Url>>,
{
join_all(urls.into_iter().map(assert_get_url_ok)).await;
}
// Because there is no way to use async in lazy_static blocks
// CLIENT will be created here.
async fn get_client() -> MutexGuard<'static, Option<Client>> {
let mut client = CLIENT.lock().await;
if client.is_none() {
let api_key =
std::env::var(ENV_APIKEY).expect(&format!("Missing or invalid {} env var", ENV_APIKEY));
*client = Some(
Client::new(api_key)
.await
.expect("Could not authenticate test client"),
);
}
client
}
|
use std::fs::File;
use std::io::{BufRead, BufReader};
fn main() {
// let filename = "src/input0";
let filename = "../part1/src/input";
// Open the file in read-only mode (ignoring errors).
let file = File::open(filename).unwrap();
let reader = BufReader::new(file);
let mut digits = Vec::new();
// Read the file line by line using the lines() iterator from std::io::BufRead.
for (_, line) in reader.lines().enumerate() {
let line = line.unwrap(); // Ignore errors.
if line.trim().len() == 0 {
break;
}
// Show the line and its number.
for digit in line.as_bytes().iter() {
digits.push(*digit - ('0' as u8));
}
}
// println!("Digits: {:?}", digits);
let offset = digits[0] as usize * 1_000_000 +
digits[1] as usize * 100_000 +
digits[2] as usize * 10_000 +
digits[3] as usize * 1_000 +
digits[4] as usize * 100 +
digits[5] as usize * 10 +
digits[6] as usize;
let digits_offset = offset % digits.len();
let total_len = 10_000 * digits.len();
println!("Offset: {}, Digits len {}, Digits Offset: {}, Total len: {}", offset, digits.len(), digits_offset, total_len);
let total_repeat = (total_len - offset) / digits.len();
let mut old_digits = Vec::new();
old_digits.extend_from_slice(&digits[digits_offset..]);
println!("Old digits: {:?}, Repeat: {}", old_digits, total_repeat);
for _i in 0..total_repeat {
old_digits.append(&mut digits.clone());
}
println!("New Digits Len: {:?}", old_digits.len());
for _phase in 1..=100 {
let mut new_digits = vec![0; old_digits.len()];
let mut total_sum : i64 = old_digits.iter().map(|&a| a as i64).sum();
for j in 0..old_digits.len() {
new_digits[j] = (total_sum.abs() % 10) as u8;
total_sum -= old_digits[j] as i64;
}
// println!("{} -> {:?}", _phase, new_digits);
old_digits = new_digits;
}
println!("{} -> {:?}", 100, &old_digits[0..8]);
}
|
#[doc = "Register `ISR_output` reader"]
pub type R = crate::R<ISR_OUTPUT_SPEC>;
#[doc = "Field `CC1IF` reader - Compare 1 interrupt flag The CC1IF flag is set by hardware to inform application that LPTIM_CNT register value matches the compare register's value. The CC1IF flag can be cleared by writing 1 to the CC1CF bit in the LPTIM_ICR register."]
pub type CC1IF_R = crate::BitReader;
#[doc = "Field `ARRM` reader - Autoreload match ARRM is set by hardware to inform application that LPTIM_CNT register’s value reached the LPTIM_ARR register’s value. ARRM flag can be cleared by writing 1 to the ARRMCF bit in the LPTIM_ICR register."]
pub type ARRM_R = crate::BitReader;
#[doc = "Field `EXTTRIG` reader - External trigger edge event EXTTRIG is set by hardware to inform application that a valid edge on the selected external trigger input has occurred. If the trigger is ignored because the timer has already started, then this flag is not set. EXTTRIG flag can be cleared by writing 1 to the EXTTRIGCF bit in the LPTIM_ICR register."]
pub type EXTTRIG_R = crate::BitReader;
#[doc = "Field `CMP1OK` reader - Compare register 1 update OK CMP1OK is set by hardware to inform application that the APB bus write operation to the LPTIM_CCR1 register has been successfully completed. CMP1OK flag can be cleared by writing 1 to the CMP1OKCF bit in the LPTIM_ICR register."]
pub type CMP1OK_R = crate::BitReader;
#[doc = "Field `ARROK` reader - Autoreload register update OK ARROK is set by hardware to inform application that the APB bus write operation to the LPTIM_ARR register has been successfully completed. ARROK flag can be cleared by writing 1 to the ARROKCF bit in the LPTIM_ICR register."]
pub type ARROK_R = crate::BitReader;
#[doc = "Field `UP` reader - Counter direction change down to up In Encoder mode, UP bit is set by hardware to inform application that the counter direction has changed from down to up. UP flag can be cleared by writing 1 to the UPCF bit in the LPTIM_ICR register. Note: If the LPTIM does not support encoder mode feature, this bit is reserved. Please refer to ."]
pub type UP_R = crate::BitReader;
#[doc = "Field `DOWN` reader - Counter direction change up to down In Encoder mode, DOWN bit is set by hardware to inform application that the counter direction has changed from up to down. DOWN flag can be cleared by writing 1 to the DOWNCF bit in the LPTIM_ICR register. Note: If the LPTIM does not support encoder mode feature, this bit is reserved. Please refer to ."]
pub type DOWN_R = crate::BitReader;
#[doc = "Field `UE` reader - LPTIM update event occurred UE is set by hardware to inform application that an update event was generated. UE flag can be cleared by writing 1 to the UECF bit in the LPTIM_ICR register."]
pub type UE_R = crate::BitReader;
#[doc = "Field `REPOK` reader - Repetition register update OK REPOK is set by hardware to inform application that the APB bus write operation to the LPTIM_RCR register has been successfully completed. REPOK flag can be cleared by writing 1 to the REPOKCF bit in the LPTIM_ICR register."]
pub type REPOK_R = crate::BitReader;
#[doc = "Field `DIEROK` reader - Interrupt enable register update OK DIEROK is set by hardware to inform application that the APB bus write operation to the LPTIM_DIER register has been successfully completed. DIEROK flag can be cleared by writing 1 to the DIEROKCF bit in the LPTIM_ICR register."]
pub type DIEROK_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - Compare 1 interrupt flag The CC1IF flag is set by hardware to inform application that LPTIM_CNT register value matches the compare register's value. The CC1IF flag can be cleared by writing 1 to the CC1CF bit in the LPTIM_ICR register."]
#[inline(always)]
pub fn cc1if(&self) -> CC1IF_R {
CC1IF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Autoreload match ARRM is set by hardware to inform application that LPTIM_CNT register’s value reached the LPTIM_ARR register’s value. ARRM flag can be cleared by writing 1 to the ARRMCF bit in the LPTIM_ICR register."]
#[inline(always)]
pub fn arrm(&self) -> ARRM_R {
ARRM_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - External trigger edge event EXTTRIG is set by hardware to inform application that a valid edge on the selected external trigger input has occurred. If the trigger is ignored because the timer has already started, then this flag is not set. EXTTRIG flag can be cleared by writing 1 to the EXTTRIGCF bit in the LPTIM_ICR register."]
#[inline(always)]
pub fn exttrig(&self) -> EXTTRIG_R {
EXTTRIG_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Compare register 1 update OK CMP1OK is set by hardware to inform application that the APB bus write operation to the LPTIM_CCR1 register has been successfully completed. CMP1OK flag can be cleared by writing 1 to the CMP1OKCF bit in the LPTIM_ICR register."]
#[inline(always)]
pub fn cmp1ok(&self) -> CMP1OK_R {
CMP1OK_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Autoreload register update OK ARROK is set by hardware to inform application that the APB bus write operation to the LPTIM_ARR register has been successfully completed. ARROK flag can be cleared by writing 1 to the ARROKCF bit in the LPTIM_ICR register."]
#[inline(always)]
pub fn arrok(&self) -> ARROK_R {
ARROK_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Counter direction change down to up In Encoder mode, UP bit is set by hardware to inform application that the counter direction has changed from down to up. UP flag can be cleared by writing 1 to the UPCF bit in the LPTIM_ICR register. Note: If the LPTIM does not support encoder mode feature, this bit is reserved. Please refer to ."]
#[inline(always)]
pub fn up(&self) -> UP_R {
UP_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Counter direction change up to down In Encoder mode, DOWN bit is set by hardware to inform application that the counter direction has changed from up to down. DOWN flag can be cleared by writing 1 to the DOWNCF bit in the LPTIM_ICR register. Note: If the LPTIM does not support encoder mode feature, this bit is reserved. Please refer to ."]
#[inline(always)]
pub fn down(&self) -> DOWN_R {
DOWN_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - LPTIM update event occurred UE is set by hardware to inform application that an update event was generated. UE flag can be cleared by writing 1 to the UECF bit in the LPTIM_ICR register."]
#[inline(always)]
pub fn ue(&self) -> UE_R {
UE_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Repetition register update OK REPOK is set by hardware to inform application that the APB bus write operation to the LPTIM_RCR register has been successfully completed. REPOK flag can be cleared by writing 1 to the REPOKCF bit in the LPTIM_ICR register."]
#[inline(always)]
pub fn repok(&self) -> REPOK_R {
REPOK_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 24 - Interrupt enable register update OK DIEROK is set by hardware to inform application that the APB bus write operation to the LPTIM_DIER register has been successfully completed. DIEROK flag can be cleared by writing 1 to the DIEROKCF bit in the LPTIM_ICR register."]
#[inline(always)]
pub fn dierok(&self) -> DIEROK_R {
DIEROK_R::new(((self.bits >> 24) & 1) != 0)
}
}
#[doc = "LPTIM interrupt and status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr_output::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ISR_OUTPUT_SPEC;
impl crate::RegisterSpec for ISR_OUTPUT_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`isr_output::R`](R) reader structure"]
impl crate::Readable for ISR_OUTPUT_SPEC {}
#[doc = "`reset()` method sets ISR_output to value 0"]
impl crate::Resettable for ISR_OUTPUT_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::collections::HashSet;
/// Determine whether a sentence is a pangram.
pub fn is_pangram(sentence: &str) -> bool {
let lowercase: HashSet<char> = "abcdefghijklmnopqrstuvwxyz".chars().collect();
let sentence_letters: HashSet<char> = sentence
.chars()
.map(|c| c.to_ascii_lowercase())
.filter(|c| c.is_ascii_alphabetic())
.collect();
lowercase == sentence_letters
}
|
use std::{
fmt::{self, Debug, Formatter},
path::Path,
};
/// A file with its contents stored in a `&'static [u8]`.
#[derive(Clone, PartialEq, Eq)]
pub struct File<'a> {
path: &'a str,
contents: &'a [u8],
#[cfg(feature = "metadata")]
metadata: Option<crate::Metadata>,
}
impl<'a> File<'a> {
/// Create a new [`File`].
pub const fn new(path: &'a str, contents: &'a [u8]) -> Self {
File {
path,
contents,
#[cfg(feature = "metadata")]
metadata: None,
}
}
/// The full path for this [`File`], relative to the directory passed to
/// [`crate::include_dir!()`].
pub fn path(&self) -> &'a Path {
Path::new(self.path)
}
/// The file's raw contents.
pub fn contents(&self) -> &[u8] {
self.contents
}
/// The file's contents interpreted as a string.
pub fn contents_utf8(&self) -> Option<&str> {
std::str::from_utf8(self.contents()).ok()
}
}
#[cfg(feature = "metadata")]
impl<'a> File<'a> {
/// Set the [`Metadata`] associated with a [`File`].
pub const fn with_metadata(self, metadata: crate::Metadata) -> Self {
let File { path, contents, .. } = self;
File {
path,
contents,
metadata: Some(metadata),
}
}
/// Get the [`File`]'s [`Metadata`], if available.
pub fn metadata(&self) -> Option<&crate::Metadata> {
self.metadata.as_ref()
}
}
impl<'a> Debug for File<'a> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let File {
path,
contents,
#[cfg(feature = "metadata")]
metadata,
} = self;
let mut d = f.debug_struct("File");
d.field("path", path)
.field("contents", &format!("<{} bytes>", contents.len()));
#[cfg(feature = "metadata")]
d.field("metadata", metadata);
d.finish()
}
}
|
#[doc = "Register `ODSR` reader"]
pub type R = crate::R<ODSR_SPEC>;
#[doc = "Field `TA1ODS` reader - Timer A Output 1 disable status"]
pub type TA1ODS_R = crate::BitReader;
#[doc = "Field `TA2ODS` reader - Timer A Output 2 disable status"]
pub type TA2ODS_R = crate::BitReader;
#[doc = "Field `TB1ODS` reader - Timer B Output 1 disable status"]
pub type TB1ODS_R = crate::BitReader;
#[doc = "Field `TB2ODS` reader - Timer B Output 2 disable status"]
pub type TB2ODS_R = crate::BitReader;
#[doc = "Field `TC1ODS` reader - Timer C Output 1 disable status"]
pub type TC1ODS_R = crate::BitReader;
#[doc = "Field `TC2ODS` reader - Timer C Output 2 disable status"]
pub type TC2ODS_R = crate::BitReader;
#[doc = "Field `TD1ODS` reader - Timer D Output 1 disable status"]
pub type TD1ODS_R = crate::BitReader;
#[doc = "Field `TD2ODS` reader - Timer D Output 2 disable status"]
pub type TD2ODS_R = crate::BitReader;
#[doc = "Field `TE1ODS` reader - Timer E Output 1 disable status"]
pub type TE1ODS_R = crate::BitReader;
#[doc = "Field `TE2ODS` reader - Timer E Output 2 disable status"]
pub type TE2ODS_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - Timer A Output 1 disable status"]
#[inline(always)]
pub fn ta1ods(&self) -> TA1ODS_R {
TA1ODS_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Timer A Output 2 disable status"]
#[inline(always)]
pub fn ta2ods(&self) -> TA2ODS_R {
TA2ODS_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Timer B Output 1 disable status"]
#[inline(always)]
pub fn tb1ods(&self) -> TB1ODS_R {
TB1ODS_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Timer B Output 2 disable status"]
#[inline(always)]
pub fn tb2ods(&self) -> TB2ODS_R {
TB2ODS_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Timer C Output 1 disable status"]
#[inline(always)]
pub fn tc1ods(&self) -> TC1ODS_R {
TC1ODS_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Timer C Output 2 disable status"]
#[inline(always)]
pub fn tc2ods(&self) -> TC2ODS_R {
TC2ODS_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Timer D Output 1 disable status"]
#[inline(always)]
pub fn td1ods(&self) -> TD1ODS_R {
TD1ODS_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Timer D Output 2 disable status"]
#[inline(always)]
pub fn td2ods(&self) -> TD2ODS_R {
TD2ODS_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Timer E Output 1 disable status"]
#[inline(always)]
pub fn te1ods(&self) -> TE1ODS_R {
TE1ODS_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Timer E Output 2 disable status"]
#[inline(always)]
pub fn te2ods(&self) -> TE2ODS_R {
TE2ODS_R::new(((self.bits >> 9) & 1) != 0)
}
}
#[doc = "Output Disable Status Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`odsr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ODSR_SPEC;
impl crate::RegisterSpec for ODSR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`odsr::R`](R) reader structure"]
impl crate::Readable for ODSR_SPEC {}
#[doc = "`reset()` method sets ODSR to value 0"]
impl crate::Resettable for ODSR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
mod leaf;
mod operator;
pub use leaf::UserFilter;
pub use operator::Operator;
use std::{collections::HashSet, convert::From, fmt};
#[derive(Clone, PartialEq, Eq)]
pub enum UserAST {
Attributed(String, Box<UserAST>),
BinaryClause(Box<UserAST>, Operator, Box<UserAST>),
Leaf(Box<UserFilter>),
}
// conversion used in tests
impl From<&'static str> for UserAST {
fn from(item: &str) -> Self {
let mut filter = UserFilter {
phrase: item.to_string(),
levenshtein: None,
};
if item.chars().next().map(|c| c != '\"').unwrap_or(false) {
let parts_field = item.splitn(2, ':').collect::<Vec<_>>();
if parts_field.len() > 1 {
filter.phrase = parts_field[1].to_string();
}
let yo = filter.phrase.to_string();
let parts_leven: Vec<_> = yo.splitn(2, '~').collect::<Vec<_>>();
if parts_leven.len() > 1 {
filter.phrase = parts_leven[0].to_string();
filter.levenshtein = Some(parts_leven[1].parse().unwrap());
}
if parts_field.len() > 1 {
return UserAST::Attributed(parts_field[0].to_string(), Box::new(UserAST::Leaf(Box::new(filter))));
}
}
UserAST::Leaf(Box::new(filter))
}
}
impl From<&'static str> for Box<UserAST> {
fn from(item: &'static str) -> Self {
Box::new(item.into())
}
}
impl fmt::Debug for UserAST {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
match self {
UserAST::Attributed(attr, ast) => write!(formatter, "{}:{:?}", attr, ast),
UserAST::BinaryClause(ast1, op, ast2) => write!(formatter, "({:?} {} {:?})", ast1, op, ast2),
UserAST::Leaf(filter) => write!(formatter, "{:?}", filter),
}
}
}
impl From<(UserAST, Operator, UserAST)> for UserAST {
fn from(item: (UserAST, Operator, UserAST)) -> Self {
UserAST::BinaryClause(Box::new(item.0), item.1, Box::new(item.2))
}
}
impl UserAST {
/// Filters the AST according to the bool returned in the should_filter callback.
///
/// Can filter any parts of the AST, while keeping a valid ast.
/// Filtering means a complete sub part of the AST will be removed.
///
/// The should_filter callback provides two values:
/// The current AST, and the current attribute filter `Option<&str>`, which is applied on the subtree
pub fn filter_ast<F>(&self, should_filter: &mut F, current_attr: Option<&str>) -> Option<UserAST>
where
F: FnMut(&UserAST, Option<&str>) -> bool,
{
if should_filter(self, current_attr) {
return None;
}
match self {
UserAST::Attributed(attr, ast) => return UserAST::filter_ast(ast, should_filter, Some(attr)).map(|ast| UserAST::Attributed(attr.to_string(), ast.into())),
UserAST::BinaryClause(ast1, op, ast2) => {
let filtered_ast1 = UserAST::filter_ast(ast1, should_filter, current_attr);
let filtered_ast2 = UserAST::filter_ast(ast2, should_filter, current_attr);
return match (filtered_ast1, filtered_ast2) {
(Some(filtered_ast1), Some(filtered_ast2)) => return Some(UserAST::BinaryClause(filtered_ast1.into(), *op, filtered_ast2.into())),
(None, Some(filtered_ast2)) => Some(filtered_ast2),
(Some(filtered_ast1), None) => Some(filtered_ast1),
(None, None) => None,
};
}
UserAST::Leaf(_filter) => {}
}
Some(self.clone())
}
/// Maps the AST according to the returned ast in the map_fn callback.
///
/// Can be used to walk over the AST and replace parts of it.
///
/// The map_fn callback provides two values:
/// The current AST, and the current attribute filter `Option<&str>`, which is applied on the subtree
pub fn map_ast<F>(mut self, map_fn: &mut F, current_attr: Option<&str>) -> UserAST
where
F: FnMut(UserAST, Option<&str>) -> UserAST,
{
match self {
UserAST::Attributed(ref attr, ref mut ast) => *ast = Box::new(UserAST::map_ast(*ast.clone(), map_fn, Some(attr))),
UserAST::BinaryClause(ref mut ast1, _op, ref mut ast2) => {
*ast1 = Box::new(UserAST::map_ast(*ast1.clone(), map_fn, current_attr));
*ast2 = Box::new(UserAST::map_ast(*ast2.clone(), map_fn, current_attr));
}
UserAST::Leaf(ref _filter) => {}
}
map_fn(self, current_attr)
}
/// walking the ast and grouping adjacent terms for phrase boosting
pub fn get_phrase_pairs(&self) -> HashSet<[&str; 2]> {
let mut collect = HashSet::new();
self._get_phrase_pairs(&mut collect, &mut None, None);
collect
}
fn _get_phrase_pairs<'a>(&'a self, collect: &mut HashSet<[&'a str; 2]>, last_term: &mut Option<&'a str>, curr_attr: Option<&'a str>) {
match self {
UserAST::Attributed(attr, ast) => {
if curr_attr == Some(attr) || curr_attr.is_none() {
ast._get_phrase_pairs(collect, last_term, Some(attr));
} else {
ast._get_phrase_pairs(collect, &mut None, Some(attr));
}
}
UserAST::BinaryClause(ast1, _op, ast2) => {
ast1._get_phrase_pairs(collect, last_term, curr_attr);
ast2._get_phrase_pairs(collect, last_term, curr_attr);
}
UserAST::Leaf(filter) => {
if let Some(last_term) = last_term {
collect.insert([last_term, &filter.phrase]);
}
*last_term = Some(&filter.phrase)
}
}
}
/// walking the ast in order, emitting all terms
pub fn walk_terms<'a, F>(&'a self, cb: &mut F)
where
F: FnMut(&'a str),
{
match self {
UserAST::Attributed(_attr, ast) => {
ast.walk_terms(cb);
}
UserAST::BinaryClause(ast1, _op, ast2) => {
ast1.walk_terms(cb);
ast2.walk_terms(cb);
}
UserAST::Leaf(filter) => cb(&filter.phrase),
}
}
}
#[cfg(test)]
mod test_ast {
use crate::{
ast::{Operator::*, UserAST, UserFilter},
parser::parse,
};
#[test]
fn test_ast_external_lifetime() {
let external_term_1 = "a".to_string();
let filter_1 = UserFilter {
phrase: external_term_1,
levenshtein: None,
};
let left_ast: UserAST = UserAST::Leaf(Box::new(filter_1));
let external_term_2 = "b".to_string();
let filter_2 = UserFilter {
phrase: external_term_2,
levenshtein: None,
};
let right_ast: UserAST = UserAST::Leaf(Box::new(filter_2));
let ast = UserAST::BinaryClause(Box::new(left_ast), Or, Box::new(right_ast));
let mut data = Vec::new();
ast.walk_terms(&mut |el| data.push(el));
assert_eq!(data, vec!["a", "b"]);
}
#[test]
fn test_filter_ast() {
let ast: UserAST = ("super".into(), Or, ("cool".into(), Or, "fancy".into()).into()).into();
let ast = ast.filter_ast(
&mut |ast: &UserAST, _attr: Option<&str>| match ast {
UserAST::Leaf(filter) => filter.phrase == "cool",
_ => false,
},
None,
);
assert_eq!(ast, Some(("super".into(), Or, "fancy".into()).into()));
let ast: UserAST = parse("myattr:(super cool)").unwrap();
assert_eq!(ast.filter_ast(&mut |_ast, _attr| { true }, None), None);
assert_eq!(
ast.filter_ast(
&mut |ast, _attr| {
match ast {
UserAST::Leaf(filter) => filter.phrase == "cool",
_ => false,
}
},
None
),
Some(UserAST::Attributed("myattr".to_string(), "super".into()))
);
}
#[test]
fn test_map_ast() {
let ast: UserAST = ("super".into(), Or, ("cool".into(), Or, "fancy".into()).into()).into();
let ast = ast.map_ast(
&mut |ast: UserAST, _attr: Option<&str>| match ast {
UserAST::Leaf(ref map) if map.phrase == "cool" => UserAST::Leaf(Box::new(UserFilter {
phrase: "coolcool".to_string(),
levenshtein: None,
})),
_ => ast,
},
None,
);
let expected_mapped_ast: UserAST = ("super".into(), Or, ("coolcool".into(), Or, "fancy".into()).into()).into();
assert_eq!(ast, expected_mapped_ast);
let ast: UserAST = "kawaii".into();
let ast = ast.map_ast(
&mut |ast: UserAST, _attr: Option<&str>| match ast {
UserAST::Leaf(ref map) if map.phrase == "kawaii" => {
let leftast = UserAST::Leaf(Box::new(UserFilter {
phrase: "kawaii".to_string(),
levenshtein: None,
}));
let rightast = UserAST::Leaf(Box::new(UserFilter {
phrase: "かわいい".to_string(),
levenshtein: None,
}));
UserAST::BinaryClause(Box::new(leftast), Or, Box::new(rightast))
}
_ => ast,
},
None,
);
let expected_mapped_ast: UserAST = ("kawaii".into(), Or, "かわいい".into()).into();
assert_eq!(ast, expected_mapped_ast);
}
// #[test]
// fn test_and_or() {
// let ast: UserAST = ("cool".into(), Or, "fancy".into()).into();
// assert_eq!(
// ast.get_phrase_pairs(),
// [["cool","fancy"]].iter().map(|el|*el).collect()
// );
// let ast: UserAST = ("super".into(), And, ("cool".into(), Or, "fancy".into()).into()).into();
// assert_eq!(
// ast.get_phrase_pairs(),
// [["cool","fancy"],["super","cool"],["super","fancy"]].iter().map(|el|*el).collect()
// );
// }
#[test]
fn test_get_phrase_pairs_or() {
// let ast: UserAST = parse("super cool fancy").unwrap();
let ast: UserAST = ("super".into(), Or, ("cool".into(), Or, "fancy".into()).into()).into();
assert_eq!(ast.get_phrase_pairs(), [["super", "cool"], ["cool", "fancy"]].iter().copied().collect());
let ast: UserAST = ("super".into(), Or, ("cool".into(), Or, ("fancy".into(), Or, "great".into()).into()).into()).into();
assert_eq!(ast.get_phrase_pairs(), [["super", "cool"], ["cool", "fancy"], ["fancy", "great"]].iter().copied().collect());
let ast: UserAST = parse("super cool nice great").unwrap();
assert_eq!(ast.get_phrase_pairs(), [["super", "cool"], ["cool", "nice"], ["nice", "great"]].iter().copied().collect());
let ast: UserAST = parse("myattr:(super cool) AND fancy").unwrap();
// let ast: UserAST = ("super".into(), Or, ("cool".into(), Or, "fancy".into()).into()).into();
let mut terms = vec![];
ast.walk_terms(&mut |term| terms.push(term));
assert_eq!(terms, vec!["super", "cool", "fancy"]);
let ast: UserAST = parse("myattr:(super cool)").unwrap();
assert_eq!(ast.get_phrase_pairs(), [["super", "cool"]].iter().copied().collect());
let ast: UserAST = parse("myattr:(super cool) different scope").unwrap();
assert_eq!(
ast.get_phrase_pairs(),
[["super", "cool"], ["cool", "different"], ["different", "scope"]].iter().copied().collect()
);
// let ast: UserAST = parse("different scope OR myattr:(super cool)").unwrap();
// assert_eq!(
// ast.get_phrase_pairs(),
// [["super","cool"],["cool","different"],["different","scope"]].iter().map(|el|*el).collect()
// );
}
}
|
/*
* Copyright 2017-2018 Ben Ashford
*
* Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
* http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
* <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
* option. This file may not be copied, modified, or distributed
* except according to those terms.
*/
use std::collections::VecDeque;
use std::net::SocketAddr;
use std::sync::{Arc, Mutex};
use futures::{future, Future, Sink, Stream};
use futures::future::Executor;
use futures::sync::{mpsc, oneshot};
use error;
use resp;
use super::connect::{connect, ClientConnection};
type PairedConnectionBox = Box<Future<Item = PairedConnection, Error = error::Error>>;
/// The default starting point to use most default Redis functionality.
///
/// Returns a future that resolves to a `PairedConnection`.
pub fn paired_connect<E>(addr: &SocketAddr, executor: E) -> PairedConnectionBox
where
E: Executor<Box<Future<Item = (), Error = ()> + Send>> + 'static,
{
let paired_con = connect(addr)
.map_err(|e| e.into())
.and_then(move |connection| {
let ClientConnection { sender, receiver } = connection;
let (out_tx, out_rx) = mpsc::unbounded();
let running = Arc::new(Mutex::new(true));
let sender_running = running.clone();
let sender = Box::new(
sender
.sink_map_err(|e| error!("Sender error: {}", e))
.send_all(out_rx)
.then(move |r| {
let mut lock = sender_running.lock().expect("Lock is tainted");
*lock = false;
match r {
Ok(_) => {
info!("Sender stream closed...");
future::ok(())
}
Err(e) => {
error!("Error occurred: {:?}", e);
future::err(())
}
}
}),
) as Box<Future<Item = (), Error = ()> + Send>;
let resp_queue: Arc<Mutex<VecDeque<oneshot::Sender<resp::RespValue>>>> =
Arc::new(Mutex::new(VecDeque::new()));
let receiver_queue = resp_queue.clone();
let receiver = Box::new(
receiver
.for_each(move |msg| {
let mut queue = receiver_queue.lock().expect("Lock is tainted");
let dest = queue.pop_front().expect("Queue is empty");
let _ = dest.send(msg); // Ignore error as receiving end could have been legitimately closed
if queue.is_empty() {
let running = running.lock().expect("Lock is tainted");
if *running {
Ok(())
} else {
Err(error::Error::EndOfStream)
}
} else {
Ok(())
}
})
.then(|result| match result {
Ok(()) => future::ok(()),
Err(error::Error::EndOfStream) => future::ok(()),
Err(e) => future::err(e),
})
.map(|_| debug!("Closing the receiver stream, receiver closed"))
.map_err(|e| error!("Error receiving message: {}", e)),
) as Box<Future<Item = (), Error = ()> + Send>;
match executor
.execute(sender)
.and_then(|_| executor.execute(receiver))
{
Ok(()) => future::ok(PairedConnection {
out_tx: out_tx,
resp_queue: resp_queue,
}),
Err(e) => future::err(error::internal(format!(
"Cannot start background tasks: {:?}",
e
))),
}
})
.map_err(|e| e.into());
Box::new(paired_con)
}
pub struct PairedConnection {
out_tx: mpsc::UnboundedSender<resp::RespValue>,
resp_queue: Arc<Mutex<VecDeque<oneshot::Sender<resp::RespValue>>>>,
}
pub type SendBox<T> = Box<Future<Item = T, Error = error::Error> + Send>;
/// Fire-and-forget, used to force the return type of a `send` command where the result is not required
/// to satisfy the generic return type.
///
#[macro_export]
macro_rules! faf {
($e:expr) => (
{
use $crate::client::paired::SendBox;
use $crate::resp;
let _:SendBox<resp::RespValue> = $e;
}
)
}
impl PairedConnection {
/// Sends a command to Redis.
///
/// The message must be in the format of a single RESP message, this can be constructed
/// manually or with the `resp_array!` macro. Returned is a future that resolves to the value
/// returned from Redis. The type must be one for which the `resp::FromResp` trait is defined.
///
/// The future will fail for numerous reasons, including but not limited to: IO issues, conversion
/// problems, and server-side errors being returned by Redis.
///
/// Behind the scenes the message is queued up and sent to Redis asynchronously before the
/// future is realised. As such, it is guaranteed that messages are sent in the same order
/// that `send` is called.
pub fn send<T: resp::FromResp + Send + 'static>(&self, msg: resp::RespValue) -> SendBox<T> {
match &msg {
&resp::RespValue::Array(_) => (),
_ => {
return Box::new(future::err(error::internal(
"Command must be a RespValue::Array",
)))
}
}
let (tx, rx) = oneshot::channel();
let mut queue = self.resp_queue.lock().expect("Tainted queue");
queue.push_back(tx);
self.out_tx.unbounded_send(msg).expect("Failed to send");
let future = rx.then(|v| match v {
Ok(v) => future::result(T::from_resp(v)),
Err(e) => future::err(e.into()),
});
Box::new(future)
}
}
#[cfg(feature = "commands")]
///
/// Implementing Redis commands as specific Rust functions, intended to be easier to use that manually constructing
/// each as appropriate.
///
/// Warning: this is still subject to change. Only a subset of commands are implemented so far, and not done so
/// consistently. This is ongoing to test various options, a winner will be picked in due course.
///
/// Protected by a feature flag until the above issues are satisfied.
///
mod commands {
use std::mem;
use futures::future;
use error;
use resp::{RespValue, ToRespString};
use super::SendBox;
/// Several Redis commands take an open-ended collection of keys, or other such structures that are flattened
/// into the redis command. For example `MGET key1 key2 key3`.
///
/// The challenge for this library is anticipating how this might be used by applications. It's conceivable that
/// applications will use vectors, but also might have a fixed set of keys which could either be passed in an array
/// or as a reference to a slice.
///
pub trait CommandCollection {
fn add_to_cmd(self, &mut Vec<RespValue>);
}
impl<T: ToRespString + Into<RespValue>> CommandCollection for Vec<T> {
fn add_to_cmd(self, cmd: &mut Vec<RespValue>) {
cmd.extend(self.into_iter().map(|key| key.into()));
}
}
impl<'a, T: ToRespString + Into<RespValue> + ToOwned<Owned = T>> CommandCollection for &'a [T] {
fn add_to_cmd(self, cmd: &mut Vec<RespValue>) {
cmd.extend(self.into_iter().map(|key| key.to_owned().into()));
}
}
macro_rules! command_collection_ary {
($c:expr) => {
impl<T: ToRespString + Into<RespValue>> CommandCollection for [T; $c] {
fn add_to_cmd(mut self, cmd: &mut Vec<RespValue>) {
for idx in 0..$c {
let value = unsafe { mem::replace(&mut self[idx], mem::uninitialized()) };
cmd.push(value.into());
}
}
}
}
}
command_collection_ary!(1);
command_collection_ary!(2);
command_collection_ary!(3);
command_collection_ary!(4);
command_collection_ary!(5);
command_collection_ary!(6);
command_collection_ary!(7);
command_collection_ary!(8);
// TODO - check the expansion regarding trailing commas, etc.
macro_rules! simple_command {
($n:ident,$k:expr,[ $(($p:ident : $t:ident)),* ],$r:ty) => {
pub fn $n< $($t,)* >(&self, ($($p,)*): ($($t,)*)) -> SendBox<$r>
where $($t: ToRespString + Into<RespValue>,)*
{
self.send(resp_array![ $k $(,$p)* ])
}
};
($n:ident,$k:expr,$r:ty) => {
pub fn $n(&self) -> SendBox<$r> {
self.send(resp_array![$k])
}
};
}
impl super::PairedConnection {
simple_command!(append, "APPEND", [(key: K), (value: V)], usize);
simple_command!(auth, "AUTH", [(password: P)], ());
simple_command!(bgrewriteaof, "BGREWRITEAOF", ());
simple_command!(bgsave, "BGSAVE", ());
}
pub trait BitcountCommand {
fn to_cmd(self) -> RespValue;
}
impl<T: ToRespString + Into<RespValue>> BitcountCommand for (T) {
fn to_cmd(self) -> RespValue {
resp_array!["BITCOUNT", self]
}
}
impl<T: ToRespString + Into<RespValue>> BitcountCommand for (T, usize, usize) {
fn to_cmd(self) -> RespValue {
resp_array!["BITCOUNT", self.0, self.1.to_string(), self.2.to_string()]
}
}
impl super::PairedConnection {
pub fn bitcount<C>(&self, cmd: C) -> SendBox<usize>
where
C: BitcountCommand,
{
self.send(cmd.to_cmd())
}
}
pub struct BitfieldCommands {
cmds: Vec<BitfieldCommand>,
}
#[derive(Clone)]
pub enum BitfieldCommand {
Set(BitfieldOffset, BitfieldTypeAndValue),
Get(BitfieldOffset, BitfieldType),
Incrby(BitfieldOffset, BitfieldTypeAndValue),
Overflow(BitfieldOverflow),
}
impl BitfieldCommand {
fn add_to_cmd(&self, cmds: &mut Vec<RespValue>) {
match self {
&BitfieldCommand::Set(ref offset, ref type_and_value) => {
cmds.push("SET".into());
cmds.push(type_and_value.type_cmd());
cmds.push(offset.to_cmd());
cmds.push(type_and_value.value_cmd());
}
&BitfieldCommand::Get(ref offset, ref ty) => {
cmds.push("GET".into());
cmds.push(ty.to_cmd());
cmds.push(offset.to_cmd());
}
&BitfieldCommand::Incrby(ref offset, ref type_and_value) => {
cmds.push("INCRBY".into());
cmds.push(type_and_value.type_cmd());
cmds.push(offset.to_cmd());
cmds.push(type_and_value.value_cmd());
}
&BitfieldCommand::Overflow(ref overflow) => {
cmds.push("OVERFLOW".into());
cmds.push(overflow.to_cmd());
}
}
}
}
#[derive(Copy, Clone)]
pub enum BitfieldType {
Signed(usize),
Unsigned(usize),
}
impl BitfieldType {
fn to_cmd(&self) -> RespValue {
match self {
&BitfieldType::Signed(size) => format!("i{}", size),
&BitfieldType::Unsigned(size) => format!("u{}", size),
}.into()
}
}
#[derive(Copy, Clone)]
pub enum BitfieldOverflow {
Wrap,
Sat,
Fail,
}
impl BitfieldOverflow {
fn to_cmd(&self) -> RespValue {
match self {
&BitfieldOverflow::Wrap => "WRAP",
&BitfieldOverflow::Sat => "SAT",
&BitfieldOverflow::Fail => "FAIL",
}.into()
}
}
#[derive(Clone)]
pub enum BitfieldTypeAndValue {
Signed(usize, isize),
Unsigned(usize, usize),
}
impl BitfieldTypeAndValue {
fn type_cmd(&self) -> RespValue {
match self {
&BitfieldTypeAndValue::Signed(size, _) => format!("i{}", size),
&BitfieldTypeAndValue::Unsigned(size, _) => format!("u{}", size),
}.into()
}
fn value_cmd(&self) -> RespValue {
match self {
&BitfieldTypeAndValue::Signed(_, amt) => amt.to_string(),
&BitfieldTypeAndValue::Unsigned(_, amt) => amt.to_string(),
}.into()
}
}
#[derive(Clone)]
pub enum BitfieldOffset {
Bits(usize),
Positional(usize),
}
impl BitfieldOffset {
fn to_cmd(&self) -> RespValue {
match self {
&BitfieldOffset::Bits(size) => size.to_string(),
&BitfieldOffset::Positional(size) => format!("#{}", size),
}.into()
}
}
impl BitfieldCommands {
pub fn new() -> Self {
BitfieldCommands { cmds: Vec::new() }
}
pub fn set(&mut self, offset: BitfieldOffset, value: BitfieldTypeAndValue) -> &mut Self {
self.cmds.push(BitfieldCommand::Set(offset, value));
self
}
pub fn get(&mut self, offset: BitfieldOffset, ty: BitfieldType) -> &mut Self {
self.cmds.push(BitfieldCommand::Get(offset, ty));
self
}
pub fn incrby(&mut self, offset: BitfieldOffset, value: BitfieldTypeAndValue) -> &mut Self {
self.cmds.push(BitfieldCommand::Incrby(offset, value));
self
}
pub fn overflow(&mut self, overflow: BitfieldOverflow) -> &mut Self {
self.cmds.push(BitfieldCommand::Overflow(overflow));
self
}
fn to_cmd(&self, key: RespValue) -> RespValue {
let mut cmd = Vec::new();
cmd.push("BITFIELD".into());
cmd.push(key);
for subcmd in self.cmds.iter() {
subcmd.add_to_cmd(&mut cmd);
}
RespValue::Array(cmd)
}
}
impl super::PairedConnection {
pub fn bitfield<K>(&self, (key, cmds): (K, &BitfieldCommands)) -> SendBox<Vec<Option<i64>>>
where
K: ToRespString + Into<RespValue>,
{
self.send(cmds.to_cmd(key.into()))
}
}
#[derive(Copy, Clone)]
pub enum BitOp {
And,
Or,
Xor,
Not,
}
impl From<BitOp> for RespValue {
fn from(op: BitOp) -> RespValue {
match op {
BitOp::And => "AND",
BitOp::Or => "OR",
BitOp::Xor => "XOR",
BitOp::Not => "NOT",
}.into()
}
}
impl super::PairedConnection {
pub fn bitop<K, C>(&self, (op, destkey, keys): (BitOp, K, C)) -> SendBox<i64>
where
K: ToRespString + Into<RespValue>,
C: CommandCollection,
{
let mut cmd = Vec::new();
cmd.push(op.into());
cmd.push(destkey.into());
keys.add_to_cmd(&mut cmd);
if cmd.len() > 2 {
self.send(RespValue::Array(cmd))
} else {
Box::new(future::err(error::internal(
"BITOP command needs at least one key",
)))
}
}
}
pub trait BitposCommand {
fn to_cmd(self) -> RespValue;
}
impl<K, B> BitposCommand for (K, B, usize)
where
K: ToRespString + Into<RespValue>,
B: ToRespString + Into<RespValue>,
{
fn to_cmd(self) -> RespValue {
resp_array!["BITPOS", self.0, self.1, self.2.to_string()]
}
}
impl<K, B> BitposCommand for (K, B, usize, usize)
where
K: ToRespString + Into<RespValue>,
B: ToRespString + Into<RespValue>,
{
fn to_cmd(self) -> RespValue {
resp_array![
"BITPOS",
self.0,
self.1,
self.2.to_string(),
self.3.to_string()
]
}
}
impl super::PairedConnection {
pub fn bitpos<C>(&self, cmd: C) -> SendBox<i64>
where
C: BitposCommand,
{
self.send(cmd.to_cmd())
}
}
// MARKER - all accounted for above this line
impl super::PairedConnection {
// TODO - there may be a way of generalising this kind of thing
pub fn del<C>(&self, keys: (C)) -> SendBox<usize>
where
C: CommandCollection,
{
let mut cmd = Vec::new();
cmd.push("DEL".into());
keys.add_to_cmd(&mut cmd);
if cmd.len() > 1 {
self.send(RespValue::Array(cmd))
} else {
Box::new(future::err(error::internal(
"DEL command needs at least one key",
)))
}
}
}
impl super::PairedConnection {
// TODO: incomplete implementation
pub fn set<K, V>(&self, (key, value): (K, V)) -> SendBox<()>
where
K: ToRespString + Into<RespValue>,
V: ToRespString + Into<RespValue>,
{
self.send(resp_array!["SET", key, value])
}
}
#[cfg(test)]
mod test {
use futures::future;
use futures::Future;
use tokio_core::reactor::Core;
use super::{BitfieldCommands, BitfieldOffset, BitfieldOverflow, BitfieldTypeAndValue};
use super::super::error::Error;
fn setup() -> (Core, super::super::PairedConnectionBox) {
let core = Core::new().unwrap();
let handle = core.handle();
let addr = "127.0.0.1:6379".parse().unwrap();
(core, super::super::paired_connect(&addr, &handle))
}
fn setup_and_delete(keys: Vec<&str>) -> (Core, super::super::PairedConnectionBox) {
let (mut core, connection) = setup();
let delete = connection.and_then(|connection| connection.del(keys).map(|_| connection));
let connection = core.run(delete).unwrap();
(core, Box::new(future::ok(connection)))
}
#[test]
fn append_test() {
let (mut core, connection) = setup_and_delete(vec!["APPENDKEY"]);
let connection =
connection.and_then(|connection| connection.append(("APPENDKEY", "ABC")));
let count = core.run(connection).unwrap();
assert_eq!(count, 3);
}
#[test]
fn bitcount_test() {
let (mut core, connection) = setup();
let connection = connection.and_then(|connection| {
connection
.set(("BITCOUNT_KEY", "foobar"))
.and_then(move |_| {
let mut counts = Vec::new();
counts.push(connection.bitcount("BITCOUNT_KEY"));
counts.push(connection.bitcount(("BITCOUNT_KEY", 0, 0)));
counts.push(connection.bitcount(("BITCOUNT_KEY", 1, 1)));
future::join_all(counts)
})
});
let counts = core.run(connection).unwrap();
assert_eq!(counts.len(), 3);
assert_eq!(counts[0], 26);
assert_eq!(counts[1], 4);
assert_eq!(counts[2], 6);
}
#[test]
fn bitfield_test() {
let (mut core, connection) = setup_and_delete(vec!["BITFIELD_KEY"]);
let connection = connection.and_then(|connection| {
let mut bitfield_commands = BitfieldCommands::new();
bitfield_commands.incrby(
BitfieldOffset::Bits(100),
BitfieldTypeAndValue::Unsigned(2, 1),
);
bitfield_commands.overflow(BitfieldOverflow::Sat);
bitfield_commands.incrby(
BitfieldOffset::Bits(102),
BitfieldTypeAndValue::Unsigned(2, 1),
);
connection.bitfield(("BITFIELD_KEY", &bitfield_commands))
});
let results = core.run(connection).unwrap();
assert_eq!(results.len(), 2);
assert_eq!(results[0], Some(1));
assert_eq!(results[1], Some(1));
}
#[test]
fn bitfield_nil_response() {
let (mut core, connection) = setup_and_delete(vec!["BITFIELD_NIL_KEY"]);
let connection = connection.and_then(|connection| {
let mut bitfield_commands = BitfieldCommands::new();
bitfield_commands.overflow(BitfieldOverflow::Fail);
bitfield_commands.incrby(
BitfieldOffset::Bits(102),
BitfieldTypeAndValue::Unsigned(2, 4),
);
connection.bitfield(("BITFIELD_NIL_KEY", &bitfield_commands))
});
let results = core.run(connection).unwrap();
assert_eq!(results.len(), 1);
assert_eq!(results[0], None);
}
#[test]
fn del_test_vec() {
let (mut core, connection) = setup();
let del_keys = vec!["DEL_KEY_1", "DEL_KEY_2"];
let connection = connection.and_then(|connection| connection.del(del_keys));
let _ = core.run(connection).unwrap();
}
#[test]
fn del_test_vec_string() {
let (mut core, connection) = setup();
let del_keys = vec![String::from("DEL_KEY_1"), String::from("DEL_KEY_2")];
let connection = connection.and_then(|connection| connection.del(del_keys));
let _ = core.run(connection).unwrap();
}
#[test]
fn del_test_slice() {
let (mut core, connection) = setup();
let del_keys = ["DEL_KEY_1", "DEL_KEY_2"];
let connection = connection.and_then(|connection| connection.del(&del_keys[..]));
let _ = core.run(connection).unwrap();
}
#[test]
fn del_test_slice_string() {
let (mut core, connection) = setup();
let del_keys = [String::from("DEL_KEY_1"), String::from("DEL_KEY_2")];
let connection = connection.and_then(|connection| connection.del(&del_keys[..]));
let _ = core.run(connection).unwrap();
}
#[test]
fn del_test_ary() {
let (mut core, connection) = setup();
let del_keys = ["DEL_KEY_1"];
let connection = connection.and_then(|connection| connection.del(del_keys));
let _ = core.run(connection).unwrap();
}
#[test]
fn del_test_ary2() {
let (mut core, connection) = setup();
let del_keys = ["DEL_KEY_1", "DEL_KEY_2"];
let connection = connection.and_then(|connection| connection.del(del_keys));
let _ = core.run(connection).unwrap();
}
#[test]
fn del_not_enough_keys() {
let (mut core, connection) = setup();
let del_keys: Vec<String> = vec![];
let connection = connection.and_then(|connection| connection.del(del_keys));
let result = core.run(connection);
if let &Err(Error::Internal(ref msg)) = &result {
assert_eq!("DEL command needs at least one key", msg);
} else {
panic!("Should have errored: {:?}", result);
}
}
}
}
|
use hacspec_dev::prelude::*;
use hacspec_lib::prelude::*;
use hacspec_chacha20::*;
#[test]
fn test_quarter_round() {
let mut state = State::from_public_slice(&[
0x879531e0, 0xc5ecf37d, 0x516461b1, 0xc9a62f8a, 0x44c20ef3, 0x3390af7f, 0xd9fc690b,
0x2a5f714c, 0x53372767, 0xb00a5631, 0x974c541a, 0x359e9963, 0x5c971061, 0x3d631689,
0x2098d9d6, 0x91dbd320,
]);
let expected_state = State::from_public_slice(&[
0x879531e0, 0xc5ecf37d, 0xbdb886dc, 0xc9a62f8a, 0x44c20ef3, 0x3390af7f, 0xd9fc690b,
0xcfacafd2, 0xe46bea80, 0xb00a5631, 0x974c541a, 0x359e9963, 0x5c971061, 0xccc07c79,
0x2098d9d6, 0x91dbd320,
]);
state = chacha20_quarter_round(2, 7, 8, 13, state);
assert_eq!(
state
.iter()
.map(|x| U32::declassify(*x))
.collect::<Vec<_>>(),
expected_state
.iter()
.map(|x| U32::declassify(*x))
.collect::<Vec<_>>()
);
}
#[test]
fn test_block() {
let key = ChaChaKey::from_public_slice(&[
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d,
0x1e, 0x1f,
]);
let iv = ChaChaIV::from_public_slice(&[
0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x4a, 0x00, 0x00, 0x00, 0x00,
]);
let ctr = U32(1);
let state = chacha20_init(key, iv, ctr);
let expected_state = State::from_public_slice(&[
0x61707865, 0x3320646e, 0x79622d32, 0x6b206574, 0x03020100, 0x07060504, 0x0b0a0908,
0x0f0e0d0c, 0x13121110, 0x17161514, 0x1b1a1918, 0x1f1e1d1c, 0x00000001, 0x09000000,
0x4a000000, 0x00000000,
]);
assert_eq!(
state
.iter()
.map(|x| U32::declassify(*x))
.collect::<Vec<_>>(),
expected_state
.iter()
.map(|x| U32::declassify(*x))
.collect::<Vec<_>>()
);
let state = chacha20_core(U32(0u32), state);
let expected_state = State::from_public_slice(&[
0xe4e7f110, 0x15593bd1, 0x1fdd0f50, 0xc47120a3, 0xc7f4d1c7, 0x0368c033, 0x9aaa2204,
0x4e6cd4c3, 0x466482d2, 0x09aa9f07, 0x05d7c214, 0xa2028bd9, 0xd19c12b5, 0xb94e16de,
0xe883d0cb, 0x4e3c50a2,
]);
assert_eq!(
state
.iter()
.map(|x| U32::declassify(*x))
.collect::<Vec<_>>(),
expected_state
.iter()
.map(|x| U32::declassify(*x))
.collect::<Vec<_>>()
);
let expected_serialised = secret_bytes!([
0x10, 0xf1, 0xe7, 0xe4, 0xd1, 0x3b, 0x59, 0x15, 0x50, 0x0f, 0xdd, 0x1f, 0xa3, 0x20, 0x71,
0xc4, 0xc7, 0xd1, 0xf4, 0xc7, 0x33, 0xc0, 0x68, 0x03, 0x04, 0x22, 0xaa, 0x9a, 0xc3, 0xd4,
0x6c, 0x4e, 0xd2, 0x82, 0x64, 0x46, 0x07, 0x9f, 0xaa, 0x09, 0x14, 0xc2, 0xd7, 0x05, 0xd9,
0x8b, 0x02, 0xa2, 0xb5, 0x12, 0x9c, 0xd1, 0xde, 0x16, 0x4e, 0xb9, 0xcb, 0xd0, 0x83, 0xe8,
0xa2, 0x50, 0x3c, 0x4e
]);
let serialised = state.to_le_bytes();
println!("{:?}", serialised.len());
assert_eq!(
serialised
.iter()
.map(|x| U8::declassify(*x))
.collect::<Vec<_>>(),
expected_serialised
.iter()
.map(|x| U8::declassify(*x))
.collect::<Vec<_>>()
);
}
fn enc_dec_test(m: ByteSeq, key: ChaChaKey, iv: ChaChaIV) {
let c = chacha20(key, iv, 1u32, &m);
let m_dec = chacha20(key, iv, 1u32, &c);
assert_eq!(
m.iter().map(|x| U8::declassify(*x)).collect::<Vec<_>>(),
m_dec.iter().map(|x| U8::declassify(*x)).collect::<Vec<_>>()
);
}
fn kat_test(m: ByteSeq, key: ChaChaKey, iv: ChaChaIV, exp_cipher: ByteSeq) {
let enc = chacha20(key, iv, 1u32, &m);
let c = enc;
assert_eq!(
exp_cipher
.iter()
.map(|x| U8::declassify(*x))
.collect::<Vec<_>>(),
c.iter().map(|x| U8::declassify(*x)).collect::<Vec<_>>()
);
let m_dec = chacha20(key, iv, 1u32, &c);
assert_eq!(
m.iter().map(|x| U8::declassify(*x)).collect::<Vec<_>>(),
m_dec.iter().map(|x| U8::declassify(*x)).collect::<Vec<_>>()
);
}
#[test]
fn test_enc_dec() {
let key = ChaChaKey::from_public_slice(&random_byte_vec(ChaChaKey::length()));
let iv = ChaChaIV::from_public_slice(&random_byte_vec(ChaChaIV::length()));
let m = ByteSeq::from_public_slice(&random_byte_vec(40));
enc_dec_test(m, key, iv);
}
#[test]
fn test_kat() {
let key = ChaChaKey::from_public_slice(&[
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e,
0x8f, 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d,
0x9e, 0x9f,
]);
let iv = ChaChaIV::from_public_slice(&[
0x07, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
]);
let m = ByteSeq::from_public_slice(&[
0x4c, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74,
0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6c,
0x61, 0x73, 0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39, 0x3a, 0x20, 0x49, 0x66, 0x20,
0x49, 0x20, 0x63, 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66, 0x65, 0x72, 0x20, 0x79,
0x6f, 0x75, 0x20, 0x6f, 0x6e, 0x6c, 0x79, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x74, 0x69, 0x70,
0x20, 0x66, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, 0x66, 0x75, 0x74, 0x75, 0x72, 0x65,
0x2c, 0x20, 0x73, 0x75, 0x6e, 0x73, 0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f, 0x75,
0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69, 0x74, 0x2e,
]);
let exp_cipher = ByteSeq::from_public_slice(&[
0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb, 0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e,
0xc2, 0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe, 0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee,
0x62, 0xd6, 0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12, 0x82, 0xfa, 0xfb, 0x69, 0xda,
0x92, 0x72, 0x8b, 0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29, 0x05, 0xd6, 0xa5, 0xb6,
0x7e, 0xcd, 0x3b, 0x36, 0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c, 0x98, 0x03, 0xae,
0xe3, 0x28, 0x09, 0x1b, 0x58, 0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94, 0x55, 0x85,
0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc, 0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d, 0xe5,
0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b, 0x61, 0x16,
]);
kat_test(m, key, iv, exp_cipher);
}
|
use drawille::Canvas;
use image::{
imageops::{resize, FilterType::Lanczos3},
open, DynamicImage,
};
use terminal_size::{terminal_size, Height, Width};
fn main() {
let im = open("logo.jpg").unwrap().to_rgba8();
let gray = DynamicImage::ImageRgba8(im).into_luma8();
let (Width(_tw), Height(_th)) = terminal_size().unwrap();
let tw = _tw as u32;
let _th = _th as u32;
let mut w = gray.width();
let mut h = gray.height();
let mut img = gray.clone();
if tw < w {
let ratio: f64 = tw as f64 / w as f64;
h = (h as f64 * ratio) as u32;
w = tw;
img = resize(&gray, w, h, Lanczos3);
}
let samples = img.as_raw().to_vec();
let mut x = 0;
let mut y = 0;
let mut canvas = Canvas::new(w, h);
for pix in samples {
if pix > 128 {
canvas.set(x, y);
}
x += 1;
if x >= w {
y += 1;
x = 0;
}
}
println!("{}", canvas.frame());
}
|
use crate::*;
use std::fs::File;
use std::io::prelude::*;
use std::path::PathBuf;
use std::sync::Arc;
use tracing;
pub struct PingResultProcessorTextLogger {
common_config: Arc<PingResultProcessorCommonConfig>,
log_path: PathBuf,
log_file: File,
}
impl PingResultProcessorTextLogger {
#[tracing::instrument(name = "Creating ping result text logger", level = "debug")]
pub fn new(common_config: Arc<PingResultProcessorCommonConfig>, log_path_buf: &PathBuf) -> PingResultProcessorTextLogger {
return PingResultProcessorTextLogger { common_config, log_path: log_path_buf.clone(), log_file: rnp_utils::create_log_file(log_path_buf) };
}
}
impl PingResultProcessor for PingResultProcessorTextLogger {
fn name(&self) -> &'static str {
"TextLogger"
}
fn config(&self) -> &PingResultProcessorCommonConfig {
self.common_config.as_ref()
}
fn process_ping_result(&mut self, ping_result: &PingResult) {
let log_content: String = ping_result.format_as_console_log();
self.log_file.write(log_content.as_bytes()).expect(&format!("Failed to write logs to text file! Path = {}", self.log_path.display()));
self.log_file.write("\n".as_bytes()).expect(&format!("Failed to write logs to text file! Path = {}", self.log_path.display()));
}
}
|
//
// Static Field Access
// An example that demonstrates setting and retrieving values from public
// static fields on objects.
//
extern crate rjni;
use std::path::PathBuf;
use std::env;
use rjni::{JavaVM, Version, Classpath, Options, Value, Type};
fn main() {
// Find the path to the manifest folder, then append the examples directory
// to it. This acts as our classpath, where the JVM will look for any .class
// files that we want to load.
let manifest = env::var("CARGO_MANIFEST_DIR").unwrap_or(".".to_string());
let mut path = PathBuf::from(manifest);
path.push("examples");
let classpath = Classpath::new().add(path);
// Create the list of options used to initialise the JVM, specifying the
// version number
let options = Options::new()
.version(Version::V16)
.classpath(classpath);
// Create the Java virtual machine. The argument to this function
// is a list of paths that will be combined to create the Java
// classpath. The classpath is a list of directories the JVM
// will look in when trying to locate a .class or .jar file.
let jvm = JavaVM::new(options).unwrap();
// Load the `Test` class. The JVM will look for a `Test.class` file in
// the classpath to find it.
let class = jvm.class("Test").unwrap();
// Set the value of the static message field to "Hi!"
let message = String::from("Hi!");
class.set_static_field("message", Value::Str(message)).unwrap();
// Get the value of the `message` field on the class, specifying the type
// of the field as `String`
let current = class.static_field("message", Type::Str).unwrap();
println!("{:?}", current);
// Get the class to print the message
class.call_static("printMessage", &[], Type::Void).unwrap();
}
|
use server::*;
use specs::*;
use server::component::channel::{OnCommand, OnCommandReader};
use server::component::time::ThisFrame;
use server::protocol::server::{GameFlag, ServerPacket};
use server::protocol::{to_bytes, FlagUpdateType};
use component::*;
pub struct DropSystem {
reader: Option<OnCommandReader>,
}
#[derive(SystemData)]
pub struct DropSystemData<'a> {
pub channel: Read<'a, OnCommand>,
pub conns: Read<'a, Connections>,
pub thisframe: Read<'a, ThisFrame>,
pub entities: Entities<'a>,
pub pos: WriteStorage<'a, Position>,
pub team: ReadStorage<'a, Team>,
pub is_flag: ReadStorage<'a, IsFlag>,
pub carrier: WriteStorage<'a, FlagCarrier>,
pub lastdrop: WriteStorage<'a, LastDrop>,
pub flagchannel: Write<'a, OnFlag>,
}
impl DropSystem {
pub fn new() -> Self {
Self { reader: None }
}
}
impl<'a> System<'a> for DropSystem {
type SystemData = DropSystemData<'a>;
fn setup(&mut self, res: &mut Resources) {
Self::SystemData::setup(res);
self.reader = Some(res.fetch_mut::<OnCommand>().register_reader());
}
fn run(&mut self, data: Self::SystemData) {
let Self::SystemData {
channel,
conns,
thisframe,
entities,
mut pos,
team,
is_flag,
mut carrier,
mut lastdrop,
mut flagchannel,
} = data;
for evt in channel.read(self.reader.as_mut().unwrap()) {
if evt.1.com != "drop" {
continue;
}
let player = match conns.0.get(&evt.0) {
Some(conn) => match conn.player {
Some(p) => p,
None => continue,
},
None => continue,
};
let p_pos = *pos.get(player).unwrap();
(
&mut pos,
&team,
&is_flag,
&mut carrier,
&mut lastdrop,
&*entities,
).join()
.filter(|(_, _, _, carrier, _, _)| {
carrier.0.is_some() && carrier.0.unwrap() == player
})
.for_each(|(fpos, team, _, carrier, lastdrop, ent)| {
let packet = GameFlag {
ty: FlagUpdateType::Position,
flag: *team,
id: None,
pos: p_pos,
blueteam: 0,
redteam: 0,
};
flagchannel.single_write(FlagEvent {
ty: FlagEventType::Drop,
player: carrier.0,
flag: ent,
});
*fpos = p_pos;
*carrier = FlagCarrier(None);
*lastdrop = LastDrop {
player: Some(player),
time: thisframe.0,
};
conns.send_to_all(OwnedMessage::Binary(
to_bytes(&ServerPacket::GameFlag(packet)).unwrap(),
));
});
}
}
}
use super::PickupFlagSystem;
impl SystemInfo for DropSystem {
type Dependencies = PickupFlagSystem;
fn name() -> &'static str {
concat!(module_path!(), "::", line!())
}
fn new() -> Self {
Self::new()
}
}
|
use std::process::{exit, Command};
const RAGEL_SOURCE: &'static str = "src/lex/scan.rl";
fn main() {
println!("cargo:rerun-if-changed={}", RAGEL_SOURCE);
let code = Command::new("/Users/charlie/code/ragel-rust/ragel/ragel")
.args(&["--host-lang=Rust", "-o", "src/lex/scan.rs", RAGEL_SOURCE])
.status()
.unwrap()
.code()
.unwrap_or(1);
exit(code);
}
|
//! A simple horizontal or vertical layout with padding and child spacing.
use std::collections::HashMap;
use crate::{scalar, Scalar, Dimensions, Context, LayoutChildrenArgs, MinimumSizeArgs};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Axis {
Horizontal,
Vertical,
}
#[derive(Debug, Clone)]
pub struct Settings {
pub axis: Axis,
pub padding: Scalar,
pub child_spacing: Scalar,
}
pub fn minimum_size<C: Context>(args: &MinimumSizeArgs<'_, C>, settings: &Settings) -> Dimensions {
let Settings { axis, padding, child_spacing } = *settings;
let mut minimum_size = Dimensions::zero();
let num_children = crate::visible_children(args.slots, args.slot_id).count();
let child_spacing = if num_children > 0 {
child_spacing * ((num_children - 1) as Scalar)
} else {
scalar::ZERO
};
match axis {
Axis::Horizontal => {
minimum_size.width += padding * scalar::TWO + child_spacing;
}
Axis::Vertical => {
minimum_size.height += padding * scalar::TWO + child_spacing;
}
}
for child_id in crate::visible_children(args.slots, args.slot_id) {
let child_minimum_size = args.minimum_size_cache[&child_id];
match axis {
Axis::Horizontal => {
minimum_size.width += child_minimum_size.width;
minimum_size.height = minimum_size.height.max(child_minimum_size.height);
}
Axis::Vertical => {
minimum_size.width = minimum_size.width.max(child_minimum_size.width);
minimum_size.height += child_minimum_size.height;
}
}
}
match axis {
Axis::Horizontal => {
minimum_size.height += padding * scalar::TWO;
}
Axis::Vertical => {
minimum_size.width += padding * scalar::TWO;
}
}
minimum_size
}
pub fn layout_children<C: Context>(args: &mut LayoutChildrenArgs<'_, C>, settings: &Settings) {
let Settings { axis, padding, child_spacing } = *settings;
let bounds = args.slots.get(args.slot_id).bounds;
let children = crate::visible_children(args.slots, args.slot_id).collect::<Vec<_>>();
let num_children = children.len();
let size = match axis {
Axis::Horizontal => bounds.size.width,
Axis::Vertical => bounds.size.height,
};
let available_size = size - padding * scalar::TWO - child_spacing * ((num_children - 1) as Scalar);
let mut irregular_sizes = HashMap::new();
let mut num_regular_children = num_children;
let mut reserved_size = scalar::ZERO;
let mut expand_children = Vec::new();
// Children not set as `expand` will always be their minimum size.
for child_id in children.iter().rev() {
let child = args.slots.get(*child_id);
let child_minimum_size = args.minimum_size_cache[child_id];
let (axis_expand, minimum_size) = match axis {
Axis::Horizontal => (child.info.expand_x, child_minimum_size.width),
Axis::Vertical => (child.info.expand_y, child_minimum_size.height),
};
if !axis_expand {
irregular_sizes.insert(*child_id, minimum_size);
num_regular_children -= 1;
reserved_size += minimum_size;
} else {
expand_children.push((*child_id, minimum_size));
}
}
// Children set as `expand` will be at least their minimum size or at most the "regular" size.
// (The leftover size that is distributed equally (for now) between children marked `expand`.)
// We need to figure out which children will be irregularly-sized.
//
// It makes sense to check this on children ordered from largest to smallest minimum sizes.
expand_children.sort_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap());
for (child_id, minimum_size) in expand_children.into_iter().rev() {
let regular_size = if num_regular_children > 0 {
(available_size - reserved_size) / num_regular_children as Scalar
} else {
scalar::ZERO
};
if minimum_size > regular_size {
irregular_sizes.insert(child_id, minimum_size);
num_regular_children -= 1;
reserved_size += minimum_size;
}
}
let regular_size = if num_regular_children > 0 {
(available_size - reserved_size) / num_regular_children as Scalar
} else {
scalar::ZERO
};
let mut offset = padding;
for (i, child_id) in children.iter().enumerate() {
let child = args.slots.get_mut(*child_id);
let other_expand = match axis {
Axis::Horizontal => child.info.expand_y,
Axis::Vertical => child.info.expand_x,
};
let child_size = irregular_sizes
.get(child_id)
.cloned()
.unwrap_or(regular_size);
let child_minimum_size = args.minimum_size_cache[child_id];
match axis {
Axis::Horizontal => {
child.bounds.x = offset;
child.bounds.y = padding;
child.bounds.size.width = child_size.max(child_minimum_size.width);
child.bounds.size.height = if other_expand {
(bounds.size.height - (padding * scalar::TWO)).max(child_minimum_size.height)
} else {
child_minimum_size.height
};
offset += child.bounds.size.width;
}
Axis::Vertical => {
child.bounds.x = padding;
child.bounds.y = offset;
child.bounds.size.width = if other_expand {
(bounds.size.width - (padding * scalar::TWO)).max(child_minimum_size.width)
} else {
child_minimum_size.width
};
child.bounds.size.height = child_size.max(child_minimum_size.height);
offset += child.bounds.size.height;
}
}
if i < children.len() - 1 {
offset += child_spacing;
}
}
}
|
use pam_sys::{
acct_mgmt, authenticate, close_session, end, getenv, open_session, putenv, setcred, start,
};
use pam_sys::{PamFlag, PamHandle, PamReturnCode};
use users;
use std::{env, ptr};
use crate::{env::get_pam_env, ffi, Converse, PamError, PamResult, PasswordConv};
/// Main struct to authenticate a user
///
/// You need to create an instance of it to start an authentication process. If you
/// want a simple password-based authentication, you can use `Authenticator::with_password`,
/// and to the following flow:
///
/// ```no_run
/// use pam::Authenticator;
///
/// let mut authenticator = Authenticator::with_password("system-auth")
/// .expect("Failed to init PAM client.");
/// // Preset the login & password we will use for authentication
/// authenticator.handler_mut().set_credentials("login", "password");
/// // actually try to authenticate:
/// authenticator.authenticate().expect("Authentication failed!");
/// // Now that we are authenticated, it's possible to open a sesssion:
/// authenticator.open_session().expect("Failed to open a session!");
/// ```
///
/// If you wish to customise the PAM conversation function, you should rather create your
/// authenticator with `Authenticator::with_handler`, providing a struct implementing the
/// `Converse` trait. You can then mutably access your conversation handler using the
/// `Authenticator::handler_mut` method.
///
/// By default, the `Authenticator` will close any opened session when dropped. If you don't
/// want this, you can change its `close_on_drop` field to `False`.
pub struct Authenticator<'a, C: Converse> {
/// Flag indicating whether the Authenticator should close the session on drop
pub close_on_drop: bool,
handle: &'a mut PamHandle,
converse: Box<C>,
is_authenticated: bool,
has_open_session: bool,
last_code: PamReturnCode,
}
impl<'a> Authenticator<'a, PasswordConv> {
/// Create a new `Authenticator` with a given service name and a password-based conversation
pub fn with_password(service: &str) -> PamResult<Authenticator<'a, PasswordConv>> {
Authenticator::with_handler(service, PasswordConv::new())
}
}
impl<'a, C: Converse> Authenticator<'a, C> {
/// Creates a new Authenticator with a given service name and conversation callback
pub fn with_handler(service: &str, converse: C) -> PamResult<Authenticator<'a, C>> {
let mut converse = Box::new(converse);
let conv = ffi::make_conversation(&mut *converse);
let mut handle: *mut PamHandle = ptr::null_mut();
match start(service, None, &conv, &mut handle) {
PamReturnCode::SUCCESS => unsafe {
Ok(Authenticator {
close_on_drop: true,
handle: &mut *handle,
converse,
is_authenticated: false,
has_open_session: false,
last_code: PamReturnCode::SUCCESS,
})
},
code => Err(PamError(code)),
}
}
/// Mutable access to the conversation handler of this Authenticator
pub fn handler_mut(&mut self) -> &mut C {
&mut *self.converse
}
/// Immutable access to the conversation handler of this Authenticator
pub fn handler(&self) -> &C {
&*self.converse
}
/// Perform the authentication with the provided credentials
pub fn authenticate(&mut self) -> PamResult<()> {
self.last_code = authenticate(self.handle, PamFlag::NONE);
if self.last_code != PamReturnCode::SUCCESS {
// No need to reset here
return Err(From::from(self.last_code));
}
self.is_authenticated = true;
self.last_code = acct_mgmt(self.handle, PamFlag::NONE);
if self.last_code != PamReturnCode::SUCCESS {
// Probably not strictly neccessary but better be sure
return self.reset();
}
Ok(())
}
/// Open a session for a previously authenticated user and
/// initialize the environment appropriately (in PAM and regular enviroment variables).
pub fn open_session(&mut self) -> PamResult<()> {
if !self.is_authenticated {
//TODO: is this the right return code?
return Err(PamReturnCode::PERM_DENIED.into());
}
self.last_code = setcred(self.handle, PamFlag::ESTABLISH_CRED);
if self.last_code != PamReturnCode::SUCCESS {
return self.reset();
}
self.last_code = open_session(self.handle, PamFlag::NONE);
if self.last_code != PamReturnCode::SUCCESS {
return self.reset();
}
// Follow openSSH and call pam_setcred before and after open_session
self.last_code = setcred(self.handle, PamFlag::REINITIALIZE_CRED);
if self.last_code != PamReturnCode::SUCCESS {
return self.reset();
}
self.has_open_session = true;
self.initialize_environment()
}
// Initialize the client environment with common variables.
// Currently always called from Authenticator.open_session()
fn initialize_environment(&mut self) -> PamResult<()> {
use users::os::unix::UserExt;
// Set PAM environment in the local process
if let Some(mut env_list) = get_pam_env(self.handle) {
let env = env_list.to_vec();
for (key, value) in env {
env::set_var(&key, &value);
}
}
let user = users::get_user_by_name(self.converse.username()).unwrap_or_else(|| {
panic!("Could not get user by name: {:?}", self.converse.username())
});
// Set some common environment variables
self.set_env(
"USER",
user.name()
.to_str()
.expect("Unix usernames should be valid UTF-8"),
)?;
self.set_env(
"LOGNAME",
user.name()
.to_str()
.expect("Unix usernames should be valid UTF-8"),
)?;
self.set_env("HOME", user.home_dir().to_str().unwrap())?;
self.set_env("PWD", user.home_dir().to_str().unwrap())?;
self.set_env("SHELL", user.shell().to_str().unwrap())?;
// Taken from https://github.com/gsingh93/display-manager/blob/master/pam.c
// Should be a better way to get this. Revisit later.
self.set_env("PATH", "$PATH:/usr/local/sbin:/usr/local/bin:/usr/bin")?;
Ok(())
}
// Utility function to set an environment variable in PAM and the process
fn set_env(&mut self, key: &str, value: &str) -> PamResult<()> {
// Set regular environment variable
env::set_var(key, value);
// Set pam environment variable
if getenv(self.handle, key).is_none() {
let name_value = format!("{}={}", key, value);
match putenv(self.handle, &name_value) {
PamReturnCode::SUCCESS => Ok(()),
code => Err(From::from(code)),
}
} else {
Ok(())
}
}
// Utility function to reset the pam handle in case of intermediate errors
fn reset(&mut self) -> PamResult<()> {
setcred(self.handle, PamFlag::DELETE_CRED);
self.is_authenticated = false;
Err(From::from(self.last_code))
}
}
impl<'a, C: Converse> Drop for Authenticator<'a, C> {
fn drop(&mut self) {
if self.has_open_session && self.close_on_drop {
close_session(self.handle, PamFlag::NONE);
}
let code = setcred(self.handle, PamFlag::DELETE_CRED);
end(self.handle, code);
}
}
|
// Copyright (C) 2020 Sebastian Dröge <sebastian@centricular.com>
//
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use super::*;
use crate::Method;
/// `Public` header ([RFC 7826 section 18.39](https://tools.ietf.org/html/rfc7826#section-18.39)).
#[derive(Debug, Clone)]
pub struct Public(Vec<Method>);
impl std::ops::Deref for Public {
type Target = Vec<Method>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl std::ops::DerefMut for Public {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl AsRef<Vec<Method>> for Public {
fn as_ref(&self) -> &Vec<Method> {
&self.0
}
}
impl AsMut<Vec<Method>> for Public {
fn as_mut(&mut self) -> &mut Vec<Method> {
&mut self.0
}
}
impl From<Vec<Method>> for Public {
fn from(v: Vec<Method>) -> Self {
Public(v)
}
}
impl<'a> From<&'a [Method]> for Public {
fn from(v: &'a [Method]) -> Self {
Public(v.to_vec())
}
}
impl Public {
/// Creates a new `Public` header builder.
pub fn builder() -> PublicBuilder {
PublicBuilder(Vec::new())
}
}
/// Builder for the `Public` header.
#[derive(Debug, Clone)]
pub struct PublicBuilder(Vec<Method>);
impl PublicBuilder {
/// Add the provided method to the `Public` header.
pub fn method(mut self, method: Method) -> Self {
self.0.push(method);
self
}
/// Build the `Public` header.
pub fn build(self) -> Public {
Public(self.0)
}
}
impl super::TypedHeader for Public {
fn from_headers(headers: impl AsRef<Headers>) -> Result<Option<Self>, HeaderParseError> {
let headers = headers.as_ref();
let header = match headers.get(&PUBLIC) {
None => return Ok(None),
Some(header) => header,
};
let mut public = Vec::new();
for method in header.as_str().split(',') {
let method = method.trim();
public.push(method.into());
}
Ok(Some(Public(public)))
}
fn insert_into(&self, mut headers: impl AsMut<Headers>) {
let headers = headers.as_mut();
let mut public = String::new();
for method in &self.0 {
if !public.is_empty() {
public.push_str(", ");
}
public.push_str(method.into());
}
headers.insert(PUBLIC, public);
}
}
impl super::TypedAppendableHeader for Public {
fn append_to(&self, mut headers: impl AsMut<Headers>) {
let headers = headers.as_mut();
let mut public = String::new();
for method in &self.0 {
if !public.is_empty() {
public.push_str(", ");
}
public.push_str(method.into());
}
headers.append(PUBLIC, public);
}
}
|
use iced::{Align, button, Button, Element, Length, Row, scrollable, Scrollable, Text, text_input, TextInput};
use crate::central_ui;
use crate::puzzle_backend;
use std::rc::Rc;
use std::cell::RefCell;
pub struct CluesBrowser {
pub backend: Rc<RefCell<puzzle_backend::Puzzle>>,
pub a_clues: Vec<ClueEntry>,
pub a_scroller: scrollable::State,
pub d_clues: Vec<ClueEntry>,
pub d_scroller: scrollable::State,
pub being_modified: Option<(u32, puzzle_backend::EntryVariant)>,
}
impl CluesBrowser {
pub fn new(backend: Rc<RefCell<puzzle_backend::Puzzle>>) -> Self {
let mut c = CluesBrowser {
backend,
a_clues: Vec::new(),
a_scroller: Default::default(),
d_clues: Vec::new(),
d_scroller: Default::default(),
being_modified: None,
};
c.update_clues();
c
}
pub fn update_clues(&mut self) {
self.a_clues.clear();
self.d_clues.clear();
let a_entries = &self.backend.borrow().across_entries;
for a in a_entries.iter() {
self.a_clues.push(ClueEntry::new(a.label,a.variant,a.clue.clone()));
}
let d_entries = &self.backend.borrow().down_entries;
for d in d_entries.iter() {
self.d_clues.push(ClueEntry::new(d.label,d.variant,d.clue.clone()));
}
}
pub fn set_being_modified(&mut self, label: u32, variant: puzzle_backend::EntryVariant) {
if !self.backend.borrow().fill_only {
match variant {
puzzle_backend::EntryVariant::Across => {
let entry = self.a_clues.iter_mut().find(|x| x.label == label).unwrap();
entry.being_modified = true;
}
puzzle_backend::EntryVariant::Down => {
let entry = self.d_clues.iter_mut().find(|x| x.label == label).unwrap();
entry.being_modified = true;
}
}
self.being_modified = Some((label,variant));
}
}
pub fn unset_being_modified(&mut self) {
if !self.backend.borrow().fill_only {
let (l,v) = self.being_modified.unwrap();
match v {
puzzle_backend::EntryVariant::Across => {
let entry = self.a_clues.iter_mut().find(|x| x.label == l).unwrap();
entry.being_modified = false;
self.backend.borrow_mut().set_clue_text(l,v,entry.clue_cache.clone());
}
puzzle_backend::EntryVariant::Down => {
let entry = self.d_clues.iter_mut().find(|x| x.label == l).unwrap();
entry.being_modified = false;
self.backend.borrow_mut().set_clue_text(l,v,entry.clue_cache.clone());
}
}
self.being_modified = None;
}
}
pub fn set_clue_text(&mut self, text: String) {
let (l,v) = self.being_modified.unwrap();
match v {
puzzle_backend::EntryVariant::Across => {
let entry = self.a_clues.iter_mut().find(|x| x.label == l).unwrap();
entry.clue_cache = text;
}
puzzle_backend::EntryVariant::Down => {
let entry = self.d_clues.iter_mut().find(|x| x.label == l).unwrap();
entry.clue_cache = text;
}
}
}
pub fn view (&mut self) -> Element<central_ui::Message> {
Row::new()
.width(Length::FillPortion(1))
.spacing(5)
.align_items(Align::Start)
.push(
self.a_clues.iter_mut().fold(
Scrollable::new(&mut self.a_scroller)
, |sc, x| sc.push(x.view())).width(Length::Fill)
)
.push(
self.d_clues.iter_mut().fold(
Scrollable::new(&mut self.d_scroller)
, |sc, x| sc.push(x.view())).width(Length::Fill)
)
.into()
}
}
pub struct ClueEntry {
pub button: button::State,
pub input: text_input::State,
pub label: u32,
pub clue_cache: String,
pub variant: puzzle_backend::EntryVariant,
pub being_modified: bool,
}
impl ClueEntry {
pub fn new(label: u32, variant: puzzle_backend::EntryVariant, clue: String) -> Self {
ClueEntry {
button: Default::default(),
input: Default::default(),
label,
clue_cache: clue,
variant,
being_modified: false,
}
}
pub fn view(&mut self) -> Element<central_ui::Message> {
let a_or_d = match self.variant {
puzzle_backend::EntryVariant::Across => 'A',
puzzle_backend::EntryVariant::Down => 'D',
};
let mut prefix = self.label.to_string();
prefix.push(a_or_d);
prefix.push_str(&": ".to_string());
let t = Text::new(prefix);
if self.being_modified {
Row::new()
.spacing(10)
.align_items(Align::Center)
.push(
Button::new(&mut self.button, t)
.on_press(central_ui::Message::ClueLeftModification(self.label,self.variant))
.min_width(60)
)
.push(
TextInput::new(&mut self.input, "", &self.clue_cache, central_ui::Message::ClueModified)
.on_submit(central_ui::Message::ClueLeftModification(self.label,self.variant))
)
.into()
} else {
Row::new()
.spacing(10)
.align_items(Align::Center)
.push(
Button::new(&mut self.button, t)
.on_press(central_ui::Message::ClueEnteredModification(self.label,self.variant))
.min_width(60)
)
.push(
Text::new(self.clue_cache.clone())
)
.into()
}
}
} |
#[doc = "Reader of register ADC_CTL"]
pub type R = crate::R<u32, super::ADC_CTL>;
#[doc = "Writer for register ADC_CTL"]
pub type W = crate::W<u32, super::ADC_CTL>;
#[doc = "Register ADC_CTL `reset()`'s with value 0"]
impl crate::ResetValue for super::ADC_CTL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `ADC_TIME`"]
pub type ADC_TIME_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ADC_TIME`"]
pub struct ADC_TIME_W<'a> {
w: &'a mut W,
}
impl<'a> ADC_TIME_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
#[doc = "Enable ADC measurement. When enabled the ADC sequencer will be started when the main sequencer goes to the SAMPLE_NORM state\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ADC_MODE_A {
#[doc = "0: No ADC measurement"]
OFF,
#[doc = "1: Count time A to bring Cref1 + Cref2 up from Vssa to Vrefhi with IDACB"]
VREF_CNT,
#[doc = "2: Count time B to bring Cref1 + Cref2 back up to Vrefhi with IDACB (after bringing them down for time A/2 cycles with IDACB sinking)"]
VREF_BY2_CNT,
#[doc = "3: Determine HSCMP polarity and count time C to source/sink Cref1 + Cref2 from Vin to Vrefhi."]
VIN_CNT,
}
impl From<ADC_MODE_A> for u8 {
#[inline(always)]
fn from(variant: ADC_MODE_A) -> Self {
match variant {
ADC_MODE_A::OFF => 0,
ADC_MODE_A::VREF_CNT => 1,
ADC_MODE_A::VREF_BY2_CNT => 2,
ADC_MODE_A::VIN_CNT => 3,
}
}
}
#[doc = "Reader of field `ADC_MODE`"]
pub type ADC_MODE_R = crate::R<u8, ADC_MODE_A>;
impl ADC_MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADC_MODE_A {
match self.bits {
0 => ADC_MODE_A::OFF,
1 => ADC_MODE_A::VREF_CNT,
2 => ADC_MODE_A::VREF_BY2_CNT,
3 => ADC_MODE_A::VIN_CNT,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `OFF`"]
#[inline(always)]
pub fn is_off(&self) -> bool {
*self == ADC_MODE_A::OFF
}
#[doc = "Checks if the value of the field is `VREF_CNT`"]
#[inline(always)]
pub fn is_vref_cnt(&self) -> bool {
*self == ADC_MODE_A::VREF_CNT
}
#[doc = "Checks if the value of the field is `VREF_BY2_CNT`"]
#[inline(always)]
pub fn is_vref_by2_cnt(&self) -> bool {
*self == ADC_MODE_A::VREF_BY2_CNT
}
#[doc = "Checks if the value of the field is `VIN_CNT`"]
#[inline(always)]
pub fn is_vin_cnt(&self) -> bool {
*self == ADC_MODE_A::VIN_CNT
}
}
#[doc = "Write proxy for field `ADC_MODE`"]
pub struct ADC_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> ADC_MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ADC_MODE_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "No ADC measurement"]
#[inline(always)]
pub fn off(self) -> &'a mut W {
self.variant(ADC_MODE_A::OFF)
}
#[doc = "Count time A to bring Cref1 + Cref2 up from Vssa to Vrefhi with IDACB"]
#[inline(always)]
pub fn vref_cnt(self) -> &'a mut W {
self.variant(ADC_MODE_A::VREF_CNT)
}
#[doc = "Count time B to bring Cref1 + Cref2 back up to Vrefhi with IDACB (after bringing them down for time A/2 cycles with IDACB sinking)"]
#[inline(always)]
pub fn vref_by2_cnt(self) -> &'a mut W {
self.variant(ADC_MODE_A::VREF_BY2_CNT)
}
#[doc = "Determine HSCMP polarity and count time C to source/sink Cref1 + Cref2 from Vin to Vrefhi."]
#[inline(always)]
pub fn vin_cnt(self) -> &'a mut W {
self.variant(ADC_MODE_A::VIN_CNT)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - ADC timing -1 in csd_sense clock cycles (actual time is ADC_TIME+1 cycles), either used to discharge Cref1&2, or as the aperture to capture the input voltage on Cref1&2"]
#[inline(always)]
pub fn adc_time(&self) -> ADC_TIME_R {
ADC_TIME_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 16:17 - Enable ADC measurement. When enabled the ADC sequencer will be started when the main sequencer goes to the SAMPLE_NORM state"]
#[inline(always)]
pub fn adc_mode(&self) -> ADC_MODE_R {
ADC_MODE_R::new(((self.bits >> 16) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - ADC timing -1 in csd_sense clock cycles (actual time is ADC_TIME+1 cycles), either used to discharge Cref1&2, or as the aperture to capture the input voltage on Cref1&2"]
#[inline(always)]
pub fn adc_time(&mut self) -> ADC_TIME_W {
ADC_TIME_W { w: self }
}
#[doc = "Bits 16:17 - Enable ADC measurement. When enabled the ADC sequencer will be started when the main sequencer goes to the SAMPLE_NORM state"]
#[inline(always)]
pub fn adc_mode(&mut self) -> ADC_MODE_W {
ADC_MODE_W { w: self }
}
}
|
use std::io;
use thiserror::Error;
use crate::store::ID;
pub type ReviseResult<T> = Result<T, ReviseError>;
#[derive(Error, Debug)]
pub enum ReviseError {
#[error("Rusqlite error: {0}")]
RusqliteError(#[from] rusqlite::Error),
#[error("Not found: {0}")]
NotFoundError(ID),
#[error("data store disconnected")]
IOError(#[from] io::Error),
}
|
// q0041_first_missing_positive
struct Solution;
impl Solution {
pub fn first_missing_positive(nums: Vec<i32>) -> i32 {
let mut nums = nums;
let nlen = nums.len();
if nlen == 0 {
return 1;
}
let mut i = 0;
while i < nlen {
let cur_v = nums[i];
if cur_v == (i + 1) as i32 {
i += 1;
continue;
}
if cur_v >= nlen as i32 || cur_v <= 0 {
i += 1;
continue;
}
if cur_v == nums[cur_v as usize - 1] {
i += 1;
continue;
}
let t = nums[i];
nums[i] = nums[cur_v as usize - 1];
nums[cur_v as usize - 1] = t;
}
println!("{:?}", nums);
for (i, &v) in nums.iter().enumerate() {
let n = (i + 1) as i32;
if n != v {
return n;
}
}
(nlen + 1) as i32
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn it_works() {
assert_eq!(1, Solution::first_missing_positive(vec![7, 8, 9, 11, 12]));
assert_eq!(2, Solution::first_missing_positive(vec![3, 4, -1, 1]));
assert_eq!(3, Solution::first_missing_positive(vec![1, 2, 0]));
assert_eq!(4, Solution::first_missing_positive(vec![1, 2, 5, 7, 2, 3]));
assert_eq!(1, Solution::first_missing_positive(vec![]));
}
}
|
use quote::quote_spanned;
use syn::parse_quote_spanned;
use super::{
FlowProperties, FlowPropertyVal, OperatorCategory, OperatorConstraints, OperatorWriteOutput,
WriteContextArgs, RANGE_0, RANGE_1,
};
use crate::graph::OperatorInstance;
/// > 0 input streams, 1 output stream
///
/// > Arguments: A [`Duration`](https://doc.rust-lang.org/stable/std/time/struct.Duration.html) for this interval.
///
/// Emits [Tokio time `Instant`s](https://docs.rs/tokio/1/tokio/time/struct.Instant.html) on a
/// repeated interval. The first tick completes imediately. Missed ticks will be scheduled as soon
/// as possible, and the `Instant` will be the missed time, not the late time.
///
/// Note that this requires the hydroflow instance be run within a [Tokio `Runtime`](https://docs.rs/tokio/1/tokio/runtime/struct.Runtime.html).
/// The easiest way to do this is with a [`#[hydroflow::main]`](https://hydro-project.github.io/hydroflow/doc/hydroflow/macro.hydroflow_main.html)
/// annotation on `async fn main() { ... }` as in the example below.
///
/// ```rustbook
/// use std::time::Duration;
///
/// use hydroflow::hydroflow_syntax;
///
/// #[hydroflow::main]
/// async fn main() {
/// let mut hf = hydroflow_syntax! {
/// source_interval(Duration::from_secs(1))
/// -> for_each(|time| println!("This runs every second: {:?}", time));
/// };
///
/// // Will print 4 times (fencepost counting).
/// tokio::time::timeout(Duration::from_secs_f32(3.5), hf.run_async())
/// .await
/// .expect_err("Expected time out");
///
/// // Example output:
/// // This runs every second: Instant { t: 27471.704813s }
/// // This runs every second: Instant { t: 27472.704813s }
/// // This runs every second: Instant { t: 27473.704813s }
/// // This runs every second: Instant { t: 27474.704813s }
/// }
/// ```
pub const SOURCE_INTERVAL: OperatorConstraints = OperatorConstraints {
name: "source_interval",
categories: &[OperatorCategory::Source],
hard_range_inn: RANGE_0,
soft_range_inn: RANGE_0,
hard_range_out: RANGE_1,
soft_range_out: RANGE_1,
num_args: 1,
persistence_args: RANGE_0,
type_args: RANGE_0,
is_external_input: true,
ports_inn: None,
ports_out: None,
properties: FlowProperties {
deterministic: FlowPropertyVal::Yes,
monotonic: FlowPropertyVal::Yes,
inconsistency_tainted: false,
},
input_delaytype_fn: |_| None,
write_fn: |wc @ &WriteContextArgs {
root,
op_span,
op_inst: OperatorInstance { arguments, .. },
..
},
diagnostics| {
let ident_intervalstream = wc.make_ident("intervalstream");
let mut write_prologue = quote_spanned! {op_span=>
let #ident_intervalstream =
#root::tokio_stream::wrappers::IntervalStream::new(#root::tokio::time::interval(#arguments));
};
let wc = WriteContextArgs {
op_inst: &OperatorInstance {
arguments: parse_quote_spanned!(op_span=> #ident_intervalstream),
..wc.op_inst.clone()
},
..wc.clone()
};
let write_output = (super::source_stream::SOURCE_STREAM.write_fn)(&wc, diagnostics)?;
write_prologue.extend(write_output.write_prologue);
Ok(OperatorWriteOutput {
write_prologue,
..write_output
})
},
};
|
use super::super::common;
use anyhow::Result;
use std::process::{Command, Stdio};
#[test]
fn cli_print_help_long() -> Result<()> {
let status = Command::new(common::get_rcterm_exec_path()?)
.arg("--help")
.stdout(Stdio::null())
.status()?;
assert!(status.success());
Ok(())
}
#[test]
fn cli_print_help_short() -> Result<()> {
let status = Command::new(common::get_rcterm_exec_path()?)
.arg("-h")
.stdout(Stdio::null())
.status()?;
assert!(status.success());
Ok(())
}
|
//! Query language AST and parsing utilities
//!
mod ast;
mod format;
mod grammar;
pub(crate) mod minified;
pub mod refs;
pub use self::ast::*;
mod visit;
pub use self::visit::*;
mod name;
pub use self::grammar::{fragment_definition, operation_definition, parse_query};
pub use self::name::*;
|
pub mod camera;
pub mod model;
pub mod texture;
|
use sqlx::sqlite::{SqliteConnectOptions, SqlitePool};
use std::str::FromStr;
use crate::libs::config::CONFIG;
use sqlx::ConnectOptions;
#[derive(Clone)]
pub struct Context {
pub db: SqlitePool,
}
impl Context {
pub(crate) async fn new() -> Self {
let db_uri = CONFIG.db_uri.as_str();
debug!("Connect to database: {}", db_uri);
let mut options = SqliteConnectOptions::from_str(db_uri)
.unwrap().create_if_missing(true);
options.log_statements(log::LevelFilter::Debug);
let db = SqlitePool::connect_with(options).await.unwrap();
sqlx::migrate!("./migrations").run(&db).await.unwrap();
Context { db }
}
}
|
use std::thread;
use std::rc::Rc;
use std::sync::{Arc, Mutex, Condvar};
use std::cell::{Cell, RefCell };
use std::marker::PhantomData;
use std::time::Duration;
use std::collections::VecDeque;
fn main() {
// o_call();
// t_call();
// t_call2();
// f_call();
// f_call2();
// f3(&5, &mut 2);
// f4();
// f5(&Cell::new(4), &Cell::new(5));
// f6(&Cell::new(vec![2, 3]));
// c();
// c2();
c4();
}
fn o_call() {
let numbers = Vec::from_iter(0..=1000);
let t = thread::spawn(move || {
let len = numbers.len();
let sum = numbers.into_iter().sum::<usize>();
// println!("{sum}, {len}");
sum / len
});
let average = t.join().unwrap();
println!("average: {average}");
}
fn t_call() {
let numbers = vec![1, 2, 3];
thread::scope(|s| {
s.spawn(|| {
println!("lenght: {}", numbers.len());
});
s.spawn(|| {
for i in &numbers {
println!("{i}");
}
});
});
}
fn t_call2() {
let x: &'static [i32; 3] = Box::leak(Box::new([1, 2, 3]));
thread::scope(|s| {
s.spawn(move || dbg!(x));
s.spawn(move || dbg!(x));
});
}
fn f_call() {
let a = Rc::new([1, 2, 3]);
let b = a.clone();
assert_eq!(a.as_ptr(), b.as_ptr());
println!("{a:?}, {b:?}");
}
fn f_call2() {
let a = Arc::new([1, 2, 3]);
let b = a.clone();
// thread::scope(|s| {
// s.spawn(move || dbg!(a));
// s.spawn(move || dbg!(b));
// });
// Naming example
thread::spawn({
let a = a.clone();
move || {
dbg!(a);
}
}).join().unwrap();
thread::spawn({
let a = a.clone();
move || {
dbg!(a);
}
}).join().unwrap();
dbg!(a);
}
fn f3(a: &i32, b: &mut i32) {
let before = *a;
println!("{a}, {b}, {before}");
*b += 1;
println!("{a}, {b}, {before}");
let after = *a;
println!("{a}, {b}, {before}, {after}");
if before != after {
//x();
}
}
fn f4() {
let a = [123, 456, 789];
let b = unsafe { a.get_unchecked(2) };
dbg!(b);
}
fn f5(a: &Cell<i32>, b: &Cell<i32>) {
let before = a.get();
println!("{a:?}, {b:?}, {before:?}");
b.set(b.get() + 1);
println!("{a:?}, {b:?}, {before:?}");
let after = a.get();
println!("{a:?}, {b:?}, {before:?}, {after:?} \n\n");
if before != after {
// X(); some function
}
}
fn f6(a: &Cell<Vec<i32>>) {
let mut v2 = a.take(); // Replaces the contents of the Cell with an empty Vec
// dbg!(a);
v2.push(1);
a.set(v2);
}
fn f7(v: &RefCell<Vec<i32>>) {
v.borrow_mut().push(1);
}
struct X {
handle: i32,
_not_sync: PhantomData<Cell<()>>,
}
struct Y {
p: *mut i32,
}
unsafe impl Send for Y {}
unsafe impl Sync for Y {}
fn c() {
// RwwLock is a concurrent version of a RefCell
let a = Rc::new(123);
// thread::spawn(move || {
// dbg!(a);
// });
}
fn c2() {
// Mutex is mutual exclusion
let n = Mutex::new(0);
// Not parallel
// thread::scope(|s| {
// for _ in 0..10 {
// s.spawn(|| {
// let mut guard = n.lock().unwrap();
//
// for _ in 0..100 {
// *guard += 1;
// println!("{guard}");
// }
//
// thread::sleep(Duration::from_secs(1));
// });
// }
// });
thread::scope(|s| {
for _ in 0..10 {
s.spawn(|| {
let mut guard = n.lock().unwrap();
for _ in 0..100 {
*guard += 1;
println!("{guard}");
}
drop(guard);
thread::sleep(Duration::from_secs(1));
});
}
});
assert_eq!(n.into_inner().unwrap(), 1000);
}
fn c3() {
let list = Mutex::new(Vec::from([1, 2, 3]));
list.lock().unwrap().push(1); //сразу лочим, пушим и открываем мьютекс снова
// Защите не дропается до конца if let {}
if let Some(item) = list.lock().unwrap().pop() {
// process_item(item);
}
// Быстрее будет использовать обычный else if
if list.lock().unwrap().pop() == Some(1) {
// do_something();
}
// Потому что if всегда оперирует boolean
// Альтернатива для if let
let item = list.lock().unwrap().pop();
if let Some(item) = item {
// process_item(item)
}
}
fn c4() {
let queue = Mutex::new(VecDeque::new());
thread::scope(|s| {
let t = s.spawn(|| loop {
let item = queue.lock().unwrap().pop_front();
if let Some(item) = item {
dbg!(item);
} else {
thread::park();
}
});
for i in 0.. {
queue.lock().unwrap().push_back(i);
}
t.thread().unpark();
thread::sleep(Duration::from_secs(1));
})
}
fn c5() {
let queue = Mutex::new(VecDeque::new());
let not_empty = Condvar::new();
thread::scope(|s| {
s.spawn(|| {
loop {
let mut q = queue.lock().unwrap();
let item = loop {
if let Some(item) = q.pop_front() {
break item;
} else {
q = not_empty.wait(q).unwrap();
}
};
drop(q);
dbg!(item);
}
});
for i in 0.. {
queue.lock().unwrap().push_back(i);
not_empty.notify_one();
thread::sleep(Duration::from_secs(1));
}
});
} |
pub struct Tokenizer {
current: Option<String>,
}
impl Tokenizer {
fn new(line: &str) -> Self {
Tokenizer {
current: Some(line.to_string()),
}
}
}
impl Iterator for Tokenizer {
type Item = String;
fn next(&mut self) -> Option<Self::Item> {
if let Some(s) = &mut self.current {
let mut split: Vec<_> = s.split(' ').collect();
let nxt = split.remove(0).to_string();
if split.is_empty() {
self.current = None;
} else {
self.current = Some(split.join(" "));
}
if nxt.is_empty() {
None
} else {
Some(nxt)
}
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_empty_string() {
let mut line = Tokenizer::new(&"");
assert_eq!(None, line.next());
assert_eq!(None, line.current);
}
#[test]
fn two_word_string() {
let mut line = Tokenizer::new(&"Hello World");
assert_eq!("Hello".to_string(), line.next().unwrap());
assert_eq!("World".to_string(), line.current.unwrap());
}
#[test]
fn test_multiple_calls() {
let mut line = Tokenizer::new(&"Hello Darkness My Old Friend");
assert_eq!("Hello".to_string(), line.next().unwrap());
assert_eq!("Darkness".to_string(), line.next().unwrap());
assert_eq!("My".to_string(), line.next().unwrap());
assert_eq!("Old".to_string(), line.next().unwrap());
assert_eq!("Friend".to_string(), line.next().unwrap());
assert_eq!(None, line.next());
}
} |
// Copyright 2019 The vault713 Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::message::*;
use super::multisig::{Builder as MultisigBuilder, Hashed};
use super::ser::*;
use super::types::*;
use super::{ErrorKind, Keychain};
use crate::grin_core::core::verifier_cache::LruVerifierCache;
use crate::grin_core::core::{
transaction as tx, CommitWrapper, Inputs, KernelFeatures, OutputIdentifier, TxKernel, Weighting,
};
use crate::grin_core::libtx::secp_ser;
use crate::grin_core::ser;
use crate::grin_keychain::{Identifier, SwitchCommitmentType};
use crate::grin_util::secp::key::{PublicKey, SecretKey};
use crate::grin_util::secp::pedersen::{Commitment, RangeProof};
use crate::grin_util::secp::{Message as SecpMessage, Secp256k1, Signature};
use crate::grin_util::RwLock;
use crate::swap::fsm::state::StateId;
use crate::{NodeClient, Slate};
use chrono::{DateTime, Utc};
use std::sync::Arc;
use uuid::Uuid;
/// Dummy wrapper for the hex-encoded serialized transaction.
#[derive(Serialize, Deserialize)]
pub struct TxWrapper {
/// hex representation of transaction
pub tx_hex: String,
}
/// Swap event
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SwapJournalRecord {
/// Unix timestamp, when event happens
pub time: i64,
/// Description with what happens at that time.
pub message: String,
}
/// Primary SWAP state. Both Seller and Buyer are using it.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Swap {
/// Swap session uuid
pub id: Uuid,
/// Swap engine version. Both party are expected to have the same version
pub version: u8,
/// Network for the swap session (mainnet/floonet)
pub network: Network,
/// Role of the party (Byer or Seller)
pub role: Role,
/// Method how we are sending message to other party
pub communication_method: String,
/// destination address of the other party
pub communication_address: String,
/// Flag that specify the Locking fund order (Will wait for the fact that transaction is publishing, not for all confirmations).
/// true: Seller lock MWC first, then Buyer BTC.
/// false: Buyer lock BTC first, then Seller does lock.
pub seller_lock_first: bool,
/// Time when we started swap session
pub started: DateTime<Utc>,
/// Current state for this swap session
pub state: StateId,
/// MWC amount that Seller offer
#[serde(with = "secp_ser::string_or_u64")]
pub primary_amount: u64,
/// BTC amount that Buyer pay
#[serde(with = "secp_ser::string_or_u64")]
pub secondary_amount: u64,
/// units for BTC
pub secondary_currency: Currency,
/// Data associated with BTC deal
pub secondary_data: SecondaryData,
#[serde(
serialize_with = "option_pubkey_to_hex",
deserialize_with = "option_pubkey_from_hex"
)]
/// Buyer Redeem slate public key
pub(super) redeem_public: Option<PublicKey>,
/// Schnorr multisig this party participant id
pub(super) participant_id: usize,
/// Schnorr multisig builder and holder
pub(super) multisig: MultisigBuilder,
/// MWC Lock Slate
#[serde(deserialize_with = "slate_deser")]
pub lock_slate: Slate,
/// MWC Refund Slate
#[serde(deserialize_with = "slate_deser")]
pub refund_slate: Slate,
#[serde(deserialize_with = "slate_deser")]
/// MWC redeem slate
pub redeem_slate: Slate,
/// Redeem slate was updated. Seller spot it on the blockchain and get the kernel updated.
pub redeem_kernel_updated: bool,
/// Signature that is done with multisig
#[serde(
serialize_with = "option_sig_to_hex",
deserialize_with = "option_sig_from_hex"
)]
/// Multisig signature
pub(super) adaptor_signature: Option<Signature>,
/// Required confirmations for MWC Locking
pub mwc_confirmations: u64,
/// Required confirmations for BTC Locking
pub secondary_confirmations: u64,
/// Time interval for message exchange session.
pub message_exchange_time_sec: u64,
/// Time interval needed to redeem or execute a refund transaction.
pub redeem_time_sec: u64,
/// First message that was sent, keep for retry operations
pub message1: Option<Message>,
/// Second message that was sent, keep for retry operations
pub message2: Option<Message>,
/// timestamp when message 1 was send
pub posted_msg1: Option<i64>,
/// timestamp when message 2 was send
pub posted_msg2: Option<i64>,
/// timestamp when lock transaction was posted
pub posted_lock: Option<i64>,
/// timestamp when redeem transaction was posted
pub posted_redeem: Option<i64>,
/// timestamp when refund transaction was posted
pub posted_refund: Option<i64>,
/// At what height the secondary redeem transaction was posted. Secondary refund doesn't use
/// that because timing is not critical.
pub posted_secondary_height: Option<u64>,
/// Last error message if --check was failed. Note, error will be very generic
pub last_check_error: Option<String>,
/// Last error message if --process was failed. Note, error will be very generic
pub last_process_error: Option<(StateId, String)>,
/// Event log for this swap trade.
pub journal: Vec<SwapJournalRecord>,
/// Secondary fee as it comes from the parameters or default value.
/// Fee units might be changed from Currency to Currency
pub secondary_fee: f32,
/// ElectrumX URI1
pub electrum_node_uri1: Option<String>,
/// ElectrumX failover URI2
pub electrum_node_uri2: Option<String>,
/// Ethereum Swap Contract Address
pub eth_swap_contract_address: Option<String>,
/// ERC20 Swap Contract Address
pub erc20_swap_contract_address: Option<String>,
/// Ethereum Infura Project Id
pub eth_infura_project_id: Option<String>,
/// Ethereum transfer to users' wallet directly
pub eth_redirect_to_private_wallet: Option<bool>,
// --------------------------------
// Additional non stored params for the trade. They good for a single call, not for all sessions
/// For the step 1 backup. The QT wallet will take care about the backup. Here we should wait until
/// it is done by user
#[serde(skip)]
#[serde(default = "default_false")]
pub wait_for_backup1: bool,
/// Tag for the swap. Using for swap marketplace automation process.
pub tag: Option<String>,
/// Flag that other party locking is confirmed. Utility flag for swap marketplace
#[serde(default = "default_false")]
pub other_lock_first_done: bool,
}
fn default_false() -> bool {
false
}
impl Swap {
/// Return true for Seller
pub fn is_seller(&self) -> bool {
match self.role {
Role::Seller(_, _) => true,
Role::Buyer(_) => false,
}
}
/// Get MWC lock slate, change outputs
pub fn change_output<K: Keychain>(
&self,
keychain: &K,
context: &Context,
) -> Result<(Identifier, u64, Commitment), ErrorKind> {
assert!(self.is_seller());
let scontext = context.unwrap_seller()?;
let identifier = scontext.change_output.clone();
let amount = scontext
.inputs
.iter()
.fold(0, |acc, (_, _, value)| acc + *value)
.saturating_sub(self.primary_amount);
let commit = keychain.commit(amount, &identifier, SwitchCommitmentType::Regular)?;
Ok((identifier, amount, commit))
}
/// Return Seller specific data
pub fn unwrap_seller(&self) -> Result<(String, u64), ErrorKind> {
match &self.role {
Role::Seller(address, change) => {
match address == "0x0000000000000000000000000000000000000000" {
true => Ok(("Internal Ethereum Wallet".to_string(), *change)),
_ => Ok((address.clone(), *change)),
}
}
_ => Err(ErrorKind::UnexpectedRole(
"Swap call unwrap_seller".to_string(),
)),
}
}
/// Return buyer specific data
pub fn unwrap_buyer(&self) -> Result<Option<String>, ErrorKind> {
match &self.role {
Role::Buyer(address) => match address {
Some(address) => match address == "0x0000000000000000000000000000000000000000" {
true => Ok(Some("Internal Ethereum Wallet".to_string())),
_ => Ok(Some(address.clone())),
},
_ => Ok(address.clone()),
},
_ => Err(ErrorKind::UnexpectedRole(
"Swap call unwrap_buyer".to_string(),
)),
}
}
/// Update secondary address. Depend on role, updates redeem or refund secondary currency address
pub fn update_secondary_address(&mut self, secondary_address: String) {
let mut sec_addr: String = secondary_address;
if !self.secondary_currency.is_btc_family() {
if sec_addr.as_str() == "0x0000000000000000000000000000000000000000" {
sec_addr = "Internal Ethereum Wallet Address!".to_string()
}
}
match &mut self.role {
Role::Buyer(address) => {
address.replace(sec_addr);
}
Role::Seller(address, _) => {
*address = sec_addr;
}
};
}
/// Get secondary address. Depend on role, returns redeem or refund secondary currency address
pub fn get_secondary_address(&self) -> String {
match &self.role {
Role::Buyer(address) => address.clone().unwrap_or(String::new()),
Role::Seller(address, _) => address.clone(),
}
}
pub(super) fn message(
&self,
inner: Update,
inner_secondary: SecondaryUpdate,
) -> Result<Message, ErrorKind> {
Ok(Message::new(self.id.clone(), inner, inner_secondary))
}
pub(super) fn multisig_secret<K: Keychain>(
&self,
keychain: &K,
context: &Context,
) -> Result<SecretKey, ErrorKind> {
let sec_key = keychain.derive_key(
self.primary_amount,
&context.multisig_key,
SwitchCommitmentType::None,
)?;
Ok(sec_key)
}
pub(super) fn refund_amount(&self) -> u64 {
self.primary_amount - self.refund_slate.fee
}
pub(super) fn redeem_tx_fields(
&self,
redeem_slate: &Slate,
) -> Result<(PublicKey, PublicKey, SecpMessage), ErrorKind> {
let pub_nonces = redeem_slate
.participant_data
.iter()
.map(|p| &p.public_nonce)
.collect();
let pub_nonce_sum = PublicKey::from_combination(pub_nonces)?;
let pub_blinds = redeem_slate
.participant_data
.iter()
.map(|p| &p.public_blind_excess)
.collect();
let pub_blind_sum = PublicKey::from_combination(pub_blinds)?;
let features = KernelFeatures::Plain {
fee: redeem_slate.fee,
};
let message = features
.kernel_sig_msg()
.map_err(|e| ErrorKind::Generic(format!("Unable to generate message, {}", e)))?;
Ok((pub_nonce_sum, pub_blind_sum, message))
}
pub(super) fn find_redeem_kernel<C: NodeClient>(
&self,
node_client: &C,
) -> Result<Option<(TxKernel, u64)>, ErrorKind> {
let excess = &self
.redeem_slate
.tx
.kernels()
.get(0)
.ok_or(ErrorKind::UnexpectedAction(
"Swap Fn find_redeem_kernel() redeem slate is not initialized, not found kernel"
.to_string(),
))?
.excess;
let res = node_client
.get_kernel(excess, None, None)?
.map(|(kernel, height, _)| (kernel, height));
Ok(res)
}
pub(super) fn other_participant_id(&self) -> usize {
(self.participant_id + 1) % 2
}
/// Common nonce for the BulletProof is sum_i H(C_i) where C_i is the commitment of participant i
pub(super) fn common_nonce(&self) -> Result<SecretKey, ErrorKind> {
let hashed_nonces: Vec<SecretKey> = self
.multisig
.participants
.iter()
.filter_map(|p| p.partial_commitment.as_ref().map(|c| c.hash()))
.filter_map(|h| h.ok().map(|h| h.to_secret_key()))
.filter_map(|s| s.ok())
.collect();
if hashed_nonces.len() != 2 {
return Err(super::multisig::ErrorKind::MultiSigIncomplete.into());
}
let sec_key = Secp256k1::blind_sum(hashed_nonces, Vec::new())?;
Ok(sec_key)
}
/// Receive acknowledge for message 1
pub fn ack_msg1(&mut self) {
if self.posted_msg1.is_some() {
self.posted_msg1 = Some(u32::MAX as i64);
}
}
/// Receive acknowledge for message 2
pub fn ack_msg2(&mut self) {
if self.posted_msg2.is_some() {
self.posted_msg2 = Some(u32::MAX as i64);
}
}
/// Add a journal message for this swap trade
pub fn add_journal_message(&mut self, msg: String) {
self.journal.push(SwapJournalRecord {
time: get_cur_time(),
message: msg,
});
// We want to limit journal to 1000 items because of the performance.
while self.journal.len() > 1000 {
self.journal.remove(0);
}
}
/// Append to the last message.
pub fn append_to_last_message(&mut self, msg: &String) {
if let Some(last) = self.journal.last_mut() {
last.message.push_str(msg.as_str());
}
}
// Time management functions
/// Trade starting time
pub fn get_time_start(&self) -> i64 {
self.started.timestamp()
}
/// Offer message exchange session time limit
pub fn get_time_message_offers(&self) -> i64 {
self.get_time_start() + self.message_exchange_time_sec as i64
}
/// When locking need to be started
pub fn get_time_start_lock(&self) -> i64 {
// We can get 5% from the total lock time. We have to post fast
self.get_time_message_offers()
+ std::cmp::max(
self.get_timeinterval_mwc_lock(),
self.get_timeinterval_secondary_lock(),
) / 20
}
/// When locking time will be expired
pub fn get_time_locking(&self) -> i64 {
// for confirmation adding 10% for possible network slow down.
self.get_time_message_offers()
+ std::cmp::max(
self.get_timeinterval_mwc_lock(),
self.get_timeinterval_secondary_lock(),
)
}
/// Second period of the message exchange
pub fn get_time_message_redeem(&self) -> i64 {
self.get_time_locking() + self.message_exchange_time_sec as i64
}
/// MWC redeem time
pub fn get_time_mwc_redeem(&self) -> i64 {
self.get_time_message_redeem() + self.redeem_time_sec as i64
}
/// MWC locking time
pub fn get_time_mwc_lock(&self) -> i64 {
// Add 10% for network instability
self.get_time_mwc_redeem() + self.get_timeinterval_mwc_lock()
}
/// mwc refund time
pub fn get_time_mwc_refund(&self) -> i64 {
// Add 10% for network instability
self.get_time_mwc_lock() + self.redeem_time_sec as i64
}
/// BTC/ETH lock time for script
pub fn get_time_secondary_lock_script(&self) -> i64 {
self.get_time_mwc_refund()
+ self.redeem_time_sec as i64
+ self.get_timeinterval_mwc_lock()
+ self.get_timeinterval_secondary_lock()
}
/// BTC/ETH lock time publish
pub fn get_time_secondary_lock_publish(&self) -> i64 {
// Here is what BTC node said:
// Only accept nLockTime-using transactions that can be mined in the next
// block; we don't want our mempool filled up with transactions that can't
// be mined yet.
//
// As a result we have to wait guarantee 1 block. To be safe, we will wait time for 5 blocks before we try to publish
// 5 is a large number, but accordign the testnet, it is needed.
self.get_time_secondary_lock_script() + self.secondary_currency.block_time_period_sec() * 5
}
/// btc redeem time limit
pub fn get_time_btc_redeem_limit(&self) -> i64 {
// Using script time as lowest possible value
self.get_time_secondary_lock_script() - self.get_timeinterval_secondary_lock()
}
////////////////////////////////////////////////////////////
// Time periof functions
/// MWC locking time interval
pub fn get_timeinterval_mwc_lock(&self) -> i64 {
// adding extra 10% for chain instability
self.mwc_confirmations as i64 * 60 * 11 / 10
}
/// BTC/ETH locking time interval
pub fn get_timeinterval_secondary_lock(&self) -> i64 {
// adding extra 10% for chain instability
self.secondary_confirmations as i64 * self.secondary_currency.block_time_period_sec() * 11
/ 10
}
/// Latest error message. Check has higher priority because it is normally done first
pub fn get_last_error(&self) -> Option<String> {
if self.last_check_error.is_some() {
self.last_check_error.clone()
} else {
if let Some((state, err)) = self.last_process_error.clone() {
// Error is reportable only if it happend for the current state. If state was changed,
// th error is not applicable any more.
if state == self.state {
Some(err)
} else {
None
}
} else {
None
}
}
}
}
impl ser::Writeable for Swap {
fn write<W: ser::Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
writer.write_bytes(&serde_json::to_vec(self).map_err(|e| {
ser::Error::CorruptedData(format!("OutputData to json conversion failed, {}", e))
})?)
}
}
impl ser::Readable for Swap {
fn read<R: ser::Reader>(reader: &mut R) -> Result<Swap, ser::Error> {
let data = reader.read_bytes_len_prefix()?;
serde_json::from_slice(&data[..]).map_err(|e| {
ser::Error::CorruptedData(format!("Json to outputData conversion failed, {}", e))
})
}
}
/// Add an input to a tx at the appropriate position
pub fn tx_add_input(slate: &mut Slate, commit: Commitment) {
match &mut slate.tx.body.inputs {
Inputs::FeaturesAndCommit(inputs) => {
let input = tx::Input {
features: tx::OutputFeatures::Plain,
commit,
};
inputs
.binary_search(&input)
.err()
.map(|e| inputs.insert(e, input));
}
Inputs::CommitOnly(commits) => {
let cmt = CommitWrapper::from(commit);
commits
.binary_search(&cmt)
.err()
.map(|e| commits.insert(e, cmt));
}
}
}
/// Add an output to a tx at the appropriate position
pub fn tx_add_output(slate: &mut Slate, commit: Commitment, proof: RangeProof) {
let output = tx::Output {
identifier: OutputIdentifier {
features: tx::OutputFeatures::Plain,
commit,
},
proof,
};
let outputs = &mut slate.tx.body.outputs;
outputs
.binary_search(&output)
.err()
.map(|e| outputs.insert(e, output));
}
/// Interpret the final 32 bytes of the signature as a secret key
pub fn signature_as_secret(signature: &Signature) -> Result<SecretKey, ErrorKind> {
let ser = signature.to_raw_data();
let key = SecretKey::from_slice(&ser[32..])?;
Ok(key)
}
/// Serialize a transaction and submit it to the network
pub fn publish_transaction<C: NodeClient>(
node_client: &C,
tx: &tx::Transaction,
fluff: bool,
) -> Result<(), ErrorKind> {
tx.validate(
Weighting::AsTransaction,
Arc::new(RwLock::new(LruVerifierCache::new())),
)
.map_err(|e| ErrorKind::UnexpectedAction(format!("slate is not valid, {}", e)))?;
node_client.post_tx(tx, fluff)?;
Ok(())
}
#[cfg(test)]
lazy_static! {
static ref CURRENT_TEST_TIME: RwLock<Option<i64>> = RwLock::new(None);
}
#[cfg(test)]
/// Test current time as a timestamp for testing. Pleas ebe carefull, in production it is never called.
pub fn set_testing_cur_time(cur_time: i64) {
CURRENT_TEST_TIME.write().replace(cur_time.clone());
}
#[cfg(test)]
/// Remove test timer control for swaps. Will use current system time instead
pub fn reset_testing_cur_time() {
CURRENT_TEST_TIME.write().take();
}
#[cfg(test)]
/// Current time. In release it is just a current time. In debug it is a test controlled time that allows us to validate the edge cases
pub fn get_cur_time() -> i64 {
match *CURRENT_TEST_TIME.read() {
Some(time) => time,
None => Utc::now().timestamp(),
}
}
#[cfg(not(test))]
/// Current time for relase allways returns fair value
pub fn get_cur_time() -> i64 {
Utc::now().timestamp()
}
/// Print how much time left from the time limit
pub fn left_from_time_limit(time_limit: &Option<i64>) -> String {
match time_limit {
Some(time_limit) => {
let left_sec = time_limit - get_cur_time();
if left_sec <= 0 {
"expired".to_string()
} else {
if left_sec > 3600 {
format!(
"expired in {} hours {} minutes",
left_sec / 3600,
(left_sec % 3600) / 60
)
} else if left_sec > 60 {
format!("expired in {} minutes", left_sec / 60)
} else {
format!("expired in {} seconds", left_sec)
}
}
}
None => "".to_string(),
}
}
|
#[doc = "Register `RCC_PLL1CFGR1` reader"]
pub type R = crate::R<RCC_PLL1CFGR1_SPEC>;
#[doc = "Register `RCC_PLL1CFGR1` writer"]
pub type W = crate::W<RCC_PLL1CFGR1_SPEC>;
#[doc = "Field `DIVN` reader - DIVN"]
pub type DIVN_R = crate::FieldReader<u16>;
#[doc = "Field `DIVN` writer - DIVN"]
pub type DIVN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 9, O, u16>;
#[doc = "Field `DIVM1` reader - DIVM1"]
pub type DIVM1_R = crate::FieldReader;
#[doc = "Field `DIVM1` writer - DIVM1"]
pub type DIVM1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 6, O>;
impl R {
#[doc = "Bits 0:8 - DIVN"]
#[inline(always)]
pub fn divn(&self) -> DIVN_R {
DIVN_R::new((self.bits & 0x01ff) as u16)
}
#[doc = "Bits 16:21 - DIVM1"]
#[inline(always)]
pub fn divm1(&self) -> DIVM1_R {
DIVM1_R::new(((self.bits >> 16) & 0x3f) as u8)
}
}
impl W {
#[doc = "Bits 0:8 - DIVN"]
#[inline(always)]
#[must_use]
pub fn divn(&mut self) -> DIVN_W<RCC_PLL1CFGR1_SPEC, 0> {
DIVN_W::new(self)
}
#[doc = "Bits 16:21 - DIVM1"]
#[inline(always)]
#[must_use]
pub fn divm1(&mut self) -> DIVM1_W<RCC_PLL1CFGR1_SPEC, 16> {
DIVM1_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is used to configure the PLL1. If TZEN = , this register can only be modified in secure mode. Write access to this register is not allowed during the clock restore sequence. See Section: The clock restore sequence description for details.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rcc_pll1cfgr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rcc_pll1cfgr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RCC_PLL1CFGR1_SPEC;
impl crate::RegisterSpec for RCC_PLL1CFGR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rcc_pll1cfgr1::R`](R) reader structure"]
impl crate::Readable for RCC_PLL1CFGR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rcc_pll1cfgr1::W`](W) writer structure"]
impl crate::Writable for RCC_PLL1CFGR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RCC_PLL1CFGR1 to value 0x0001_0031"]
impl crate::Resettable for RCC_PLL1CFGR1_SPEC {
const RESET_VALUE: Self::Ux = 0x0001_0031;
}
|
use std::fmt::Display;
use std::io::{stdout, Stdout, Write};
use crossterm::{
cursor, execute, queue,
style::{self, style, Attribute, Color, StyledContent},
terminal::{self, size},
};
use crate::display::{Board, Entity, GameDisplay};
use crate::game::{Block, EntityBlock, CHUNK_SIZE};
pub struct CTDisplay {
cols: u16,
rows: u16,
stdout: Stdout,
}
impl GameDisplay for CTDisplay {
fn start() -> Self {
let mut stdout = stdout();
terminal::enable_raw_mode()
.expect("impl GameDisplay for CTDisplay: fn start: terminal::enable_raw_mode()");
let (cols, rows) = size()
.expect("impl GameDisplay for CTDisplay: fn start: let (mut cols, mut rows) = size()");
execute!(stdout, terminal::EnterAlternateScreen, cursor::Hide)
.expect("impl GameDisplay for CTDisplay: fn start: execute!(");
CTDisplay { cols, rows, stdout }
}
fn display<'a>(
&mut self,
board: &Board,
entities: impl Iterator<Item = &'a Entity>,
) -> Option<()> {
let start_x = self.cols / 2 - board.width() as u16 * CHUNK_SIZE;
let start_y = (self.rows - board.height() as u16 * CHUNK_SIZE) / 2;
let print_h_wall = style::PrintStyledContent(
style(
WALL_SHAPE
.chars()
.cycle()
.take((board.width() as usize * CHUNK_SIZE as usize + 2) * 2)
.collect::<String>(),
)
.with(WALL_COLOR_FG)
.on(WALL_COLOR_BG)
.attribute(WALL_ATTRIBUTE),
);
queue!(
self.stdout,
cursor::MoveTo(start_x - 3, start_y - 1),
print_h_wall,
cursor::MoveTo(start_x - 3, start_y + CHUNK_SIZE * board.height() as u16),
print_h_wall,
)
.expect("");
queue!(self.stdout, cursor::MoveTo(start_x - 1, start_y)).expect("err queue move row");
board.chunks().chunks(board.width().into()).for_each(|row| {
row.iter()
.cycle()
.take((board.width() as u16 * CHUNK_SIZE) as usize)
.enumerate()
.for_each(|(n, chunk)| {
queue!(
self.stdout,
style::PrintStyledContent(
chunk.blocks[0 + (n / board.width() as usize) * CHUNK_SIZE as usize]
.styled_content()
),
style::PrintStyledContent(
chunk.blocks[1 + (n / board.width() as usize) * CHUNK_SIZE as usize]
.styled_content()
),
style::PrintStyledContent(
chunk.blocks[2 + (n / board.width() as usize) * CHUNK_SIZE as usize]
.styled_content()
)
)
.expect("err print borad");
if n as u16 % (board.width() as u16) + 1 == board.width() as u16 {
queue!(
self.stdout,
style::PrintStyledContent(Block::Wall.styled_content()),
cursor::MoveToColumn(start_x - 2),
style::PrintStyledContent(Block::Wall.styled_content()),
cursor::MoveDown(1),
)
.expect("err new line");
}
});
});
entities.for_each(|e| {
queue!(
self.stdout,
cursor::MoveTo(start_x + e.pos.0 * 2 - 1, start_y + e.pos.1),
style::PrintStyledContent(e.block.styled_content())
)
.expect("err print entities");
});
self.stdout.flush().expect("flush");
None
}
}
impl Drop for CTDisplay {
fn drop(&mut self) {
terminal::disable_raw_mode()
.expect("impl Drop for CTDisplay: fn drop: terminal::disable_raw_mode()");
execute!(self.stdout, cursor::Show, terminal::LeaveAlternateScreen)
.expect("impl Drop for CTDisplay: fn drop: execute!(self.stdout, cursor::Show, terminal::LeaveAlternateScreen)");
}
}
trait CTPrint<'a, D: Display + Clone + 'a> {
fn styled_content(&self) -> StyledContent<D>;
}
static WALL_SHAPE: &'static str = "[]";
static WALL_COLOR_FG: Color = Color::DarkBlue;
static WALL_COLOR_BG: Color = Color::Black;
static WALL_ATTRIBUTE: Attribute = Attribute::Bold;
impl CTPrint<'_, &'static str> for Block {
fn styled_content(&self) -> StyledContent<&'static str> {
lazy_static! {
static ref WALL: StyledContent<&'static str> = style(WALL_SHAPE)
.with(WALL_COLOR_FG)
.on(WALL_COLOR_BG)
.attribute(WALL_ATTRIBUTE);
static ref EMPTY: StyledContent<&'static str> = style(" ");
}
match self {
Block::Wall => *WALL,
Block::Empty => *EMPTY,
}
}
}
impl CTPrint<'_, &'static str> for EntityBlock {
fn styled_content(&self) -> StyledContent<&'static str> {
lazy_static! {
static ref PLAYER: [StyledContent<&'static str>; 4] = [
style("<>").with(Color::DarkGreen).on(Color::Black),
style("<>").with(Color::DarkRed).on(Color::Black),
style("<>").with(Color::DarkBlue).on(Color::Black),
style("<>").with(Color::DarkBlue).on(Color::Black),
];
static ref TRAP: StyledContent<&'static str> =
style("##").with(Color::Cyan).on(Color::Black);
static ref POINT: StyledContent<&'static str> =
style("()").with(Color::Yellow).on(Color::Black);
static ref NONE: StyledContent<&'static str> = style("");
}
match self {
EntityBlock::Player(n) if n > &0 && n < &4 => PLAYER[*n as usize],
EntityBlock::Trap => *TRAP,
_ => *NONE,
}
}
}
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under both the MIT license found in the
* LICENSE-MIT file in the root directory of this source tree and the Apache
* License, Version 2.0 found in the LICENSE-APACHE file in the root directory
* of this source tree.
*/
use proc_macro2::TokenStream;
use quote::{quote, quote_spanned};
use syn::{
spanned::Spanned, Data, DataEnum, DataStruct, Fields, GenericParam, Generics, Ident, Index,
TypeParamBound, Variant,
};
// Add a bound to every type parameter.
pub fn add_trait_bounds(mut generics: Generics, bound: &TypeParamBound) -> Generics {
for param in &mut generics.params {
if let GenericParam::Type(ref mut type_param) = *param {
type_param.bounds.push(bound.clone());
}
}
generics
}
fn duplicate_struct(data: &DataStruct, duplicate: &TokenStream) -> TokenStream {
match data.fields {
Fields::Named(ref fields) => {
// Self {x: clone(self.x), y: clone(self.y)}
let xs = fields.named.iter().map(|f| {
let name = &f.ident;
quote_spanned! {f.span() =>
#name: #duplicate(&self.#name)
}
});
quote! {
Self { #(#xs, )* }
}
}
Fields::Unnamed(ref fields) => {
// Self(clone(self.0), clone(self.1))
let xs = fields.unnamed.iter().enumerate().map(|(i, f)| {
let index = Index::from(i);
quote_spanned! {f.span()=>
#duplicate(&self.#index)
}
});
quote! {
Self ( #(#xs, )* )
}
}
Fields::Unit => {
// Self
quote!(Self)
}
}
}
fn duplicate_variant(data: &Variant, duplicate: &TokenStream) -> TokenStream {
let ctor = &data.ident;
match data.fields {
Fields::Named(ref fields) => {
// Self::Ctor{x,y} => {x: clone(x), y: clone(y)}
let (pats, es): (Vec<_>, Vec<_>) = fields
.named
.iter()
.map(|f| {
let name = &f.ident;
(
quote_spanned! {f.span() =>
#name
},
quote_spanned! {f.span() =>
#name: #duplicate(#name)
},
)
})
.unzip();
quote! {
Self::#ctor{ #(#pats, )* } => Self::#ctor{ #(#es, )* }
}
}
Fields::Unnamed(ref fields) => {
// Self::Ctor(v0,v1) => Self::Ctor(clone(v0), clone(v1))
let (pats, es): (Vec<_>, Vec<_>) = fields
.unnamed
.iter()
.enumerate()
.map(|(i, f)| {
let var = Ident::new(&format!("v{}", i), f.span());
(
quote_spanned! {f.span() => #var},
quote_spanned! {f.span() => #duplicate(#var)},
)
})
.unzip();
quote! {
Self::#ctor( #(#pats,)* ) => Self::#ctor( #(#es,)* )
}
}
Fields::Unit => {
// Self::Ctor => Self::Ctor
quote!(Self::#ctor => Self::#ctor)
}
}
}
fn duplicate_enum(data: &DataEnum, duplicate: &TokenStream) -> TokenStream {
let xs = data
.variants
.iter()
.map(|v| duplicate_variant(v, duplicate));
quote! {
match self {
#(#xs, )*
}
}
}
pub fn duplicate_impl(data: &Data, duplicate: &TokenStream) -> TokenStream {
match data {
Data::Struct(data) => duplicate_struct(data, duplicate),
Data::Enum(data) => duplicate_enum(data, duplicate),
Data::Union(x) => {
syn::Error::new_spanned(x.union_token, "Can't derive duplication for unions")
.into_compile_error()
}
}
}
|
use itertools::Itertools;
mod common;
struct PatternIterator {
repeat: usize,
repeat_pos: usize,
pos: usize,
}
impl Iterator for PatternIterator {
type Item = i32;
fn next(&mut self) -> Option<Self::Item> {
self.repeat_pos += 1;
if self.repeat_pos == self.repeat {
self.repeat_pos = 0;
self.pos = (self.pos + 1) % PatternIterator::BASE.len();
}
Some(Self::BASE[self.pos])
}
}
impl PatternIterator {
const BASE: [i32; 4] = [0, 1, 0, -1];
fn new(repeat: usize, start: usize) -> PatternIterator {
assert!(repeat > 0);
PatternIterator {
repeat,
repeat_pos: start % repeat,
pos: (start / repeat) % Self::BASE.len(),
}
}
}
fn fft(n: &mut Vec<i32>, phases: usize, start: usize) {
for _ in 0..phases {
for j in start..n.len() {
let pattern = PatternIterator::new(start + j + 1, start + j);
let result = n[j..].iter().zip(pattern).map(|(x, y)| x * y).sum::<i32>();
n[j] = result.abs() % 10;
}
}
}
fn fft2(n: &mut Vec<i32>, phases: usize, start: usize) {
for _ in 0..phases {
let mut sum = n.iter().skip(start).sum::<i32>();
#[allow(clippy::needless_range_loop)]
for j in start..n.len() {
let result = sum.abs() % 10;
sum -= n[j];
n[j] = result;
}
}
}
fn parse_input(s: &str) -> Result<Vec<i32>, &'static str> {
s.chars()
.map(|c| {
c.to_digit(10)
.map(|x| x as i32)
.ok_or("could not parse digit")
})
.collect()
}
fn result_to_str<'a>(n: impl Iterator<Item = &'a i32>) -> String {
n.map(|x| x.to_string()).join("")
}
fn sequence_to_number<'a>(n: impl Iterator<Item = &'a i32>) -> i32 {
n.fold(0, |s, x| 10 * s + x)
}
fn part2(n: &[i32], phases: usize, digits: usize) -> String {
let offset = sequence_to_number(n.iter().take(7)) as usize;
let mut input: Vec<_> = n.iter().cycle().take(10000 * n.len()).cloned().collect();
if input.len() / 2 < offset {
// apply optimized version
fft2(&mut input, phases, offset);
} else {
println!("Warning: can not apply optimized version of FFT");
fft(&mut input, phases, offset);
}
result_to_str(input.iter().skip(offset).take(digits))
}
fn main() -> Result<(), &'static str> {
let numbers: Vec<_> = common::get_lines()
.into_iter()
.map(|l| parse_input(&l))
.collect::<Result<Vec<_>, _>>()?;
for n in numbers {
let mut input1 = n.clone();
fft(&mut input1, 100, 0);
let output1 = result_to_str(n.iter().take(8));
println!("Part1: result after 100 phases of FFT: {}", output1);
let output2 = part2(&n, 100, 8);
println!("Part2: message is {}", output2);
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
fn test_part1(input: &str, phases: usize, digits: usize, expected: &str) {
let mut n = parse_input(input).unwrap();
fft(&mut n, phases, 0);
let output = result_to_str(n.iter().take(digits));
assert_eq!(output, expected);
}
fn test_part2(input: &str, phases: usize, digits: usize, expected: &str) {
let n = parse_input(input).unwrap();
let output = part2(&n, phases, digits);
assert_eq!(output, expected);
}
#[test]
fn test_example1() {
test_part1("12345678", 4, 8, "01029498");
}
#[test]
fn test_example2() {
test_part1("80871224585914546619083218645595", 100, 8, "24176176");
}
#[test]
fn test_example3() {
test_part1("19617804207202209144916044189917", 100, 8, "73745418");
}
#[test]
fn test_example4() {
test_part1("69317163492948606335995924319873", 100, 8, "52432133");
}
#[test]
fn test_example5() {
test_part2("03036732577212944063491565474664", 100, 8, "84462026");
}
#[test]
fn test_example6() {
test_part2("02935109699940807407585447034323", 100, 8, "78725270");
}
#[test]
fn test_example7() {
test_part2("03081770884921959731165446850517", 100, 8, "53553731");
}
}
|
use rand::rngs::StdRng;
use rand::{RngCore, SeedableRng};
use sealpir::PirReply;
use hybridpir::client::HybridPirClient;
use hybridpir::server::HybridPirServer;
#[test]
fn test_pir() {
let mut prng = StdRng::from_entropy();
let size = 1 << 20;
let raidpir_servers = 2;
let raidpir_redundancy = 2;
let raidpir_size = 1 << 10;
let index = size >> 1;
let mut db: Vec<Vec<u8>> = Vec::with_capacity(size);
for _i in 0..size {
let mut buffer = vec![0; 8];
prng.fill_bytes(&mut buffer);
db.push(buffer);
}
db[index] = b"deadbeef".to_vec();
let mut servers: Vec<HybridPirServer> = (0..raidpir_servers)
.map(|i| HybridPirServer::new(
&db,
i, raidpir_servers, raidpir_redundancy, raidpir_size, false,
2048, 12, 2
)).collect();
let client = HybridPirClient::new(db.len(), 8,
raidpir_servers, raidpir_redundancy, raidpir_size,
2048, 12, 2);
let seeds = servers.iter_mut().map(|s| s.seed()).collect();
let (raidpir_queries, sealpir_query) = client.query(index, &seeds);
let sealpir_key = client.sealpir_key();
let responses: Vec<PirReply> = servers
.iter_mut()
.zip(seeds.iter().zip(raidpir_queries.iter()))
.map(|(server, (seed, raidpir_query))| server.response(*seed, raidpir_query, sealpir_key, &sealpir_query))
.collect();
let response = client.combine(index, responses);
assert!(response == b"deadbeef");
}
#[test]
fn test_tcp() {
let mut prng = StdRng::from_entropy();
let size = 1 << 20;
let raidpir_servers = 2;
let raidpir_redundancy = 2;
let raidpir_size = 1 << 8;
let index = size >> 1;
let mut db: Vec<Vec<u8>> = Vec::with_capacity(size);
for _i in 0..size {
let mut buffer = vec![0; 8];
prng.fill_bytes(&mut buffer);
db.push(buffer);
}
db[index] = b"deadbeef".to_vec();
for i in 0..raidpir_servers {
let server = HybridPirServer::new(&db,
i, raidpir_servers, raidpir_redundancy, raidpir_size, false,
2048, 12, 2);
std::thread::spawn(move || {
server.accept_connections(("localhost", (7000 + i) as u16)).unwrap();
});
}
let client = HybridPirClient::new(db.len(), 8,
raidpir_servers, raidpir_redundancy, raidpir_size,
2048, 12, 2);
let response = client
.send_query(&[("localhost", 7000), ("localhost", 7001)], index)
.unwrap();
assert!(response == b"deadbeef");
}
|
use rusoto_core::Region;
use rusoto_logs::{
CloudWatchLogs, CloudWatchLogsClient, CreateLogGroupRequest, CreateLogStreamRequest,
DescribeLogGroupsRequest, DescribeLogStreamsRequest, GetLogEventsRequest, InputLogEvent,
LogGroup, PutLogEventsRequest,
};
use std::default::Default;
#[test]
fn describe_group() {
let addr = start_server();
let client = client(addr);
let req = CreateLogGroupRequest {
log_group_name: "test-group".into(),
..Default::default()
};
client.create_log_group(req).sync().unwrap();
let mut desc_groups_req = DescribeLogGroupsRequest::default();
desc_groups_req.log_group_name_prefix = "test-group".to_string().into();
let res = client.describe_log_groups(desc_groups_req).sync().unwrap();
let groups = res.log_groups.unwrap();
assert_eq!(
groups,
vec![LogGroup {
log_group_name: Some("test-group".into()),
..Default::default()
}]
);
}
#[test]
fn group_not_found() {
let addr = start_server();
let client = client(addr);
let mut desc_streams_req = DescribeLogStreamsRequest::default();
desc_streams_req.log_group_name = "non-existant-group".to_string();
client
.describe_log_streams(desc_streams_req)
.sync()
.unwrap_err();
}
#[test]
fn group_found() {
let addr = start_server();
let client = client(addr);
let req = CreateLogGroupRequest {
log_group_name: "test-group".into(),
..Default::default()
};
client.create_log_group(req).sync().unwrap();
let mut desc_streams_req = DescribeLogStreamsRequest::default();
desc_streams_req.log_group_name = "test-group".to_string();
let res = client
.describe_log_streams(desc_streams_req)
.sync()
.unwrap();
let streams = res.log_streams.unwrap();
assert_eq!(streams, vec![]);
}
#[test]
fn stream_found() {
let addr = start_server();
let client = client(addr);
let group_name = "test-group".to_string();
let req = CreateLogGroupRequest {
log_group_name: group_name.clone(),
..Default::default()
};
client.create_log_group(req).sync().unwrap();
let req = CreateLogStreamRequest {
log_group_name: group_name.clone(),
log_stream_name: "test-log-stream".into(),
};
client.create_log_stream(req).sync().unwrap();
let desc_streams_req = DescribeLogStreamsRequest {
log_group_name: group_name,
..Default::default()
};
let res = client
.describe_log_streams(desc_streams_req)
.sync()
.unwrap();
let streams = res.log_streams.unwrap();
let stream = streams.into_iter().next().unwrap();
let stream_name = stream.log_stream_name.unwrap();
assert_eq!(stream_name, "test-log-stream".to_string());
}
#[test]
fn create_group() {
let addr = start_server();
let client = client(addr);
let req = CreateLogGroupRequest {
log_group_name: "test-group-1".into(),
..Default::default()
};
client.create_log_group(req).sync().unwrap();
}
#[test]
fn create_stream() {
let addr = start_server();
let client = client(addr);
let group_name = "test-group-1".to_string();
let req = CreateLogGroupRequest {
log_group_name: group_name.clone(),
..Default::default()
};
client.create_log_group(req).sync().unwrap();
let req = CreateLogStreamRequest {
log_group_name: group_name,
log_stream_name: "test-log-stream".into(),
};
client.create_log_stream(req).sync().unwrap();
}
#[test]
fn put_logs_empty() {
let addr = start_server();
let client = client(addr);
let group_name = "test-group".to_string();
let req = CreateLogGroupRequest {
log_group_name: group_name.clone(),
..Default::default()
};
client.create_log_group(req).sync().unwrap();
let req = CreateLogStreamRequest {
log_group_name: group_name.clone(),
log_stream_name: "test-log-stream".into(),
};
client.create_log_stream(req).sync().unwrap();
let desc_streams_req = DescribeLogStreamsRequest {
log_group_name: group_name.clone(),
..Default::default()
};
let res = client
.describe_log_streams(desc_streams_req)
.sync()
.unwrap();
let streams = res.log_streams.unwrap();
let stream = streams.into_iter().next().unwrap();
let token = stream.upload_sequence_token.clone();
let stream_name = stream.log_stream_name.unwrap();
assert_eq!(stream_name, "test-log-stream".to_string());
let req = PutLogEventsRequest {
log_events: Vec::new(),
log_group_name: group_name,
log_stream_name: "test-log-stream".to_string(),
sequence_token: token,
};
client.put_log_events(req).sync().unwrap();
}
#[test]
fn put_logs_non_empty() {
let addr = start_server();
let client = client(addr);
let group_name = "test-group".to_string();
let req = CreateLogGroupRequest {
log_group_name: group_name.clone(),
..Default::default()
};
client.create_log_group(req).sync().unwrap();
let req = CreateLogStreamRequest {
log_group_name: group_name.clone(),
log_stream_name: "test-log-stream".into(),
};
client.create_log_stream(req).sync().unwrap();
let desc_streams_req = DescribeLogStreamsRequest {
log_group_name: group_name.clone(),
..Default::default()
};
let res = client
.describe_log_streams(desc_streams_req)
.sync()
.unwrap();
let streams = res.log_streams.unwrap();
let stream = streams.into_iter().next().unwrap();
let token = stream.upload_sequence_token.clone();
let stream_name = stream.log_stream_name.unwrap();
assert_eq!(stream_name, "test-log-stream".to_string());
let logs = vec![InputLogEvent {
message: "hello world".into(),
timestamp: chrono::Utc::now().timestamp_millis(),
}];
let req = PutLogEventsRequest {
log_events: logs,
log_group_name: group_name,
log_stream_name: "test-log-stream".to_string(),
sequence_token: token,
};
client.put_log_events(req).sync().unwrap();
}
#[test]
fn get_logs_empty() {
let addr = start_server();
let client = client(addr);
let group_name = "test-group".to_string();
let req = CreateLogGroupRequest {
log_group_name: group_name.clone(),
..Default::default()
};
client.create_log_group(req).sync().unwrap();
let req = CreateLogStreamRequest {
log_group_name: group_name.clone(),
log_stream_name: "test-log-stream".into(),
};
client.create_log_stream(req).sync().unwrap();
let desc_streams_req = DescribeLogStreamsRequest {
log_group_name: group_name.clone(),
..Default::default()
};
let res = client
.describe_log_streams(desc_streams_req)
.sync()
.unwrap();
let streams = res.log_streams.unwrap();
let stream = streams.into_iter().next().unwrap();
let token = stream.upload_sequence_token.clone();
let stream_name = stream.log_stream_name.unwrap();
assert_eq!(stream_name, "test-log-stream".to_string());
let req = PutLogEventsRequest {
log_events: Vec::new(),
log_group_name: group_name.clone(),
log_stream_name: "test-log-stream".to_string(),
sequence_token: token,
};
client.put_log_events(req).sync().unwrap();
let req = GetLogEventsRequest {
log_stream_name: "test-log-stream".to_string(),
log_group_name: group_name.clone(),
..Default::default()
};
let res = client.get_log_events(req).sync().unwrap();
let events = res.events.unwrap();
assert!(events.is_empty());
}
#[test]
fn get_logs_non_empty() {
let addr = start_server();
let client = client(addr);
let group_name = "test-group".to_string();
let req = CreateLogGroupRequest {
log_group_name: group_name.clone(),
..Default::default()
};
client.create_log_group(req).sync().unwrap();
let req = CreateLogStreamRequest {
log_group_name: group_name.clone(),
log_stream_name: "test-log-stream".into(),
};
client.create_log_stream(req).sync().unwrap();
let desc_streams_req = DescribeLogStreamsRequest {
log_group_name: group_name.clone(),
..Default::default()
};
let res = client
.describe_log_streams(desc_streams_req)
.sync()
.unwrap();
let streams = res.log_streams.unwrap();
let stream = streams.into_iter().next().unwrap();
let token = stream.upload_sequence_token.clone();
let stream_name = stream.log_stream_name.unwrap();
assert_eq!(stream_name, "test-log-stream".to_string());
let logs = vec![InputLogEvent {
message: "hello world".into(),
timestamp: chrono::Utc::now().timestamp_millis(),
}];
let req = PutLogEventsRequest {
log_events: logs,
log_group_name: group_name.clone(),
log_stream_name: "test-log-stream".to_string(),
sequence_token: token,
};
client.put_log_events(req).sync().unwrap();
let req = GetLogEventsRequest {
log_stream_name: "test-log-stream".to_string(),
log_group_name: group_name.clone(),
..Default::default()
};
let res = client.get_log_events(req).sync().unwrap();
let events = res.events.unwrap();
let event = events.into_iter().next().unwrap();
let message = event.message.unwrap();
assert_eq!(message, "hello world".to_string());
}
fn client(addr: SocketAddr) -> impl CloudWatchLogs {
let endpoint = format!("http://localhost:{}", addr.port());
let region = Region::Custom {
name: "mockwatchlogs".into(),
endpoint,
};
CloudWatchLogsClient::new(region)
}
fn start_server() -> SocketAddr {
use mockwatchlogs::serve;
let addr = next_addr();
std::thread::spawn(move || {
use tokio::runtime::current_thread;
let serve = serve(addr);
current_thread::run(serve);
});
std::thread::sleep(std::time::Duration::from_millis(100));
addr
}
use std::net::SocketAddr;
use std::sync::atomic::{AtomicUsize, Ordering};
static NEXT_PORT: AtomicUsize = AtomicUsize::new(1234);
pub fn next_addr() -> SocketAddr {
use std::net::{IpAddr, Ipv4Addr};
let port = NEXT_PORT.fetch_add(1, Ordering::AcqRel) as u16;
SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), port)
}
|
#[doc = "Reader of register CHAN_RESULT_NEWVALUE"]
pub type R = crate::R<u32, super::CHAN_RESULT_NEWVALUE>;
#[doc = "Reader of field `CHAN_RESULT_NEWVALUE`"]
pub type CHAN_RESULT_NEWVALUE_R = crate::R<u16, u16>;
impl R {
#[doc = "Bits 0:15 - If set the corresponding RESULT data received a new value, i.e. was sampled during the last scan and data was valid. In case of a UAB this New Value bit reflects the value of UAB.valid output, for anything else the data is always valid. In case of averaging this New Value bit is an OR of all the valid bits received by each conversion."]
#[inline(always)]
pub fn chan_result_newvalue(&self) -> CHAN_RESULT_NEWVALUE_R {
CHAN_RESULT_NEWVALUE_R::new((self.bits & 0xffff) as u16)
}
}
|
pub trait BlockingDelay {
fn blocking_delay_ms(&self, amount: u32);
fn blocking_delay_us(&self, amount: u32);
} |
//! Parse various text markup formats.
//!
//! Each module is optional and relies on a feature.
#[cfg(feature = "markdown")]
pub mod markdown;
#[cfg(feature = "markdown")]
pub use self::markdown::MarkdownText;
use owning_ref::OwningHandle;
use owning_ref::StringRef;
use std::borrow::Cow;
use std::ops::Deref;
use theme::Style;
use utils::lines::spans::Span;
/// Trait for parsing text into styled spans.
pub trait Markup {
/// Possible error happening when parsing.
type Error;
/// Parses text and return the styled spans.
fn parse<'a>(input: &'a str) -> Result<Vec<Span<'a>>, Self::Error>;
/// Returns a string and its parsed spans.
///
/// Generates a self-borrowing struct containing the source string, as well
/// as the styled spans borrowing this string.
fn make_handle<S>(input: S) -> Result<StyledHandle, Self::Error>
where
S: Into<String>,
{
let input = input.into();
OwningHandle::try_new(StringRef::new(input), |input| {
Self::parse(unsafe { &*input })
})
}
}
/// Thin wrapper around a string, with a markup format.
///
/// This only wraps the text and indicates how it should be parsed;
/// it does not parse the text itself.
pub trait MarkupText {
/// Markup format to use to parse the string.
type M: Markup;
/// Access the inner string.
fn to_string(self) -> String;
}
/// Unwrapped text gets the "Plain" markup for free.
impl<S: Into<String>> MarkupText for S {
type M = Plain;
fn to_string(self) -> String {
self.into()
}
}
/// Dummy `Markup` implementation that returns the text as-is.
pub struct Plain;
impl Markup for Plain {
type Error = ();
fn parse<'a>(input: &'a str) -> Result<Vec<Span<'a>>, Self::Error> {
Ok(if input.is_empty() {
Vec::new()
} else {
vec![
Span {
text: Cow::Borrowed(input),
style: Style::none(),
},
]
})
}
}
/// Holds both parsed spans, and the input string they borrow.
///
/// This is used to pass around a parsed string.
pub type StyledHandle = OwningHandle<StringRef, Vec<Span<'static>>>;
/// A String that parses a markup language.
pub struct StyledString {
content: Option<StyledHandle>,
}
impl StyledString {
/// Creates a new styled string, parsing the given content.
///
/// # Examples
///
/// ```rust
/// # use cursive::utils::markup::StyledString;
/// let styled_string = StyledString::new("*plain* text");
/// ```
pub fn new<T>(content: T) -> Result<Self, <T::M as Markup>::Error>
where
T: MarkupText,
{
let content = content.to_string();
let content = Some(T::M::make_handle(content)?);
Ok(StyledString { content })
}
/// Returns a plain StyledString without any style.
///
/// > You got no style, Dutch. You know that.
pub fn plain<S>(content: S) -> Self
where
S: Into<String>,
{
Self::new(content).unwrap()
}
/// Sets the content of this string.
///
/// # Examples
///
/// ```rust
/// # use cursive::utils::markup::StyledString;
/// # let mut styled_string = StyledString::new("").unwrap();
/// styled_string.set_content("*plain* text").unwrap();
/// ```
pub fn set_content<T>(
&mut self, content: T
) -> Result<(), <<T as MarkupText>::M as Markup>::Error>
where
T: MarkupText,
{
let content = content.to_string();
self.content = Some(T::M::make_handle(content)?);
Ok(())
}
/// Sets the content of this string to plain text.
pub fn set_plain<S>(&mut self, content: S)
where
S: Into<String>,
{
self.set_content(content).unwrap();
}
/// Append `content` to the end.
///
/// Re-parse everything after.
pub fn append_content<T>(
&mut self, content: T
) -> Result<(), <T::M as Markup>::Error>
where
T: MarkupText,
{
self.with_content::<T::M, _, _>(|c| c.push_str(&content.to_string()))
}
/// Run a closure on the text content.
///
/// And re-parse everything after.
pub fn with_content<M, F, O>(&mut self, f: F) -> Result<O, M::Error>
where
M: Markup,
F: FnOnce(&mut String) -> O,
{
// Get hold of the StyledHandle
let content = self.content.take().unwrap();
// Get the inner String
let mut content = content.into_inner().into_inner();
// Do what we have to do
let out = f(&mut content);
// And re-parse everything
self.content = Some(M::make_handle(content)?);
Ok(out)
}
/// Gives access to the parsed styled spans.
pub fn spans<'a>(&'a self) -> &'a [Span<'a>] {
&self.content.as_ref().unwrap()
}
}
impl Deref for StyledString {
type Target = str;
fn deref(&self) -> &str {
&self.content.as_ref().unwrap().owner()
}
}
|
//! Auxiliary Proof of Work Pallet
//!
//! This pallet allows blockchain users to submit proofs of work on the parent block (or maybe a
//! few recent parents).
//!
//! Why?
//! It's cool
//! To avoid long range attacks
//! To not waste orphaned work (like GHOST)
//! To help forks resolve in a more continuous manner.
//!
//! Imagine if Babe is the primary leader election mechanism.
//! When multiple authors are selected there is a fork. That fork is resolved
//! at the very earliest, at the next slot. When the next
//! slot arrives, the fork may be suddenly resolved. Or both forks may continue to grow if multiple
//! authors are selected again.
//!
//! To an observer (a user or author) of the network the state of
//! the network during forking conditions can be thought of as a quantum superposition of possible next states.
//! The auxiliary PoW alllows aux miners (CAUTION! aux "miners" are not block authors)
//! to signal their support for one of the forks before the next block is included, and incentivises
//! block authors to choose that fork to collect their transaction rewards.
//!
//! This also helps future authors coordinate better so that two honest miners don't author on
//! different forks by chance.
//!
//!
//! Miners could be incentivized. Think about rewarding "points"
//! Or aux mining could be required for casting votes or something. "If you wanna participate in _this_ election, you need to have stake on _this_ chain"
//! Include an unsigned version of note work?
//! Question: Is this still useful in conjunction with normal proof of work? I don't see why not?
//! Imagine whale primary miners colluding to author a chain that the plebians didn't like (maybe they're getting sensored, or fees are artificially driven up.)
//! Grass-roots non-colluding miners could start mining an alternate chain with way less primary hashrate and aux pow miners (the plebs) can signal their support for the second chain that they consider fair
//!
//! This is similar to and possible inspired by a paper I read called nested blocks or something like that.
#![cfg_attr(not(feature = "std"), no_std)]
use frame_support::{decl_module, decl_storage, decl_event, decl_error, dispatch, ensure, traits::Get};
use frame_system::{ensure_none};
use codec::{Encode, Decode};
use sp_runtime::RuntimeDebug;
use sp_core::U256;
use sp_io::hashing::blake2_256;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
/// The seal that is submitted to prove that work was done. The beneficiary is included here to
/// prevent front-running.
#[derive(Encode, Decode, RuntimeDebug, Eq, PartialEq, Hash, Clone)]
pub struct Seal<Hash, AccountId> {
parent: Hash,
beneficiary: AccountId,
nonce: u64,
}
type SealOf<T> = Seal<<T as frame_system::Config>::Hash, <T as frame_system::Config>::AccountId>;
/// Auxiliary PoW's configuration trait.
pub trait Config: frame_system::Config {
/// Because this pallet emits events, it depends on the runtime's definition of an event.
type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;
/// Minimum number of leading zero bits the hash must have to be accepted.
type MinLeftZeros: Get<u32>;
//TODO Hook to notify when work is added (for rewards for example)
//TODO Way to add additional work (in case leader election PoW is used and should be counted)
}
decl_storage! {
trait Store for Module<T: Config> as AuxPow {
/// All of the work from genesis until now.
AccumulatedWork: u64;//TODO is that gonna be big enough? We'll see.
}
}
decl_event!(
pub enum Event<T> where AccountId = <T as frame_system::Config>::AccountId {
/// Some auxiliary proof of work was included in the chain.
/// Data are: New Work, Accumulated Work, Beneficiary
WorkNoted(u64, u64, AccountId),
}
);
decl_error! {
pub enum Error for Module<T: Config> {
/// The Parent block supplied is not this blocks parent.
IncorrectParent,
/// The number of zeros the submitter claimed is below the minimum.
ClaimedWorkInsufficient,
/// The actual hash does not have as many zeros as claimed.
WorkHarderNextTime,
}
}
decl_module! {
pub struct Module<T: Config> for enum Call where origin: T::Origin {
// Errors must be initialized if they are used by the pallet.
type Error = Error<T>;
// Events must be initialized if they are used by the pallet.
fn deposit_event() = default;
/// Submit an auxiliary proof of work.
#[weight = 0]
pub fn note_work(origin, seal: SealOf<T>, zeros_of_work: u32) -> dispatch::DispatchResult {
ensure_none(origin)?;
// Make sure they're above the minimum difficulty
ensure!(
zeros_of_work >= T::MinLeftZeros::get(),
Error::<T>::ClaimedWorkInsufficient
);
// Make sure they are mining on the parent
ensure!(
seal.parent == frame_system::Module::<T>::parent_hash(),
Error::<T>::IncorrectParent
);
// Compute the actual hash of the seal passed in
let hash = seal.using_encoded(|bytes| {
U256::from(&blake2_256(bytes))
});
// Ensure that the hash is as low as the submitter claimed
ensure!(
512 - hash.bits() >= zeros_of_work as usize,
Error::<T>::WorkHarderNextTime
);
// Update accumulated work.
// Check this calculation. Do I need zeros + 1?
let delta_work = u64::pow(2, zeros_of_work);
let new_work = AccumulatedWork::get() + delta_work;
AccumulatedWork::put(new_work);
//TODO reward (or maybe just note via a hook) the beneficiary
// Emit an event.
Self::deposit_event(RawEvent::WorkNoted(delta_work, new_work, seal.beneficiary));
Ok(())
}
}
}
|
use crate::robust_arduino::*;
use crate::logger::*;
use serial::prelude::*;
use serial::SystemPort;
use std::time::Duration;
use std::thread;
const SETTINGS: serial::PortSettings = serial::PortSettings {
baud_rate: serial::Baud115200,
char_size: serial::Bits8,
parity: serial::ParityNone,
stop_bits: serial::Stop1,
flow_control: serial::FlowNone,
};
pub fn connect_to_arduino(serial_port: &str, debug: bool) -> SystemPort {
log(format!("Opening port: {:?}", serial_port));
let mut port = serial::open(&serial_port).unwrap();
port.configure(&SETTINGS).unwrap();
// timeout of 30s
port.set_timeout(Duration::from_secs(30)).unwrap();
loop
{
log(format!("Waiting for Arduino..."));
let order = Order::HELLO;
write_order(&mut port, order).unwrap();
let received_order = Order::from_i8(read_i8(&mut port).unwrap()).unwrap();
if received_order == Order::ALREADY_CONNECTED
{
break;
}
thread::sleep(Duration::from_secs(1));
}
log(format!("Connected to Arduino"));
log(format!("Order received: {:?}", read_order(&mut port).unwrap()));
log(format!("Attempting to sync Arduino Config..."));
write_order(&mut port, Order::SYNC_CONFIG).unwrap();
if debug {
write_i8(&mut port, 1).unwrap();
} else {
write_i8(&mut port, 0).unwrap();
}
log(format!("Order received: {:?}", read_order(&mut port).unwrap()));
if debug {
log(format!("Order received: {:?}", read_order(&mut port).unwrap()));
log(format!("Parameter received: {:?}", read_i8(&mut port).unwrap()));
}
return port;
} |
use ark_bls12_381::{Bls12_381, Fr};
use ark_ff::{to_bytes, UniformRand};
use ark_poly_commit::kzg10::Commitment;
use ark_std::test_rng;
use merlin::Transcript;
pub trait TranscriptProtocol {
/// Append a `commitment` with the given `label`.
fn append_commitment(&mut self, label: &'static [u8], comm: &Commitment<Bls12_381>);
/// Append a `Scalar` with the given `label`.
fn append_scalar(&mut self, label: &'static [u8], s: &Fr);
/// Compute a `label`ed challenge variable.
fn challenge_scalar(&mut self, label: &'static [u8]) -> Fr;
}
impl TranscriptProtocol for Transcript {
fn append_commitment(&mut self, label: &'static [u8], comm: &Commitment<Bls12_381>) {
self.append_message(label, &to_bytes![comm].unwrap());
}
fn append_scalar(&mut self, label: &'static [u8], s: &Fr) {
self.append_message(label, &to_bytes![s].unwrap())
}
fn challenge_scalar(&mut self, label: &'static [u8]) -> Fr {
let mut buf = [0u8; 32];
self.challenge_bytes(label, &mut buf);
let mut rng = test_rng();
Fr::rand(&mut rng)
}
}
|
pub use authorized_users::{
get_random_key, get_secrets, token::Token, AuthorizedUser, AUTHORIZED_USERS, JWT_SECRET,
KEY_LENGTH, SECRET_KEY, TRIGGER_DB_UPDATE,
};
use futures::TryStreamExt;
use log::debug;
use maplit::hashset;
use reqwest::Client;
use rweb::{filters::cookie::cookie, Filter, Rejection, Schema};
use rweb_helper::UuidWrapper;
use serde::{Deserialize, Serialize};
use stack_string::{format_sstr, StackString};
use std::{
convert::{TryFrom, TryInto},
env::var,
str::FromStr,
};
use time::{Duration, OffsetDateTime};
use tokio::task::spawn;
use url::Url;
use uuid::Uuid;
use sync_app_lib::{config::Config, models::AuthorizedUsers, pgpool::PgPool};
use crate::{
app::AppState,
errors::ServiceError as Error,
requests::{
CalendarSyncRequest, GarminSyncRequest, MovieSyncRequest, SyncPodcastsRequest,
SyncSecurityRequest, SyncWeatherRequest,
},
};
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Hash, Clone, Schema)]
pub struct LoggedUser {
#[schema(description = "Email Address")]
pub email: StackString,
#[schema(description = "Session UUID")]
pub session: UuidWrapper,
#[schema(description = "Secret Key")]
pub secret_key: StackString,
}
impl LoggedUser {
fn verify_session_id(&self, session_id: Uuid) -> Result<(), Error> {
if self.session == session_id {
Ok(())
} else {
Err(Error::Unauthorized)
}
}
#[must_use]
pub fn filter() -> impl Filter<Extract = (Self,), Error = Rejection> + Copy {
cookie("session-id")
.and(cookie("jwt"))
.and_then(|id: Uuid, user: Self| async move {
user.verify_session_id(id)
.map(|_| user)
.map_err(rweb::reject::custom)
})
}
async fn get_session(
&self,
client: &Client,
config: &Config,
session_key: &str,
) -> Result<Option<SyncSession>, anyhow::Error> {
let base_url: Url = format_sstr!("https://{}", config.domain).parse()?;
let session: Option<SyncSession> = AuthorizedUser::get_session_data(
&base_url,
self.session.into(),
&self.secret_key,
client,
session_key,
)
.await?;
debug!("Got session {:?}", session);
if let Some(session) = session {
if session.created_at > (OffsetDateTime::now_utc() - Duration::minutes(10)) {
return Ok(Some(session));
}
}
Ok(None)
}
async fn set_session(
&self,
client: &Client,
config: &Config,
session_key: &str,
session_value: SyncSession,
) -> Result<(), anyhow::Error> {
let base_url: Url = format_sstr!("https://{}", config.domain).parse()?;
AuthorizedUser::set_session_data(
&base_url,
self.session.into(),
&self.secret_key,
client,
session_key,
&session_value,
)
.await?;
Ok(())
}
/// # Errors
/// Return error if api call fails
pub async fn rm_session(
&self,
client: &Client,
config: &Config,
session_key: &str,
) -> Result<(), anyhow::Error> {
let base_url: Url = format_sstr!("https://{}", config.domain).parse()?;
AuthorizedUser::rm_session_data(
&base_url,
self.session.into(),
&self.secret_key,
client,
session_key,
)
.await?;
Ok(())
}
/// # Errors
/// Return error if api call fails
pub async fn push_session(
self,
key: SyncKey,
data: AppState,
) -> Result<Option<Vec<StackString>>, Error> {
if let Some(session) = self
.get_session(&data.client, &data.config, key.to_str())
.await
.map_err(Into::<Error>::into)?
{
if let Some(result) = session.result {
self.rm_session(&data.client, &data.config, key.to_str())
.await?;
return Ok(Some(result));
}
debug!("session exists and is presumably running {:?}", session);
} else {
debug!("push job to queue {}", key.to_str());
self.set_session(
&data.client,
&data.config,
key.to_str(),
SyncSession::default(),
)
.await?;
let mesg = SyncMesg::new(self, key);
data.queue.push((
mesg.clone(),
spawn({
let data = data.clone();
async move { mesg.process_mesg(data).await.map_err(Into::into) }
}),
));
}
Ok(None)
}
}
impl From<AuthorizedUser> for LoggedUser {
fn from(user: AuthorizedUser) -> Self {
Self {
email: user.email,
session: user.session.into(),
secret_key: user.secret_key,
}
}
}
impl TryFrom<Token> for LoggedUser {
type Error = Error;
fn try_from(token: Token) -> Result<Self, Self::Error> {
if let Ok(user) = token.try_into() {
if AUTHORIZED_USERS.is_authorized(&user) {
return Ok(user.into());
}
debug!("NOT AUTHORIZED {:?}", user);
}
Err(Error::Unauthorized)
}
}
impl FromStr for LoggedUser {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut buf = StackString::new();
buf.push_str(s);
let token: Token = buf.into();
token.try_into()
}
}
/// # Errors
/// Return error if db query fails
pub async fn fill_from_db(pool: &PgPool) -> Result<(), Error> {
debug!("{:?}", *TRIGGER_DB_UPDATE);
let users = if TRIGGER_DB_UPDATE.check() {
AuthorizedUsers::get_authorized_users(pool)
.await?
.map_ok(|user| user.email)
.try_collect()
.await?
} else {
AUTHORIZED_USERS.get_users()
};
if let Ok("true") = var("TESTENV").as_ref().map(String::as_str) {
AUTHORIZED_USERS.update_users(hashset! {"user@test".into()});
}
AUTHORIZED_USERS.update_users(users);
debug!("{:?}", *AUTHORIZED_USERS);
Ok(())
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SyncSession {
pub created_at: OffsetDateTime,
pub result: Option<Vec<StackString>>,
}
impl Default for SyncSession {
fn default() -> Self {
Self {
created_at: OffsetDateTime::now_utc(),
result: None,
}
}
}
impl SyncSession {
#[must_use]
pub fn from_lines(lines: Vec<StackString>) -> Self {
Self {
created_at: OffsetDateTime::now_utc(),
result: Some(lines),
}
}
}
#[derive(Clone, Copy)]
pub enum SyncKey {
SyncGarmin,
SyncMovie,
SyncCalendar,
SyncPodcast,
SyncSecurity,
SyncWeather,
}
impl SyncKey {
#[must_use]
pub fn to_str(self) -> &'static str {
match self {
Self::SyncGarmin => "sync_garmin",
Self::SyncMovie => "sync_movie",
Self::SyncCalendar => "sync_calendar",
Self::SyncPodcast => "sync_podcast",
Self::SyncSecurity => "sync_security",
Self::SyncWeather => "sync_weather",
}
}
#[must_use]
pub fn all_keys() -> [Self; 6] {
[
Self::SyncGarmin,
Self::SyncMovie,
Self::SyncCalendar,
Self::SyncPodcast,
Self::SyncSecurity,
Self::SyncWeather,
]
}
}
#[derive(Clone)]
pub struct SyncMesg {
pub user: LoggedUser,
pub key: SyncKey,
}
impl SyncMesg {
#[must_use]
pub fn new(user: LoggedUser, key: SyncKey) -> Self {
Self { user, key }
}
async fn process_mesg(self, app: AppState) -> Result<(), Error> {
debug!(
"start {} for {} {}",
self.key.to_str(),
self.user.email,
self.user.session
);
let lines = match self.key {
SyncKey::SyncGarmin => (GarminSyncRequest {}).handle(&app.locks).await,
SyncKey::SyncMovie => (MovieSyncRequest {}).handle(&app.locks).await,
SyncKey::SyncCalendar => (CalendarSyncRequest {}).handle(&app.locks).await,
SyncKey::SyncPodcast => (SyncPodcastsRequest {}).handle(&app.locks).await,
SyncKey::SyncSecurity => (SyncSecurityRequest {}).handle(&app.locks).await,
SyncKey::SyncWeather => (SyncWeatherRequest {}).handle(&app.locks).await,
}?;
debug!(
"finished {} for {} {}, {} lines",
self.key.to_str(),
self.user.email,
self.user.session,
lines.len()
);
let value = SyncSession::from_lines(lines);
self.user
.set_session(&app.client, &app.config, self.key.to_str(), value)
.await
.map_err(Into::into)
}
}
|
use bitcoin::{BlockHash, Transaction, Txid};
use parking_lot::RwLock;
use std::collections::HashMap;
use std::sync::Arc;
use crate::{
merkle::Proof,
metrics::{self, Histogram, Metrics},
};
pub(crate) struct Cache {
txs: Arc<RwLock<HashMap<Txid, Transaction>>>,
proofs: Arc<RwLock<HashMap<(BlockHash, Txid), Proof>>>,
// stats
txs_size: Histogram,
}
impl Cache {
pub fn new(metrics: &Metrics) -> Self {
Cache {
txs: Default::default(),
proofs: Default::default(),
txs_size: metrics.histogram_vec(
"cache_txs_size",
"Cached transactions' size (in bytes)",
"type",
metrics::default_size_buckets(),
),
}
}
pub fn add_tx(&self, txid: Txid, f: impl FnOnce() -> Transaction) {
self.txs.write().entry(txid).or_insert_with(|| {
let tx = f();
self.txs_size.observe("serialized", tx.get_size());
tx
});
}
pub fn get_tx<F, T>(&self, txid: &Txid, f: F) -> Option<T>
where
F: FnOnce(&Transaction) -> T,
{
self.txs.read().get(txid).map(f)
}
pub fn add_proof<F>(&self, blockhash: BlockHash, txid: Txid, f: F)
where
F: FnOnce() -> Proof,
{
self.proofs
.write()
.entry((blockhash, txid))
.or_insert_with(f);
}
pub fn get_proof<F, T>(&self, blockhash: BlockHash, txid: Txid, f: F) -> Option<T>
where
F: FnOnce(&Proof) -> T,
{
self.proofs.read().get(&(blockhash, txid)).map(f)
}
}
|
use crate::types::*;
use regmach::dsp::types as rdt;
use regmach::dsp::types::Display;
use regmach::schem::types::Schematic;
use std::cell::RefCell;
use std::rc::Rc;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use web_sys::WebGl2RenderingContext as GL;
use rusttype::{point, FontCollection, PositionedGlyph, Scale};
use std::io::Write;
// Called when the wasm module is instantiated
#[wasm_bindgen(start)]
pub fn main() -> Result<(), JsValue> {
let mut dsp = BrowserDisplay::new();
// let schematic = Schematic::from_display(box BrowserDisplay::new());
// need to move these meshes and text entities into a space hash
// they need to be hidden.
let verts: Vec<f32> = vec![-0.7, -0.7, 0.0, 0.7, -0.7, 0.0, 0.0, 0.7, 0.0];
let mut triangle = Mesh::from_verts(&dsp,
verts,
include_str!("../shaders/basic-shader.vs"),
include_str!("../shaders/basic-shader.fs"))?;
let grid = Grid::new(&dsp)?;
for i in (0..=100).step_by(5) {
dsp.add_text(rdt::Command::AddText(i as f32 + 0.0,
i as f32 + 1.0,
format!("({:?}, {:?})", i, i).to_owned()));
}
// -----------------------------------------------------------------------------
// MAIN EVENT LOOP
// https://rustwasm.github.io/wasm-bindgen/examples/request-animation-frame.html
let f = Rc::new(RefCell::new(None));
let g = f.clone();
*g.borrow_mut() =
Some(Closure::wrap(Box::new(move || {
// there should be an event driven way adjust the canvas size.
dsp.update_canvas_size_todo();
dsp.props.frame_increment();
dsp.clear();
for ev in &dsp.get_events() {
// there should be an event driven way adjust the canvas size.
match ev {
rdt::Event::MouseDown(p) => {
log!("processing {:?}, vertex_buffer: {:?}",
ev,
triangle.vertex_buffer);
let scmpoint = dsp.screen_to_schematic(p.x as u32, p.y as u32);
log!("scmpoint: {:?}", scmpoint);
}
rdt::Event::MouseMove(p) => {
let pos = dsp.screen_to_schematic(p.x as u32, p.y as u32);
// triangle.move_to(pos.x, pos.y);
}
rdt::Event::KeyDown(code) => {
log!("processing {:?}", ev);
match *code {
68 => dsp.camera.pan_left(),
65 => dsp.camera.pan_right(),
87 => dsp.camera.pan_up(),
83 => dsp.camera.pan_down(),
33 => dsp.camera.zoom_in(),
34 => dsp.camera.zoom_out(),
67 => dsp.camera.center(),
_ => log!("unhandled key {:?}", ev),
}
}
_ => {
log!("unhandled event: {:?}", ev);
}
}
}
grid.draw(&dsp);
triangle.draw_with_mode(&dsp, GL::TRIANGLES);
dsp.draw_entities();
// schedule another requestAnimationFrame callback.
request_animation_frame(f.borrow().as_ref().unwrap());
}) as Box<dyn FnMut()>));
request_animation_frame(g.borrow().as_ref().unwrap());
Ok(())
}
fn document() -> web_sys::Document {
window().document().expect("should have a document on window")
}
fn window() -> web_sys::Window {
web_sys::window().expect("no global `window` exists")
}
fn request_animation_frame(f: &Closure<dyn FnMut()>) {
window().request_animation_frame(f.as_ref().unchecked_ref())
.expect("should register `requestAnimationFrame` OK");
}
#[wasm_bindgen]
pub fn add(a: u32, b: u32) -> u32 {
a + b
}
|
// #![deny(missing_docs)]
//! A key-value store system
#![allow(dead_code)]
// #![allow(unused)]
#[macro_use]
extern crate log;
mod backend;
mod client;
mod config;
mod error;
mod percolator;
mod raft;
mod rpc;
mod server;
/// Thread Pool
pub mod thread_pool;
pub use backend::{EngineKind, KvSled, KvStore, KvsEngine};
pub use client::{KvsClient, KvsClientBuilder};
pub use error::{KvError, KvRpcError, Result};
pub use raft::{FilePersister, KvRaftNode, Persister, RaftNode};
// #[allow(missing_docs)]
// pub(crate) use rpc::kvs_service::*;
// #[allow(missing_docs)]
// pub(crate) use rpc::raft_service::*;
pub use percolator::{DataValue, Key, LockValue, MultiStore, TimestampOracle, WriteValue};
pub use server::{KvsServer, KvsServerBuilder};
/// preclude
pub mod preclude {
pub use crate::backend::{EngineKind, KvSled, KvStore, KvsEngine};
pub use crate::client::{KvsClient, KvsClientBuilder};
pub use crate::error::{KvError, Result};
pub use crate::percolator::{
DataValue, Key, LockValue, MultiStore, TimestampOracle, WriteValue,
};
pub use crate::raft::{FilePersister, KvRaftNode, Persister, RaftNode};
#[allow(missing_docs)]
pub use crate::rpc::kvs_service::*;
#[allow(missing_docs)]
pub use crate::rpc::raft_service::*;
pub use crate::server::{KvsServer, KvsServerBuilder};
}
|
pub use blake2b_rs::{Blake2b, Blake2bBuilder};
pub const BLAKE2B_KEY: &[u8] = &[];
pub const BLAKE2B_LEN: usize = 32;
pub const CKB_HASH_PERSONALIZATION: &[u8] = b"ckb-default-hash";
pub fn new_blake2b() -> Blake2b {
Blake2bBuilder::new(32)
.personal(CKB_HASH_PERSONALIZATION)
.build()
}
pub fn blake2b_256<T: AsRef<[u8]>>(s: T) -> [u8; 32] {
let mut result = [0u8; 32];
let mut blake2b = new_blake2b();
blake2b.update(s.as_ref());
blake2b.finalize(&mut result);
result
}
#[test]
fn empty_blake2b() {
let actual = blake2b_256([]);
let expected = "44f4c69744d5f8c55d642062949dcae49bc4e7ef43d388c5a12f42b5633d163e";
assert_eq!(&faster_hex::hex_string(&actual).unwrap(), expected);
}
|
use crate::grammar::ast::FuncCall;
use crate::grammar::testing::TestingContext;
#[test]
fn test_empty() {
TestingContext::with(&["", " ", "\n"]).test_all_fail(FuncCall::parse)
}
#[test]
fn test_underscore() {
TestingContext::with(&["_()"]).test_all_fail(FuncCall::parse)
}
#[test]
fn test_reserved() {
TestingContext::with(&["true()", "if()", "else()"]).test_all_fail(FuncCall::parse)
}
// FIXME: Takes too long -- ignored.
#[test]
#[ignore]
fn test_calls() {
TestingContext::with(&[
"abc()",
"a_bc(1, \"abc\")",
"n0(a, b, c)",
"xd(e)",
"foo(bar(baz), baz(bar(), foo), bar)",
"a(b(), c(), d())",
])
.test_all_succeed(FuncCall::parse)
}
|
pub struct PrimeFactors {
n: usize,
last_prime: usize,
primes: primal::Primes,
}
impl Iterator for PrimeFactors {
type Item = usize;
fn next(&mut self) -> Option<usize> {
if self.n == 0 || self.n == 1 {
return None;
}
loop {
if self.n % self.last_prime == 0 {
self.n /= self.last_prime;
return Some(self.last_prime);
}
self.last_prime = self.primes.next().expect("unable to get next prime!");
}
}
}
pub fn factors(n: usize) -> PrimeFactors {
PrimeFactors {
n,
last_prime: 2,
primes: primal::Primes::all(),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_fib() {
let mut f = factors(13195);
assert_eq!(f.next(), Some(5));
assert_eq!(f.next(), Some(7));
assert_eq!(f.next(), Some(13));
assert_eq!(f.next(), Some(29));
assert_eq!(f.next(), None);
}
#[test]
fn test_factor_6() {
let mut f = factors(6);
assert_eq!(f.next(), Some(2));
assert_eq!(f.next(), Some(3));
assert_eq!(f.next(), None);
}
#[test]
fn test_factor_12() {
let mut f = factors(12);
assert_eq!(f.next(), Some(2));
assert_eq!(f.next(), Some(2));
assert_eq!(f.next(), Some(3));
assert_eq!(f.next(), None);
}
}
|
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PaymentSwapResult {
Native,
Swapped,
Transferred,
}
pub trait CurrencySwap<AccountId, Balance> {
fn swap(who: &AccountId, fee: Balance) -> Result<PaymentSwapResult, frame_support::sp_runtime::DispatchError>;
}
|
use std::f64::consts::PI;
use std::f64;
use rand::{thread_rng, Rng};
use piston_window::types::Color;
use piston_window::{RenderArgs, UpdateArgs};
pub enum Speed {
Go { scala: f64 },
None,
}
pub enum Side {
None,
Center,
Up,
Down,
Right,
Left,
}
pub struct Arrow {
theta: f64,
sin: f64,
cos: f64,
}
impl Arrow {
fn new(theta: f64) -> Self {
Arrow {
theta: theta,
sin: (PI * theta).sin(),
cos: (PI * theta).cos(),
}
}
pub fn show_theta(&self) -> f64 {
self.theta
}
}
pub struct Object {
background: (u32, u32),
current_speed: Speed,
pub current_state: (f64, f64),
pub size: (f64, f64),
spawn: Side,
pub arrow: Arrow,
pub color: Color,
}
impl Object {
pub fn new(width: f64, height: f64) -> Self {
Object {
background: (0, 0),
current_state: (0.0, 0.0),
current_speed: Speed::None,
size: (width, height),
spawn: Side::None,
arrow: Arrow::new(0.0),
color: [0.0, 0.0, 0.0, 0.0],
}
}
pub fn set_place(&mut self, r: &RenderArgs) {
self.background = (r.width, r.height);
}
pub fn set_speed(&mut self, scala: f64) {
self.current_speed = Speed::Go { scala: scala };
}
pub fn set_pos(&mut self, r: &RenderArgs, pos: Side) {
let position_x = thread_rng().gen_range(0.0, r.width as f64 - self.size.0);
let position_y = thread_rng().gen_range(0.0, r.height as f64 - self.size.1);
self.current_state = match pos {
Side::Up => (position_x, 0.0),
Side::Right => (r.width as f64 - self.size.0, position_y),
Side::Down => (position_x, r.height as f64 - self.size.1),
Side::Left => (0.0, position_y),
Side::Center => (r.width as f64 / 2.0, r.height as f64 / 2.0),
_ => panic!("system error"),
};
self.spawn = pos;
}
pub fn set_color(&mut self, color: Color) {
self.color = color;
}
fn inner_set_pos(&mut self, pos: (f64, f64)) {
self.current_state = pos;
}
pub fn arrow_set(&mut self, theta: f64) {
self.arrow = Arrow::new(theta);
}
pub fn random_arrow_set(&mut self) {
let theta = match self.spawn {
Side::Up => thread_rng().gen_range(0.0, 1.0),
Side::Right => thread_rng().gen_range(0.5, 1.5),
Side::Down => thread_rng().gen_range(1.0, 2.0),
Side::Left => thread_rng().gen_range(-0.5, 0.5),
Side::Center => thread_rng().gen_range(0.0, 2.0),
_ => panic!("system error"),
};
self.arrow = Arrow::new(theta);
}
pub fn update(&mut self, args: &UpdateArgs) {
let before_pos = self.current_state;
match self.current_speed {
Speed::Go { scala } => {
self.current_state.0 += scala * self.arrow.cos * args.dt;
self.current_state.1 += scala * self.arrow.sin * args.dt;
}
Speed::None => {}
};
if !self.is_wall() {
self.collide();
self.inner_set_pos(before_pos);
}
}
pub fn is_wall(&mut self) -> bool {
let available_x = self.background.0 as f64 - self.size.0;
let available_y = self.background.1 as f64 - self.size.1;
between(0.0, available_x, self.current_state.0) &&
between(0.0, available_y, self.current_state.1)
}
pub fn collide(&mut self) {
let available_x = self.background.0 as f64 - self.size.0;
let available_y = self.background.1 as f64 - self.size.1;
let before_theta = self.arrow.theta;
let plane_vec = match (between(0.0, available_x, self.current_state.0),
between(0.0, available_y, self.current_state.1)) {
(false, true) => 1.0,
(true, false) => 2.0,
(false, false) => 2.0 * before_theta,
(true, true) => {
panic!("system error");
}
};
let mut result = plane_vec - before_theta;
loop {
if result < 0.0 {
result += 2.0;
continue;
} else if result > 2.0 {
result -= 2.0;
continue;
} else {
break;
}
}
self.arrow = Arrow::new(result);
}
pub fn is_hit(&self, other: &Object) -> bool {
let (result_x, result_y) = (self.current_state.0 - other.current_state.0,
self.current_state.1 - other.current_state.1);
match (result_x < 0.0, result_y < 0.0) {
(false, false) => result_x <= other.size.0 && result_y <= other.size.1,
(true, false) => result_x.abs() <= self.size.0 && result_y <= other.size.1,
(false, true) => result_x <= other.size.0 && result_y.abs() <= self.size.1,
(true, true) => result_x.abs() <= self.size.0 && result_y.abs() <= self.size.1,
}
}
}
fn between(x: f64, y: f64, target: f64) -> bool {
x <= target && target <= y
}
|
extern crate glutin_window;
extern crate graphics;
extern crate opengl_graphics;
use crate::{BODY_PIXEL_SIZE, GRID_SIDE, WINDOW_SIDE, LAST_UNTAGGED_DISPLAY_LENGTH};
use crate::grid::Position;
use glutin_window::GlutinWindow as Window;
use graphics::*;
use opengl_graphics::{GlGraphics, OpenGL};
use piston::event_loop::{EventSettings, Events};
use piston::input::RenderArgs;
use piston::window::WindowSettings;
const WHITE: [f32; 4] = [0.75, 0.75, 0.75, 1.0];
const RED: [f32; 4] = [1.0, 0.0, 0.0, 0.8];
const YELLOW: [f32; 4] = [1.0, 1.0, 0.0, 0.8];
const ORANGE: [f32; 4] = [1.0, 0.5, 0.0, 0.8];
pub type RenderObject = (Position, bool, usize);
pub struct Graphics {
gl: GlGraphics,
scale: f64,
}
impl Graphics {
pub fn render(&mut self, args: &RenderArgs, objects: &Vec<RenderObject>) {
let circle = ellipse::circle(0.0, 0.0, BODY_PIXEL_SIZE as f64 / 2.0 as f64);
let scale = self.scale;
self.gl.draw(args.viewport(), |c, gl| {
clear(WHITE, gl);
for obj in objects {
let color = if obj.1 { RED } else { if obj.2 > LAST_UNTAGGED_DISPLAY_LENGTH {YELLOW} else {ORANGE}};
let transform = c
.transform
.trans(obj.0.x as f64 * scale, obj.0.y as f64 * scale);
ellipse(color, circle, transform, gl);
}
});
}
}
pub struct Display {
pub window: Window,
pub graphics: Graphics,
pub events: Events,
}
impl Display {
pub fn new() -> Display {
let opengl = OpenGL::V3_2;
let window: Window = WindowSettings::new("Tag!", [WINDOW_SIDE, WINDOW_SIDE])
.graphics_api(opengl)
.exit_on_esc(true)
.build()
.unwrap();
let graphics = Graphics {
gl: GlGraphics::new(opengl),
scale: WINDOW_SIDE / GRID_SIDE as f64,
};
let settings = EventSettings {
max_fps: 120,
ups: 120,
ups_reset: 0,
swap_buffers: true,
bench_mode: true,
lazy: true,
};
let events = Events::new(settings);
Display {
window,
graphics,
events,
}
}
}
|
include!(concat!(env!("OUT_DIR"), "/BUILDSCRIPT_GENERATED_use.rs"));
include!(concat!(env!("OUT_DIR"), "/BUILDSCRIPT_GENERATED_only_adapters.rs"));
include!(concat!(env!("OUT_DIR"), "/BUILDSCRIPT_GENERATED_fmt_adapters.rs")); |
use vector::Vector3;
#[derive(Debug, Clone, Copy)]
pub struct Ray {
pub origin: Vector3<f64>,
pub direction: Vector3<f64>,
}
#[derive(Debug, Clone, Copy)]
pub struct HitInfo<'a> {
pub distance: f64,
pub point: Vector3<f64>,
pub normal: Vector3<f64>,
pub material: &'a str,
}
|
pub(crate) mod numeric_constant;
pub(crate) mod string_constant;
use apllodb_shared_components::{ApllodbResult, SqlValue};
use apllodb_sql_parser::apllodb_ast;
use crate::ast_translator::AstTranslator;
impl AstTranslator {
pub(crate) fn constant(ast_constant: apllodb_ast::Constant) -> ApllodbResult<SqlValue> {
let sql_value: SqlValue = match ast_constant {
apllodb_ast::Constant::NullVariant => SqlValue::Null,
apllodb_ast::Constant::NumericConstantVariant(nc) => Self::numeric_constant(nc)?,
apllodb_ast::Constant::StringConstantVariant(sc) => Self::string_constant(sc),
};
Ok(sql_value)
}
}
|
#[doc = "Register `RCC_PLL3CSGR` reader"]
pub type R = crate::R<RCC_PLL3CSGR_SPEC>;
#[doc = "Register `RCC_PLL3CSGR` writer"]
pub type W = crate::W<RCC_PLL3CSGR_SPEC>;
#[doc = "Field `MOD_PER` reader - MOD_PER"]
pub type MOD_PER_R = crate::FieldReader<u16>;
#[doc = "Field `MOD_PER` writer - MOD_PER"]
pub type MOD_PER_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 13, O, u16>;
#[doc = "Field `TPDFN_DIS` reader - TPDFN_DIS"]
pub type TPDFN_DIS_R = crate::BitReader;
#[doc = "Field `TPDFN_DIS` writer - TPDFN_DIS"]
pub type TPDFN_DIS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RPDFN_DIS` reader - RPDFN_DIS"]
pub type RPDFN_DIS_R = crate::BitReader;
#[doc = "Field `RPDFN_DIS` writer - RPDFN_DIS"]
pub type RPDFN_DIS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SSCG_MODE` reader - SSCG_MODE"]
pub type SSCG_MODE_R = crate::BitReader;
#[doc = "Field `SSCG_MODE` writer - SSCG_MODE"]
pub type SSCG_MODE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `INC_STEP` reader - INC_STEP"]
pub type INC_STEP_R = crate::FieldReader<u16>;
#[doc = "Field `INC_STEP` writer - INC_STEP"]
pub type INC_STEP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 15, O, u16>;
impl R {
#[doc = "Bits 0:12 - MOD_PER"]
#[inline(always)]
pub fn mod_per(&self) -> MOD_PER_R {
MOD_PER_R::new((self.bits & 0x1fff) as u16)
}
#[doc = "Bit 13 - TPDFN_DIS"]
#[inline(always)]
pub fn tpdfn_dis(&self) -> TPDFN_DIS_R {
TPDFN_DIS_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - RPDFN_DIS"]
#[inline(always)]
pub fn rpdfn_dis(&self) -> RPDFN_DIS_R {
RPDFN_DIS_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - SSCG_MODE"]
#[inline(always)]
pub fn sscg_mode(&self) -> SSCG_MODE_R {
SSCG_MODE_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bits 16:30 - INC_STEP"]
#[inline(always)]
pub fn inc_step(&self) -> INC_STEP_R {
INC_STEP_R::new(((self.bits >> 16) & 0x7fff) as u16)
}
}
impl W {
#[doc = "Bits 0:12 - MOD_PER"]
#[inline(always)]
#[must_use]
pub fn mod_per(&mut self) -> MOD_PER_W<RCC_PLL3CSGR_SPEC, 0> {
MOD_PER_W::new(self)
}
#[doc = "Bit 13 - TPDFN_DIS"]
#[inline(always)]
#[must_use]
pub fn tpdfn_dis(&mut self) -> TPDFN_DIS_W<RCC_PLL3CSGR_SPEC, 13> {
TPDFN_DIS_W::new(self)
}
#[doc = "Bit 14 - RPDFN_DIS"]
#[inline(always)]
#[must_use]
pub fn rpdfn_dis(&mut self) -> RPDFN_DIS_W<RCC_PLL3CSGR_SPEC, 14> {
RPDFN_DIS_W::new(self)
}
#[doc = "Bit 15 - SSCG_MODE"]
#[inline(always)]
#[must_use]
pub fn sscg_mode(&mut self) -> SSCG_MODE_W<RCC_PLL3CSGR_SPEC, 15> {
SSCG_MODE_W::new(self)
}
#[doc = "Bits 16:30 - INC_STEP"]
#[inline(always)]
#[must_use]
pub fn inc_step(&mut self) -> INC_STEP_W<RCC_PLL3CSGR_SPEC, 16> {
INC_STEP_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is used to configure the PLL3.It is not recommended to change the content of this register when the PLL3 is enabled (PLLON = ). Refer to Section: Using the PLLs in spread spectrum mode for details. If TZEN = MCKPROT = , this register can only be modified in secure mode.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rcc_pll3csgr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rcc_pll3csgr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RCC_PLL3CSGR_SPEC;
impl crate::RegisterSpec for RCC_PLL3CSGR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rcc_pll3csgr::R`](R) reader structure"]
impl crate::Readable for RCC_PLL3CSGR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rcc_pll3csgr::W`](W) writer structure"]
impl crate::Writable for RCC_PLL3CSGR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RCC_PLL3CSGR to value 0"]
impl crate::Resettable for RCC_PLL3CSGR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use anyhow::Result;
use regex::Regex;
use std::collections::HashMap;
pub fn day4() -> Result<()> {
/*
let passports = r#"ecl:gry pid:860033327 eyr:2020 hcl:#fffffd
byr:1937 iyr:2017 cid:147 hgt:183cm
iyr:2013 ecl:amb cid:350 eyr:2023 pid:028048884
hcl:#cfa07d byr:1929
hcl:#ae17e1 iyr:2013
eyr:2024
ecl:brn pid:760753108 byr:1931
hgt:179cm
hcl:#cfa07d eyr:2025 pid:166559648
iyr:2011 ecl:brn hgt:59in"#;
*/
let passports = include_str!("day4_input.txt");
let split_passports = Regex::new("\n\n")?;
let re = Regex::new("(byr|iyr|eyr|hgt|hcl|ecl|pid):([#0-9a-zA-Z]+)")?;
let part1_validation = |fields: HashMap<&str, &str>| {
fields.contains_key(&"byr")
&& fields.contains_key(&"iyr")
&& fields.contains_key(&"eyr")
&& fields.contains_key(&"hgt")
&& fields.contains_key(&"hcl")
&& fields.contains_key(&"ecl")
&& fields.contains_key(&"pid")
};
let part2_validation = |fields: HashMap<&str, &str>| {
if fields.contains_key(&"byr")
&& fields.contains_key(&"iyr")
&& fields.contains_key(&"eyr")
&& fields.contains_key(&"hgt")
&& fields.contains_key(&"hcl")
&& fields.contains_key(&"ecl")
&& fields.contains_key(&"pid")
{
let byr_val = fields.get("byr").unwrap().parse::<u32>().unwrap();
let iyr_val = fields.get("iyr").unwrap().parse::<u32>().unwrap();
let eyr_val = fields.get("eyr").unwrap().parse::<u32>().unwrap();
let hgt_val = fields.get("hgt").unwrap();
let hcl_val = fields.get("hcl").unwrap();
let eye_colors = vec!["amb", "blu", "brn", "gry", "grn", "hzl", "oth"];
let ecl_val = fields.get("ecl").unwrap();
let pid_val = fields.get("pid").unwrap();
byr_val >= 1920
&& byr_val <= 2002
&& iyr_val >= 2010
&& iyr_val <= 2020
&& eyr_val >= 2020
&& eyr_val <= 2030
&& if hgt_val.ends_with("cm") {
let z = hgt_val.replace("cm", "").parse::<u32>().unwrap();
z >= 150 && z <= 193
} else if hgt_val.ends_with("in") {
let z = hgt_val.replace("in", "").parse::<u32>().unwrap();
z >= 59 && z <= 76
} else {
false
}
&& hcl_val.starts_with("#")
&& hcl_val.chars().skip(1).count() == 6
&& hcl_val.chars().skip(1).all(|c| {
let c = c as u8;
(c >= '0' as u8 && c <= '9' as u8) || (c >= 'a' as u8 && c <= 'f' as u8)
})
&& eye_colors.contains(ecl_val)
&& pid_val.len() == 9
&& pid_val.chars().all(|c| {
let c = c as u8;
c >= '0' as u8 && c <= '9' as u8
})
} else {
false
}
};
let process_passports = |validation: fn(HashMap<&str, &str>) -> bool| {
let num_valid_passports = split_passports
.split(passports)
.filter(|passport| {
let fields: HashMap<&str, &str> = passport
.split_whitespace()
.filter_map(|kv| {
re.captures(kv).map(|caps| {
(
caps.get(1).map(|m| m.as_str()).unwrap(),
caps.get(2).map(|m| m.as_str()).unwrap(),
)
})
})
.collect();
validation(fields)
})
.count();
println!("num valid passports: {}", num_valid_passports);
};
process_passports(part1_validation);
process_passports(part2_validation);
Ok(())
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.