text stringlengths 8 4.13M |
|---|
use crate::buf_reader::BufIo;
use crate::AsyncRead;
use futures_util::ready;
use http::header::{HeaderName, HeaderValue};
use std::fmt::Debug;
use std::io;
use std::io::Write;
use std::pin::Pin;
use std::task::{Context, Poll};
// Request headers today vary in size from ~200 bytes to over 2KB.
// As applications use more cookies and user agents expand features,
// typical header sizes of 700-800 bytes is common.
// http://dev.chromium.org/spdy/spdy-whitepaper
/// Size of buffer reading response body into.
pub(crate) const READ_BUF_INIT_SIZE: usize = 16_384;
/// Write an http/1.1 request to a buffer.
#[allow(clippy::write_with_newline)]
pub fn write_http1x_req(req: &http::Request<()>, buf: &mut [u8]) -> Result<usize, io::Error> {
// Write http request into a buffer
let mut w = io::Cursor::new(buf);
// Path and query
let pq = req
.uri()
.path_and_query()
.map(|pq| pq.as_str())
.unwrap_or("/");
let ver = match req.version() {
http::Version::HTTP_10 => "1.0",
http::Version::HTTP_11 => "1.1",
_ => panic!("Unsupported http version: {:?}", req.version()),
};
write!(w, "{} {} HTTP/{}\r\n", req.method(), pq, ver)?;
let mut host = None;
for (name, value) in req.headers() {
if name.as_str() == "host" {
host = Some(value);
}
}
if host.is_none() {
let default_port: u16 = match req.uri().scheme_str() {
Some("https") => 443,
Some("http") => 80,
_ => 0,
};
let port = match req.uri().port_u16() {
Some(p) => {
if p == default_port {
0
} else {
p
}
}
_ => 0,
};
// fall back on uri host
if let Some(h) = req.uri().host() {
write!(w, "host: {}", h)?;
if port != 0 {
write!(w, ":{}", port)?;
}
write!(w, "\r\n")?;
}
}
// the rest of the headers.
for (name, value) in req.headers() {
write!(w, "{}: ", name)?;
w.write_all(value.as_bytes())?;
write!(w, "\r\n")?;
}
write!(w, "\r\n")?;
let len = w.position() as usize;
// write buffer to connection
let buf = w.into_inner();
debug!(
"write_http11_req: {:?}",
String::from_utf8_lossy(&buf[0..len])
);
Ok(len)
}
/// Write an http/1.x response to a buffer.
#[allow(clippy::write_with_newline)]
pub fn write_http1x_res(res: &http::Response<()>, buf: &mut [u8]) -> Result<usize, io::Error> {
// Write http request into a buffer
let mut w = io::Cursor::new(buf);
let ver = match res.version() {
http::Version::HTTP_10 => "1.0",
http::Version::HTTP_11 => "1.1",
_ => panic!("Unsupported http version: {:?}", res.version()),
};
write!(
w,
"HTTP/{} {} {}\r\n",
ver,
res.status().as_u16(),
res.status().canonical_reason().unwrap_or("Unknown")
)?;
// the rest of the headers.
for (name, value) in res.headers() {
write!(w, "{}: ", name)?;
w.write_all(value.as_bytes())?;
write!(w, "\r\n")?;
}
write!(w, "\r\n")?;
let len = w.position() as usize;
// write buffer to connection
let buf = w.into_inner();
debug!(
"write_http11_res: {:?}",
String::from_utf8_lossy(&buf[0..len])
);
Ok(len)
}
fn version_of(v: Option<u8>) -> http::Version {
match v {
Some(0) => http::Version::HTTP_10,
Some(1) => http::Version::HTTP_11,
Some(v) => {
trace!("Unknown http version ({}), assume HTTP/1.1", v);
http::Version::HTTP_11
}
None => {
trace!("Found no http version, assume HTTP/1.1");
http::Version::HTTP_11
}
}
}
/// Attempt to parse an http/1.1 response.
pub fn try_parse_res(
buf: &[u8],
end_of_stream: bool,
) -> Result<Option<http::Response<()>>, io::Error> {
trace!("try_parse_res: {:?}", String::from_utf8_lossy(buf));
let mut headers = [httparse::EMPTY_HEADER; 128];
let mut parser = httparse::Response::new(&mut headers);
let status = parser
.parse(&buf)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
if status.is_partial() && !end_of_stream {
return Ok(None);
}
let mut bld = http::Response::builder().version(version_of(parser.version));
if let Some(code) = parser.code {
bld = bld.status(code);
}
for head in parser.headers.iter() {
if head.name.is_empty() && end_of_stream {
// ignore empty stuff if this parsing is due to end_of_stream.
// (it's probably broken).
trace!("Ignore empty header");
continue;
}
let name = HeaderName::from_bytes(head.name.as_bytes());
let value = HeaderValue::from_bytes(head.value);
match (name, value) {
(Ok(name), Ok(value)) => bld = bld.header(name, value),
(Err(e), _) => {
debug!("Dropping bad header name: {}", e);
}
(Ok(name), Err(e)) => {
debug!("Dropping bad header value ({}): {}", name, e);
}
}
}
let built = bld
.body(())
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
// let len = status.unwrap();
debug!("try_parse_http11 success: {:?}", built);
Ok(Some(built))
}
/// Attempt to parse an http/1.1 request.
pub fn try_parse_req(buf: &[u8], _: bool) -> Result<Option<http::Request<()>>, io::Error> {
trace!("try_parse_req: {:?}", String::from_utf8_lossy(buf));
let mut headers = [httparse::EMPTY_HEADER; 128];
let mut parser = httparse::Request::new(&mut headers);
let status = parser
.parse(&buf)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
if status.is_partial() {
return Ok(None);
}
let mut uri = http::Uri::builder();
if let Some(path) = parser.path {
uri = uri.path_and_query(path);
}
let mut bld = http::Request::builder().version(if parser.version == Some(1) {
http::Version::HTTP_11
} else {
http::Version::HTTP_10
});
let uri = uri
.build()
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
bld = bld.uri(uri);
if let Some(method) = parser.method {
bld = bld.method(method);
}
for head in parser.headers.iter() {
let name = http::header::HeaderName::from_bytes(head.name.as_bytes());
let value = http::header::HeaderValue::from_bytes(head.value);
match (name, value) {
(Ok(name), Ok(value)) => bld = bld.header(name, value),
(Err(e), _) => {
debug!("Dropping bad header name: {}", e);
}
(Ok(name), Err(e)) => {
debug!("Dropping bad header value ({}): {}", name, e);
}
}
}
let built = bld
.body(())
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
// let used_len = status.unwrap();
debug!("try_parse_http11 success: {:?}", built);
Ok(Some(built))
}
/// Helper to poll for request or response.
///
/// It looks out for \r\n\r\n, which indicates the end of the headers and body begins.
pub fn poll_for_crlfcrlf<S, F, T>(cx: &mut Context, io: &mut BufIo<S>, f: F) -> Poll<io::Result<T>>
where
S: AsyncRead + Unpin,
F: FnOnce(&[u8], bool) -> T,
T: Debug,
{
trace!("try find header");
const END_OF_HEADER: &[u8] = &[b'\r', b'\n', b'\r', b'\n'];
let mut end_index = 0; // index into END_OF_HEADER
let mut buf_index = 0; // index into buf
// first loop we use whatever is in poll_fill_buf, second loop we
// really must read more (since we didn't get to end);
let mut force_append = false;
loop {
let buf = ready!(Pin::new(&mut *io).poll_fill_buf(cx, force_append))?;
force_append = true;
// println!("{:?}", std::str::from_utf8(&buf));
// buffer did not grow. that means end_of_file in underlying reader.
// we still however might have a possible header there with a
// redirect or some such.
let end_of_stream = buf.len() == buf_index;
loop {
if end_index == END_OF_HEADER.len() || end_of_stream && buf_index > 0 {
// we might have found the end of the request/response header
// convert to whatever caller wants.
let ret = f(&buf[0..buf_index], end_of_stream);
trace!("Parsed header: {:?}", ret);
// discard the amount used from buffer.
Pin::new(&mut *io).consume(buf_index);
return Ok(ret).into();
}
if end_of_stream && buf_index == 0 {
return Err(io::Error::new(
io::ErrorKind::UnexpectedEof,
"EOF and no header length".to_string(),
))
.into();
}
if buf_index == READ_BUF_INIT_SIZE {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("No header found in {} bytes", READ_BUF_INIT_SIZE),
))
.into();
}
if buf_index == buf.len() {
// must fill more.
break;
}
if buf[buf_index] == END_OF_HEADER[end_index] {
end_index += 1;
} else if end_index == 0 && buf[buf_index] == b'\n' {
// This might be a special case of \n\n instead of the
// correct \r\n\r\n. We still expect the last \n.
end_index = 3;
} else if end_index > 0 {
end_index = 0;
}
buf_index += 1;
}
}
}
|
use super::{data_structure::*, *};
use syntax::ast;
// krate is mutable, because we will fill in its NodeId
pub fn expand_crate<'a>(krate: &mut LangRust, sess: &'a ParseSess) -> LangHIR {
let mut ribcage = ModRibcage {};
let LangRust {
module,
attrs,
span,
} = krate;
// Step 1: Collect all bindings
for item in module.items.iter() {
expand_item(item, sess, &mut ribcage);
}
// Step 2: Expand all macro uses
// Step 3: Do all the remaining work: generate HIR
panic!();
}
pub fn expand_item<'a>(item: &mut ast::Item, sess: &'a ParseSess, ribcage: &mut ModRibcage) {
use syntax::ast::ItemKind::*;
item.id = match item.node {
ExternCrate(o_name) => ribcage.add_extern_crate(item.ident, item.vis, o_name),
Use(p_use_tree) => ribcage.add_use_tree(p_use_tree),
Static(_p_ty, mutability, _p_expr) => ribcage.add_static(item.ident, mutability),
Const(_p_ty, _p_expr) => ribcage.add_const(item.ident),
Fn(_p_fn_declP, _fn_header, _generics, _p_block) => ribcage.add_function(item.ident),
Mod(_mud) => ribcage.add_mod(item.ident),
ForeignMod(foreign_mod) => {
ribcage.add_foreign_mod(&foreign_mod);
}
GlobalAsm(p_global_asm) => ast::DUMMY_NODE_ID,
Ty(p_ty, generics) => {
ribcage.add_type(item.ident);
}
Existential(generics_bounds, generics) => {
// FIXME: currently we ignore existensial
ast::DUMMY_NODE_ID
}
Enum(_enum_def, _generics) => {}
Struct(_variant_data, _generics) => ribcage.add_struct(item.ident),
Union(_variant_data, _generics) => ribcage.add_union(item.ident),
Trait(_is_auto, _unsafety, _generics, _generics_bounds, _vec_trait_item) => {
ribcage.add_trait(item.ident)
}
TraitAlias(_generics, _generic_bounds) => ribcage.add_trait_alias(item.ident),
Impl(
_unsafety,
_impl_polarity,
_defaultness,
_generics,
_o_trait_ref,
_p_ty,
_vec_impl_item,
) => ribcage.add_impl(item.ident),
Mac(_mac) => {
// defer macro expansion to next phase to permit macro-use before macro-def
// eg:
// foo!{}
// macro!(foo ...)
ast::DUMMY_NODE_ID
}
MacroDef(_macro_def) => ribcage.add_macro_def(item.ident),
}
}
|
use derive_more::{Deref, DerefMut};
use serde::ser::{Serialize, SerializeSeq, Serializer};
use serde_sexpr::to_string;
#[derive(Clone, Debug)]
pub struct Foo;
#[derive(Default, Clone, Debug, Deref, DerefMut)]
pub struct Bar(Vec<Foo>);
impl Serialize for Foo {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str("foo")
}
}
impl Serialize for Bar {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let len = self.len() + 1;
let mut seq = serializer.serialize_seq(Some(len))?;
seq.serialize_element("net")?;
for e in self.iter() {
seq.serialize_element(e)?;
}
seq.end()
}
}
fn main() {
let mut v = Bar::default();
v.push(Foo);
v.push(Foo);
println!("debug: {:?}", v);
println!("sexpr: {}", to_string(&v).unwrap());
}
|
pub mod method_call;
pub mod notification;
|
use std::ops::Bound;
use chrono::{DateTime, Utc};
type BoundedDatetimeTuple = (Bound<DateTime<Utc>>, Bound<DateTime<Utc>>);
pub(crate) mod ts_seconds_bound_tuple {
use std::fmt;
use std::ops::Bound;
use super::BoundedDatetimeTuple;
use chrono::{DateTime, NaiveDateTime, Utc};
use serde::{de, ser};
pub(crate) fn serialize<S>(
value: &(Bound<DateTime<Utc>>, Bound<DateTime<Utc>>),
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
use ser::SerializeTuple;
let (lt, rt) = value;
let mut tup = serializer.serialize_tuple(2)?;
match lt {
Bound::Included(lt) => {
let val = lt.timestamp();
tup.serialize_element(&val)?;
}
Bound::Excluded(lt) => {
// Adjusting the range to '[lt, rt)'
let val = lt.timestamp() + 1;
tup.serialize_element(&val)?;
}
Bound::Unbounded => {
let val: Option<i64> = None;
tup.serialize_element(&val)?;
}
}
match rt {
Bound::Included(rt) => {
// Adjusting the range to '[lt, rt)'
let val = rt.timestamp() - 1;
tup.serialize_element(&val)?;
}
Bound::Excluded(rt) => {
let val = rt.timestamp();
tup.serialize_element(&val)?;
}
Bound::Unbounded => {
let val: Option<i64> = None;
tup.serialize_element(&val)?;
}
}
tup.end()
}
pub fn deserialize<'de, D>(d: D) -> Result<BoundedDatetimeTuple, D::Error>
where
D: de::Deserializer<'de>,
{
d.deserialize_tuple(2, TupleSecondsTimestampVisitor)
}
struct TupleSecondsTimestampVisitor;
impl<'de> de::Visitor<'de> for TupleSecondsTimestampVisitor {
type Value = BoundedDatetimeTuple;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a [lt, rt) range of unix time (seconds) or null (unbounded)")
}
/// Deserialize a tuple of two Bounded DateTime<Utc>
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
let lt = match seq.next_element()? {
Some(Some(val)) => {
let dt = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(val, 0), Utc);
Bound::Included(dt)
}
Some(None) => Bound::Unbounded,
None => return Err(de::Error::invalid_length(1, &self)),
};
let rt = match seq.next_element()? {
Some(Some(val)) => {
let dt = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(val, 0), Utc);
Bound::Excluded(dt)
}
Some(None) => Bound::Unbounded,
None => return Err(de::Error::invalid_length(2, &self)),
};
Ok((lt, rt))
}
}
}
///////////////////////////////////////////////////////////////////////////////
pub(crate) mod ts_seconds_option_bound_tuple {
use serde::{de, ser};
use std::fmt;
use super::BoundedDatetimeTuple;
pub(crate) fn serialize<S>(
option: &Option<BoundedDatetimeTuple>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
match option {
Some(value) => super::ts_seconds_bound_tuple::serialize(value, serializer),
None => serializer.serialize_none(),
}
}
pub fn deserialize<'de, D>(d: D) -> Result<Option<BoundedDatetimeTuple>, D::Error>
where
D: de::Deserializer<'de>,
{
d.deserialize_option(TupleSecondsTimestampVisitor)
}
pub struct TupleSecondsTimestampVisitor;
impl<'de> de::Visitor<'de> for TupleSecondsTimestampVisitor {
type Value = Option<BoundedDatetimeTuple>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.write_str("none or a [lt, rt) range of unix time (seconds) or null (unbounded)")
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(None)
}
fn visit_some<D>(self, d: D) -> Result<Self::Value, D::Error>
where
D: de::Deserializer<'de>,
{
let interval = super::ts_seconds_bound_tuple::deserialize(d)?;
Ok(Some(interval))
}
}
}
//////////////////////////////////////////////////////////////////////////////
pub(crate) mod duration_seconds {
use std::fmt;
use chrono::Duration;
use serde::de;
pub fn deserialize<'de, D>(d: D) -> Result<Duration, D::Error>
where
D: de::Deserializer<'de>,
{
d.deserialize_u64(SecondsDurationVisitor)
}
pub struct SecondsDurationVisitor;
impl<'de> de::Visitor<'de> for SecondsDurationVisitor {
type Value = Duration;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("duration (seconds)")
}
fn visit_u64<E>(self, seconds: u64) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(Duration::seconds(seconds as i64))
}
}
}
pub(crate) mod milliseconds_bound_tuples {
use std::fmt;
use std::ops::Bound;
use serde::{de, ser};
pub(crate) fn serialize<S>(
value: &[(Bound<i64>, Bound<i64>)],
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
use ser::SerializeSeq;
let mut seq = serializer.serialize_seq(Some(value.len()))?;
for (lt, rt) in value {
let lt = match lt {
Bound::Included(lt) | Bound::Excluded(lt) => Some(lt),
Bound::Unbounded => None,
};
let rt = match rt {
Bound::Included(rt) | Bound::Excluded(rt) => Some(rt),
Bound::Unbounded => None,
};
seq.serialize_element(&(lt, rt))?;
}
seq.end()
}
type BoundedI64Tuple = (Bound<i64>, Bound<i64>);
pub(crate) fn deserialize<'de, D>(deserializer: D) -> Result<Vec<BoundedI64Tuple>, D::Error>
where
D: de::Deserializer<'de>,
{
pub struct MillisecondsBoundTupleVisitor;
impl<'de> de::Visitor<'de> for MillisecondsBoundTupleVisitor {
type Value = Vec<(Bound<i64>, Bound<i64>)>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a list of [lt, rt) range of integer milliseconds")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
let mut elements: Self::Value = vec![];
while let Some((Some(lt), Some(rt))) = seq.next_element()? {
if lt <= rt {
elements.push((Bound::Included(lt), Bound::Excluded(rt)))
} else {
return Err(de::Error::invalid_value(
de::Unexpected::Str(&format!("[{}, {}]", lt, rt)),
&"lt <= rt",
));
}
}
Ok(elements)
}
}
deserializer.deserialize_seq(MillisecondsBoundTupleVisitor)
}
}
|
///// chapter 4 "structuring data and matching patterns"
///// program section:
//
fn main() {
let location = "middle-earth";
let part = &location[7..12];
println!("{}", part);
}
///// output should be:
/*
earth
*/// end of output
|
pub mod mds;
pub mod sboxes;
use rand::Rng;
use franklin_crypto::bellman::pairing::Engine;
use franklin_crypto::bellman::pairing::ff::Field;
use std::marker::PhantomData;
use mds::generate_vectors_for_matrix;
use sboxes::{QuinticSBox, QuinticInverseSBox};
pub struct CipherParams<
E: Engine,
const SIZE: usize,
const RNUMBER: usize> {
pub vect_for_matrix: [Vec<E::Fr>; 2],
pub sbox1: QuinticSBox<E, SIZE>,
pub sbox2: QuinticInverseSBox<E, SIZE>,
pub round_constants: [[E::Fr; SIZE]; RNUMBER]
}
pub fn generate_cipher_params<
E: Engine,
R: Rng,
const SIZE: usize,
const RNUMBER: usize>(rng: &mut R)-> CipherParams<E, SIZE, RNUMBER> {
let vect_for_matrix = generate_vectors_for_matrix::<E, R, SIZE>(rng);
let sbox1 = QuinticSBox::<E, SIZE>{
_marker: PhantomData::<E>::default()
};
let sbox2 = QuinticInverseSBox::<E, SIZE>{
_marker: PhantomData::<E>::default()
};
let round_constants = generate_round_constants::<E, R, SIZE, RNUMBER>(rng);
CipherParams{
vect_for_matrix,
sbox1,
sbox2,
round_constants
}
}
fn generate_round_constants<
E: Engine,
R: Rng,
const SIZE: usize,
const RNUMBER: usize
>(
rng: &mut R
)-> [[E::Fr; SIZE]; RNUMBER] {
let mut roconst = [[E::Fr::zero(); SIZE]; RNUMBER];
let zero = E::Fr::zero();
for i in 0..RNUMBER {
for j in 0..SIZE {
loop {
let n = rng.gen();
if n == zero {
continue;
}
roconst[i][j] = n;
break;
}
}
}
roconst
}
|
use crate::kernel::opcode;
use crate::kernel::Table_;
use crate::statement_iter::{Statement, StatementIter, StatementOwned};
use mmb_parser::{ProofStream, UnifyStream, Visitor};
pub struct UnifyCommands {
data: Vec<opcode::Command<opcode::Unify>>,
start_offset: usize,
}
impl UnifyStream for UnifyCommands {
fn push(&mut self, value: opcode::Command<opcode::Unify>) {
self.data.push(value);
}
fn done(&self) -> (usize, usize) {
(self.start_offset, self.data.len())
}
}
pub struct ProofCommands {
data: Vec<opcode::Command<opcode::Proof>>,
start_offset: usize,
}
impl ProofStream for ProofCommands {
fn push(&mut self, value: opcode::Command<opcode::Proof>) {
self.data.push(value);
}
fn done(&self) -> (usize, usize) {
(self.start_offset, self.data.len())
}
}
use crate::kernel::{Sort_, Term_, Theorem_, Var_};
pub struct MmbVisitor<'a> {
binders: Vec<Var_>,
slices: Vec<&'a [u8]>,
statements: Vec<Statement>,
uni_streams: UnifyCommands,
proof_stream: ProofCommands,
sorts: Vec<Sort_>,
terms: Vec<Term_>,
theorems: Vec<Theorem_>,
sort_indices: Vec<usize>,
axiom_indices: Vec<usize>,
term_indices: Vec<usize>,
theorem_indices: Vec<usize>,
}
impl<'a> MmbVisitor<'a> {
pub fn new() -> MmbVisitor<'a> {
MmbVisitor {
binders: Vec::with_capacity(1024 * 1024),
slices: Vec::with_capacity(1024 * 1024),
statements: Vec::with_capacity(1024 * 1024),
uni_streams: UnifyCommands {
data: Vec::with_capacity(1024 * 1024),
start_offset: 0,
},
proof_stream: ProofCommands {
data: Vec::with_capacity(1024 * 1024),
start_offset: 0,
},
sorts: Vec::new(),
terms: Vec::new(),
theorems: Vec::new(),
sort_indices: Vec::new(),
axiom_indices: Vec::new(),
term_indices: Vec::new(),
theorem_indices: Vec::new(),
}
}
pub fn into_table(self) -> (crate::kernel::Table_, StatementIter) {
(
Table_ {
sorts: self.sorts,
theorems: self.theorems,
terms: self.terms,
unify: self.uni_streams.data,
binders: self.binders,
},
StatementIter::new(self.statements, self.proof_stream.data),
)
}
pub fn into_table_owned(self) -> (crate::kernel::Table_, StatementOwned) {
(
Table_ {
sorts: self.sorts,
theorems: self.theorems,
terms: self.terms,
unify: self.uni_streams.data,
binders: self.binders,
},
StatementOwned::new(
self.statements,
self.proof_stream.data,
self.sort_indices,
self.axiom_indices,
self.term_indices,
self.theorem_indices,
),
)
}
}
impl<'a> Visitor<'a> for MmbVisitor<'a> {
type Binder = Var_;
type Sort = Sort_;
type Statement = opcode::Statement;
type Proof = ProofCommands;
type Unify = UnifyCommands;
fn parse_sort(&mut self, sort: Self::Sort) {
self.sorts.push(sort);
}
fn parse_statement(
&mut self,
statement: Self::Statement,
_offset: usize,
slice: &'a [u8],
proof: Option<(usize, usize)>,
) {
self.slices.push(slice);
let idx = self.statements.len();
match statement {
opcode::Statement::Sort => self.sort_indices.push(idx),
opcode::Statement::Axiom => {
self.axiom_indices.push(idx);
self.theorem_indices.push(idx)
}
opcode::Statement::TermDef => self.term_indices.push(idx),
opcode::Statement::Thm => self.theorem_indices.push(idx),
opcode::Statement::LocalDef => self.term_indices.push(idx),
opcode::Statement::LocalTerm => self.theorem_indices.push(idx),
_ => {}
}
self.statements.push(Statement {
code: statement,
proof,
});
}
fn start_unify_stream(&mut self) -> &mut UnifyCommands {
self.uni_streams.start_offset = self.uni_streams.data.len();
&mut self.uni_streams
}
fn start_proof_stream(&mut self) -> &mut ProofCommands {
self.proof_stream.start_offset = self.proof_stream.data.len();
&mut self.proof_stream
}
fn try_reserve_binder_slice(&mut self, nr: usize) -> Option<(&mut [Var_], usize)> {
let len = self.binders.len();
let new_len = len + nr;
self.binders.resize(new_len, From::from(0));
if let Some(slice) = self.binders.get_mut(len..) {
Some((slice, len))
} else {
None
}
}
fn parse_term(
&mut self,
sort: u8,
binders: (usize, usize),
ret_type: Self::Binder,
unify: &'a [u8],
unify_indices: (usize, usize),
) {
let term = Term_ {
sort,
binders: binders.0..binders.1,
ret_type,
unify_commands: unify_indices.0..unify_indices.1,
};
self.terms.push(term);
self.slices.push(unify);
}
fn parse_theorem(
&mut self,
binders: (usize, usize),
unify: &'a [u8],
unify_indices: (usize, usize),
) {
let theorem = Theorem_ {
binders: binders.0..binders.1,
unify_commands: unify_indices.0..unify_indices.1,
};
self.theorems.push(theorem);
self.slices.push(unify);
}
}
|
use regex::Regex;
use std::collections::HashMap;
static FILENAME: &str = "input/data";
static BITMASK_LEN: usize = 36;
enum Operation {
Mask((usize, usize)),
Mem(usize, usize),
}
fn main() {
let data = std::fs::read_to_string(FILENAME).expect("could not read file");
let ops = parse(&data);
println!("part one: {}", part_one(&ops));
println!("part two: {}", part_two(&ops));
}
fn part_one(ops: &[Operation]) -> usize {
let mut mask = (0, 0);
let mut mem = HashMap::new();
for op in ops {
match op {
Operation::Mask(m) => mask = *m,
Operation::Mem(addr, val) => {
mem.insert(*addr, val & mask.0 | mask.1);
}
}
}
mem.values().sum()
}
fn part_two(ops: &[Operation]) -> usize {
let mut mask = (0, 0);
let mut mem = HashMap::new();
for op in ops {
match op {
Operation::Mask(m) => mask = *m,
Operation::Mem(addr, val) => {
for address in find_addresses(mask, *addr) {
mem.insert(address, *val);
}
}
}
}
mem.values().sum()
}
fn find_addresses(mask: (usize, usize), addr: usize) -> Vec<usize> {
let mut addr_base = addr | mask.1;
let mut tmp_mask = mask.0 ^ mask.1;
let mut addrs: Vec<usize> = Vec::new();
for i in 0..BITMASK_LEN {
let a = addr_base % 2;
addrs = if tmp_mask % 2 == 1 {
if addrs.is_empty() {
vec![0, 1]
} else {
addrs
.iter()
.map(|address| vec![address + 2_usize.pow(i as u32), *address])
.flatten()
.collect()
}
} else if addrs.is_empty() {
vec![a]
} else {
addrs
.iter()
.map(|address| address + (a) * 2_usize.pow(i as u32))
.collect()
};
addr_base >>= 1;
tmp_mask >>= 1;
}
addrs
}
fn parse(data: &str) -> Vec<Operation> {
let re = Regex::new(r"mem\[(?P<addr>[0-9]*)\]").unwrap();
let mut mask = "";
data.lines()
.map(|line| {
let mut instr = line.split(" = ");
let (lhs, rhs) = (instr.next().unwrap(), instr.next().unwrap());
match lhs {
"mask" => {
mask = rhs;
let and = usize::from_str_radix(&mask.replace("X", "1"), 2).unwrap();
let or = usize::from_str_radix(&mask.replace("X", "0"), 2).unwrap();
Operation::Mask((and, or))
}
_ => {
let addr = re.captures(lhs).unwrap()["addr"].parse::<usize>().unwrap();
let val = rhs.parse::<usize>().unwrap();
Operation::Mem(addr, val)
}
}
})
.collect()
}
mod tests {
#[test]
fn test_part_one() {
let data = std::fs::read_to_string(super::FILENAME).expect("could not read file");
let ops = super::parse(&data);
assert_eq!(8570568288597, super::part_one(&ops));
}
#[test]
fn test_part_two() {
let data = std::fs::read_to_string(super::FILENAME).expect("could not read file");
let ops = super::parse(&data);
assert_eq!(3289441921203, super::part_two(&ops));
}
}
|
use crate::repr::Literal;
use inkwell::values::BasicValueEnum;
use super::common::CompileResult;
use super::context::CodegenContext;
use super::literal::*;
fn compile_quoted_symbol(ctx: &mut CodegenContext, name: &String) -> BasicValueEnum {
let sym_name_ptr = ctx.str_literal_as_i8_ptr(name.as_str());
let intern_fn = ctx.lookup_known_fn("unlisp_rt_intern_sym");
let interned_sym_ptr = ctx
.builder
.build_call(intern_fn, &[sym_name_ptr.into()], "intern")
.try_as_basic_value()
.left()
.unwrap()
.into_pointer_value();
let intern_fn = ctx.lookup_known_fn("unlisp_rt_object_from_symbol");
ctx.builder
.build_call(intern_fn, &[interned_sym_ptr.into()], "object")
.try_as_basic_value()
.left()
.unwrap()
}
fn compile_quoted_list(ctx: &mut CodegenContext, list: &Vec<Literal>) -> CompileResult {
let cons_fn = ctx.lookup_known_fn("unlisp_rt_list_cons");
let empty_list_fn = ctx.lookup_known_fn("unlisp_rt_empty_list");
let object_form_list_fn = ctx.lookup_known_fn("unlisp_rt_object_from_list");
let mut result = ctx
.builder
.build_call(empty_list_fn, &[], "empty")
.try_as_basic_value()
.left()
.unwrap();
for el in list.iter().rev() {
let compiled = compile_quoted_literal(ctx, el)?;
result = ctx
.builder
.build_call(cons_fn, &[compiled, result], "result")
.try_as_basic_value()
.left()
.unwrap();
}
let result_obj = ctx
.builder
.build_call(object_form_list_fn, &[result], "result_obj")
.try_as_basic_value()
.left()
.unwrap();
Ok(result_obj)
}
pub fn compile_quoted_literal(ctx: &mut CodegenContext, literal: &Literal) -> CompileResult {
match literal {
Literal::SymbolLiteral(s) => Ok(compile_quoted_symbol(ctx, s)),
Literal::ListLiteral(list) => Ok(compile_quoted_list(ctx, list)?),
Literal::IntegerLiteral(_) | Literal::StringLiteral(_) | Literal::T => {
compile_literal(ctx, literal)
}
}
}
|
//! Cortex-M7 TCM and Cache access control.
use volatile_register::RW;
/// Register block
#[repr(C)]
pub struct RegisterBlock {
/// Instruction Tightly-Coupled Memory Control Register
pub itcmcr: RW<u32>,
/// Data Tightly-Coupled Memory Control Register
pub dtcmcr: RW<u32>,
/// AHBP Control Register
pub ahbpcr: RW<u32>,
/// L1 Cache Control Register
pub cacr: RW<u32>,
/// AHB Slave Control Register
pub ahbscr: RW<u32>,
reserved0: u32,
/// Auxilary Bus Fault Status Register
pub abfsr: RW<u32>,
}
/// ITCMCR and DTCMCR TCM enable bit.
pub const TCM_EN: u32 = 1;
/// ITCMCR and DTCMCR TCM read-modify-write bit.
pub const TCM_RMW: u32 = 2;
/// ITCMCR and DTCMCR TCM rety phase enable bit.
pub const TCM_RETEN: u32 = 4;
/// ITCMCR and DTCMCR TCM size mask.
pub const TCM_SZ_MASK: u32 = 0x78;
/// ITCMCR and DTCMCR TCM shift.
pub const TCM_SZ_SHIFT: usize = 3;
/// AHBPCR AHBP enable bit.
pub const AHBPCR_EN: u32 = 1;
/// AHBPCR AHBP size mask.
pub const AHBPCR_SZ_MASK: u32 = 0x0e;
/// AHBPCR AHBP size shit.
pub const AHBPCR_SZ_SHIFT: usize = 1;
/// CACR Shared cachedable-is-WT for data cache.
pub const CACR_SIWT: u32 = 1;
/// CACR ECC in the instruction and data cache (disable).
pub const CACR_ECCDIS: u32 = 2;
/// CACR Force Write-Through in the data cache.
pub const CACR_FORCEWT: u32 = 4;
/// AHBSCR AHBS prioritization control mask.
pub const AHBSCR_CTL_MASK: u32 = 0x03;
/// AHBSCR AHBS prioritization control shift.
pub const AHBSCR_CTL_SHIFT: usize = 0;
/// AHBSCR Threshold execution prioity for AHBS traffic demotion, mask.
pub const AHBSCR_TPRI_MASK: u32 = 0x7fc;
/// AHBSCR Threshold execution prioity for AHBS traffic demotion, shift.
pub const AHBSCR_TPRI_SHIFT: usize = 2;
/// AHBSCR Failness counter initialization value, mask.
pub const AHBSCR_INITCOUNT_MASK: u32 = 0xf800;
/// AHBSCR Failness counter initialization value, shift.
pub const AHBSCR_INITCOUNT_SHIFT: usize = 11;
/// ABFSR Async fault on ITCM interface.
pub const ABFSR_ITCM: u32 = 1;
/// ABFSR Async fault on DTCM interface.
pub const ABFSR_DTCM: u32 = 2;
/// ABFSR Async fault on AHBP interface.
pub const ABFSR_AHBP: u32 = 4;
/// ABFSR Async fault on AXIM interface.
pub const ABFSR_AXIM: u32 = 8;
/// ABFSR Async fault on EPPB interface.
pub const ABFSR_EPPB: u32 = 16;
/// ABFSR Indicates the type of fault on the AXIM interface, mask.
pub const ABFSR_AXIMTYPE_MASK: u32 = 0x300;
/// ABFSR Indicates the type of fault on the AXIM interface, shift.
pub const ABFSR_AXIMTYPE_SHIFT: usize = 8;
|
use std::fs::File;
use std::mem::size_of;
use std::io::Seek;
use std::io::SeekFrom;
use std::convert::TryInto;
use byteorder::{LittleEndian, ReadBytesExt};
use libc::{_SC_PAGESIZE, sysconf};
#[derive(Debug)]
pub enum Error {
PageMap,
Read,
Unk,
}
pub fn virt_to_phys<T>(virt: *const T) -> Result<*const T, Error> {
let ptr_val = virt as usize;
let page_size = unsafe {
sysconf(_SC_PAGESIZE) as usize
};
let mut file = File::open("/proc/self/pagemap").unwrap();
let seek = (ptr_val / page_size * size_of::<*const T>()).try_into().unwrap();
file.seek(SeekFrom::Start(seek)).unwrap();
let entry = file.read_u64::<LittleEndian>().unwrap();
Ok(((entry & 0x7fffffffffffffu64) * page_size as u64 + (ptr_val % page_size) as u64) as *const T)
}
#[cfg(test)]
mod tests {
use crate::virt_to_phys;
#[test]
fn it_works() {
let my_num: i32 = 10;
let my_num_ptr: *const i32 = &my_num;
println!("ptr virt: {:?} ptr phys: {:?}", my_num_ptr, virt_to_phys(my_num_ptr).unwrap());
}
}
|
extern crate termion;
use termion::async_stdin;
use std::io::{Read, Write};
use std::thread;
use std::time::Duration;
use crate::util::*;
pub struct Input { //will become command
ch: Char,
position: Pos,
}
pub type Action = Vec<Input>;
pub type Done = Vec<Action>; //will become "Done" stack
pub type Undone = Vec<Action>; //will become "Undone" stack
pub type Comm = Vec<Action>; //will become command mode input stack
/*
Basicallly how this works:
When in input mode, any characters you press are saved in an action: an
input chain. After a brief period of inactivity or if `esc` is pressed,
the action chain is considered closed/complete. Then the action is pushed
onto the "done" stack.
When undoing an action, it's effects are reversed, and the action is popped
from the "done" stack and pushed onto the "undone" stack.
When redoing an action, it's effects are applied, and the action is popped
from the "undone" stack and pushed back onto the "done" stack.
*/
pub fn input_handler(scr: &mut dyn Write, ) {
let mut stdin = async_stdin().bytes();
let mut last_input = String::new();
// create input channel and run in separate thread
}
|
use azure_core::prelude::*;
use azure_storage::blob::prelude::*;
use azure_storage::core::prelude::*;
use std::error::Error;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error + Send + Sync>> {
// First we retrieve the account name and master key from environment variables.
let account =
std::env::var("STORAGE_ACCOUNT").expect("Set env variable STORAGE_ACCOUNT first!");
let master_key =
std::env::var("STORAGE_MASTER_KEY").expect("Set env variable STORAGE_MASTER_KEY first!");
let container_name = std::env::args()
.nth(1)
.expect("please specify a non-existing container name as command line parameter");
let http_client = new_http_client();
let storage_account =
StorageAccountClient::new_access_key(http_client.clone(), &account, &master_key)
.as_storage_client();
create_container_and_list(storage_account, &container_name).await?;
let storage_account = StorageAccountClient::new_emulator_default().as_storage_client();
create_container_and_list(storage_account, &container_name).await?;
Ok(())
}
async fn create_container_and_list(
storage_account: std::sync::Arc<StorageClient>,
container_name: &str,
) -> Result<(), Box<dyn Error + Send + Sync>> {
let container = storage_account.as_container_client(container_name);
container.create().execute().await?;
// list empty container
let iv = container.list_blobs().execute().await?;
println!("List blob returned {} blobs.", iv.blobs.blobs.len());
for i in 0..3 {
container
.as_blob_client(format!("blob{}.txt", i))
.put_block_blob("somedata")
.content_type("text/plain")
.execute()
.await?;
println!("\tAdded blob {}", i);
}
// list full container
let iv = container.list_blobs().execute().await?;
println!("List blob returned {} blobs.", iv.blobs.blobs.len());
container.delete().execute().await?;
println!("Container {} deleted", container_name);
Ok(())
}
|
use sudo_test::{Command, Env, User};
use crate::{Result, PASSWORD, USERNAME};
#[test]
fn is_limited_to_a_single_user() -> Result<()> {
let second_user = "ghost";
let env = Env("ALL ALL=(ALL:ALL) ALL")
.user(User(USERNAME).password(PASSWORD))
.user(User(second_user).password(PASSWORD))
.build()?;
let child = Command::new("sh")
.arg("-c")
.arg(format!(
"echo {PASSWORD} | sudo -S true; touch /tmp/barrier1; until [ -f /tmp/barrier2 ]; do sleep 1; done; sudo -S true"
))
.as_user(USERNAME)
.spawn(&env)?;
Command::new("sh")
.arg("-c")
.arg("until [ -f /tmp/barrier1 ]; do sleep 1; done; sudo -K && touch /tmp/barrier2")
.as_user(second_user)
.output(&env)?
.assert_success()?;
child.wait()?.assert_success()
}
#[test]
fn has_a_user_global_effect() -> Result<()> {
let env = Env(format!("{USERNAME} ALL=(ALL:ALL) ALL"))
.user(User(USERNAME).password(PASSWORD))
.build()?;
let child = Command::new("sh")
.arg("-c")
.arg(format!(
"echo {PASSWORD} | sudo -S true; touch /tmp/barrier1; until [ -f /tmp/barrier2 ]; do sleep 1; done; echo | sudo -S true"
))
.as_user(USERNAME)
.spawn(&env)?;
Command::new("sh")
.arg("-c")
.arg("until [ -f /tmp/barrier1 ]; do sleep 1; done; sudo -K && touch /tmp/barrier2")
.as_user(USERNAME)
.output(&env)?
.assert_success()?;
let output = child.wait()?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
let diagnostic = if sudo_test::is_original_sudo() {
"1 incorrect password attempt"
} else {
"incorrect authentication attempt"
};
assert_contains!(output.stderr(), diagnostic);
Ok(())
}
#[test]
fn also_works_locally() -> Result<()> {
let env = Env(format!("{USERNAME} ALL=(ALL:ALL) ALL"))
.user(User(USERNAME).password(PASSWORD))
.build()?;
// input valid credentials
// invalidate them
// try to sudo without a password
let output = Command::new("sh")
.arg("-c")
.arg(format!(
"echo {PASSWORD} | sudo -S true; sudo -K; sudo true"
))
.as_user(USERNAME)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
let diagnostic = if sudo_test::is_original_sudo() {
"a password is required"
} else {
"Authentication failed"
};
assert_contains!(output.stderr(), diagnostic);
Ok(())
}
|
use std::io::stdin;
fn main() {
println!("You enter a dark room with two doors. Do you go through door #1 or door #2?");
let mut door = String::new();
stdin().read_line(&mut door).unwrap();
if door == "1\n".to_owned(){
println!("There is a giant bear here eating a cheese cake.");
println!("What do you do?");
println!("1. Take the cake.");
println!("2. Scream at the bear.");
let mut bear = String::new();
stdin().read_line(&mut bear).unwrap();
if bear == "1\n".to_string() {
println!("The bear eats your face off. Good job!");
}
else if bear == "2\n".to_string() {
println!("The bear eats your legs off. Good job!");
}
else {
println!("Well, doing {} is probably better.", bear);
println!("Bear runs away.");
}
}
else if door == "2\n".to_string() {
println!("You stare into the endless abyss at Cthulhu's retina.");
println!("1. Blueberries");
println!("2. Yellow jacket clothespins");
println!("3. Understanding revolvers yelling melodies");
let mut insanity = String::new();
stdin().read_line(&mut insanity).unwrap();
if insanity == "1\n".to_string() || insanity == "2".to_string() {
println!("Your body survives powered by a mind of jello.");
println!("Good job!");
}
else {
println!("The insanity rots your eyes into a pol of muck.");
println!("Good job!");
}
}
else {
println!("You stumble around and fall on a knife and die. Good job!");
}
}
|
// Copyright 2014 Christopher Schröder, Johannes Köster.
// Licensed under the MIT license (http://opensource.org/licenses/MIT)
// This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(libc)]
#![feature(step_by)]
#![feature(convert)]
#![feature(vec_push_all)]
#![feature(copy_lifetime)]
#![feature(slice_bytes)]
extern crate libc;
pub mod htslib;
pub mod bam;
pub mod bcf;
|
extern crate timer;
extern crate chrono;
use timer::Timer;
use chrono::Duration;
use std::thread;
fn x() {
println!("hello");
}
fn main() {
let timer = Timer::new();
let guard = timer.schedule_repeating(Duration::seconds(2), x);
// give some time so we can see hello printed
// you can execute any code here
thread::sleep(::std::time::Duration::new(10, 0));
// stop repeating
drop(guard);
} |
use actix::prelude::*;
use chrono::NaiveTime;
use log::warn;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::time::Duration;
use std::time::Instant;
pub struct RouteFragment {
id: String,
stop_names: [String; 2],
past_trip_duration: Vec<Duration>,
last_update_time: Option<NaiveTime>,
current_trip_starts: HashMap<String, NaiveTime>,
current_trip_stops: HashMap<String, NaiveTime>,
}
impl Actor for RouteFragment {
type Context = Context<RouteFragment>;
}
impl RouteFragment {
pub fn new(id: String) -> RouteFragment {
RouteFragment {
id: id,
stop_names: ["?".to_string(), "?".to_string()],
last_update_time: None,
past_trip_duration: Vec::new(),
current_trip_starts: HashMap::new(),
current_trip_stops: HashMap::new(),
}
}
}
#[derive(Message, Debug)]
#[rtype(result = "()")]
pub struct FragmentEntryEvent {
pub trip_id: String,
pub instant: Instant,
pub time: NaiveTime,
}
#[derive(Message, Debug)]
#[rtype(result = "()")]
pub struct FragmentLeaveEvent {
pub trip_id: String,
pub instant: Instant,
pub time: NaiveTime,
}
#[derive(Message, Debug)]
#[rtype(result = "()")]
pub enum UpdateMeta {
UpdateStartName(String),
UpdateStopName(String),
}
#[derive(Serialize, MessageResponse, Debug)]
pub struct RouteFragmentStats {
pub stop_id: String,
pub stop_name: String,
pub time: Option<u64>,
pub update_secs: Option<u64>,
pub active_trips: Vec<String>,
}
#[derive(Message, Debug)]
#[rtype(result = "RouteFragmentStats")]
pub struct FragmentStatusRequest;
impl Handler<FragmentStatusRequest> for RouteFragment {
type Result = RouteFragmentStats;
fn handle(&mut self, _msg: FragmentStatusRequest, _ctx: &mut Context<Self>) -> Self::Result {
let last_update = self.last_update_time.map(|x| {
(chrono::Local::now().time() - x)
.to_std()
.unwrap()
.as_secs()
});
return RouteFragmentStats {
stop_id: self.id.clone(),
stop_name: self.stop_names[0].clone(),
time: self.past_trip_duration.last().map(|x| x.as_secs()),
update_secs: last_update,
active_trips: self.current_trip_starts.keys().cloned().collect(),
};
}
}
impl RouteFragment {
fn insert_finished_trip(&mut self, start_time: &NaiveTime, stop_time: &NaiveTime) {
match (*stop_time - *start_time).to_std() {
Ok(duration) => {
self.past_trip_duration.push(duration);
self.last_update_time = Some(*stop_time);
}
Err(_) => warn!(
"Zero or negative trip duration for route fragment {}",
self.stop_names[0]
),
}
}
}
impl Handler<FragmentEntryEvent> for RouteFragment {
type Result = ();
fn handle(&mut self, msg: FragmentEntryEvent, _ctx: &mut Context<Self>) {
if let Some(stop) = self.current_trip_stops.get(&msg.trip_id).cloned() {
self.insert_finished_trip(&msg.time, &stop);
self.current_trip_stops.remove(&msg.trip_id);
println!(
"Unregistered trip (rev-order) {} for fragment {}-{}, took: {:?}",
msg.trip_id,
self.stop_names[0],
self.stop_names[1],
self.past_trip_duration.last().map(|x| x.as_secs())
)
} else {
println!(
"Registered new trip {} for fragment {}",
&msg.trip_id, &self.stop_names[0]
);
self.current_trip_starts.insert(msg.trip_id, msg.time);
}
}
}
impl Handler<FragmentLeaveEvent> for RouteFragment {
type Result = ();
fn handle(&mut self, msg: FragmentLeaveEvent, _ctx: &mut Context<Self>) {
if let Some(start) = self.current_trip_starts.get(&msg.trip_id).cloned() {
self.insert_finished_trip(&start, &msg.time);
self.current_trip_starts.remove(&msg.trip_id);
println!(
"Unregistered trip {} for fragment {}-{}, took: {:?}",
msg.trip_id,
self.stop_names[0],
self.stop_names[1],
self.past_trip_duration.last().map(|x| x.as_secs())
)
} else {
self.current_trip_stops.insert(msg.trip_id, msg.time);
}
}
}
impl Handler<UpdateMeta> for RouteFragment {
type Result = ();
fn handle(&mut self, msg: UpdateMeta, _ctx: &mut Context<Self>) {
match msg {
UpdateMeta::UpdateStartName(start) => self.stop_names[0] = start,
UpdateMeta::UpdateStopName(stop) => self.stop_names[1] = stop,
}
}
}
impl RouteFragment {
pub fn update_start(s: &str) -> UpdateMeta {
UpdateMeta::UpdateStartName(s.to_string())
}
pub fn update_stop(s: &str) -> UpdateMeta {
UpdateMeta::UpdateStopName(s.to_string())
}
}
|
fn main() {
let m = ["This", 4];
let n = [4, 5.];
}
|
extern crate parser_c;
use std::env;
use parser_c::parser::exec_parser_simple;
use parser_c::parser::lexer::lex;
use parser_c::data::input_stream::InputStream;
use parser_c::data::position::Position;
use parser_c::parser::tokens::CTokEof;
fn main() {
let mut args = env::args();
let input_file = args.nth(1).unwrap();
let dump = args.nth(0).as_ref().map(|x| &**x) == Some("-d");
let input_stream = InputStream::from_file(&input_file).unwrap();
let init_pos = Position::from_file(&input_file);
let res = exec_parser_simple(|p| loop {
let tok = lex(p)?;
if dump {
println!("{:?}", tok);
}
if let CTokEof = tok {
return Ok(());
}
}, input_stream, init_pos);
if let Err(e) = res {
eprintln!("{}", e);
}
}
|
mod boxed;
mod peek;
mod prefixed;
mod sensor;
pub use self::{
boxed::BoxedIo,
peek::{Peek, Peekable},
prefixed::PrefixedIo,
sensor::{Sensor, SensorIo},
};
pub use std::io::{Error, ErrorKind, Read, Result, Write};
use std::net::SocketAddr;
pub use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
pub type Poll<T> = std::task::Poll<Result<T>>;
pub trait PeerAddr {
fn peer_addr(&self) -> SocketAddr;
}
impl PeerAddr for tokio::net::TcpStream {
fn peer_addr(&self) -> SocketAddr {
tokio::net::TcpStream::peer_addr(self).expect("TcpStream must have a peer address")
}
}
impl<T: PeerAddr> PeerAddr for tokio_rustls::client::TlsStream<T> {
fn peer_addr(&self) -> SocketAddr {
self.get_ref().0.peer_addr()
}
}
impl<T: PeerAddr> PeerAddr for tokio_rustls::server::TlsStream<T> {
fn peer_addr(&self) -> SocketAddr {
self.get_ref().0.peer_addr()
}
}
#[cfg(feature = "tokio-test")]
impl PeerAddr for tokio_test::io::Mock {
fn peer_addr(&self) -> SocketAddr {
([0, 0, 0, 0], 0).into()
}
}
#[cfg(feature = "tokio-test")]
impl PeerAddr for tokio::io::DuplexStream {
fn peer_addr(&self) -> SocketAddr {
([0, 0, 0, 0], 0).into()
}
}
mod internal {
use super::{AsyncRead, AsyncWrite, PeerAddr, Poll};
use bytes::{Buf, BufMut};
use std::pin::Pin;
use std::task::Context;
/// This trait is private, since its purpose is for creating a dynamic trait
/// object, but doing so without care can to lead not getting vectored
/// writes.
///
/// Instead, use the concrete `BoxedIo` type.
pub trait Io: AsyncRead + AsyncWrite + PeerAddr + Send {
/// This method is to allow using `Async::polL_read_buf` even through a
/// trait object.
fn poll_read_buf_erased(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut dyn BufMut,
) -> Poll<usize>;
/// This method is to allow using `Async::poll_write_buf` even through a
/// trait object.
fn poll_write_buf_erased(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut dyn Buf,
) -> Poll<usize>;
}
impl Io for tokio::net::TcpStream {
fn poll_write_buf_erased(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
mut buf: &mut dyn Buf,
) -> Poll<usize> {
self.poll_write_buf(cx, &mut buf)
}
fn poll_read_buf_erased(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
mut buf: &mut dyn BufMut,
) -> Poll<usize> {
self.poll_read_buf(cx, &mut buf)
}
}
impl<S: Io + Unpin> Io for tokio_rustls::server::TlsStream<S> {
fn poll_write_buf_erased(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
mut buf: &mut dyn Buf,
) -> Poll<usize> {
self.poll_write_buf(cx, &mut buf)
}
fn poll_read_buf_erased(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
mut buf: &mut dyn BufMut,
) -> Poll<usize> {
self.poll_read_buf(cx, &mut buf)
}
}
impl<S: Io + Unpin> Io for tokio_rustls::client::TlsStream<S> {
fn poll_write_buf_erased(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
mut buf: &mut dyn Buf,
) -> Poll<usize> {
self.poll_write_buf(cx, &mut buf)
}
fn poll_read_buf_erased(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
mut buf: &mut dyn BufMut,
) -> Poll<usize> {
self.poll_read_buf(cx, &mut buf)
}
}
}
|
#[allow(unused_imports)]
use nom::*;
use ast::Ast;
use parser::body::body;
use std::boxed::Box;
use parser::expressions::sexpr;
named!(pub if_expression<Ast>,
do_parse!(
ws!(tag!("if")) >>
if_conditional: ws!(sexpr) >>
if_body: ws!(body) >>
else_body: opt!(
complete!(
// nest another do_parse to get the else keyword and its associated block
do_parse!(
ws!(tag!("else")) >>
e: map!( // Map the body of the else statement into a Box so it can easily live in the Some()
ws!(body),
Box::new
) >>
(e)
)
)
) >>
(
Ast::Conditional {
condition: Box::new(if_conditional),
true_expr: Box::new(if_body),
false_expr: else_body
})
)
);
#[cfg(test)]
mod test {
use super::*;
use datatype::Datatype;
use s_expression::SExpression;
#[test]
fn parse_if_statement_test() {
let input_string = "if true { true }";
let (_, value) = match if_expression(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("Error in parsing: {}", e),
IResult::Incomplete(i) => panic!("Incomplete parse: {:?}", i),
};
assert_eq!(
Ast::Conditional {
condition: Box::new(Ast::Literal(Datatype::Bool(true))),
true_expr: Box::new(Ast::ExpressionList(
vec![Ast::Literal(Datatype::Bool(true))],
)),
false_expr: None,
},
value
)
}
#[test]
fn parse_if_statement_with_expression_test() {
let input_string = "if 1 == 1 { true }";
let (_, value) = match if_expression(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("Error in parsing: {}", e),
IResult::Incomplete(i) => panic!("Incomplete parse: {:?}", i),
};
assert_eq!(
Ast::Conditional {
condition: Box::new(Ast::SExpr(SExpression::Equals(
Box::new(Ast::Literal(Datatype::Number(1))),
Box::new(Ast::Literal(Datatype::Number(1))),
))),
true_expr: Box::new(Ast::ExpressionList(
vec![Ast::Literal(Datatype::Bool(true))],
)),
false_expr: None,
},
value
)
}
#[test]
fn parse_if_else_statement_test() {
let input_string = "if true { true } else { true }";
let (_, value) = match if_expression(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("Error in parsing: {}", e),
IResult::Incomplete(i) => panic!("Incomplete parse: {:?}", i),
};
assert_eq!(
Ast::Conditional {
condition: Box::new(Ast::Literal(Datatype::Bool(true))),
true_expr: Box::new(Ast::ExpressionList(
vec![Ast::Literal(Datatype::Bool(true))],
)),
false_expr: Some(Box::new(Ast::ExpressionList(
vec![Ast::Literal(Datatype::Bool(true))],
))),
},
value
)
}
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - TIM3 control register 1"]
pub tim3_cr1: TIM3_CR1,
_reserved1: [u8; 0x02],
#[doc = "0x04 - TIM3 control register 2"]
pub tim3_cr2: TIM3_CR2,
#[doc = "0x08 - TIM3 slave mode control register"]
pub tim3_smcr: TIM3_SMCR,
#[doc = "0x0c - TIM3 DMA/Interrupt enable register"]
pub tim3_dier: TIM3_DIER,
#[doc = "0x10 - TIM3 status register"]
pub tim3_sr: TIM3_SR,
#[doc = "0x14 - TIM3 event generation register"]
pub tim3_egr: TIM3_EGR,
_reserved6: [u8; 0x02],
_reserved_6_tim3_ccmr1: [u8; 0x04],
_reserved_7_tim3_ccmr2: [u8; 0x04],
#[doc = "0x20 - TIM3 capture/compare enable register"]
pub tim3_ccer: TIM3_CCER,
_reserved9: [u8; 0x02],
#[doc = "0x24 - TIM3 counter"]
pub tim3_cnt: TIM3_CNT,
#[doc = "0x28 - TIM3 prescaler"]
pub tim3_psc: TIM3_PSC,
_reserved11: [u8; 0x02],
#[doc = "0x2c - TIM3 auto-reload register"]
pub tim3_arr: TIM3_ARR,
_reserved12: [u8; 0x04],
#[doc = "0x34 - TIM3 capture/compare register 1"]
pub tim3_ccr1: TIM3_CCR1,
#[doc = "0x38 - TIM3 capture/compare register 2"]
pub tim3_ccr2: TIM3_CCR2,
#[doc = "0x3c - TIM3 capture/compare register 3"]
pub tim3_ccr3: TIM3_CCR3,
#[doc = "0x40 - TIM3 capture/compare register 4"]
pub tim3_ccr4: TIM3_CCR4,
_reserved16: [u8; 0x14],
#[doc = "0x58 - TIM3 timer encoder control register"]
pub tim3_ecr: TIM3_ECR,
#[doc = "0x5c - TIM3 timer input selection register"]
pub tim3_tisel: TIM3_TISEL,
#[doc = "0x60 - TIM3 alternate function register 1"]
pub tim3_af1: TIM3_AF1,
#[doc = "0x64 - TIM3 alternate function register 2"]
pub tim3_af2: TIM3_AF2,
_reserved20: [u8; 0x0374],
#[doc = "0x3dc - TIM3 DMA control register"]
pub tim3_dcr: TIM3_DCR,
#[doc = "0x3e0 - TIM3 DMA address for full transfer"]
pub tim3_dmar: TIM3_DMAR,
}
impl RegisterBlock {
#[doc = "0x18 - TIM3 capture/compare mode register 1 \\[alternate\\]"]
#[inline(always)]
pub const fn tim3_ccmr1_output(&self) -> &TIM3_CCMR1_OUTPUT {
unsafe { &*(self as *const Self).cast::<u8>().add(24usize).cast() }
}
#[doc = "0x18 - TIM3 capture/compare mode register 1 \\[alternate\\]"]
#[inline(always)]
pub const fn tim3_ccmr1_input(&self) -> &TIM3_CCMR1_INPUT {
unsafe { &*(self as *const Self).cast::<u8>().add(24usize).cast() }
}
#[doc = "0x1c - TIM3 capture/compare mode register 2 \\[alternate\\]"]
#[inline(always)]
pub const fn tim3_ccmr2_output(&self) -> &TIM3_CCMR2_OUTPUT {
unsafe { &*(self as *const Self).cast::<u8>().add(28usize).cast() }
}
#[doc = "0x1c - TIM3 capture/compare mode register 2 \\[alternate\\]"]
#[inline(always)]
pub const fn tim3_ccmr2_input(&self) -> &TIM3_CCMR2_INPUT {
unsafe { &*(self as *const Self).cast::<u8>().add(28usize).cast() }
}
}
#[doc = "TIM3_CR1 (rw) register accessor: TIM3 control register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_cr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_cr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_cr1`]
module"]
pub type TIM3_CR1 = crate::Reg<tim3_cr1::TIM3_CR1_SPEC>;
#[doc = "TIM3 control register 1"]
pub mod tim3_cr1;
#[doc = "TIM3_CR2 (rw) register accessor: TIM3 control register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_cr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_cr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_cr2`]
module"]
pub type TIM3_CR2 = crate::Reg<tim3_cr2::TIM3_CR2_SPEC>;
#[doc = "TIM3 control register 2"]
pub mod tim3_cr2;
#[doc = "TIM3_SMCR (rw) register accessor: TIM3 slave mode control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_smcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_smcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_smcr`]
module"]
pub type TIM3_SMCR = crate::Reg<tim3_smcr::TIM3_SMCR_SPEC>;
#[doc = "TIM3 slave mode control register"]
pub mod tim3_smcr;
#[doc = "TIM3_DIER (rw) register accessor: TIM3 DMA/Interrupt enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_dier::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_dier::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_dier`]
module"]
pub type TIM3_DIER = crate::Reg<tim3_dier::TIM3_DIER_SPEC>;
#[doc = "TIM3 DMA/Interrupt enable register"]
pub mod tim3_dier;
#[doc = "TIM3_SR (rw) register accessor: TIM3 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_sr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_sr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_sr`]
module"]
pub type TIM3_SR = crate::Reg<tim3_sr::TIM3_SR_SPEC>;
#[doc = "TIM3 status register"]
pub mod tim3_sr;
#[doc = "TIM3_EGR (w) register accessor: TIM3 event generation register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_egr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_egr`]
module"]
pub type TIM3_EGR = crate::Reg<tim3_egr::TIM3_EGR_SPEC>;
#[doc = "TIM3 event generation register"]
pub mod tim3_egr;
#[doc = "TIM3_CCMR1_Input (rw) register accessor: TIM3 capture/compare mode register 1 \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ccmr1_input::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ccmr1_input::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ccmr1_input`]
module"]
pub type TIM3_CCMR1_INPUT = crate::Reg<tim3_ccmr1_input::TIM3_CCMR1_INPUT_SPEC>;
#[doc = "TIM3 capture/compare mode register 1 \\[alternate\\]"]
pub mod tim3_ccmr1_input;
#[doc = "TIM3_CCMR1_Output (rw) register accessor: TIM3 capture/compare mode register 1 \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ccmr1_output::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ccmr1_output::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ccmr1_output`]
module"]
pub type TIM3_CCMR1_OUTPUT = crate::Reg<tim3_ccmr1_output::TIM3_CCMR1_OUTPUT_SPEC>;
#[doc = "TIM3 capture/compare mode register 1 \\[alternate\\]"]
pub mod tim3_ccmr1_output;
#[doc = "TIM3_CCMR2_Input (rw) register accessor: TIM3 capture/compare mode register 2 \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ccmr2_input::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ccmr2_input::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ccmr2_input`]
module"]
pub type TIM3_CCMR2_INPUT = crate::Reg<tim3_ccmr2_input::TIM3_CCMR2_INPUT_SPEC>;
#[doc = "TIM3 capture/compare mode register 2 \\[alternate\\]"]
pub mod tim3_ccmr2_input;
#[doc = "TIM3_CCMR2_Output (rw) register accessor: TIM3 capture/compare mode register 2 \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ccmr2_output::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ccmr2_output::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ccmr2_output`]
module"]
pub type TIM3_CCMR2_OUTPUT = crate::Reg<tim3_ccmr2_output::TIM3_CCMR2_OUTPUT_SPEC>;
#[doc = "TIM3 capture/compare mode register 2 \\[alternate\\]"]
pub mod tim3_ccmr2_output;
#[doc = "TIM3_CCER (rw) register accessor: TIM3 capture/compare enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ccer::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ccer::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ccer`]
module"]
pub type TIM3_CCER = crate::Reg<tim3_ccer::TIM3_CCER_SPEC>;
#[doc = "TIM3 capture/compare enable register"]
pub mod tim3_ccer;
#[doc = "TIM3_CNT (rw) register accessor: TIM3 counter\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_cnt::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_cnt::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_cnt`]
module"]
pub type TIM3_CNT = crate::Reg<tim3_cnt::TIM3_CNT_SPEC>;
#[doc = "TIM3 counter"]
pub mod tim3_cnt;
#[doc = "TIM3_PSC (rw) register accessor: TIM3 prescaler\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_psc::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_psc::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_psc`]
module"]
pub type TIM3_PSC = crate::Reg<tim3_psc::TIM3_PSC_SPEC>;
#[doc = "TIM3 prescaler"]
pub mod tim3_psc;
#[doc = "TIM3_ARR (rw) register accessor: TIM3 auto-reload register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_arr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_arr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_arr`]
module"]
pub type TIM3_ARR = crate::Reg<tim3_arr::TIM3_ARR_SPEC>;
#[doc = "TIM3 auto-reload register"]
pub mod tim3_arr;
#[doc = "TIM3_CCR1 (rw) register accessor: TIM3 capture/compare register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ccr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ccr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ccr1`]
module"]
pub type TIM3_CCR1 = crate::Reg<tim3_ccr1::TIM3_CCR1_SPEC>;
#[doc = "TIM3 capture/compare register 1"]
pub mod tim3_ccr1;
#[doc = "TIM3_CCR2 (rw) register accessor: TIM3 capture/compare register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ccr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ccr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ccr2`]
module"]
pub type TIM3_CCR2 = crate::Reg<tim3_ccr2::TIM3_CCR2_SPEC>;
#[doc = "TIM3 capture/compare register 2"]
pub mod tim3_ccr2;
#[doc = "TIM3_CCR3 (rw) register accessor: TIM3 capture/compare register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ccr3::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ccr3::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ccr3`]
module"]
pub type TIM3_CCR3 = crate::Reg<tim3_ccr3::TIM3_CCR3_SPEC>;
#[doc = "TIM3 capture/compare register 3"]
pub mod tim3_ccr3;
#[doc = "TIM3_CCR4 (rw) register accessor: TIM3 capture/compare register 4\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ccr4::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ccr4::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ccr4`]
module"]
pub type TIM3_CCR4 = crate::Reg<tim3_ccr4::TIM3_CCR4_SPEC>;
#[doc = "TIM3 capture/compare register 4"]
pub mod tim3_ccr4;
#[doc = "TIM3_ECR (rw) register accessor: TIM3 timer encoder control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_ecr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_ecr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_ecr`]
module"]
pub type TIM3_ECR = crate::Reg<tim3_ecr::TIM3_ECR_SPEC>;
#[doc = "TIM3 timer encoder control register"]
pub mod tim3_ecr;
#[doc = "TIM3_TISEL (rw) register accessor: TIM3 timer input selection register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_tisel::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_tisel::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_tisel`]
module"]
pub type TIM3_TISEL = crate::Reg<tim3_tisel::TIM3_TISEL_SPEC>;
#[doc = "TIM3 timer input selection register"]
pub mod tim3_tisel;
#[doc = "TIM3_AF1 (rw) register accessor: TIM3 alternate function register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_af1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_af1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_af1`]
module"]
pub type TIM3_AF1 = crate::Reg<tim3_af1::TIM3_AF1_SPEC>;
#[doc = "TIM3 alternate function register 1"]
pub mod tim3_af1;
#[doc = "TIM3_AF2 (rw) register accessor: TIM3 alternate function register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_af2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_af2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_af2`]
module"]
pub type TIM3_AF2 = crate::Reg<tim3_af2::TIM3_AF2_SPEC>;
#[doc = "TIM3 alternate function register 2"]
pub mod tim3_af2;
#[doc = "TIM3_DCR (rw) register accessor: TIM3 DMA control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_dcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_dcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_dcr`]
module"]
pub type TIM3_DCR = crate::Reg<tim3_dcr::TIM3_DCR_SPEC>;
#[doc = "TIM3 DMA control register"]
pub mod tim3_dcr;
#[doc = "TIM3_DMAR (rw) register accessor: TIM3 DMA address for full transfer\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tim3_dmar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tim3_dmar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tim3_dmar`]
module"]
pub type TIM3_DMAR = crate::Reg<tim3_dmar::TIM3_DMAR_SPEC>;
#[doc = "TIM3 DMA address for full transfer"]
pub mod tim3_dmar;
|
use crate::filter::PostFilters;
use bincode::{deserialize, Result};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct Storage {
pub filters: PostFilters,
}
impl From<PostFilters> for Storage {
fn from(filters: PostFilters) -> Self {
Storage { filters }
}
}
impl Storage {
pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
let decoded = deserialize(bytes)?;
let storage = Storage { filters: decoded };
Ok(storage)
}
}
|
use std::fmt;
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct Feature {
name: String,
version: Option<String>,
}
impl Feature {
pub fn new(name: String, version: Option<String>) -> Self {
Feature { name, version }
}
}
impl fmt::Display for Feature {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self.version {
Some(v) => write!(f, "{name}:{version}", name = self.name, version = v),
None => write!(f, "{}", self.name),
}
}
}
|
// This file is part of Basilisk-node.
// Copyright (C) 2020-2021 Intergalactic, Limited (GIB).
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(clippy::or_fun_call)]
#![allow(clippy::too_many_arguments)]
use basilisk_runtime::{
AccountId, AssetRegistryConfig, AuraId, Balance, BalancesConfig, CollatorSelectionConfig, CouncilConfig,
DusterConfig, ElectionsConfig, GenesisConfig, MultiTransactionPaymentConfig, OrmlNftConfig, ParachainInfoConfig,
SessionConfig, Signature, SudoConfig, SystemConfig, TechnicalCommitteeConfig, TokensConfig, VestingConfig, BSX,
NATIVE_EXISTENTIAL_DEPOSIT, WASM_BINARY,
};
use cumulus_primitives_core::ParaId;
use hex_literal::hex;
use primitives::BlockNumber;
use sc_chain_spec::{ChainSpecExtension, ChainSpecGroup};
use sc_service::ChainType;
use sc_telemetry::TelemetryEndpoints;
use serde::{Deserialize, Serialize};
use serde_json::map::Map;
use sp_core::{crypto::UncheckedInto, sr25519, Pair, Public};
use sp_runtime::traits::{IdentifyAccount, Verify};
const TOKEN_DECIMALS: u8 = 12;
const TOKEN_SYMBOL: &str = "BSX";
const PROTOCOL_ID: &str = "bsx";
// The URL for the telemetry server.
const TELEMETRY_URLS: [&str; 2] = [
"wss://telemetry.polkadot.io/submit/",
"wss://telemetry.hydradx.io:9000/submit/",
];
//Kusama parachain id
const PARA_ID: u32 = 2090;
/// The extensions for the [`ChainSpec`].
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ChainSpecGroup, ChainSpecExtension)]
#[serde(deny_unknown_fields)]
pub struct Extensions {
/// The relay chain of the Parachain.
pub relay_chain: String,
/// The id of the Parachain.
pub para_id: u32,
}
impl Extensions {
/// Try to get the extension from the given `ChainSpec`.
#[allow(clippy::borrowed_box)]
pub fn try_get(chain_spec: &Box<dyn sc_service::ChainSpec>) -> Option<&Self> {
sc_chain_spec::get_extension(chain_spec.extensions())
}
}
/// Specialized `ChainSpec`. This is a specialization of the general Substrate ChainSpec type.
pub type ChainSpec = sc_service::GenericChainSpec<GenesisConfig, Extensions>;
/// Generate a crypto pair from seed.
pub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {
TPublic::Pair::from_string(&format!("//{}", seed), None)
.expect("static values are valid; qed")
.public()
}
type AccountPublic = <Signature as Verify>::Signer;
/// Generate an account ID from seed.
pub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId
where
AccountPublic: From<<TPublic::Pair as Pair>::Public>,
{
AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()
}
pub fn get_vesting_config_for_test() -> Vec<(AccountId, BlockNumber, BlockNumber, u32, Balance)> {
let vesting_list_json = &include_bytes!("../res/basilisk-vesting-lbp-test.json")[..];
let vesting_list: Vec<(AccountId, BlockNumber, BlockNumber, u32, Balance)> =
serde_json::from_slice(vesting_list_json).unwrap();
// ensure no duplicates exist.
let unique_vesting_accounts = vesting_list
.iter()
.map(|(x, _, _, _, _)| x)
.cloned()
.collect::<std::collections::BTreeSet<_>>();
assert!(
unique_vesting_accounts.len() == vesting_list.len(),
"duplicate vesting accounts in genesis."
);
vesting_list
}
pub fn basilisk_parachain_config() -> Result<ChainSpec, String> {
ChainSpec::from_json_bytes(&include_bytes!("../res/basilisk.json")[..])
}
pub fn kusama_staging_parachain_config() -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or("Development wasm binary not available".to_string())?;
let mut properties = Map::new();
properties.insert("tokenDecimals".into(), TOKEN_DECIMALS.into());
properties.insert("tokenSymbol".into(), TOKEN_SYMBOL.into());
Ok(ChainSpec::from_genesis(
// Name
"Basilisk",
// ID
"basilisk",
ChainType::Live,
move || {
parachain_genesis(
wasm_binary,
// Sudo account
hex!["bca8eeb9c7cf74fc28ebe4091d29ae1c12ed622f7e3656aae080b54d5ff9a23c"].into(), //TODO intergalactic
//initial authorities & invulnerables
vec![
(
hex!["f25e5d7b43266a5b4cca762c9be917f18852d7a5db85e734776206eeb539dd4f"].into(),
hex!["f25e5d7b43266a5b4cca762c9be917f18852d7a5db85e734776206eeb539dd4f"].unchecked_into(),
),
(
hex!["e84a7090cb18fe39eafebdae9a3ac1111c955247a202a3ab2a3cfe8573c03c60"].into(),
hex!["e84a7090cb18fe39eafebdae9a3ac1111c955247a202a3ab2a3cfe8573c03c60"].unchecked_into(),
),
(
hex!["c49e3fbebac92027e0d19c2fc1ddc288eb549971831e336550832a476727f601"].into(),
hex!["c49e3fbebac92027e0d19c2fc1ddc288eb549971831e336550832a476727f601"].unchecked_into(),
),
(
hex!["c856aabea6e433be2dfe233c6118d156133e4e663a1223da06421058ddb56712"].into(),
hex!["c856aabea6e433be2dfe233c6118d156133e4e663a1223da06421058ddb56712"].unchecked_into(),
),
(
hex!["e02a753fc885bde7ea5839df8619ab80b67be6c869bc19b41f20f865a2f90578"].into(),
hex!["e02a753fc885bde7ea5839df8619ab80b67be6c869bc19b41f20f865a2f90578"].unchecked_into(),
),
],
// Pre-funded accounts
vec![],
true,
PARA_ID.into(),
//technical committee
hex!["6d6f646c70792f74727372790000000000000000000000000000000000000000"].into(), // TREASURY - Fallback for multi tx payment
)
},
// Bootnodes
vec![
"/dns/p2p-01.basilisk.hydradx.io/tcp/30333/p2p/12D3KooWJRdTtgFnwrrcigrMRxdJ9zfmhtpH5qgAV9budWat4UtR"
.parse()
.unwrap(),
"/dns/p2p-02.basilisk.hydradx.io/tcp/30333/p2p/12D3KooWQNvuYebz6Zt34LnesFfdVh5i7FWP8GUe9QxuBmKE4b9R"
.parse()
.unwrap(),
"/dns/p2p-03.basilisk.hydradx.io/tcp/30333/p2p/12D3KooWD2Y9VkfC9cmQEpKZLN26xWq7XPJXHDUH8LNVmhoNBrdJ"
.parse()
.unwrap(),
],
// Telemetry
Some(
TelemetryEndpoints::new(vec![
(TELEMETRY_URLS[0].to_string(), 0),
(TELEMETRY_URLS[1].to_string(), 0),
])
.expect("Telemetry url is valid"),
),
// Protocol ID
Some(PROTOCOL_ID),
// Properties
Some(properties),
// Extensions
Extensions {
relay_chain: "kusama".into(),
para_id: PARA_ID,
},
))
}
pub fn testnet_parachain_config(para_id: ParaId) -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or("Development wasm binary not available".to_string())?;
let mut properties = Map::new();
properties.insert("tokenDecimals".into(), TOKEN_DECIMALS.into());
properties.insert("tokenSymbol".into(), TOKEN_SYMBOL.into());
Ok(ChainSpec::from_genesis(
// Name
"Basilisk Egg",
// ID
"basilisk_egg",
ChainType::Live,
move || {
testnet_parachain_genesis(
wasm_binary,
// Sudo account
hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into(),
//initial authorities & invulnerables
vec![
(
hex!["da0fa4ab419def66fb4ac5224e594e82c34ee795268fc7787c8a096c4ff14f11"].into(),
hex!["da0fa4ab419def66fb4ac5224e594e82c34ee795268fc7787c8a096c4ff14f11"].unchecked_into(),
),
(
hex!["ecd7a5439c6ab0cd6550bc2f1cef5299d425bb95bb6d7afb32aa3d95ee4f7f1f"].into(),
hex!["ecd7a5439c6ab0cd6550bc2f1cef5299d425bb95bb6d7afb32aa3d95ee4f7f1f"].unchecked_into(),
),
(
hex!["f0ad6f1aae7a445c1e80cac883096ec8177eda276fec53ad9ccbe570f3090a26"].into(),
hex!["f0ad6f1aae7a445c1e80cac883096ec8177eda276fec53ad9ccbe570f3090a26"].unchecked_into(),
),
],
// Pre-funded accounts
vec![hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into()],
true,
para_id,
//council
vec![hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into()],
//technical committee
vec![hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into()],
hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into(), // SAME AS ROOT
vec![],
vec![
(b"hKSM".to_vec(), 1_000u128),
(b"hDOT".to_vec(), 1_000u128),
(b"hETH".to_vec(), 1_000u128),
(b"hUSDT".to_vec(), 1_000u128),
],
)
},
// Bootnodes
vec![
"/dns/p2p-01.basilisk-testnet.hydradx.io/tcp/30333/p2p/12D3KooW9qapYrocm6W1meShf8eQfeJzbry9PN2CN6SfBGbymxPL"
.parse()
.unwrap(),
"/dns/p2p-02.basilisk-testnet.hydradx.io/tcp/30333/p2p/12D3KooWPS16BYW173YxmxEJpQBoDz1t3Ht4yaPwwg5qCTED7N66"
.parse()
.unwrap(),
"/dns/p2p-03.basilisk-testnet.hydradx.io/tcp/30333/p2p/12D3KooWRMgQRtYrWsLvuwg3V3aQEvMgsbb88T29cKCTH6RAxTaj"
.parse()
.unwrap(),
],
// Telemetry
Some(
TelemetryEndpoints::new(vec![
(TELEMETRY_URLS[0].to_string(), 0),
(TELEMETRY_URLS[1].to_string(), 0),
])
.expect("Telemetry url is valid"),
),
// Protocol ID
Some(PROTOCOL_ID),
// Properties
Some(properties),
// Extensions
Extensions {
relay_chain: "westend".into(),
para_id: para_id.into(),
},
))
}
pub fn parachain_development_config(para_id: ParaId) -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or("Development wasm binary not available".to_string())?;
let mut properties = Map::new();
properties.insert("tokenDecimals".into(), TOKEN_DECIMALS.into());
properties.insert("tokenSymbol".into(), TOKEN_SYMBOL.into());
Ok(ChainSpec::from_genesis(
// Name
"Basilisk Development",
// ID
"dev",
ChainType::Development,
move || {
testnet_parachain_genesis(
wasm_binary,
// Sudo account
get_account_id_from_seed::<sr25519::Public>("Alice"),
//initial authorities & invulnerables
vec![
(
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_from_seed::<AuraId>("Alice"),
),
(
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_from_seed::<AuraId>("Bob"),
),
],
// Pre-funded accounts
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
get_account_id_from_seed::<sr25519::Public>("Duster"),
],
true,
para_id,
//council
vec![get_account_id_from_seed::<sr25519::Public>("Alice")],
//technical_committe
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
],
get_account_id_from_seed::<sr25519::Public>("Alice"), // SAME AS ROOT
get_vesting_config_for_test(),
vec![
(b"hKSM".to_vec(), 1_000u128),
(b"hDOT".to_vec(), 1_000u128),
(b"hETH".to_vec(), 1_000u128),
(b"hUSDT".to_vec(), 1_000u128),
],
)
},
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
Some(PROTOCOL_ID),
// Properties
Some(properties),
// Extensions
Extensions {
relay_chain: "rococo-dev".into(),
para_id: para_id.into(),
},
))
}
// This is used when benchmarking pallets
// Originally dev config was used - but benchmarking needs empty asset registry
pub fn benchmarks_development_config(para_id: ParaId) -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or("Development wasm binary not available".to_string())?;
let mut properties = Map::new();
properties.insert("tokenDecimals".into(), TOKEN_DECIMALS.into());
properties.insert("tokenSymbol".into(), TOKEN_SYMBOL.into());
Ok(ChainSpec::from_genesis(
// Name
"Basilisk Benchmarks",
// ID
"benchmarks",
ChainType::Development,
move || {
testnet_parachain_genesis(
wasm_binary,
// Sudo account
get_account_id_from_seed::<sr25519::Public>("Alice"),
//initial authorities & invulnerables
vec![
(
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_from_seed::<AuraId>("Alice"),
),
(
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_from_seed::<AuraId>("Bob"),
),
],
// Pre-funded accounts
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
get_account_id_from_seed::<sr25519::Public>("Duster"),
],
true,
para_id,
//council
vec![get_account_id_from_seed::<sr25519::Public>("Alice")],
//technical_committe
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
],
get_account_id_from_seed::<sr25519::Public>("Alice"), // SAME AS ROOT
get_vesting_config_for_test(),
vec![],
)
},
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
Some(PROTOCOL_ID),
// Properties
Some(properties),
// Extensions
Extensions {
relay_chain: "rococo-dev".into(),
para_id: para_id.into(),
},
))
}
pub fn local_parachain_config(para_id: ParaId) -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or("Development wasm binary not available".to_string())?;
let mut properties = Map::new();
properties.insert("tokenDecimals".into(), TOKEN_DECIMALS.into());
properties.insert("tokenSymbol".into(), TOKEN_SYMBOL.into());
Ok(ChainSpec::from_genesis(
// Name
"Basilisk Local Testnet",
// ID
"local_testnet",
ChainType::Local,
move || {
testnet_parachain_genesis(
wasm_binary,
// Sudo account
get_account_id_from_seed::<sr25519::Public>("Alice"),
//initial authorities & invulnerables
vec![
(
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_from_seed::<AuraId>("Alice"),
),
(
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_from_seed::<AuraId>("Bob"),
),
],
// Pre-funded accounts
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Charlie"),
get_account_id_from_seed::<sr25519::Public>("Dave"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
get_account_id_from_seed::<sr25519::Public>("Ferdie"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
get_account_id_from_seed::<sr25519::Public>("Charlie//stash"),
get_account_id_from_seed::<sr25519::Public>("Dave//stash"),
get_account_id_from_seed::<sr25519::Public>("Eve//stash"),
get_account_id_from_seed::<sr25519::Public>("Ferdie//stash"),
],
true,
para_id,
//council
vec![get_account_id_from_seed::<sr25519::Public>("Alice")],
//technical_committe
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
],
get_account_id_from_seed::<sr25519::Public>("Alice"), // SAME AS ROOT
get_vesting_config_for_test(),
vec![
(b"hKSM".to_vec(), 1_000u128),
(b"hDOT".to_vec(), 1_000u128),
(b"hETH".to_vec(), 1_000u128),
(b"hUSDT".to_vec(), 1_000u128),
],
)
},
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
Some(PROTOCOL_ID),
// Properties
Some(properties),
// Extensions
Extensions {
relay_chain: "rococo-local".into(),
para_id: para_id.into(),
},
))
}
/// Configure initial storage state for FRAME modules.
fn parachain_genesis(
wasm_binary: &[u8],
root_key: AccountId,
initial_authorities: Vec<(AccountId, AuraId)>,
_endowed_accounts: Vec<AccountId>,
_enable_println: bool,
parachain_id: ParaId,
tx_fee_payment_account: AccountId,
) -> GenesisConfig {
GenesisConfig {
system: SystemConfig {
// Add Wasm runtime to storage.
code: wasm_binary.to_vec(),
changes_trie_config: Default::default(),
},
balances: BalancesConfig {
// Configure endowed accounts with initial balance of a lot.
balances: vec![
(
// Intergalactic HDX Tokens 15%
hex!["bca8eeb9c7cf74fc28ebe4091d29ae1c12ed622f7e3656aae080b54d5ff9a23c"].into(),
15_000_000_000u128 * BSX,
),
(
// Treasury 9%
hex!["6d6f646c70792f74727372790000000000000000000000000000000000000000"].into(),
9_000_000_000 * BSX,
),
],
},
sudo: SudoConfig {
// Assign network admin rights.
key: root_key,
},
collator_selection: CollatorSelectionConfig {
invulnerables: initial_authorities.iter().cloned().map(|(acc, _)| acc).collect(),
candidacy_bond: 10_000,
..Default::default()
},
session: SessionConfig {
keys: initial_authorities
.iter()
.cloned()
.map(|(acc, aura)| {
(
acc.clone(), // account id
acc, // validator id
basilisk_runtime::opaque::SessionKeys { aura }, // session keys
)
})
.collect(),
},
// no need to pass anything, it will panic if we do. Session will take care
// of this.
aura: Default::default(),
asset_registry: AssetRegistryConfig {
asset_names: vec![],
native_asset_name: TOKEN_SYMBOL.as_bytes().to_vec(),
native_existential_deposit: NATIVE_EXISTENTIAL_DEPOSIT,
},
multi_transaction_payment: MultiTransactionPaymentConfig {
currencies: vec![],
fallback_account: tx_fee_payment_account,
},
tokens: TokensConfig { balances: vec![] },
treasury: Default::default(),
elections: ElectionsConfig {
// Intergalactic elections
members: vec![(
hex!["bca8eeb9c7cf74fc28ebe4091d29ae1c12ed622f7e3656aae080b54d5ff9a23c"].into(),
14_999_900_000u128 * BSX,
)],
},
council: CouncilConfig {
// Intergalactic council member
members: vec![hex!["bca8eeb9c7cf74fc28ebe4091d29ae1c12ed622f7e3656aae080b54d5ff9a23c"].into()],
phantom: Default::default(),
},
technical_committee: TechnicalCommitteeConfig {
members: vec![
hex!["d6cf8789dce651cb54a4036406f4aa0c771914d345c004ad0567b814c71fb637"].into(),
hex!["bc96ec00952efa8f0e3e08b36bf5096bcb877acac536e478aecb72868db5db02"].into(),
hex!["2875dd47bc1bcb70e23de79e7538c312be12c716033bbae425130e46f5f2b35e"].into(),
hex!["644643bf953233d08c4c9bae0acd49f3baa7658d9b342b7e6879bb149ee6e44c"].into(),
hex!["ccdb435892c9883656d0398b2b67023ba1e11bda0c7f213f70fdac54c6abab3f"].into(),
hex!["f461c5ae6e80bf4af5b84452789c17b0b0a095a2d77c2a407978147de2d5b572"].into(),
],
phantom: Default::default(),
},
orml_nft: OrmlNftConfig {
tokens: Default::default(),
},
vesting: VestingConfig { vesting: vec![] },
parachain_info: ParachainInfoConfig { parachain_id },
aura_ext: Default::default(),
duster: DusterConfig {
account_blacklist: vec![hex!["6d6f646c70792f74727372790000000000000000000000000000000000000000"].into()],
reward_account: hex!["6d6f646c70792f74727372790000000000000000000000000000000000000000"].into(),
dust_account: hex!["6d6f646c70792f74727372790000000000000000000000000000000000000000"].into(),
},
}
}
fn testnet_parachain_genesis(
wasm_binary: &[u8],
root_key: AccountId,
initial_authorities: Vec<(AccountId, AuraId)>,
endowed_accounts: Vec<AccountId>,
_enable_println: bool,
parachain_id: ParaId,
council_members: Vec<AccountId>,
tech_committee_members: Vec<AccountId>,
tx_fee_payment_account: AccountId,
vesting_list: Vec<(AccountId, BlockNumber, BlockNumber, u32, Balance)>,
registered_assets: Vec<(Vec<u8>, Balance)>, // (Asset name, Existential deposit)
) -> GenesisConfig {
GenesisConfig {
system: SystemConfig {
// Add Wasm runtime to storage.
code: wasm_binary.to_vec(),
changes_trie_config: Default::default(),
},
balances: BalancesConfig {
// Configure endowed accounts with initial balance of a lot.
balances: endowed_accounts
.iter()
.cloned()
.map(|k| (k, 1_000_000_000u128 * BSX))
.collect(),
},
sudo: SudoConfig {
// Assign network admin rights.
key: root_key,
},
collator_selection: CollatorSelectionConfig {
invulnerables: initial_authorities.iter().cloned().map(|(acc, _)| acc).collect(),
candidacy_bond: 10_000,
..Default::default()
},
session: SessionConfig {
keys: initial_authorities
.iter()
.cloned()
.map(|(acc, aura)| {
(
acc.clone(), // account id
acc, // validator id
basilisk_runtime::opaque::SessionKeys { aura }, // session keys
)
})
.collect(),
},
// no need to pass anything, it will panic if we do. Session will take care
// of this.
aura: Default::default(),
asset_registry: AssetRegistryConfig {
asset_names: registered_assets,
native_asset_name: TOKEN_SYMBOL.as_bytes().to_vec(),
native_existential_deposit: NATIVE_EXISTENTIAL_DEPOSIT,
},
multi_transaction_payment: MultiTransactionPaymentConfig {
currencies: vec![],
fallback_account: tx_fee_payment_account,
},
tokens: TokensConfig { balances: vec![] },
treasury: Default::default(),
elections: ElectionsConfig {
// Intergalactic elections
members: vec![(
get_account_id_from_seed::<sr25519::Public>("Alice"),
100_000_000u128 * BSX,
)],
},
council: CouncilConfig {
members: council_members,
phantom: Default::default(),
},
technical_committee: TechnicalCommitteeConfig {
members: tech_committee_members,
phantom: Default::default(),
},
vesting: VestingConfig { vesting: vesting_list },
orml_nft: OrmlNftConfig {
tokens: Default::default(),
},
parachain_info: ParachainInfoConfig { parachain_id },
aura_ext: Default::default(),
duster: DusterConfig {
account_blacklist: vec![get_account_id_from_seed::<sr25519::Public>("Duster")],
reward_account: get_account_id_from_seed::<sr25519::Public>("Duster"),
dust_account: get_account_id_from_seed::<sr25519::Public>("Duster"),
},
}
}
|
use super::{condition_ready, retry, ConstructContext, ANNOTATION_APP_NAME, LABEL_KAFKA_CLUSTER};
use crate::controller::ControllerConfig;
use async_trait::async_trait;
use drogue_client::{registry::v1::KafkaAppStatus, Translator};
use drogue_cloud_operator_common::controller::reconciler::{
progress::{self, OperationOutcome, ProgressOperation},
ReconcileError,
};
use drogue_cloud_service_api::kafka::{make_kafka_resource_name, ResourceType};
use kube::{
api::{ApiResource, DynamicObject},
Api, Resource,
};
use operator_framework::process::create_or_update_by;
use serde_json::json;
pub struct CreateTopic<'o> {
pub api: &'o Api<DynamicObject>,
pub resource: &'o ApiResource,
pub config: &'o ControllerConfig,
}
impl CreateTopic<'_> {
async fn ensure_kafka_topic(
kafka_topics: &Api<DynamicObject>,
kafka_topic_resource: &ApiResource,
config: &ControllerConfig,
target: ResourceType,
) -> Result<(DynamicObject, String), ReconcileError> {
let topic_name = make_kafka_resource_name(target.clone());
let topic = create_or_update_by(
&kafka_topics,
Some(config.topic_namespace.clone()),
&topic_name,
|meta| {
let mut topic = DynamicObject::new(&topic_name, &kafka_topic_resource)
.within(&config.topic_namespace);
*topic.meta_mut() = meta;
topic
},
|this, that| this.metadata == that.metadata && this.data == that.data,
|mut topic| {
// set target cluster
topic
.metadata
.labels
.insert(LABEL_KAFKA_CLUSTER.into(), config.cluster_name.clone());
topic
.metadata
.annotations
.insert(ANNOTATION_APP_NAME.into(), target.app_name().into());
// set config
topic.data["spec"] = json!({
"config": {},
"partitions": 3,
"replicas": 1,
"topicName": topic_name,
});
Ok::<_, ReconcileError>(topic)
},
)
.await?
.resource();
// done
Ok((topic, topic_name))
}
}
#[async_trait]
impl<'o> ProgressOperation<ConstructContext> for CreateTopic<'o> {
fn type_name(&self) -> String {
"CreateTopics".into()
}
async fn run(
&self,
mut ctx: ConstructContext,
) -> drogue_cloud_operator_common::controller::reconciler::progress::Result<ConstructContext>
{
let (topic, topic_name) = Self::ensure_kafka_topic(
&self.api,
&self.resource,
&self.config,
ResourceType::Events(ctx.app.metadata.name.clone()),
)
.await?;
ctx.events_topic = Some(topic);
ctx.events_topic_name = Some(topic_name);
// done
Ok(OperationOutcome::Continue(ctx))
}
}
pub struct TopicReady<'o> {
pub config: &'o ControllerConfig,
}
#[async_trait]
impl<'o> ProgressOperation<ConstructContext> for TopicReady<'o> {
fn type_name(&self) -> String {
"TopicsReady".into()
}
async fn run(&self, mut ctx: ConstructContext) -> progress::Result<ConstructContext> {
let events_ready = ctx
.events_topic
.as_ref()
.and_then(|topic| condition_ready("Ready", topic))
.unwrap_or_default();
ctx.app.update_section(|mut status: KafkaAppStatus| {
// using the internal model only for now
status.downstream = None;
status
})?;
match events_ready {
true => Ok(OperationOutcome::Continue(ctx)),
false => retry(ctx),
}
}
}
|
mod compiler;
mod config;
mod platform;
mod symbols;
mod utils;
pub use compiler::*;
pub use config::*;
pub use platform::*;
pub use symbols::*;
pub use utils::*;
pub use compiler::Js as CompilerJs;
pub use symbols::Js as SymbolsJs;
|
use std::io;
use std::net::TcpListener;
use std::sync::Arc;
use std::thread;
use connection::{Connection, ConnectionType};
use public_key::KeyPair;
pub struct ServerConfig {
pub host: String,
pub port: u16,
pub key: Box<KeyPair>,
}
pub struct Server {
config: Arc<ServerConfig>,
}
impl Server {
pub fn with_config(config: ServerConfig) -> Server {
Server { config: Arc::new(config) }
}
pub fn run(&self) -> io::Result<()> {
let listener =
TcpListener::bind((&*self.config.host, self.config.port))?;
loop {
let (mut stream, addr) = listener.accept()?;
let config = self.config.clone();
debug!("Incoming connection from {}", addr);
thread::spawn(move || {
let mut connection =
Connection::new(ConnectionType::Server(config));
let result = connection.run(&mut stream);
if let Some(error) = result.err() {
println!("sshd: {}", error)
}
});
}
Ok(())
}
}
|
use std::io::*;
enum Token {
Eof,
Number(f64),
Identifier(String),
}
fn getToken() {
}
fn main() {
let reader = BufReader::new(stdin());
let buf: []
while (reader.read(buf).is_ok()) {
getToken();
}
} |
// Copyright (c) 2015-2016, Johan Sköld.
// License: http://opensource.org/licenses/ISC
use std::env;
use std::io::Write;
use std::path::PathBuf;
use std::process::{Command, Stdio};
fn main() {
let target = env::var("TARGET").unwrap();
let profile = env::var("PROFILE").unwrap();
let first_div = target.find('-').unwrap();
let last_div = target.rfind('-').unwrap();
let arch = &target[..first_div];
let platform = &target[(first_div + 1)..last_div];
let compiler = &target[(last_div + 1)..];
let bitness = if arch == "x86_64" { 64 } else { 32 };
match compiler {
"msvc" => build_msvc(bitness),
"gnu" | "darwin" => build_gmake(bitness, &profile, platform),
_ => panic!("Unsupported compiler"),
}
}
/// Builds the bgfx binaries for `msvc` targets.
fn build_msvc(bitness: u32) {
let vs_version = env::var("VisualStudioVersion").expect("Visual Studio version not detected");
let platform = if bitness == 32 { "X86" } else { "X64" };
let vs_release = match vs_version.as_ref() {
"12.0" => "2013",
"14.0" => "2015",
"15.0" => "2017",
_ => panic!(format!("Unknown Visual Studio version: {:?}", vs_version)),
};
Command::new("bx/tools/bin/windows/genie.exe")
.current_dir("bgfx")
.arg("--with-dynamic-runtime")
.arg(format!("vs{}", vs_release))
.output()
.expect("Failed to generate project files");
let status = Command::new("MSBuild.exe")
.current_dir("bgfx")
.arg("/p:Configuration=Release")
.arg(format!("/p:Platform={}", platform))
.arg(format!(".build/projects/vs{}/bgfx.sln", vs_release))
.status()
.expect("Failed to build bgfx");
if status.code().unwrap() != 0 {
panic!("Failed to build bgfx");
}
let mut path = PathBuf::from(env::current_dir().unwrap());
path.push("bgfx");
path.push(".build");
path.push(format!("win{}_vs{}", bitness, vs_release));
path.push("bin");
println!("cargo:rustc-link-lib=static=bxRelease");
println!("cargo:rustc-link-lib=static=bimgRelease");
println!("cargo:rustc-link-lib=static=bgfxRelease");
println!("cargo:rustc-link-lib=gdi32");
println!("cargo:rustc-link-lib=user32");
println!("cargo:rustc-link-search=native={}", path.as_os_str().to_str().unwrap());
}
/// Builds the bgfx binaries for makefile based targets.
fn build_gmake(bitness: u32, profile: &str, platform: &str) {
let project_name = match platform {
"pc-windows" => "gmake-mingw-gcc",
"unknown-linux" => "gmake-linux",
"apple" => "gmake-osx",
_ => panic!("Unsupported OS"),
};
let output_name = match platform {
"pc-windows" => format!("win{}_mingw-gcc", bitness),
"unknown-linux" => format!("linux{}_gcc", bitness),
"apple" => format!("osx{}_clang", bitness),
_ => unreachable!(),
};
// Generate makefiles
let status = Command::new("make")
.arg("-C")
.arg("bgfx")
.arg(format!(".build/projects/{}", project_name))
.status()
.expect("Failed to generate makefiles");
if status.code().unwrap() != 0 {
panic!("Failed to generate makefiles.");
}
// C flags
let cflags = if platform == "pc-windows" && bitness == 32 {
"-fPIC -DBGFX_CONFIG_MULTITHREADED=1 -mincoming-stack-boundary=2"
} else {
"-fPIC -DBGFX_CONFIG_MULTITHREADED=1"
};
// Build bgfx
let status = Command::new("make")
.env("CFLAGS", cflags)
.arg("-R")
.arg("-C")
.arg(format!("bgfx/.build/projects/{}", project_name))
.arg(format!("config={}{}", profile, bitness))
.arg("verbose=1")
.arg("bgfx")
.status()
.expect("Failed to build bgfx");
if status.code().unwrap() != 0 {
panic!("Failed to build bgfx.");
}
// Build bimg
let status = Command::new("make")
.env("CFLAGS", cflags)
.arg("-R")
.arg("-C")
.arg(format!("bgfx/.build/projects/{}", project_name))
.arg(format!("config={}{}", profile, bitness))
.arg("verbose=1")
.arg("bimg")
.status()
.expect("Failed to build bimg");
if status.code().unwrap() != 0 {
panic!("Failed to build bimg.");
}
// Output linker config
let mut path = PathBuf::from(env::current_dir().unwrap());
path.push("bgfx");
path.push(".build");
path.push(output_name);
path.push("bin");
let config = if profile == "debug" { "Debug" } else { "Release" };
println!("cargo:rustc-link-lib=bgfx{}", config);
println!("cargo:rustc-link-lib=bimg{}", config);
println!("cargo:rustc-link-lib=bx{}", config);
println!("cargo:rustc-link-lib=stdc++");
println!("cargo:rustc-link-search=native={}", path.as_os_str().to_str().unwrap());
match platform {
"pc-windows" => {
println!("cargo:rustc-link-lib=gdi32");
println!("cargo:rustc-link-lib=opengl32");
println!("cargo:rustc-link-lib=psapi");
}
"unknown-linux" => {
println!("cargo:rustc-link-lib=GL");
println!("cargo:rustc-link-lib=X11");
}
"apple" => {
println!("cargo:rustc-link-lib=framework=Cocoa");
println!("cargo:rustc-link-lib=framework=QuartzCore");
println!("cargo:rustc-link-lib=framework=OpenGL");
if should_link_metal() {
println!("cargo:rustc-link-lib=framework=Metal");
}
}
_ => unreachable!(),
}
}
/// Determines whether we should link with Metal on OSX. The Metal framework
/// is only included starting with OSX 10.11. We do this through the C
/// compiler so we can test the same macro bgfx tests for support with.
fn should_link_metal() -> bool {
let test = "#ifdef __ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__\nv=__ENVIRONMENT_MAC_OS_X_VER\
SION_MIN_REQUIRED__\n#else\nv=1\n#endif";
let mut cc = Command::new("cc")
.arg("-xc")
.arg("-E")
.arg("-")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.unwrap();
{
let mut stdin = cc.stdin.take().unwrap();
stdin.write_fmt(format_args!("{}", test)).unwrap();
}
let output = cc.wait_with_output().unwrap();
let output_str = String::from_utf8(output.stdout).unwrap();
let ver_line = output_str.lines().find(|l| l.starts_with("v=")).unwrap();
let ver_str = &ver_line[2..];
let ver = ver_str.parse::<u32>().unwrap();
ver >= 101100
} |
$NetBSD: patch-vendor_crossbeam-utils_no__atomic.rs,v 1.3 2023/04/08 18:18:11 he Exp $
Add mipsel-unknown-netbsd target as not having 64-bit atomics.
Unify with crossbeam-utils-0.8.12 by removing mipsel-sony-psx.
--- vendor/crossbeam-utils/no_atomic.rs.orig 2023-01-25 01:49:15.000000000 +0000
+++ vendor/crossbeam-utils/no_atomic.rs
@@ -37,10 +37,10 @@ const NO_ATOMIC_64: &[&str] = &[
"mips-unknown-linux-musl",
"mips-unknown-linux-uclibc",
"mipsel-sony-psp",
- "mipsel-sony-psx",
"mipsel-unknown-linux-gnu",
"mipsel-unknown-linux-musl",
"mipsel-unknown-linux-uclibc",
+ "mipsel-unknown-netbsd",
"mipsel-unknown-none",
"mipsisa32r6-unknown-linux-gnu",
"mipsisa32r6el-unknown-linux-gnu",
@@ -74,7 +74,6 @@ const NO_ATOMIC_64: &[&str] = &[
#[allow(dead_code)] // Only crossbeam-utils uses this.
const NO_ATOMIC: &[&str] = &[
"avr-unknown-gnu-atmega328",
- "mipsel-sony-psx",
"msp430-none-elf",
"riscv32i-unknown-none-elf",
"riscv32im-unknown-none-elf",
|
/// NO. 1: Two Sum
pub struct Solution;
// ----- submission codes start here -----
use std::collections::HashMap;
impl Solution {
pub fn two_sum(nums: Vec<i32>, target: i32) -> Vec<i32> {
// let mut hash_map = HashMap::with_capacity(nums.len());
let mut hash_map = HashMap::with_capacity(nums.len());
for (i, num) in nums.iter().enumerate() {
if let Some(&j) = hash_map.get(&(target - num)) {
return vec![i as i32, j as i32]
} else {
hash_map.insert(num, i as i32);
}
}
vec![]
}
}
// ----- submission codes end here -----
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test() {
assert_eq!(
Solution::two_sum(vec![2, 7, 11, 15], 9).sort(),
vec![0, 1].sort(),
);
assert_eq!(
Solution::two_sum(vec![3, 2, 4], 6).sort(),
vec![1, 2].sort(),
);
assert_eq!(
Solution::two_sum(vec![3, 3], 6).sort(),
vec![0, 1].sort(),
);
}
}
|
use std::sync::Arc;
use anyhow::{Context, Result};
use async_trait::async_trait;
use chrono::Utc;
use serde_derive::Serialize;
use serde_json::Value as JsonValue;
use sqlx::Acquire;
use svc_agent::mqtt::{
IntoPublishableMessage, OutgoingEvent, OutgoingEventProperties, ShortTermTimingProperties,
};
use uuid::Uuid;
use crate::app::AppContext;
use crate::clients::event::RoomAdjustResult;
use crate::clients::tq::Priority;
use crate::db::class::{ClassType, Object as Class};
use crate::{
clients::tq::{Task as TqTask, TranscodeStreamToHlsSuccess},
db::recording::RecordingInsertQuery,
};
use super::{MjrDumpsUploadResult, TranscodeSuccess, UploadedStream};
pub(super) struct WebinarPostprocessingStrategy {
ctx: Arc<dyn AppContext>,
webinar: Class,
}
impl WebinarPostprocessingStrategy {
pub(super) fn new(ctx: Arc<dyn AppContext>, webinar: Class) -> Self {
Self { ctx, webinar }
}
}
#[async_trait]
impl super::PostprocessingStrategy for WebinarPostprocessingStrategy {
async fn handle_mjr_dumps_upload(&self, rtcs: Vec<MjrDumpsUploadResult>) -> Result<()> {
let mut ready_dumps = super::shared_helpers::extract_ready_dumps(rtcs)?;
if ready_dumps.len() != 1 {
return Err(anyhow!("Expected exactly 1 dump"));
}
let dump = ready_dumps.pop().unwrap();
{
let mut conn = self.ctx.get_conn().await?;
RecordingInsertQuery::new(self.webinar.id(), dump.id, dump.created_by)
.execute(&mut conn)
.await?;
}
self.ctx
.tq_client()
.create_task(
&self.webinar,
TqTask::ConvertMjrDumpsToStream {
mjr_dumps_uris: dump.mjr_dumps_uris,
stream_uri: dump.uri,
stream_id: dump.id,
},
Priority::Normal,
)
.await
.context("Failed to set mjr dumps convert task")?;
Ok(())
}
async fn handle_adjust(&self, room_adjust_result: RoomAdjustResult) -> Result<()> {
match room_adjust_result {
RoomAdjustResult::Success {
original_room_id,
modified_room_id,
modified_segments,
..
} => {
let recording = {
let mut conn = self.ctx.get_conn().await?;
let mut txn = conn
.begin()
.await
.context("Failed to begin sqlx db transaction")?;
let q = crate::db::class::UpdateAdjustedRoomsQuery::new(
self.webinar.id(),
original_room_id,
modified_room_id,
);
q.execute(&mut txn).await?;
let q = crate::db::recording::AdjustWebinarUpdateQuery::new(
self.webinar.id(),
modified_segments.clone(),
);
let recording = q.execute(&mut txn).await?;
txn.commit().await?;
recording
};
send_transcoding_task(
&self.ctx,
&self.webinar,
recording,
modified_room_id,
Priority::Normal,
)
.await
.context("TqClient create task failed")
}
RoomAdjustResult::Error { error } => {
bail!("Adjust failed, err = {:?}", error);
}
}
}
async fn handle_transcoding_completion(
&self,
completion_result: TranscodeSuccess,
) -> Result<()> {
match completion_result {
TranscodeSuccess::TranscodeStreamToHls(TranscodeStreamToHlsSuccess {
stream_duration,
stream_id,
stream_uri,
event_room_id,
}) => {
let stream_duration = stream_duration.parse::<f64>()?.round() as u64;
{
let mut conn = self.ctx.get_conn().await?;
crate::db::recording::TranscodingUpdateQuery::new(self.webinar.id())
.execute(&mut conn)
.await?;
}
let timing = ShortTermTimingProperties::new(Utc::now());
let props = OutgoingEventProperties::new("webinar.ready", timing);
let path = format!("audiences/{}/events", self.webinar.audience());
let payload = WebinarReady {
tags: self.webinar.tags().map(ToOwned::to_owned),
stream_duration,
stream_uri,
stream_id,
status: "success",
scope: self.webinar.scope().to_owned(),
id: self.webinar.id(),
event_room_id,
};
let event = OutgoingEvent::broadcast(payload, props, &path);
let boxed_event = Box::new(event) as Box<dyn IntoPublishableMessage + Send>;
self.ctx
.publisher()
.publish(boxed_event)
.context("Failed to publish webinar.ready event")
}
TranscodeSuccess::TranscodeMinigroupToHls(result) => {
bail!(
"Got transcoding success for an unexpected tq template; expected transcode-stream-to-hls for a webinar, id = {}, result = {:?}",
self.webinar.id(),
result,
);
}
}
}
async fn handle_stream_upload(&self, stream: UploadedStream) -> Result<()> {
let rtc = {
let mut conn = self.ctx.get_conn().await?;
let parsed_data = stream.parsed_data?;
crate::db::recording::StreamUploadUpdateQuery::new(
self.webinar.id(),
stream.id,
parsed_data.segments,
parsed_data.uri,
parsed_data.started_at,
)
.execute(&mut conn)
.await?
};
self.ctx
.event_client()
.adjust_room(
self.webinar.event_room_id(),
rtc.started_at()
.ok_or_else(|| anyhow!("Missing started at after upload"))?,
rtc.segments()
.ok_or_else(|| anyhow!("Missing segments after upload"))?
.clone(),
self.ctx.get_preroll_offset(self.webinar.audience()),
)
.await
.context("Failed to adjust room")
}
}
pub async fn restart_transcoding(
ctx: Arc<dyn AppContext>,
webinar: Class,
priority: Priority,
) -> Result<()> {
if webinar.kind() != ClassType::Webinar {
bail!("Invalid class type");
}
let modified_event_room_id = match webinar.modified_event_room_id() {
Some(id) => id,
None => bail!("Not adjusted yet"),
};
let mut conn = ctx.get_conn().await?;
let recordings = crate::db::recording::RecordingListQuery::new(webinar.id())
.execute(&mut conn)
.await?;
for recording in recordings {
send_transcoding_task(&ctx, &webinar, recording, modified_event_room_id, priority).await?;
}
Ok(())
}
async fn send_transcoding_task(
ctx: &Arc<dyn AppContext>,
webinar: &Class,
recording: crate::db::recording::Object,
modified_event_room_id: Uuid,
priority: Priority,
) -> Result<()> {
ctx.event_client()
.dump_room(modified_event_room_id)
.await
.context("Dump room event failed")?;
ctx.tq_client()
.create_task(
webinar,
TqTask::TranscodeStreamToHls {
stream_id: recording.rtc_id(),
stream_uri: recording
.stream_uri()
.ok_or_else(|| {
anyhow!("Missing stream_uri in adjust for {}", recording.rtc_id())
})?
.clone(),
event_room_id: Some(modified_event_room_id),
segments: recording.modified_segments().cloned(),
},
priority,
)
.await
.context("TqClient create task failed")
}
#[derive(Serialize)]
struct WebinarReady {
#[serde(skip_serializing_if = "Option::is_none")]
tags: Option<JsonValue>,
status: &'static str,
stream_duration: u64,
stream_id: Uuid,
stream_uri: String,
scope: String,
id: Uuid,
event_room_id: Uuid,
}
|
#[doc = "Register `TZC_FAIL_CONTROL0` reader"]
pub type R = crate::R<TZC_FAIL_CONTROL0_SPEC>;
#[doc = "Field `PRIVILEGE` reader - PRIVILEGE"]
pub type PRIVILEGE_R = crate::BitReader;
#[doc = "Field `NON_SECURE` reader - NON_SECURE"]
pub type NON_SECURE_R = crate::BitReader;
#[doc = "Field `DIRECTION` reader - DIRECTION"]
pub type DIRECTION_R = crate::BitReader;
impl R {
#[doc = "Bit 20 - PRIVILEGE"]
#[inline(always)]
pub fn privilege(&self) -> PRIVILEGE_R {
PRIVILEGE_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - NON_SECURE"]
#[inline(always)]
pub fn non_secure(&self) -> NON_SECURE_R {
NON_SECURE_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 24 - DIRECTION"]
#[inline(always)]
pub fn direction(&self) -> DIRECTION_R {
DIRECTION_R::new(((self.bits >> 24) & 1) != 0)
}
}
#[doc = "Status information about the first access that failed a region permission check in the associated filter (0 to 1).\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tzc_fail_control0::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct TZC_FAIL_CONTROL0_SPEC;
impl crate::RegisterSpec for TZC_FAIL_CONTROL0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`tzc_fail_control0::R`](R) reader structure"]
impl crate::Readable for TZC_FAIL_CONTROL0_SPEC {}
#[doc = "`reset()` method sets TZC_FAIL_CONTROL0 to value 0"]
impl crate::Resettable for TZC_FAIL_CONTROL0_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::io;
use std::io::Write;
fn main() {
println!("This goes to stdout");
println!("ユニコードを作りましょう");
write!(&mut io::stderr(),
"This goes to stderr\n").unwrap();
write!(&mut io::stderr(),
"ユニコードを作りましょう\n").unwrap();
}
|
use std::{
ffi::OsStr,
process::{ExitStatus, Stdio},
};
#[cfg(feature = "async-std-runtime")]
use async_std::process::{Child, Command};
#[cfg(feature = "tokio-runtime")]
use tokio::process::{Child, Command};
use crate::error::Result;
#[derive(Debug)]
pub(crate) struct Process {
child: Child,
}
impl Process {
pub(crate) fn spawn<P, I, A>(path: P, args: I) -> Result<Self>
where
P: AsRef<OsStr>,
I: IntoIterator<Item = A>,
A: AsRef<OsStr>,
{
let mut command = Command::new(path);
let child = command
.args(args)
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()?;
Ok(Self { child })
}
/// Issue a kill signal to the child process and immediately return; to wait for the process to
/// actually exit, use `wait`.
pub(crate) fn kill(&mut self) -> Result<()> {
#[cfg(feature = "tokio-runtime")]
return Ok(self.child.start_kill()?);
#[cfg(feature = "async-std-runtime")]
return Ok(self.child.kill()?);
}
pub(crate) async fn wait(&mut self) -> Result<ExitStatus> {
#[cfg(feature = "tokio-runtime")]
return Ok(self.child.wait().await?);
#[cfg(feature = "async-std-runtime")]
return Ok(self.child.status().await?);
}
}
impl Drop for Process {
fn drop(&mut self) {
// Attempt to reap the process.
#[cfg(feature = "tokio-runtime")]
let _ = self.child.try_wait();
#[cfg(feature = "async-std-runtime")]
let _ = self.child.try_status();
}
}
|
use super::c_void;
extern "C" {
pub fn linearAlloc(size: i32) -> *mut c_void;
pub fn linearMemAlign(size: i32, alignment: i32) -> *mut c_void;
pub fn linearRealloc(mem: *mut c_void, size: i32) -> *mut c_void;
pub fn linearFree(mem: *mut c_void) -> ();
pub fn linearSpaceFree() -> u32;
}
|
//! The Tendermock JsonRPC Websocket API.
use futures::{SinkExt, StreamExt};
use serde::Serialize;
use tendermint_rpc::endpoint::subscribe::{Request, Response};
use warp::ws::{Message, WebSocket, Ws as WarpWs};
use warp::Filter;
use super::utils::{JrpcEnvelope, JrpcError, JrpcResponse, JrpcResult, JRPC_VERSION};
use crate::logger::Log;
/// A struct that can be used to build the Websocket `warp` filter, see the `new` method.
pub struct Ws {}
impl Ws {
/// Creates a `warp` filter that mimics the Tendermint Websocket API.
pub fn new_mimic() -> warp::filters::BoxedFilter<(impl warp::Reply,)> {
warp::ws().map(|ws: WarpWs| ws.on_upgrade(handler)).boxed()
}
}
/// Handle a websocket connection.
async fn handler(ws: WebSocket) {
log!(Log::Websocket, "Connection");
let (mut sending_ws, mut listening_ws) = ws.split();
while let Some(result) = listening_ws.next().await {
let msg = match result {
Ok(msg) => msg,
Err(e) => {
log!(Log::Websocket, "Receiving error: '{}'", e);
break;
}
};
let msg = if let Ok(msg) = msg.to_str() {
msg
} else {
log!(Log::Websocket, "Could not interpret message as str");
break;
};
if let Err(e) = sending_ws.send(Message::text(handle_request(msg))).await {
log!(Log::Websocket, "Sending error: '{}'", e);
break;
};
}
if let Ok(ws) = sending_ws.reunite(listening_ws) {
if let Err(e) = ws.close().await {
log!(Log::Websocket, "Closing error: '{}'", e);
};
};
}
/// Parses the request, dispatch the query to the method handler and return the serialized Jrpc
/// response.
fn handle_request(msg: &str) -> String {
let req = if let Ok(req) = parse_message(msg) {
req
} else {
return serde_json::to_string(&JrpcResponse::<()> {
id: "0".to_string(),
jsonrpc: JRPC_VERSION.to_string(),
error: Some(JrpcError::InvalidRequest.into()),
result: None,
})
.unwrap();
};
if req.jsonrpc != JRPC_VERSION {
return serialize_response::<()>(Err(JrpcError::WrongVersion), req);
}
match req.method.as_str() {
"subscribe" => {
let res = subscribe(&req);
serialize_response(res, req)
}
_ => serialize_response::<()>(Err(JrpcError::WrongMethod), req),
}
}
/// Parses the websocket message into a JsonRPC request.
fn parse_message(msg: &str) -> JrpcResult<JrpcEnvelope> {
serde_json::from_str(msg).map_err(|_| JrpcError::InvalidRequest)
}
/// Serializes a JrpcResult into an actual JsonRPC response String.
fn serialize_response<T: Serialize>(result: JrpcResult<T>, envelope: JrpcEnvelope) -> String {
let (error, result) = match result {
Ok(res) => (None, Some(res)),
Err(e) => (Some(e.into()), None),
};
serde_json::to_string(&JrpcResponse {
id: envelope.id,
jsonrpc: envelope.jsonrpc,
error,
result,
})
.unwrap()
}
/// Handles the /subscribe endpoint.
fn subscribe(req: &JrpcEnvelope) -> JrpcResult<Response> {
let _params: Request =
serde_json::from_value(req.params.clone()).map_err(|_| JrpcError::WrongParameters)?;
Ok(Response {})
}
|
use cpp::{cpp, cpp_class};
use std::{os::raw::{c_char, c_int, c_uint}, ffi::CStr};
cpp! {{
#include <lldb/API/SBDebugger.h>
using namespace lldb;
}}
// pub fn terminate() {
// cpp!(unsafe [] {
// SBDebugger::Terminate();
// })
// }
// pub fn create(source_init_files: bool) -> SBDebugger {
// cpp!(unsafe [source_init_files as "bool"] -> SBDebugger as "SBDebugger" {
// return SBDebugger::Create(source_init_files);
// })
// }
// }
cpp_class!(pub unsafe struct SBDebugger as "SBDebugger");
pub(crate) unsafe fn get_str<'a>(ptr: *const c_char) -> &'a str {
if ptr.is_null() {
""
} else {
let cstr = CStr::from_ptr(ptr);
match cstr.to_str() {
Ok(val) => val,
Err(err) => std::str::from_utf8(&cstr.to_bytes()[..err.valid_up_to()]).unwrap(),
}
}
}
impl SBDebugger {
pub fn create(source_init_files: bool) -> SBDebugger {
cpp!(unsafe [source_init_files as "bool"] -> SBDebugger as "SBDebugger" {
return SBDebugger::Create(source_init_files);
})
}
pub fn instance_name(&self) -> &str {
let ptr = cpp!(unsafe [self as "SBDebugger*"] -> *const c_char as "const char*" {
return self->GetInstanceName();
});
unsafe { get_str(ptr) }
}
}
|
use std::marker::PhantomData;
use std::ops::BitXor;
use crate::metric::Metric;
use crate::Dist;
pub trait CountOnes {
#[inline]
fn count_ones(self) -> u32;
}
impl CountOnes for u8 {
#[inline]
fn count_ones(self) -> u32 {
self.count_ones()
}
}
impl CountOnes for u16 {
#[inline]
fn count_ones(self) -> u32 {
self.count_ones()
}
}
impl CountOnes for u32 {
#[inline]
fn count_ones(self) -> u32 {
self.count_ones()
}
}
impl CountOnes for u64 {
#[inline]
fn count_ones(self) -> u32 {
self.count_ones()
}
}
impl CountOnes for u128 {
#[inline]
fn count_ones(self) -> u32 {
self.count_ones()
}
}
#[derive(Default, Clone, Copy, Derivative)]
#[derivative(Debug)]
pub struct HammingMetric<I>(#[derivative(Debug = "ignore")] PhantomData<I>)
where
I: BitXor<I>,
<I as BitXor<I>>::Output: CountOnes;
impl<I> Metric<I> for HammingMetric<I>
where
I: Copy + BitXor<I>,
<I as BitXor<I>>::Output: CountOnes,
{
#[inline]
fn distance(&self, k1: &I, k2: &I) -> Dist {
(*k1 ^ *k2).count_ones() as usize
}
#[inline]
fn distance_static(k1: &I, k2: &I) -> Dist {
(*k1 ^ *k2).count_ones() as usize
}
}
// TODO: figure out how to declare a HammingMetric over Clone and over BitXor<&I> that doesn't conflict with the above
// implementation for Copy. (The code difference is k1.clone() instead of *k1 for Clone and no
// deref for BitXor<&I>). Better yet, handle a constraint that means <&I as BitXor<&I>>::Output: CountOnes.
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn hamming_distance() {
let metric: HammingMetric<u64> = Default::default();
assert_eq!(0usize, metric.distance(&0u64, &0u64));
assert_eq!(0usize, metric.distance(&1u64, &1u64));
assert_eq!(1usize, metric.distance(&1u64, &0u64));
assert_eq!(1usize, metric.distance(&0u64, &1u64));
assert_eq!(2usize, metric.distance(&1u64, &2u64));
assert_eq!(1usize, metric.distance(&0u64, &2u64));
}
}
|
use std::io::{self, Read, Write};
use vendored_sha3::digest::{ExtendableOutput, Input};
use vendored_sha3::{Sha3XofReader, Shake256};
#[derive(Clone)]
enum State {
Absorbing(Shake256),
Reading(Sha3XofReader),
}
/// SHAKE256 is the 256-bit SHAKE variable-output-length hash functions defined by FIPS-202
#[derive(Clone)]
pub struct SHAKE256 {
state: State,
}
impl Read for SHAKE256 {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if let State::Absorbing(v) = &self.state {
// clone is inefficient
self.state = State::Reading(v.clone().xof_result());
}
match self.state {
State::Reading(ref mut v) => v.read(buf),
_ => panic!("unexpected state"),
}
}
}
impl Write for SHAKE256 {
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self.state {
State::Absorbing(ref mut v) => v.input(buf),
State::Reading(_) => panic!("absorbing after reading"),
}
Ok(buf.len())
}
}
impl super::ShakeHash for SHAKE256 {
fn reset(&mut self) {
self.state = State::Absorbing(Shake256::default());
}
}
/// new_shake256 creates a new SHAKE256 variable-output-length ShakeHash. Its generic security
/// strength is 256 bits against all attacks if at least 64 bytes of its output are used.
pub fn new_shake256() -> SHAKE256 {
SHAKE256 {
state: State::Absorbing(Shake256::default()),
}
}
/// shake_sum256 writes an arbitrary-length digest of data into hash.
pub fn shake_sum256(hash: &mut [u8], b: &[u8]) -> io::Result<usize> {
let mut h = new_shake256();
let _ = h.write(b).expect("unfallible");
h.read(hash)
}
|
use draw::DrawContext;
use std::marker::PhantomData;
use std::ptr;
use ui_sys::{self, uiDrawBrush};
pub use ui_sys::uiDrawBrushGradientStop as BrushGradientStop;
/// Used to determine how a given stroke or fill is drawn.
#[derive(Clone, Debug)]
pub enum Brush {
Solid(SolidBrush),
LinearGradient(LinearGradientBrush),
RadialGradient(RadialGradientBrush),
Image,
}
/// A reference to a DrawBrush
#[derive(Clone, Debug)]
pub struct BrushRef<'a> {
ui_draw_brush: uiDrawBrush,
phantom: PhantomData<&'a uiDrawBrush>,
}
impl Brush {
pub fn as_ui_draw_brush_ref(&self, _ctx: &DrawContext) -> BrushRef {
match *self {
Brush::Solid(ref solid_brush) => BrushRef {
ui_draw_brush: uiDrawBrush {
Type: ui_sys::uiDrawBrushTypeSolid as u32,
R: solid_brush.r,
G: solid_brush.g,
B: solid_brush.b,
A: solid_brush.a,
X0: 0.0,
Y0: 0.0,
X1: 0.0,
Y1: 0.0,
OuterRadius: 0.0,
Stops: ptr::null_mut(),
NumStops: 0,
},
phantom: PhantomData,
},
Brush::LinearGradient(ref linear_gradient_brush) => BrushRef {
ui_draw_brush: uiDrawBrush {
Type: ui_sys::uiDrawBrushTypeLinearGradient as u32,
R: 0.0,
G: 0.0,
B: 0.0,
A: 0.0,
X0: linear_gradient_brush.start_x,
Y0: linear_gradient_brush.start_y,
X1: linear_gradient_brush.end_x,
Y1: linear_gradient_brush.end_y,
OuterRadius: 0.0,
Stops: linear_gradient_brush.stops.as_ptr() as *mut BrushGradientStop,
NumStops: linear_gradient_brush.stops.len() as u64,
},
phantom: PhantomData,
},
Brush::RadialGradient(ref radial_gradient_brush) => BrushRef {
ui_draw_brush: uiDrawBrush {
Type: ui_sys::uiDrawBrushTypeRadialGradient as u32,
R: 0.0,
G: 0.0,
B: 0.0,
A: 0.0,
X0: radial_gradient_brush.start_x,
Y0: radial_gradient_brush.start_y,
X1: radial_gradient_brush.outer_circle_center_x,
Y1: radial_gradient_brush.outer_circle_center_y,
OuterRadius: radial_gradient_brush.outer_radius,
Stops: radial_gradient_brush.stops.as_ptr() as *mut BrushGradientStop,
NumStops: radial_gradient_brush.stops.len() as u64,
},
phantom: PhantomData,
},
Brush::Image => {
// These don't work yet in `libui`, but just for completeness' sake…
BrushRef {
ui_draw_brush: uiDrawBrush {
Type: ui_sys::uiDrawBrushTypeImage as u32,
R: 0.0,
G: 0.0,
B: 0.0,
A: 0.0,
X0: 0.0,
Y0: 0.0,
X1: 0.0,
Y1: 0.0,
OuterRadius: 0.0,
Stops: ptr::null_mut(),
NumStops: 0,
},
phantom: PhantomData,
}
}
}
}
}
impl<'a> BrushRef<'a> {
/// Return the underlying uiDrawBrush for this BrushRef as a mutable pointer.
pub unsafe fn ptr(&'a self) -> *mut uiDrawBrush {
&self.ui_draw_brush as *const uiDrawBrush as *mut uiDrawBrush
}
}
/// A brush that paints all pixels with the same color, respecting alpha.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct SolidBrush {
/// Red component of the color
pub r: f64,
/// Green component of the color
pub g: f64,
/// Blue component of the color
pub b: f64,
/// Alpha (α) component of the color (that is, opacity).
pub a: f64,
}
/// A brush that paints a linear gradient.
#[derive(Clone, Debug)]
pub struct LinearGradientBrush {
pub start_x: f64,
pub start_y: f64,
pub end_x: f64,
pub end_y: f64,
pub stops: Vec<BrushGradientStop>,
}
/// A brush that paints a radial gradient.
#[derive(Clone, Debug)]
pub struct RadialGradientBrush {
pub start_x: f64,
pub start_y: f64,
pub outer_circle_center_x: f64,
pub outer_circle_center_y: f64,
pub outer_radius: f64,
pub stops: Vec<BrushGradientStop>,
}
|
use std::net::SocketAddr;
use msg_types::{AnnounceRequest, AnnounceSecret, CallResponse, Disconnect};
use mio::Token;
use crate::{client::tui::Tui, common::{debug_message::DebugMessageType, encryption::SymmetricEncryption, lib::read_exact, message_type::{InterthreadMessage, MsgType, msg_types::{self, Call}, Peer}}};
use super::{ConnectionManager, UdpConnection, UdpConnectionState};
impl ConnectionManager {
pub fn read_tcp_message(&mut self, msg_type: u8, _: Token) {
let sock = &mut self.rendezvous_socket;
let addr = sock.peer_addr().unwrap();
let msg_type = num::FromPrimitive::from_u8(msg_type);
let mut msg_size = [0u8; 8];
read_exact(sock, &mut msg_size);
let msg_size: u64 = bincode::deserialize(&msg_size).unwrap();
let mut msg = vec![0;msg_size as usize];
read_exact(sock, &mut msg[..]);
match msg_type {
Some(MsgType::AnnounceRequest) => {
let announcement: AnnounceRequest = bincode::deserialize(&msg).unwrap();
self.on_announce_request(addr, announcement);
}
Some(MsgType::Announce) => {
let peers: Vec<Peer> = bincode::deserialize(&msg).unwrap();
self.on_tcp_announce(addr, peers);
}
Some(MsgType::Call) => {
let call: msg_types::Call = bincode::deserialize(&mut msg[..]).unwrap();
self.on_call(addr, call);
}
Some(MsgType::CallResponse) => {
let call_response: msg_types::CallResponse = bincode::deserialize(&mut msg[..]).unwrap();
self.on_call_response(addr, call_response);
}
Some(MsgType::Disconnect) => {
let disconnect_peer: msg_types::Disconnect = bincode::deserialize(&mut msg[..]).unwrap();
self.on_disconnect(addr, disconnect_peer);
}
_ => unreachable!()
}
}
fn on_announce_request(&mut self, addr: SocketAddr, announcement: AnnounceRequest) {
let conn = self.udp_connections.iter()
.find(|x| x.address == addr).unwrap();
self.rendezvous_public_key = Some(announcement.public_key);
let announce_secret = msg_types::AnnounceSecret {
secret: conn.symmetric_key.as_ref().unwrap().secret.clone()
};
self.send_tcp_message_public_key(MsgType::AnnounceSecret, &announce_secret).unwrap();
let announce_public = msg_types::AnnouncePublic {
public_key: self.encryption.get_public_key().clone()
};
self.send_tcp_message(MsgType::Announce, &announce_public).unwrap();
}
fn on_tcp_announce(&mut self, _: SocketAddr, peers: Vec<Peer>) {
for new_p in peers {
if !self.peers.iter().any(|p| p.public_key == new_p.public_key) {
self.peers.push(new_p);
}
}
self.ui_s.send(InterthreadMessage::AnnounceResponse(self.peers.clone())).unwrap();
}
/// Handle incoming call
fn on_call(&mut self, _: SocketAddr, call: Call) {
let caller = call.caller.unwrap();
let udp_address = call.udp_address.unwrap();
let mut conn = UdpConnection::new(UdpConnectionState::Pending, udp_address, self.udp_socket.clone(), None, self.encryption.clone());
conn.associated_peer = Some(caller.clone());
self.udp_connections.push(conn);
// Notify the UI of the incoming call
self.ui_s.send(InterthreadMessage::Call(caller)).unwrap();
}
/// Handle the response to a sent call
fn on_call_response(&mut self, _: SocketAddr, call_response: CallResponse) {
let call = call_response.call;
if !call_response.response {
let i = self.calls_in_progress.iter()
.position(|(c, _)| c.callee == call.callee)
.unwrap();
self.calls_in_progress.remove(i);
self.ui_s.send(InterthreadMessage::CallDenied(call.callee)).unwrap();
}
else {
let udp_address = call.udp_address.unwrap();
let p = self.peers.iter_mut().find(|p| p.public_key == call.callee).unwrap();
p.udp_addr = Some(udp_address);
let i = self.calls_in_progress.iter()
.position(|(c, _)| c.callee == call.callee)
.unwrap();
self.calls_in_progress.remove(i);
let sym_key = SymmetricEncryption::new();
let mut conn = UdpConnection::new(UdpConnectionState::MidCall, udp_address, self.udp_socket.clone(), Some(sym_key), self.encryption.clone());
conn.associated_peer = Some(call.callee.clone());
Tui::debug_message(
&format!("A sent call has been accepted by peer ({};{}), starting the punch through protocol", call.callee, conn.address),
DebugMessageType::Log, &self.ui_s);
conn.send_udp_message_with_public_key(MsgType::AnnounceSecret, &AnnounceSecret{secret: conn.symmetric_key.as_ref().unwrap().secret.clone()}, true, None).unwrap();
self.ui_s.send(InterthreadMessage::CallAccepted(p.public_key.clone())).unwrap();
self.udp_connections.push(conn);
}
}
fn on_disconnect(&mut self, _: SocketAddr, disconnect_peer: Disconnect) {
let p = self.peers.iter_mut().find(|p| p.public_key == disconnect_peer.public_key).unwrap();
Tui::debug_message(&format!("Peer ({}) disconnected", p.public_key),DebugMessageType::Log, &self.ui_s);
match p.udp_addr {
Some(addr) => {
self.udp_connections.iter_mut()
.position(|conn| conn.address == addr)
.map(|i| self.udp_connections.remove(i)).unwrap();
}
None => {}
}
self.peers.iter_mut()
.position(|p| p.public_key == disconnect_peer.public_key)
.map(|i| self.peers.remove(i));
self.ui_s.send(InterthreadMessage::PeerDisconnected(disconnect_peer.public_key)).unwrap();
}
} |
#[derive(Eq, Hash)]
pub struct SymbolStatus {
name: String,
compiled: bool
}
impl SymbolStatus {
pub fn new(name: String) -> SymbolStatus {
SymbolStatus { name: name, compiled: false }
}
pub fn get_name(&self) -> &String {
&self.name
}
pub fn should_compile(&self) -> bool {
!self.compiled
}
pub fn set_compiled(&mut self) {
self.compiled = true;
}
}
impl PartialEq for SymbolStatus {
fn eq(&self, other: &SymbolStatus) -> bool {
self.name == other.name
}
}
|
#[doc = "Register `GICD_CIDR2` reader"]
pub type R = crate::R<GICD_CIDR2_SPEC>;
#[doc = "Field `CIDR2` reader - CIDR2"]
pub type CIDR2_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - CIDR2"]
#[inline(always)]
pub fn cidr2(&self) -> CIDR2_R {
CIDR2_R::new(self.bits)
}
}
#[doc = "GICD component ID2 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gicd_cidr2::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct GICD_CIDR2_SPEC;
impl crate::RegisterSpec for GICD_CIDR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`gicd_cidr2::R`](R) reader structure"]
impl crate::Readable for GICD_CIDR2_SPEC {}
#[doc = "`reset()` method sets GICD_CIDR2 to value 0x05"]
impl crate::Resettable for GICD_CIDR2_SPEC {
const RESET_VALUE: Self::Ux = 0x05;
}
|
#![allow(dead_code)]
#![allow(unreachable_patterns)]
fn main() {
simple();
}
fn simple() {
let c = 'c';
match c {
'c' => {
println!("Hello");
}
_ => {
unreachable!();
}
}
}
fn destruct2() {
let x = 1;
let c = 'c';
match c {
x => {
assert_eq!(x, c);
// ^ 创建x,并把c传递给x
}
_ => {
assert!(false, "Never Here.");
}
}
// x 值不变
assert_eq!(x, 1);
}
fn mul_conditions() {
let number = 13;
// 试一试 ^ 将不同的值赋给 `number`
println!("Tell me about {}", number);
match number {
// 匹配单个值
1 => println!("One!"),
// 匹配多个值
2 | 3 | 5 | 7 | 11 => println!("This is a prime"),
// 匹配一个闭区间范围
13...19 => println!("A teen"),
// 处理其他情况
_ => println!("Ain't special"),
}
let boolean = true;
// match 也是一个表达式
let binary = match boolean {
// match 分支必须覆盖所有可能的值
false => 0,
true => 1,
// 试一试 ^ 将其中一条分支注释掉
};
println!("{} -> {}", boolean, binary);
}
|
use ggez::graphics::Point2;
use std;
use consts::*;
use std::f32::consts::PI;
use utils::PointArithmetic;
//////////////////////////////////////////////////
pub struct LiveArrow {
pub position: Point2,
pub angle: f32,
pub momentum: Point2,
pub height: f32,
pub climb_momentum: f32,
}
impl LiveArrow {
pub fn new(position: Point2, momentum: Point2, climb_momentum: f32) -> Self {
Self {
position,
momentum,
climb_momentum,
angle: momentum.rotation(),
height: 10.0,
}
}
fn normalized_climb(&self) -> f32 {
self.climb_momentum / self.total_momentum()
}
fn total_momentum(&self) -> f32 {
(sqr!(self.momentum[0]) + sqr!(self.momentum[1]) + sqr!(self.climb_momentum)).sqrt()
}
pub fn shadow_draw_length(&self) -> f32 {
let x = self._vert_draw_ratio().abs();
let y = ((x * PI).cos() + 1.) / 2.;
let val = 0.7 * y + 0.2;
val * 0.3 * self.momentum[0].abs() / self.momentum.offset() + 0.7 * val
}
// returns 1.0 when arrow is shot straight up, -1.0 when shot straight down
fn _vert_draw_ratio(&self) -> f32 {
let x = self.angle.sin();
let hyp = self.climb_momentum.hypot(self.momentum.offset());
let climb_ratio = self.climb_momentum / hyp;
let x_influence = 0.5 + 0.5 * x.abs();
x_influence * climb_ratio
}
pub fn image_draw_length(&self) -> f32 {
//TODO REDO entirely
// let rat = self._vert_draw_ratio();
let max_len_at_norm_climb = { -self.angle.sin() * 0.6 };
let nclimb = self.normalized_climb();
println!("nclimb {:?}", nclimb);
1. - (max_len_at_norm_climb - nclimb).abs() * (self.angle.sin().abs())
}
pub fn image_angle(&self) -> f32 {
let pi = std::f32::consts::PI;
let mut ratio = self._vert_draw_ratio();
let val = if ratio > 0. {
// skew UPWARD
//ensure shortest distance around the clock is in phase
let n = if self.angle > pi / 2. {
//positive
self.angle - pi * 2.
} else {
//negative
self.angle
};
// print!("angle:{}\tup:{}\t+ normal:{}\tn:{}", self.angle, ratio, 1.-ratio, n);
(-pi / 2./*UP*/) * ratio + n * (1. - ratio)
} else {
ratio *= -1.;
// skew DOWNWARD
//ensure shortest distance around the clock is in phase
let n = if self.angle < -pi / 2. {
//positive
self.angle + pi * 2.
// self.angle
} else {
//negative
self.angle
};
// assert!(n >= 0.);
// print!("angle:{}\tdown:{}\t+ normal:{}\tn:{}", self.angle, ratio, 1.-ratio, n);
(pi / 2./*DOWN*/) * ratio + n * (1. - ratio)
};
val
}
}
|
pub mod dataset;
pub mod layer;
pub mod matrix;
pub mod nn;
|
use http;
use http::HttpError;
#[test]
fn test_http_response(){
let dummy_response: http::HttpResponse = http::HttpResponse::builder()
.status_code(http::HttpResponseStatusCode::OK)
.add_header("Accept", "text/html, application/xhtml+xml")
.add_header("Accept-Encoding", "gzip, deflate, sdch")
.add_header("Accept-Language", "en-US, en")
.add_body("<html></html>")
.finalize();
assert_eq!(dummy_response.get_status_code(), "200 OK");
}
#[test]
fn test_http_request(){
let dummy_request_string: &str = "GET / HTTP/1.1\r\nHost: www.jcu.edu.au\r\nCache-Control : max-age=0\r\nUser-Agent: Mozilla/5.0 Accept: text/html, application/xhtml+xml, application.xml;q=0.9, */*;q=0.8\r\nAccept-Encoding: gzip, deflate, sdch\r\nAccept-Language: en-US, en; q=0.8\r\nAccept-Charset: ISO-8859-1, utf-8\r\n\r\n" ;
let dummy_request: http::HttpRequest = match http::HttpRequest::parse(dummy_request_string){
Ok(method) => method,
Err(err) => {
println!("{:?}", err);
panic!()
},
};
assert_eq!(dummy_request.get_method(), http::HttpRequestMethod::GET);
} |
use std::error::Error;
use std::io::{self, prelude::*};
/// What is the best beauty score possible, when taking `num_plates` from `stacks`?
///
/// `stacks` must be a non-empty square matrix.
fn best_beauty_score(stacks: &Vec<Vec<u32>>, num_plates: usize) -> u32 {
let n = stacks.len();
assert_ne!(n, 0);
let k = stacks[0].len();
assert_ne!(k, 0);
// Otherwise, the problem is impossible.
assert!(num_plates <= n * k);
// Indexed as `best[i in 0..=n][p in 0..=num_plates]`.
// This represents the best score in the subproblem that needs `p` plates,
// but is restricted to `stacks[i..]`.
//
// We use f64 instead of u32 so we can represent `-infty`, which means
// that a particular sub-problem is impossible to solve.
let mut best = vec![vec![0.0; num_plates + 1]; n + 1];
// Base cases.
best[n][0] = 0.0;
for p in 1..=num_plates {
best[n][p] = f64::NEG_INFINITY;
}
for i in (0..=n - 1).rev() {
for p in 0..=num_plates {
let low = if p >= k { p - k } else { 0 };
best[i][p] = (low..=p)
.map(|j| stacks[i][..p - j].iter().sum::<u32>() as f64 + best[i + 1][j])
.fold(f64::NEG_INFINITY, f64::max);
}
}
let ans = best[0][num_plates];
// This follows from the assumption that `num_plates <= n * k`.
assert!(ans.is_finite());
ans as u32
}
fn main() -> Result<(), Box<dyn Error>> {
run_tests(io::stdin().lock())
}
/// Panics if the input isn't correctly formatted.
#[allow(non_snake_case)]
fn run_tests(input: impl BufRead) -> Result<(), Box<dyn Error>> {
let mut lines = input.lines();
let T: u32 = lines.next().unwrap()?.parse()?;
for t in 1..=T {
let line = lines.next().unwrap()?;
let mut words = line.split_whitespace();
let N: usize = words.next().unwrap().parse()?;
let K: usize = words.next().unwrap().parse()?;
let P: usize = words.next().unwrap().parse()?;
assert!(words.next().is_none());
let mut stacks = Vec::with_capacity(N);
for _ in 0..N {
let line = lines.next().unwrap()?;
let stack: Vec<_> = line
.split_whitespace()
.map(|w| w.parse::<u32>())
.collect::<Result<_, _>>()?;
assert_eq!(stack.len(), K);
stacks.push(stack);
}
let ans = best_beauty_score(&stacks, P);
println!("Case #{}: {}", t, ans);
}
assert!(lines.next().is_none());
Ok(())
}
|
#[doc = "Reader of register AIRCR"]
pub type R = crate::R<u32, super::AIRCR>;
#[doc = "Writer for register AIRCR"]
pub type W = crate::W<u32, super::AIRCR>;
#[doc = "Register AIRCR `reset()`'s with value 0"]
impl crate::ResetValue for super::AIRCR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `VECTKEY`"]
pub type VECTKEY_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `VECTKEY`"]
pub struct VECTKEY_W<'a> {
w: &'a mut W,
}
impl<'a> VECTKEY_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xffff << 16)) | (((value as u32) & 0xffff) << 16);
self.w
}
}
#[doc = "Reader of field `ENDIANESS`"]
pub type ENDIANESS_R = crate::R<bool, bool>;
#[doc = "Reader of field `SYSRESETREQ`"]
pub type SYSRESETREQ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SYSRESETREQ`"]
pub struct SYSRESETREQ_W<'a> {
w: &'a mut W,
}
impl<'a> SYSRESETREQ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `VECTCLRACTIVE`"]
pub type VECTCLRACTIVE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `VECTCLRACTIVE`"]
pub struct VECTCLRACTIVE_W<'a> {
w: &'a mut W,
}
impl<'a> VECTCLRACTIVE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
impl R {
#[doc = "Bits 16:31 - Register key:\\n Reads as Unknown\\n On writes, write 0x05FA to VECTKEY, otherwise the write is ignored."]
#[inline(always)]
pub fn vectkey(&self) -> VECTKEY_R {
VECTKEY_R::new(((self.bits >> 16) & 0xffff) as u16)
}
#[doc = "Bit 15 - Data endianness implemented:\\n 0 = Little-endian."]
#[inline(always)]
pub fn endianess(&self) -> ENDIANESS_R {
ENDIANESS_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 2 - Writing 1 to this bit causes the SYSRESETREQ signal to the outer system to be asserted to request a reset. The intention is to force a large system reset of all major components except for debug. The C_HALT bit in the DHCSR is cleared as a result of the system reset requested. The debugger does not lose contact with the device."]
#[inline(always)]
pub fn sysresetreq(&self) -> SYSRESETREQ_R {
SYSRESETREQ_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1 - Clears all active state information for fixed and configurable exceptions. This bit: is self-clearing, can only be set by the DAP when the core is halted. When set: clears all active exception status of the processor, forces a return to Thread mode, forces an IPSR of 0. A debugger must re-initialize the stack."]
#[inline(always)]
pub fn vectclractive(&self) -> VECTCLRACTIVE_R {
VECTCLRACTIVE_R::new(((self.bits >> 1) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 16:31 - Register key:\\n Reads as Unknown\\n On writes, write 0x05FA to VECTKEY, otherwise the write is ignored."]
#[inline(always)]
pub fn vectkey(&mut self) -> VECTKEY_W {
VECTKEY_W { w: self }
}
#[doc = "Bit 2 - Writing 1 to this bit causes the SYSRESETREQ signal to the outer system to be asserted to request a reset. The intention is to force a large system reset of all major components except for debug. The C_HALT bit in the DHCSR is cleared as a result of the system reset requested. The debugger does not lose contact with the device."]
#[inline(always)]
pub fn sysresetreq(&mut self) -> SYSRESETREQ_W {
SYSRESETREQ_W { w: self }
}
#[doc = "Bit 1 - Clears all active state information for fixed and configurable exceptions. This bit: is self-clearing, can only be set by the DAP when the core is halted. When set: clears all active exception status of the processor, forces a return to Thread mode, forces an IPSR of 0. A debugger must re-initialize the stack."]
#[inline(always)]
pub fn vectclractive(&mut self) -> VECTCLRACTIVE_W {
VECTCLRACTIVE_W { w: self }
}
}
|
#[cfg(test)]
mod tests {
use crate::aes_ecb::aes_128_ecb_decrypt;
use crate::aes_ecb::aes_128_ecb_encrypt;
use crate::util::find_blocksize;
use crate::util::generate_random_bytes;
use crate::util::parse_key_value;
use crate::util::profile_for;
use crypto::symmetriccipher::SymmetricCipherError;
use std::collections::HashMap;
use std::iter;
use std::str;
// Thirteenth cryptopals challenge - https://cryptopals.com/sets/2/challenges/13
#[test]
fn challenge13() {
// Black Box
let key = generate_random_bytes(16);
let encrypt_user_profile = |email: &[u8]| {
let profile = profile_for(str::from_utf8(email).unwrap(), 10, "user");
aes_128_ecb_encrypt(&key, profile.as_bytes(), true)
};
// Test
fn decrypt_user_profile(
key: &[u8],
ciphertext: &[u8],
) -> Result<HashMap<String, String>, SymmetricCipherError> {
let mut decrypted: Vec<u8> = aes_128_ecb_decrypt(key, ciphertext, true)?;
let mut last = decrypted.pop().unwrap();
while last == 4u8 {
last = decrypted.pop().unwrap();
}
decrypted.push(last);
Ok(parse_key_value(str::from_utf8(&decrypted).unwrap()))
}
let blocksize = find_blocksize(&encrypt_user_profile).unwrap();
assert_eq!(16, blocksize);
// Create an "email address" long enough to fill up the first block so that we can add 'admin'
// and padding in the next
let mut test_email = iter::repeat('A' as u8)
.take(blocksize - 6)
.collect::<Vec<u8>>();
assert_eq!(10, test_email.len()); // + "email=" makes 16 bytes
test_email.extend("admin".as_bytes());
test_email.extend(iter::repeat(4u8).take(11).collect::<Vec<u8>>()); // Pad rest of second block
assert_eq!(26, test_email.len());
// Try encrypting that email address
let encrypted = encrypt_user_profile(&test_email).unwrap();
// Now take the second block. We'll substitute that block in later
let admin_block = &encrypted[16..32];
// Next we want an email long enough to end the block with "role="
// so that's 32 - 19, so 13 bytes
test_email = iter::repeat('A' as u8).take(13).collect::<Vec<u8>>();
let encrypted_again = encrypt_user_profile(&test_email).unwrap();
// We replace the third block with our admin block
let mut new_encrypted = encrypted_again[..32].to_vec();
new_encrypted.extend(admin_block);
let profile = decrypt_user_profile(&key, &new_encrypted).unwrap();
assert_eq!("admin", profile["role"]);
}
}
|
pub mod server;
pub mod util;
pub mod cache;
pub mod authority;
pub mod index;
|
use messages::payloads::*;
#[derive(Debug, Clone)]
pub enum Payload {
Nothing,
Unknown(String),
Hello(HelloInfo),
State(GameStateInfo),
Delete(DeleteInfo),
PlayerDelete(DeleteInfo),
Ping,
Pong(PongInfo),
PlayerInfo(PlayerInfo),
PickupInfo(PickupInfo),
PlayerUpdate(PlayerUpdateInfo),
PlayerJoined(PlayerJoinedInfo),
MadeConnection,
}
#[derive(Debug, Clone)]
pub enum AllowedUsage {
FromServerOnly,
FromClientOnly,
}
#[derive(Debug, Clone)]
pub struct MetaData {
id : &'static str,
allowed_usage: AllowedUsage,
}
#[derive(Debug, Clone)]
pub struct PayloadStruct {
data: Payload,
meta_data: &'static MetaData,
}
impl Payload {
pub fn get_name(&self) -> &'static str {
match *self {
Payload::Nothing => "nothing",
Payload::Unknown(_) => "uknown",
Payload::Hello(_) => "hello",
Payload::PlayerInfo(_) => "playerInfo",
Payload::State(_) => "state",
Payload::Delete(_) => "delete",
Payload::Ping => "ping",
Payload::Pong(_) => "pong",
Payload::PickupInfo(_) => "pickupInfo",
Payload::PlayerUpdate(_) => "playerUpdate",
Payload::MadeConnection => "madeConnection",
Payload::PlayerDelete(_) => "playerDelete",
Payload::PlayerJoined(_) => "playerJoined",
}
}
}
#[derive(Debug, Clone)]
pub struct Message {
pub msg: String,
pub time: u64,
pub id: u64,
pub data: Payload,
}
impl Message {
pub fn new(data : Payload, id : u64, time: u64 ) -> Self {
let msg = data.get_name().to_string();
Self {
id, time, msg, data
}
}
}
|
fn read<T: std::str::FromStr>() -> T {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
s.trim().parse().ok().unwrap()
}
fn read_vec<T: std::str::FromStr>() -> Vec<T> {
read::<String>()
.split_whitespace()
.map(|e| e.parse().ok().unwrap())
.collect()
}
fn read_vec2<T: std::str::FromStr>(n: u32) -> Vec<Vec<T>> {
(0..n).map(|_| read_vec()).collect()
}
fn abs(x: i32) -> i32 {
if x > 0 {
x
} else {
-x
}
}
fn main() {
let v: Vec<i32> = read_vec();
let n = v[0];
let m = v[1];
let ab: Vec<Vec<i32>> = read_vec2(n as u32);
let cd: Vec<Vec<i32>> = read_vec2(m as u32);
let mut e: Vec<i32> = vec![];
for i in 0..n {
let mut d_min: i32 = abs(ab[i as usize][0] - cd[0][0]) + abs(ab[i as usize][1] - cd[0][1]);
let mut p: i32 = 0;
for j in 1..m {
let tmp = abs(ab[i as usize][0] - cd[j as usize][0])
+ abs(ab[i as usize][1] - cd[j as usize][1]);
if tmp < d_min {
d_min = tmp;
p = j;
}
}
e.push(p);
}
for i in e {
println!("{}", i + 1);
}
}
|
fn main() {
println!("Project euler problem 1");
println!("Multiples of 3 and 5");
println!("If we list all the natural numbers below 10 that are multiples of 3 or 5, \
we get 3, 5, 6 and 9. The sum of these multiples is 23. \
Find the sum of all the multiples of 3 or 5 below 1000.");
let max_range = 1000;
let mut sum = 0;
let mut multiple = 1;
let mut productof3;
let mut productof5;
loop {
productof3 = 3 * multiple;
if productof3 < max_range
{
sum += productof3;
}
else {
break;
}
productof5 = 5 * multiple;
if productof5 < max_range && productof5 % 3 != 0
{
sum += productof5;
}
multiple = multiple + 1;
}
println!("Sum is : {}", sum);
} |
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// AlertGraphWidgetDefinitionType : Type of the alert graph widget.
/// Type of the alert graph widget.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum AlertGraphWidgetDefinitionType {
#[serde(rename = "alert_graph")]
ALERT_GRAPH,
}
impl ToString for AlertGraphWidgetDefinitionType {
fn to_string(&self) -> String {
match self {
Self::ALERT_GRAPH => String::from("alert_graph"),
}
}
}
|
// Enums test
#[atrr]
pub enum Test {
A,
B(u32, A /* comment */),
/// Doc comment
C,
}
pub enum Foo<'a, Y: Baz>
where X: Whatever
{
A,
}
enum EmtpyWithComment {
// Some comment
}
// C-style enum
enum Bar {
A = 1,
#[someAttr(test)]
B = 2, // comment
C,
}
enum LongVariants {
First(LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG, // small comment
VARIANT),
// This is the second variant
Second,
}
|
pub mod application;
pub mod container;
pub mod domain;
pub mod infrastructure;
mod mocks;
|
use super::{Block, BlockId, Field};
use crate::resource::ResourceId;
use crate::Promise;
use std::collections::HashSet;
use std::{cell::RefCell, rc::Rc};
use wasm_bindgen::{prelude::*, JsCast};
#[derive(Clone)]
pub struct Texture {
element: web_sys::HtmlCanvasElement,
context: web_sys::CanvasRenderingContext2d,
size: [f64; 2],
buffer_size: [f64; 2],
pixel_ratio: [f64; 2],
}
impl Texture {
fn get_context2d_from_canvas(
canvas: &web_sys::HtmlCanvasElement,
) -> web_sys::CanvasRenderingContext2d {
canvas
.get_context("2d")
.unwrap()
.unwrap()
.dyn_into::<web_sys::CanvasRenderingContext2d>()
.unwrap()
}
pub fn new(buffer_size: &[u32; 2], size: [f64; 2]) -> Self {
let element = web_sys::window()
.unwrap()
.document()
.unwrap()
.create_element("canvas")
.unwrap()
.dyn_into::<web_sys::HtmlCanvasElement>()
.unwrap();
element.set_width(buffer_size[0]);
element.set_height(buffer_size[1]);
let context = Self::get_context2d_from_canvas(&element);
let mut me = Self {
element,
context,
pixel_ratio: [1.0, 1.0],
size: [1.0, 1.0],
buffer_size: [buffer_size[0] as f64, buffer_size[1] as f64],
};
me.set_size(size);
me
}
pub fn texture_position(&self, p: &[f64; 2]) -> [f64; 2] {
[(p[0] + self.size[0] / 2.0), -(p[1] - self.size[1] / 2.0)]
}
pub fn element(&self) -> &web_sys::HtmlCanvasElement {
&self.element
}
pub fn context(&self) -> &web_sys::CanvasRenderingContext2d {
&self.context
}
pub fn set_size(&mut self, size: [f64; 2]) {
let new_pixel_ratio = [self.buffer_size[0] / size[0], self.buffer_size[1] / size[1]];
let _ = self.context.scale(
new_pixel_ratio[0] / self.pixel_ratio[0],
new_pixel_ratio[1] / self.pixel_ratio[1],
);
self.pixel_ratio = new_pixel_ratio;
self.size = size;
}
pub fn clear(&self) {
let [px, py] = self.pixel_ratio.clone();
self.context
.clear_rect(0.0, 0.0, self.buffer_size[0] * px, self.buffer_size[1] * py);
}
}
impl Block for Texture {
fn pack(&self) -> Promise<JsValue> {
let w = self.size[0];
let h = self.size[1];
let element = self.element.clone();
let element = element;
Promise::new(move |resolve| {
let resolve = RefCell::new(Some(resolve));
let a = Closure::once(Box::new(move |blob| {
let obj = object! {
buffer: blob,
size: array![w, h]
};
let obj: js_sys::Object = obj.into();
let obj: JsValue = obj.into();
if let Some(resolve) = resolve.borrow_mut().take() {
resolve(Some(obj));
}
}) as Box<dyn FnOnce(web_sys::Blob)>);
element.to_blob(a.as_ref().unchecked_ref()).unwrap();
a.forget();
})
}
fn unpack(_: &mut Field, val: JsValue) -> Promise<Box<Self>> {
use crate::JsObject;
let val = val.dyn_into::<JsObject>().unwrap();
let buffer = val.get("buffer").unwrap();
let buffer = if let Some(buffer) = buffer.dyn_ref::<js_sys::ArrayBuffer>() {
web_sys::Blob::new_with_buffer_source_sequence_and_options(
array![buffer].as_ref(),
web_sys::BlobPropertyBag::new().type_("image/png"),
)
.ok()
} else if let Ok(buffer) = buffer.dyn_into::<web_sys::Blob>() {
Some(buffer)
} else {
None
};
let size = js_sys::Array::from(&val.get("size").unwrap()).to_vec();
let size = [size[0].as_f64().unwrap(), size[1].as_f64().unwrap()];
if let Some(blob) = buffer {
Promise::new(move |resolve| {
let image = Rc::new(crate::util::html_image_element());
let a = {
let image = Rc::clone(&image);
Closure::once(Box::new(move || {
let w = image.width();
let h = image.height();
let me = Self::new(&[w, h], size);
let _ = me
.context()
.draw_image_with_html_image_element_and_dw_and_dh(
&image,
0.0,
0.0,
w as f64 / me.pixel_ratio[0],
h as f64 / me.pixel_ratio[1],
);
resolve(Some(Box::new(me)));
}))
};
image.set_onload(Some(&a.as_ref().unchecked_ref()));
if let Ok(object_url) = web_sys::Url::create_object_url_with_blob(&blob) {
image.set_src(&object_url);
}
a.forget();
})
} else {
Promise::new(|resolve| resolve(None))
}
}
fn dependents(&self, _: &Field) -> HashSet<BlockId> {
set! {}
}
fn resources(&self, _: &Field) -> HashSet<ResourceId> {
set! {}
}
}
|
/*
Okay... one way or another I have a list of per-rect properties, like position. All rects in that list are drawn with the same texture and geometry, however that geometry is specified. Then I have another list of (texture, rect list) pairs. And that gets me batched drawing in a form that's easy to make automatic.
THEN I have one or more frames in flight, and each frame in flight has its own copies of the things necessary to actually draw that frame: uniforms/push constants, descriptor set, and the GPU buffers containing the list of (texture, rect list) pairs
To actually draw, you grab the appropriate free frame-in-flight, copy your (texture, rect list) pairs into its buffers, and for each pair issue one draw call to draw that chunk of the buffer.
*/
use std::io;
use std::mem;
use std::sync::Arc;
use rendy::command::{QueueId, RenderPassEncoder};
use rendy::factory::{Factory, ImageState};
use rendy::graph::{render::*, GraphContext, NodeBuffer, NodeImage};
use rendy::hal;
use rendy::hal::device::Device;
use rendy::memory::Dynamic;
use rendy::mesh::AsVertex;
use rendy::resource::{
Buffer, BufferInfo, DescriptorSet, DescriptorSetLayout, Escape, Filter, Handle, SamplerDesc,
WrapMode,
};
use rendy::texture::Texture;
use rendy::init::winit::event::{Event, WindowEvent};
use rendy::init::winit::event_loop::{ControlFlow, EventLoop};
use rendy::init::winit::window::{Window, WindowBuilder};
use euclid;
use log::*;
use oorandom;
pub type Point2 = euclid::Point2D<f32, euclid::UnknownUnit>;
pub type Transform3 = euclid::Transform3D<f32, euclid::UnknownUnit, euclid::UnknownUnit>;
pub type Rect = euclid::Rect<f32, euclid::UnknownUnit>;
/// Data we need for each quad instance.
/// DrawParam gets turned into this, eventually.
/// We have to be *quite particular* about layout since this gets
/// fed straight to the shader.
///
/// TODO: Currently the shader doesn't use src or color though.
#[repr(C, align(16))]
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
pub struct QuadData {
transform: [f32; 16],
rect: [f32; 4],
color: [f32; 4],
}
use rendy::mesh::{Attribute, VertexFormat};
/// Okay, this tripped me up. Instance data is technically
/// part of the per-vertex data. So we describe it as
/// part of the vertex format. This is where that
/// definition happens.
///
/// This trait impl is basically extended from the impl for
/// `rendy::mesh::Transform`
impl AsVertex for QuadData {
fn vertex() -> VertexFormat {
VertexFormat {
attributes: vec![
Attribute::new(
"Transform1",
0,
hal::pso::Element {
format: hal::format::Format::Rgba32Sfloat,
offset: 0,
},
),
Attribute::new(
"Transform2",
1,
hal::pso::Element {
format: hal::format::Format::Rgba32Sfloat,
offset: 16,
},
),
Attribute::new(
"Transform3",
0,
hal::pso::Element {
format: hal::format::Format::Rgba32Sfloat,
offset: 32,
},
),
Attribute::new(
"Transform4",
0,
hal::pso::Element {
format: hal::format::Format::Rgba32Sfloat,
offset: 48,
},
),
// rect
Attribute::new(
"rect",
0,
hal::pso::Element {
format: hal::format::Format::Rgba32Sfloat,
offset: 64,
},
),
// color
Attribute::new(
"color",
0,
hal::pso::Element {
format: hal::format::Format::Rgba32Sfloat,
offset: 80,
},
),
],
stride: 96,
}
}
}
/// Uniform data. Each frame contains one of these.
#[derive(Clone, Copy, Debug, Default)]
#[repr(C, align(16))]
pub struct UniformData {
pub proj: Transform3,
pub view: Transform3,
}
/// An instance buffer that bounds checks how many instances you can
/// put into it. Is not generic on the instance data type, just holds
/// `QuadData`.
/// TODO: Make it resizeable someday.
///
/// The buffer in a `FrameInFlight`.
///
/// We pack data from multiple `QuadDrawCall`'s together into one `Buffer`
/// but each draw call can have a varying amount of instance data,
/// so we end up with something like:
///
/// ```
/// | Instances1 ... | Instances2 ... | ... | empty space |
/// ```
///
/// Right now we have a fixed size buffer and just limit the number
/// of objects in it. TODO: Eventually someday we will grow the buffer
/// as needed. Maybe shrink it too? Not sure about that.
#[derive(Debug)]
pub struct InstanceBuffer<B>
where
B: hal::Backend,
{
/// Capacity, in *number of instances*.
pub capacity: u64,
/// Number of instances currently in the buffer
pub length: u64,
/// Actual buffer object.
pub buffer: Escape<Buffer<B>>,
}
impl<B> InstanceBuffer<B>
where
B: hal::Backend,
{
/// Create a new empty instance buffer with the given
/// capacity in *number of instances*
pub fn new(capacity: u64, factory: &Factory<B>) -> Self {
let bytes_per_instance = Self::instance_size();
let buffer_size = capacity * bytes_per_instance;
let buffer = factory
.create_buffer(
BufferInfo {
size: buffer_size,
// TODO: We probably don't need usage::Uniform here anymore. Confirm!
usage: hal::buffer::Usage::UNIFORM | hal::buffer::Usage::VERTEX,
},
Dynamic,
)
.unwrap();
Self {
capacity,
length: 0,
buffer,
}
}
/// Resizes the underlying buffer. Does NOT copy the contents
/// of the old buffer (yet), new buffer is empty.
pub fn resize(&mut self, factory: &Factory<B>, new_capacity: u64) {
let buffer = factory
.create_buffer(
BufferInfo {
size: new_capacity,
// TODO: We probably don't need usage::Uniform here anymore. Confirm!
usage: hal::buffer::Usage::UNIFORM | hal::buffer::Usage::VERTEX,
},
Dynamic,
)
.unwrap();
let old_buffer = mem::replace(&mut self.buffer, buffer);
unsafe {
factory.destroy_relevant_buffer(Escape::unescape(old_buffer));
}
self.length = 0;
self.capacity = new_capacity;
}
/// Returns the size in bytes of a single instance for this type.
/// For now, this doesn't change, but it's convenient to have.
///
/// This can't be a const fn yet, 'cause trait bounds.
/// See https://github.com/rust-lang/rust/issues/57563
pub fn instance_size() -> u64 {
mem::size_of::<QuadData>() as u64
}
/// Returns the buffer size in bytes, rounded up
/// to the given alignment.
/// TODO: Are the alignment requirements for this necessary?
pub fn buffer_size(&self, align: u64) -> u64 {
(((Self::instance_size() * self.capacity) - 1) / align + 1) * align
}
/// Returns an offset in bytes, pointing to free space right after
/// the given number of instances, or None if `idx >= self.capacity`
pub fn instance_offset(&self, idx: u64) -> Option<u64> {
if idx >= self.capacity {
None
} else {
Some(idx * Self::instance_size())
}
}
/// Empties the buffer by setting the length to 0.
/// Capacity remains unchanged.
pub fn clear(&mut self) {
self.length = 0;
}
/// Copies the instance data in the given slice into the buffer,
/// starting from the current end of it.
/// Returns the offset at which it started if ok, or if the buffer
/// is not large enough returns Err.
///
/// TODO: Better error types. Do bounds checks with assert_dbg!()?
pub fn add_slice(&mut self, factory: &Factory<B>, instances: &[QuadData]) -> Result<u64, ()> {
if self.length + (instances.len() as u64) >= self.capacity {
return Err(());
}
let offset = self.instance_offset(self.length).ok_or(())?;
// Vulkan doesn't seem to like zero-size copies very much.
if instances.len() > 0 {
unsafe {
factory
.upload_visible_buffer(&mut self.buffer, offset, instances)
.unwrap();
}
self.length += instances.len() as u64;
}
Ok(self.length)
}
pub fn inner(&self) -> &Buffer<B> {
&self.buffer
}
pub fn inner_mut(&mut self) -> &mut Buffer<B> {
&mut self.buffer
}
pub fn dispose(self, factory: &Factory<B>) {
info!("Destroying instance buffer");
unsafe {
factory.destroy_relevant_buffer(Escape::unescape(self.buffer));
}
}
}
/// The type used for push constants sent to the shaders.
/// This is its own type 'cause there's a couple places we need
/// to use it and it's nice to keep in sync.
type PushConstantsBuffer = [u32; 32];
/// What data we need for each frame in flight.
/// Rendy doesn't do any synchronization for us
/// beyond guarenteeing that when we get a new frame
/// index everything touched by that frame index is
/// now free to reuse.
///
/// We could make different frames share buffers
/// and descriptor sets and such and only change bits
/// that aren't in use from other frames, but that's
/// more complex than I want to get into right now.
/// So basically each `FrameInFlight` contains a whole
/// copy of what writeable data we need to render a frame,
/// and that data gets re-filled each frame.
///
/// When we do want to do that though, I think the simple
/// way would be... maybe create a structure through which
/// a FrameInFlight can be altered and which records if
/// things have actually changed. Actually, the QuadDrawCall
/// might the place to handle that? Hm, having a separate
/// Buffer per draw call might be the way to go too? If
/// the buffer does not change from one draw call to the
/// next, we don't need to re-record its data, just issue
/// the draw call directly with the right PrepareReuse...
/// idk, I'm rambling.
#[derive(Debug)]
struct FrameInFlight<B>
where
B: hal::Backend,
{
/// The buffer where we store instance data.
buffer: InstanceBuffer<B>,
/// The frame's local copy of uniform data.
push_constants: PushConstantsBuffer,
}
impl<B> FrameInFlight<B>
where
B: hal::Backend,
{
/// All our descriptor sets use the same layout.
/// This one! We have an instance buffer, an
/// image, and a sampler.
const LAYOUT: &'static [hal::pso::DescriptorSetLayoutBinding] = &[
// TODO: Can we get rid of this uniform buffer since we use push constants?
// Doesn't look like it, 'cause we use the buffer for our instance data too.
hal::pso::DescriptorSetLayoutBinding {
binding: 0,
ty: hal::pso::DescriptorType::UniformBuffer,
count: 1,
stage_flags: hal::pso::ShaderStageFlags::GRAPHICS,
immutable_samplers: false,
},
hal::pso::DescriptorSetLayoutBinding {
binding: 1,
ty: hal::pso::DescriptorType::SampledImage,
count: 1,
stage_flags: hal::pso::ShaderStageFlags::FRAGMENT,
immutable_samplers: false,
},
hal::pso::DescriptorSetLayoutBinding {
binding: 2,
ty: hal::pso::DescriptorType::Sampler,
count: 1,
stage_flags: hal::pso::ShaderStageFlags::FRAGMENT,
immutable_samplers: false,
},
];
fn get_descriptor_set_layout() -> SetLayout {
SetLayout {
bindings: Self::LAYOUT.to_vec(),
}
}
fn new(factory: &mut Factory<B>) -> Self {
use std::convert::TryInto;
// TODO: Figure out max length.
let buffer_count = MAX_OBJECTS; // * draw_calls.len();
let buffer = InstanceBuffer::new(buffer_count.try_into().unwrap(), factory);
let ret = Self {
buffer,
push_constants: [0; 32],
};
ret
}
/// This happens before a frame; it should take a LIST of draw calls and take
/// care of uploading EACH of them into the buffer so they don't clash!
fn prepare(
&mut self,
factory: &Factory<B>,
uniforms: &UniformData,
draw_calls: &[QuadDrawCall<B>],
) {
//assert!(draw_calls.len() > 0);
// Store the uniforms to be shoved into push constants this frame
// TODO: Be less crude about indexing and such.
for (i, vl) in uniforms
.proj
.to_column_major_array()
.into_iter()
.enumerate()
{
self.push_constants[i] = vl.to_bits();
}
for (i, vl) in uniforms
.view
.to_column_major_array()
.into_iter()
.enumerate()
{
self.push_constants[16 + i] = vl.to_bits();
}
//println!("Preparing frame-in-flight, {} draw calls, first has {} instances.", draw_calls.len(),
//draw_calls[0].objects.len());
self.buffer.clear();
for draw_call in draw_calls {
let _offset = self
.buffer
.add_slice(factory, &draw_call.objects[..])
.unwrap();
}
}
/// Draws a list of QuadDrawCall's.
fn draw(
&mut self,
draw_calls: &[QuadDrawCall<B>],
layout: &B::PipelineLayout,
encoder: &mut RenderPassEncoder<'_, B>,
) {
//println!("Drawing {} draw calls", draw_calls.len());
let mut instance_count: u64 = 0;
for draw_call in draw_calls {
// Skip empty draw calls, since buffers of length 0 are invalid
if draw_call.objects.is_empty() {
continue;
}
//println!("Drawing {:#?}, {:#?}, {}", draw_call, descriptor_set, draw_offset);
// This is a bit weird, but basically tells the thing where to find the
// instance data. The stride and such of the instance structure is
// defined in the `AsVertex` definition.
//
// TODO: Per Ralith, "don't rebind the descriptor set for every batch, it sticks
// around". So we might be able to do it in prepare() instead? Not sure. The
// Rendy examples do it here.
//
// Also descriptor set binding numbers and vertex buffer binding numbers
// are separate.
unsafe {
encoder.bind_graphics_descriptor_sets(
layout,
0,
std::iter::once(draw_call.descriptor_set.raw()),
std::iter::empty(),
);
}
// The 0 here is to say which vertex buffer slot we're binding to, more or less...
// if we were drawing a Mesh as well we would, say, bind that to 0 and
// bind the instance buffer to 1.
//
// But, we are doing mesh-less drawing 'cause it's just quads, and
// just creating the mesh-equivalent in the shader,
unsafe {
encoder.bind_vertex_buffers(
0,
std::iter::once((
self.buffer.inner().raw(),
self.buffer.instance_offset(instance_count).unwrap(),
)),
);
}
unsafe {
encoder.push_constants(
layout,
hal::pso::ShaderStageFlags::ALL,
0,
&self.push_constants,
);
}
// This count is the *number of instances*. What instance
// to start at in the buffer is defined by the offset in
// `bind_vertex_buffers()` above, and the stride/size of an instance
// is defined in `AsVertex`.
let instances = 0..(draw_call.objects.len() as u32);
unsafe {
encoder.draw(0..6, instances);
}
instance_count += draw_call.objects.len() as u64;
}
}
pub fn dispose(self, factory: &Factory<B>) {
info!("FrameInFlight disposed");
self.buffer.dispose(factory)
}
}
/// The data we need for a single draw call, which
/// gets bound to descriptor sets and
///
/// For now we re-bind EVERYTHING even if only certain
/// resources have changed (for example, texture changes
/// and mesh stays the same). Should be easy to check
/// that in the future and make the various things
/// in here `Option`s.
#[derive(Debug)]
pub struct QuadDrawCall<B>
where
B: hal::Backend,
{
objects: Vec<QuadData>,
texture: Arc<Texture<B>>,
descriptor_set: Escape<DescriptorSet<B>>,
/// We just need the actual config for the sampler 'cause
/// Rendy's `Factory` can manage a sampler cache itself.
sampler_info: SamplerDesc,
}
impl<B> QuadDrawCall<B>
where
B: hal::Backend,
{
pub fn new(
texture: Arc<Texture<B>>,
factory: &Factory<B>,
layout: &Handle<DescriptorSetLayout<B>>,
) -> Self {
let sampler_info = SamplerDesc::new(Filter::Nearest, WrapMode::Clamp);
let descriptor_set =
QuadDrawCall::create_descriptor_set(&*texture, &sampler_info, factory, layout);
Self {
objects: vec![],
texture,
sampler_info,
descriptor_set,
}
}
pub fn add_quad(&mut self, instance: QuadData) {
if self.objects.len() < MAX_OBJECTS {
self.objects.push(instance);
}
}
pub fn add_random_object(
&mut self,
rng: &mut oorandom::Rand32,
max_width: f32,
max_height: f32,
) {
let x = rng.rand_float() * max_width;
let y = rng.rand_float() * max_height;
let offset = euclid::Vector3D::new(x, y, 0.0);
let transform = Transform3::create_scale(10.0, 10.0, 1.0).post_translate(offset);
let src = Rect::from(euclid::Size2D::new(100.0, 100.0));
let color = [1.0, 0.0, 1.0, 1.0];
let instance = QuadData {
transform: transform.to_column_major_array(),
rect: [src.origin.x, src.origin.y, src.size.width, src.size.height],
color,
};
self.add_quad(instance);
}
fn create_descriptor_set(
texture: &Texture<B>,
sampler_info: &SamplerDesc,
factory: &Factory<B>,
layout: &Handle<DescriptorSetLayout<B>>,
) -> Escape<DescriptorSet<B>> {
// Does this sampler need to stay alive? We pass a
// reference to it elsewhere in an `unsafe` block...
// It's cached in the Factory anyway, so I don't think so.
let sampler = factory
.get_sampler(sampler_info.clone())
.expect("Could not get sampler");
unsafe {
let set = factory.create_descriptor_set(layout.clone()).unwrap();
factory
.device()
.write_descriptor_sets(Some(hal::pso::DescriptorSetWrite {
set: set.raw(),
binding: 1,
array_offset: 0,
descriptors: vec![hal::pso::Descriptor::Image(
texture.view().raw(),
hal::image::Layout::ShaderReadOnlyOptimal,
)],
}));
factory.write_descriptor_sets(Some(hal::pso::DescriptorSetWrite {
set: set.raw(),
binding: 2,
array_offset: 0,
descriptors: vec![hal::pso::Descriptor::Sampler(sampler.raw())],
}));
set
}
}
}
#[derive(Debug)]
pub struct Aux<B: hal::Backend> {
/// Number of FrameInFlight's.
/// This is a little bass-ackwards but there's currently no better place to put it;
/// we get this from the PresentNode but have to build that after building
/// the QuadRenderGroup, and the QuadRenderGroupDesc is where this is mainly used.
pub frames: usize,
pub draws: Vec<QuadDrawCall<B>>,
pub camera: UniformData,
pub shader: rendy::shader::ShaderSetBuilder,
layout: Option<Handle<DescriptorSetLayout<B>>>,
}
impl<B> Aux<B>
where
B: hal::Backend,
{
/// for SOME reason, doing this in a Drop impl doesn't work right, it either
/// doesn't get called at all or gets called at the wrong time. Even when we
/// call it by hand. So I guess I'm just doing this instead.
fn dispose(&mut self) {
/*
for draw in self.draws.drain(..) {
drop(draw);
}
*/
self.draws.clear();
info!("Dropped draw calls");
self.layout = None;
info!("Dropped layout");
}
}
const MAX_OBJECTS: usize = 10_000;
/// Render group describing a graph node that renders quads.
#[derive(Debug)]
pub struct QuadRenderGroup<B>
where
B: hal::Backend,
{
pipeline_layout: B::PipelineLayout,
graphics_pipeline: B::GraphicsPipeline,
frames_in_flight: Vec<FrameInFlight<B>>,
}
/// Descriptor for simple render group.
#[derive(Debug)]
pub struct QuadRenderGroupDesc {
colors: Vec<hal::pso::ColorBlendDesc>,
}
impl QuadRenderGroupDesc {
pub fn new() -> Self {
Self {
colors: vec![hal::pso::ColorBlendDesc {
mask: hal::pso::ColorMask::ALL,
blend: Some(hal::pso::BlendState::ALPHA),
}],
}
}
}
impl<B> RenderGroupDesc<B, Aux<B>> for QuadRenderGroupDesc
where
B: hal::Backend,
{
fn buffers(&self) -> Vec<rendy::graph::BufferAccess> {
vec![]
}
fn images(&self) -> Vec<rendy::graph::ImageAccess> {
vec![]
}
fn colors(&self) -> usize {
self.colors.len()
}
fn depth(&self) -> bool {
true
}
fn build<'a>(
self,
_ctx: &GraphContext<B>,
factory: &mut Factory<B>,
_queue: QueueId,
aux: &Aux<B>,
framebuffer_width: u32,
framebuffer_height: u32,
subpass: hal::pass::Subpass<'_, B>,
_buffers: Vec<NodeBuffer>,
_images: Vec<NodeImage>,
) -> Result<Box<dyn RenderGroup<B, Aux<B>>>, hal::pso::CreationError> {
let depth_stencil = hal::pso::DepthStencilDesc {
depth: Some(hal::pso::DepthTest {
fun: hal::pso::Comparison::LessEqual,
write: true,
}),
depth_bounds: false,
stencil: None,
};
let input_assembler_desc = hal::pso::InputAssemblerDesc {
primitive: hal::pso::Primitive::TriangleList,
with_adjacency: false,
restart_index: None,
};
let layout_push_constants = vec![(
hal::pso::ShaderStageFlags::ALL,
// This size is in BYTES now AUGH
0..(mem::size_of::<PushConstantsBuffer>() as u32),
)];
let vertices =
vec![QuadData::vertex().gfx_vertex_input_desc(hal::pso::VertexInputRate::Instance(1))];
// TODO: Verify this doesn't cause a double-free when combined with aux.layout
// getting destroyed.
let desc_set_layout_list = vec![aux.layout.as_ref().unwrap().raw()];
let pipeline_layout = unsafe {
factory
.device()
.create_pipeline_layout(desc_set_layout_list, layout_push_constants)
}?;
let mut vertex_buffers = Vec::new();
let mut attributes = Vec::new();
for &(ref elements, stride, rate) in &vertices {
push_vertex_desc(elements, stride, rate, &mut vertex_buffers, &mut attributes);
}
let rect = hal::pso::Rect {
x: 0,
y: 0,
w: framebuffer_width as i16,
h: framebuffer_height as i16,
};
let mut shader_set = aux.shader.build(factory, Default::default()).unwrap();
// TODO: Make disposing of the shader set nicer. Either store it, or have a wrapper
// that disposes it on drop, or something. Would that cause a double-borrow?
//
// Actually, think about this more in general, 'cause there's other structures that
// need similar handling: set_layouts, pipeline layouts, etc.
let shaders = match shader_set.raw() {
Err(e) => {
shader_set.dispose(factory);
// TODO: Better error type
return Err(hal::pso::CreationError::Other);
}
Ok(s) => s,
};
let graphics_pipeline = unsafe {
factory.device().create_graphics_pipelines(
Some(hal::pso::GraphicsPipelineDesc {
shaders,
rasterizer: hal::pso::Rasterizer::FILL,
vertex_buffers,
attributes,
input_assembler: input_assembler_desc,
blender: hal::pso::BlendDesc {
logic_op: None,
targets: self.colors.clone(),
},
depth_stencil: depth_stencil,
multisampling: None,
baked_states: hal::pso::BakedStates {
viewport: Some(hal::pso::Viewport {
rect,
depth: 0.0..1.0,
}),
scissor: Some(rect),
blend_color: None,
depth_bounds: None,
},
layout: &pipeline_layout,
subpass,
flags: hal::pso::PipelineCreationFlags::empty(),
parent: hal::pso::BasePipeline::None,
}),
None,
)
}
.remove(0)
.map_err(|e| {
shader_set.dispose(factory);
e
})?;
// Create frames-in-flight
let mut frames_in_flight = vec![];
frames_in_flight.extend((0..aux.frames).map(|_| FrameInFlight::new(factory)));
shader_set.dispose(factory);
Ok(Box::new(QuadRenderGroup::<B> {
pipeline_layout,
graphics_pipeline,
frames_in_flight,
}))
}
}
impl<B> RenderGroup<B, Aux<B>> for QuadRenderGroup<B>
where
B: hal::Backend,
{
fn prepare(
&mut self,
factory: &Factory<B>,
_queue: QueueId,
index: usize,
_subpass: hal::pass::Subpass<'_, B>,
aux: &Aux<B>,
) -> PrepareResult {
self.frames_in_flight[index].prepare(factory, &aux.camera, &aux.draws);
// TODO: Investigate this more...
// Ooooooh in the example it always used the same draw command buffer 'cause it
// always did indirect drawing, and just modified the draw command in the data buffer.
// we're doing direct drawing now so we have to always re-record our drawing
// command buffers when they change -- and the number of instances always changes
// in this program, so!
//PrepareResult::DrawReuse
PrepareResult::DrawRecord
}
fn draw_inline(
&mut self,
mut encoder: RenderPassEncoder<'_, B>,
index: usize,
_subpass: hal::pass::Subpass<'_, B>,
aux: &Aux<B>,
) {
encoder.bind_graphics_pipeline(&self.graphics_pipeline);
self.frames_in_flight[index].draw(&aux.draws, &self.pipeline_layout, &mut encoder);
}
fn dispose(self: Box<Self>, factory: &mut Factory<B>, _aux: &Aux<B>) {
info!("Disposing of QuadRenderGroup");
unsafe {
for frame in self.frames_in_flight.into_iter() {
frame.dispose(factory);
}
info!("Disposed frames in flight");
factory
.device()
.destroy_graphics_pipeline(self.graphics_pipeline);
info!("Destroyed pipeline");
factory
.device()
.destroy_pipeline_layout(self.pipeline_layout);
info!("Destroyed pipeline layout");
}
}
}
/// Add a description of our vertex buffer elements to the given
/// vertex buffer and attribute descriptors. Maybe this can be merged
/// back into QuadRenderGroup::build() ?
fn push_vertex_desc(
elements: &[hal::pso::Element<hal::format::Format>],
stride: hal::pso::ElemStride,
rate: hal::pso::VertexInputRate,
vertex_buffers: &mut Vec<hal::pso::VertexBufferDesc>,
attributes: &mut Vec<hal::pso::AttributeDesc>,
) {
let index = vertex_buffers.len() as hal::pso::BufferIndex;
vertex_buffers.push(hal::pso::VertexBufferDesc {
binding: index,
stride,
rate,
});
let mut location = attributes.last().map_or(0, |a| a.location + 1);
for &element in elements {
attributes.push(hal::pso::AttributeDesc {
location,
binding: index,
element,
});
location += 1;
}
}
/// This is how we can load an image and create a new texture.
pub fn make_texture<B>(device: &mut GraphicsDevice<B>, image_bytes: &[u8]) -> Arc<Texture<B>>
where
B: hal::Backend,
{
let cursor = std::io::Cursor::new(image_bytes);
let texture_builder = rendy::texture::image::load_from_image(cursor, Default::default())
.expect("Could not load texture?");
let texture = texture_builder
.build(
ImageState {
queue: device.queue_id,
stage: hal::pso::PipelineStage::FRAGMENT_SHADER,
access: hal::image::Access::SHADER_READ,
layout: hal::image::Layout::ShaderReadOnlyOptimal,
},
&mut device.factory,
)
.unwrap();
Arc::new(texture)
}
/// Creates a shader builder from the given raw SPIR-V byte buffers.
/// Alignment and byte-order is handled for you.
pub fn load_shaders(vertex_src: &[u8], fragment_src: &[u8]) -> rendy::shader::ShaderSetBuilder {
use rendy::shader::SpirvShader;
let vert_cursor = io::Cursor::new(vertex_src);
let vert_words = hal::pso::read_spirv(vert_cursor)
.expect("Invalid SPIR-V buffer passed to load_shaders one way or another!");
let vertex = SpirvShader::new(vert_words, hal::pso::ShaderStageFlags::VERTEX, "main");
let frag_cursor = io::Cursor::new(fragment_src);
let frag_words = hal::pso::read_spirv(frag_cursor)
.expect("Invalid SPIR-V buffer passed to load_shaders one way or another!");
let fragment = SpirvShader::new(frag_words, hal::pso::ShaderStageFlags::FRAGMENT, "main");
let shader_builder: rendy::shader::ShaderSetBuilder =
rendy::shader::ShaderSetBuilder::default()
.with_vertex(&vertex)
.unwrap()
.with_fragment(&fragment)
.unwrap();
shader_builder
}
/// Load shaders from the given file names.
pub fn load_shader_files(
vertex_file: &str,
fragment_file: &str,
) -> rendy::shader::ShaderSetBuilder {
let vertex_src = std::fs::read(vertex_file).unwrap();
let fragment_src = std::fs::read(fragment_file).unwrap();
load_shaders(vertex_src.as_ref(), fragment_src.as_ref())
}
/*
Exploring API
General idea: render pass -> pipeline -> draw call -> instance
From Viral:
loop {
update_frame_data(); // This one writes into uniform buffers bound to frame level descriptor set
for pipeline in &pipelines {
pipeline.update_pipeline_specific_data(); // This one writes into uniform buffers bound to pipeline level descriptor set
for material in &pipeline.materials {
material.bind_material_descriptors_set();
for mesh in &material.meshes {
for object in &mesh.objects {
object.fill_instancing_data();
}
mesh.draw();
}
}
}
}
*/
/// An initialized graphics device context.
/// Basically just a few things wrapped up together
/// for convenience.
pub struct GraphicsDevice<B>
where
B: hal::Backend,
{
// gfx-hal types
pub factory: Factory<B>,
pub queue_id: QueueId,
pub families: rendy::command::Families<B>,
}
impl<B> GraphicsDevice<B>
where
B: hal::Backend,
{
pub fn new(factory: Factory<B>, families: rendy::command::Families<B>) -> Self {
use rendy::factory::Config;
let config: Config = Default::default();
// TODO: HACK suggested by Frizi, just use queue 0 for everything
// instead of getting it from `graph.node_queue(pass)`.
// Since we control in our `Config` what families we have
// and what they have, as long as we only ever use one family
// (which is probably fine) then we're prooooobably okay with
// this.
// TODO: Check and see if this has immproved now
let queue_id = QueueId {
family: families.family_by_index(0).id(),
index: 0,
};
Self {
factory,
families,
queue_id,
}
}
}
fn build_graph<B>(
device: &mut GraphicsDevice<B>,
surface: rendy::wsi::Surface<B>,
window: &Window,
) -> (rendy::graph::Graph<B, Aux<B>>, Aux<B>)
where
B: hal::Backend,
{
use rendy::graph::{present::PresentNode, render::*, GraphBuilder};
let size = window.inner_size().to_physical(window.hidpi_factor());
let mut graph_builder = GraphBuilder::<B, Aux<B>>::new();
let window_kind = hal::image::Kind::D2(size.width as u32, size.height as u32, 1, 1);
let surface: rendy::wsi::Surface<B> = device.factory.create_surface(window).unwrap();
let format = device.factory.get_surface_format(&surface);
let color = graph_builder.create_image(
window_kind,
1,
device.factory.get_surface_format(&surface),
Some(hal::command::ClearValue {
color: hal::command::ClearColor {
float32: [0.1, 0.2, 0.3, 1.0],
},
}),
);
let depth = graph_builder.create_image(
window_kind,
1,
hal::format::Format::D16Unorm,
Some(hal::command::ClearValue {
depth_stencil: hal::command::ClearDepthStencil {
depth: 1.0,
stencil: 0,
},
}),
);
let render_group_desc = QuadRenderGroupDesc::new();
let pass = graph_builder.add_node(
render_group_desc
.builder()
.into_subpass()
.with_color(color)
.with_depth_stencil(depth)
.into_pass(),
);
println!("Surface format is {:?}", format);
let present_builder =
PresentNode::builder(&device.factory, surface, color).with_dependency(pass);
let frames = present_builder.image_count();
graph_builder.add_node(present_builder);
let aux = GraphicsWindowThing::make_aux(device, frames, size.width as f32, size.height as f32);
let graph = graph_builder
.with_frames_in_flight(frames)
.build(&mut device.factory, &mut device.families, &aux)
.unwrap();
(graph, aux)
}
pub struct GraphicsWindowThing
//<B>
//where
// B: hal::Backend,
{
// winit window stuff
pub window: Window,
pub event_loop: EventLoop<()>,
// Graph, gfx device and render targets
/*
pub graph: rendy::graph::Graph<B, Aux<B>>,
pub device: GraphicsDevice<B>,
// Our stuff
pub aux: Aux<B>,
*/
}
impl GraphicsWindowThing
//<B>
//where
// B: hal::Backend,
{
pub fn make_aux<B>(
device: &mut GraphicsDevice<B>,
frames: u32,
width: f32,
height: f32,
) -> Aux<B>
where
B: hal::Backend,
{
let heart_bytes =
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/data/heart.png"));
let layout_set: SetLayout = FrameInFlight::<B>::get_descriptor_set_layout();
let desc_set_layout = device
.factory
.create_descriptor_set_layout(layout_set.bindings)
.expect("Bogus layout?")
.into(); // Turn Escape into Handle
let texture1 = make_texture(device, heart_bytes);
let draws = vec![QuadDrawCall::new(
texture1,
&device.factory,
&desc_set_layout,
)];
let vertex_file = concat!(env!("CARGO_MANIFEST_DIR"), "/src/data/quad.vert.spv");
let fragment_file = concat!(env!("CARGO_MANIFEST_DIR"), "/src/data/quad.frag.spv");
/* TODO: Hmm
let align = device
.factory
.physical()
.limits()
.min_uniform_buffer_offset_alignment;
*/
let aux = Aux {
frames: frames as _,
draws,
camera: UniformData {
proj: Transform3::ortho(0.0, width, height, 0.0, -100.0, 100.0),
view: Transform3::create_translation(0.0, 0.0, 10.0),
},
shader: load_shader_files(vertex_file, fragment_file),
layout: Some(desc_set_layout),
};
aux
}
/*
pub fn run() {
use rendy::graph::{present::PresentNode, render::*, GraphBuilder};
let size = window
.get_inner_size()
.unwrap()
.to_physical(window.get_hidpi_factor());
let mut device = GraphicsDevice::<B>::new();
let mut graph_builder = GraphBuilder::<B, Aux<B>>::new();
let window_kind = hal::image::Kind::D2(size.width as u32, size.height as u32, 1, 1);
let surface: rendy::wsi::Surface<B> = device.factory.create_surface(&window).unwrap();
let format = device.factory.get_surface_format(&surface);
let color = graph_builder.create_image(
window_kind,
1,
device.factory.get_surface_format(&surface),
Some(hal::command::ClearValue {
color: hal::command::ClearColor {
float32: [0.1, 0.2, 0.3, 1.0],
},
}),
);
let depth = graph_builder.create_image(
window_kind,
1,
hal::format::Format::D16Unorm,
Some(hal::command::ClearValue {
depth_stencil: hal::command::ClearDepthStencil {
depth: 1.0,
stencil: 0,
},
}),
);
let render_group_desc = QuadRenderGroupDesc::new();
let pass = graph_builder.add_node(
render_group_desc
.builder()
.into_subpass()
.with_color(color)
.with_depth_stencil(depth)
.into_pass(),
);
println!("Surface format is {:?}", format);
let present_builder =
PresentNode::builder(&device.factory, surface, color).with_dependency(pass);
let frames = present_builder.image_count();
graph_builder.add_node(present_builder);
let aux = Self::make_aux(&mut device, frames, size.width as f32, size.height as f32);
let graph = graph_builder
.with_frames_in_flight(frames)
.build(&mut device.factory, &mut device.families, &aux)
.unwrap();
Self {
window,
event_loop,
/*
graph,
device,
aux,
*/
}
}
*/
pub fn run() {
use std::time;
let mut frame = 0;
let mut rng = oorandom::Rand32::new(12345);
let mut should_close = false;
use rendy::factory::Config;
let config: Config = Default::default();
let event_loop = EventLoop::new();
let window = WindowBuilder::new()
.with_title("Rendy example")
.with_inner_size((800, 600).into());
let rendy = rendy::init::AnyWindowedRendy::init_auto(&config, window, &event_loop).unwrap();
rendy::with_any_windowed_rendy!((rendy)
(factory, families, surface, window) => {
let mut device = GraphicsDevice::new(factory, families);
let (graph, mut aux) = build_graph(&mut device,surface, &window);
let mut graph = Some(graph);
let started = time::Instant::now();
event_loop.run(move |event, _, control_flow| {
*control_flow = ControlFlow::Poll;
match event {
Event::WindowEvent { event, .. } => match event {
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
_ => {}
},
Event::EventsCleared => {
device.factory.maintain(&mut device.families);
if let Some(ref mut graph) = graph {
graph.run(&mut device.factory, &mut device.families, &aux);
frame += 1;
}
for draw_call in &mut aux.draws {
draw_call.add_random_object(&mut rng, 1024.0, 768.0);
}
}
_ => {}
}
if *control_flow == ControlFlow::Exit {
if let Some(graph) = graph.take() {
aux.dispose();
graph.dispose(&mut device.factory, &aux);
}
}
});
let finished = time::Instant::now();
let dt = finished - started;
let millis = dt.as_millis() as f64;
let fps = frame as f64 / (millis / 1000.0);
println!(
"{} frames over {} seconds; {} fps",
frame,
millis / 1000.0,
fps
);
});
// TODO: Someday actually check against MAX_OBJECTS
/*
while !should_close {
for _i in &mut frames {
self.device.factory.maintain(&mut self.device.families);
self.event_loop.poll_events(|event| match event {
Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => should_close = true,
_ => (),
});
self.graph.run(
&mut self.device.factory,
&mut self.device.families,
&self.aux,
);
// Add another object
for draw_call in &mut self.aux.draws {
draw_call.add_random_object(&mut rng, 1024.0, 768.0);
}
if should_close {
break;
}
}
}
*/
}
pub fn draw(&mut self) {
/*
self.device.factory.maintain(&mut self.device.families);
self.graph.run(
&mut self.device.factory,
&mut self.device.families,
&self.aux,
);
*/
}
pub fn dispose(mut self) {
/*
// Things maybe not disposed: Texture? DescriptorSet?
info!("Disposing aux");
self.aux.dispose();
info!("Disposing graph");
self.graph.dispose(&mut self.device.factory, &self.aux);
*/
}
}
/*
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct DrawParam {
pub dest: Point2,
/*
/// A portion of the drawable to clip, as a fraction of the whole image.
/// Defaults to the whole image `(0,0 to 1,1)` if omitted.
pub src: Rect,
/// The position to draw the graphic expressed as a `Point2`.
pub dest: mint::Point2<f32>,
/// The orientation of the graphic in radians.
pub rotation: f32,
/// The x/y scale factors expressed as a `Vector2`.
pub scale: mint::Vector2<f32>,
/// An offset from the center for transform operations like scale/rotation,
/// with `0,0` meaning the origin and `1,1` meaning the opposite corner from the origin.
/// By default these operations are done from the top-left corner, so to rotate something
/// from the center specify `Point2::new(0.5, 0.5)` here.
pub offset: mint::Point2<f32>,
/// A color to draw the target with.
/// Default: white.
pub color: Color,
*/
}
/// Draws a quad with the given texture.
pub fn draw<B>(
ctx: &mut GraphicsWindowThing, //<B>,
_target: (),
drawable: Arc<Texture<B>>,
param: DrawParam,
) -> Result<(), ()>
where
B: hal::Backend,
{
/// Texture's aren't Eq and so we have a slightly ugly pointer comparison to
/// see if two Arc<Texture> 's point to the same thing.
fn texture_compare<B>(t1: &Arc<Texture<B>>, t2: &Arc<Texture<B>>) -> bool
where
B: hal::Backend,
{
let t1p: *const Texture<B> = t1.as_ref() as *const _;
let t2p: *const Texture<B> = t2.as_ref() as *const _;
t1p == t2p
}
// Here is where we check whether we need to create a new draw call, or just batch
// another item onto the existing one
let latest_draw_call = match ctx.aux.draws.last_mut() {
Some(c) => c,
None => {
let c = QuadDrawCall::new(
drawable.clone(),
&ctx.device.factory,
ctx.aux.layout.as_ref().unwrap(),
);
ctx.aux.draws.push(c);
ctx.aux.draws.last_mut().expect("Should never happen")
}
};
if texture_compare(&latest_draw_call.texture, &drawable) {
let transform = Transform3::create_translation(param.dest.x, param.dest.y, -100.0);
let src = Rect::from(euclid::Size2D::new(100.0, 100.0));
let color = [1.0, 0.0, 1.0, 1.0];
let instance = QuadData {
transform: transform.to_column_major_array(),
rect: [src.origin.x, src.origin.y, src.size.width, src.size.height],
color,
};
latest_draw_call.add_quad(instance);
}
Ok(())
}
*/
|
use crate::CONFIG_PATH;
use crate::{importer::config::Config, Importer};
use log::{debug, error, info};
use std::error::Error;
use std::os::unix::prelude::PermissionsExt;
use std::path::Path;
use crate::{BUFFER_SIZE, SOCKET_PATH};
use std::os::unix::net::UnixListener;
use std::os::unix::net::UnixStream;
use std::{fs, io::prelude::*};
use std::{io, thread};
mod handlers;
fn sleep() {
thread::sleep(::std::time::Duration::from_millis(100))
}
// TODO: Should probably make my own error type
fn make_error(error: Box<dyn Error>, description: &str) -> Box<io::Error> {
Box::new(io::Error::new(
io::ErrorKind::Other,
format!("{}: {}", description, error),
))
}
pub struct Server {
listener: UnixListener,
}
impl Server {
pub fn new() -> io::Result<Server> {
let listener = match UnixListener::bind(SOCKET_PATH) {
Ok(listener) => listener,
Err(_) => {
debug!("Could not create listener removing and trying again");
fs::remove_file(SOCKET_PATH)?;
UnixListener::bind(SOCKET_PATH)?
}
};
fs::set_permissions(SOCKET_PATH, PermissionsExt::from_mode(0o766))?;
listener
.set_nonblocking(true)
.expect("Could not set listener to non_blocking");
debug!("Created server listener");
Ok(Server { listener: listener })
}
/// Little wrapper around importer listen so you can still send messages without a valid importer
pub fn listen(&self) -> Result<(), Box<dyn Error>> {
match Config::from_settings() {
Ok(config) => {
let mut importer = match Importer::from_config(config) {
Ok(importer) => importer,
Err(e) => {
return Err(make_error(e, "Could not create importer"));
}
};
if let Err(e) = importer.setup() {
return Err(make_error(e, "Setup failed"));
}
if let Err(e) = importer.listen() {
return Err(make_error(e, "Could not sync"));
}
}
Err(e) => {
error!("Could not create config: {}", e);
loop {
if let Ok((stream, _)) = self.listener.accept() {
check_messages(stream, |request| get_response_importless(request));
// try again
return self.listen();
}
sleep();
}
}
}
Ok(())
}
/// Will listen for messages for 300+ seconds and will then return
pub fn check_messages_for_300(&self, importer: &mut Importer) -> io::Result<()> {
let mut iter = 0;
loop {
if iter > 3000 {
return Ok(());
}
if let Ok((stream, _)) = self.listener.accept() {
check_messages(stream, |request| get_response(request, importer));
}
iter += 1;
sleep();
}
}
}
fn check_messages<F>(mut stream: UnixStream, op: F)
where
F: FnOnce(&str) -> Result<String, String>,
{
let mut buffer = vec![0; BUFFER_SIZE];
stream
.read_exact(&mut buffer)
.expect("Could not read message from cli");
let request = String::from_utf8(buffer).unwrap();
let request = request.trim_end_matches("\u{0}");
info!("Receive from cli: {}", request);
let response = match op(request) {
Ok(pos_res) => {
info!("Ok Response: {}", pos_res);
format!("O {}", pos_res)
}
Err(neg_res) => {
info!("Error Response: {}", neg_res);
format!("E {}", neg_res)
}
};
stream
.write_all(&raw(&response))
.expect("Could not send response to cli");
stream.flush().unwrap();
}
fn raw(response: &str) -> Vec<u8> {
let mut response = response.to_string().into_bytes();
response.resize(BUFFER_SIZE, 0);
response
}
fn get_response_importless(request_raw: &str) -> Result<String, String> {
let mut request = request_raw.split(" ");
match request.next() {
Some(command) => {
match command {
"init" => {
if let Some(home_path) = request.next() {
if let Err(e) = Config::write("home_path", home_path) {
return Err(format!("Could not write home path: {}", e));
};
if let Some(repository) = request.next() {
if let Err(e) = Config::write("repository", repository) {
return Err(format!("Could not write repository: {}", e));
}
}
let mut private_key_path = Path::new(home_path).join(".ssh/id_ecdsa");
if !private_key_path.exists() {
private_key_path = Path::new(home_path).join(".ssh/id_rsa");
if !private_key_path.exists() {
return Err("Could not find valid ssh key".into());
}
}
if let Err(e) =
Config::write("private_key_path", private_key_path.to_str().unwrap())
{
return Err(format!("Could not write private key path: {}", e));
}
}
}
"config" => match Config::show_raw() {
Ok(config) => return Ok(config),
Err(e) => return Err(format!("Could not fetch config: {}", e)),
},
"set" => {
if let Some(arg) = request.next() {
if arg.eq("repo") {
if let Some(repo) = request.next() {
if let Err(e) = Config::write("repository", repo) {
return Err(format!("Could not write home path: {}", e));
}
}
} else if arg.eq("home") {
if let Some(path) = request.next() {
if let Err(e) = Config::write("home_path", path) {
return Err(format!("Could not write home path: {}", e));
}
}
} else if arg.eq("private_key") {
if let Some(path) = request.next() {
if let Err(e) = Config::write("private_key_path", path) {
return Err(format!("Could not write home path: {}", e));
}
}
}
}
}
_ => {
let config_err = match Config::from_settings() {
Ok(_) => return Ok("Valid config. Setting up..".to_string()),
Err(e) => e,
};
return Err(
format!("Dimport is unitialized\nInvalid Config: {}\n\nSee the daemon logs and set the correct values using the commands. \nYou can also manually edit the config at `{}` although this is not recommended.", config_err, CONFIG_PATH)
);
}
};
let config_message;
match Config::from_settings() {
Ok(_) => config_message = "Valid config. Setting up..".to_string(),
Err(e) => config_message = format!("Invalid config: {}", e),
}
return Ok(format!(
"Succesfully written to config file.\n\n{}",
config_message
));
}
None => return Err("Invalid Command".into()),
}
}
fn get_response(request: &str, importer: &mut Importer) -> Result<String, String> {
let mut request = request.split(" ");
if let Some(command) = request.next() {
match command {
"status" => {
return handlers::status(importer);
}
"config" => {
return handlers::config(importer);
}
"sync" => {
return handlers::sync(importer);
}
"set" => {
if let Some(arg) = request.next() {
if arg.eq("repo") {
if let Some(repo) = request.next() {
return handlers::set_repository(repo, importer);
}
} else if arg.eq("home") {
if let Some(home) = request.next() {
return handlers::set_home(home, importer);
}
} else if arg.eq("private_key") {
if let Some(path) = request.next() {
return handlers::set_private_key(path, importer);
}
}
}
}
"ignore" => {
if let Some(arg) = request.next() {
if arg.eq("all") {
return handlers::ignore_all(importer);
} else {
return handlers::ignore_regex(arg, importer);
}
}
}
"restore" => {
if let Some(arg) = request.next() {
return handlers::restore(arg, importer);
}
}
"add" => {
if let Some(arg) = request.next() {
return handlers::add(arg, importer);
}
}
"pick" => {
if let Some(arg) = request.next() {
return handlers::pick(arg, importer);
}
}
"unpick" => {
if let Some(arg) = request.next() {
return handlers::unpick(arg, importer);
}
}
"save" => {
if let Some(arg) = request.next() {
let description =
format!("{} {}", arg, request.collect::<Vec<&str>>().join(" "));
return handlers::save(Some(&description), importer);
} else {
return handlers::save(None, importer);
}
}
_ => return Err("Invalid command".into()),
}
} else {
return Err("Empty command".into());
}
return Err("Could not find command".into());
}
|
/// Reaction contain one reaction
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct Reaction {
pub content: Option<String>,
pub created_at: Option<String>,
pub user: Option<crate::user::User>,
}
impl Reaction {
/// Create a builder for this object.
#[inline]
pub fn builder() -> ReactionBuilder {
ReactionBuilder {
body: Default::default(),
}
}
#[inline]
pub fn issue_get_comment_reactions() -> ReactionGetBuilder<crate::generics::MissingOwner, crate::generics::MissingRepo, crate::generics::MissingId> {
ReactionGetBuilder {
inner: Default::default(),
_param_owner: core::marker::PhantomData,
_param_repo: core::marker::PhantomData,
_param_id: core::marker::PhantomData,
}
}
#[inline]
pub fn issue_get_issue_reactions() -> ReactionGetBuilder1<crate::generics::MissingOwner, crate::generics::MissingRepo, crate::generics::MissingIndex> {
ReactionGetBuilder1 {
inner: Default::default(),
_param_owner: core::marker::PhantomData,
_param_repo: core::marker::PhantomData,
_param_index: core::marker::PhantomData,
}
}
}
impl Into<Reaction> for ReactionBuilder {
fn into(self) -> Reaction {
self.body
}
}
/// Builder for [`Reaction`](./struct.Reaction.html) object.
#[derive(Debug, Clone)]
pub struct ReactionBuilder {
body: self::Reaction,
}
impl ReactionBuilder {
#[inline]
pub fn content(mut self, value: impl Into<String>) -> Self {
self.body.content = Some(value.into());
self
}
#[inline]
pub fn created_at(mut self, value: impl Into<String>) -> Self {
self.body.created_at = Some(value.into());
self
}
#[inline]
pub fn user(mut self, value: crate::user::User) -> Self {
self.body.user = Some(value.into());
self
}
}
/// Builder created by [`Reaction::issue_get_comment_reactions`](./struct.Reaction.html#method.issue_get_comment_reactions) method for a `GET` operation associated with `Reaction`.
#[repr(transparent)]
#[derive(Debug, Clone)]
pub struct ReactionGetBuilder<Owner, Repo, Id> {
inner: ReactionGetBuilderContainer,
_param_owner: core::marker::PhantomData<Owner>,
_param_repo: core::marker::PhantomData<Repo>,
_param_id: core::marker::PhantomData<Id>,
}
#[derive(Debug, Default, Clone)]
struct ReactionGetBuilderContainer {
param_owner: Option<String>,
param_repo: Option<String>,
param_id: Option<i64>,
}
impl<Owner, Repo, Id> ReactionGetBuilder<Owner, Repo, Id> {
/// owner of the repo
#[inline]
pub fn owner(mut self, value: impl Into<String>) -> ReactionGetBuilder<crate::generics::OwnerExists, Repo, Id> {
self.inner.param_owner = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// name of the repo
#[inline]
pub fn repo(mut self, value: impl Into<String>) -> ReactionGetBuilder<Owner, crate::generics::RepoExists, Id> {
self.inner.param_repo = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// id of the comment to edit
#[inline]
pub fn id(mut self, value: impl Into<i64>) -> ReactionGetBuilder<Owner, Repo, crate::generics::IdExists> {
self.inner.param_id = Some(value.into());
unsafe { std::mem::transmute(self) }
}
}
impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for ReactionGetBuilder<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::IdExists> {
type Output = Vec<Reaction>;
const METHOD: http::Method = http::Method::GET;
fn rel_path(&self) -> std::borrow::Cow<'static, str> {
format!("/repos/{owner}/{repo}/issues/comments/{id}/reactions", owner=self.inner.param_owner.as_ref().expect("missing parameter owner?"), repo=self.inner.param_repo.as_ref().expect("missing parameter repo?"), id=self.inner.param_id.as_ref().expect("missing parameter id?")).into()
}
}
impl crate::client::ResponseWrapper<Vec<Reaction>, ReactionGetBuilder<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::IdExists>> {
#[inline]
pub fn message(&self) -> Option<String> {
self.headers.get("message").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
#[inline]
pub fn url(&self) -> Option<String> {
self.headers.get("url").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
}
/// Builder created by [`Reaction::issue_get_issue_reactions`](./struct.Reaction.html#method.issue_get_issue_reactions) method for a `GET` operation associated with `Reaction`.
#[repr(transparent)]
#[derive(Debug, Clone)]
pub struct ReactionGetBuilder1<Owner, Repo, Index> {
inner: ReactionGetBuilder1Container,
_param_owner: core::marker::PhantomData<Owner>,
_param_repo: core::marker::PhantomData<Repo>,
_param_index: core::marker::PhantomData<Index>,
}
#[derive(Debug, Default, Clone)]
struct ReactionGetBuilder1Container {
param_owner: Option<String>,
param_repo: Option<String>,
param_index: Option<i64>,
param_page: Option<i64>,
param_limit: Option<i64>,
}
impl<Owner, Repo, Index> ReactionGetBuilder1<Owner, Repo, Index> {
/// owner of the repo
#[inline]
pub fn owner(mut self, value: impl Into<String>) -> ReactionGetBuilder1<crate::generics::OwnerExists, Repo, Index> {
self.inner.param_owner = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// name of the repo
#[inline]
pub fn repo(mut self, value: impl Into<String>) -> ReactionGetBuilder1<Owner, crate::generics::RepoExists, Index> {
self.inner.param_repo = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// index of the issue
#[inline]
pub fn index(mut self, value: impl Into<i64>) -> ReactionGetBuilder1<Owner, Repo, crate::generics::IndexExists> {
self.inner.param_index = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// page number of results to return (1-based)
#[inline]
pub fn page(mut self, value: impl Into<i64>) -> Self {
self.inner.param_page = Some(value.into());
self
}
/// page size of results
#[inline]
pub fn limit(mut self, value: impl Into<i64>) -> Self {
self.inner.param_limit = Some(value.into());
self
}
}
impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for ReactionGetBuilder1<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::IndexExists> {
type Output = Vec<Reaction>;
const METHOD: http::Method = http::Method::GET;
fn rel_path(&self) -> std::borrow::Cow<'static, str> {
format!("/repos/{owner}/{repo}/issues/{index}/reactions", owner=self.inner.param_owner.as_ref().expect("missing parameter owner?"), repo=self.inner.param_repo.as_ref().expect("missing parameter repo?"), index=self.inner.param_index.as_ref().expect("missing parameter index?")).into()
}
fn modify(&self, req: Client::Request) -> Result<Client::Request, crate::client::ApiError<Client::Response>> {
use crate::client::Request;
Ok(req
.query(&[
("page", self.inner.param_page.as_ref().map(std::string::ToString::to_string)),
("limit", self.inner.param_limit.as_ref().map(std::string::ToString::to_string))
]))
}
}
impl crate::client::ResponseWrapper<Vec<Reaction>, ReactionGetBuilder1<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::IndexExists>> {
#[inline]
pub fn message(&self) -> Option<String> {
self.headers.get("message").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
#[inline]
pub fn url(&self) -> Option<String> {
self.headers.get("url").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
}
|
use std::path::PathBuf;
use structopt::StructOpt;
use super::VarsFormat;
#[derive(Debug, StructOpt)]
#[structopt(
name = "kay",
about = "replace ${...} expressions in text",
rename_all = "kebab-case"
)]
pub struct Opt {
#[structopt(short = "i", long = "--input-file", parse(from_os_str))]
pub input_file: Option<PathBuf>,
#[structopt(short = "v", long = "--vars-file", parse(from_os_str))]
pub vars_file: Option<PathBuf>,
#[structopt(long = "--vars-format")]
pub vars_format: Option<VarsFormat>,
#[structopt(short = "o", long = "--output-file", parse(from_os_str))]
pub output_file: Option<PathBuf>,
} |
extern crate sdl2;
extern crate rand;
extern crate libc;
extern crate time;
use sdl2::sys;
use sdl2::event::{Event};
use sdl2::keyboard::Keycode;
use time::PreciseTime;
use std::time::Duration;
use std::cmp;
use rand::Rng;
// // Misc
/*
let a = Point {x: movement.x2 - movement.x1, y: movement.y2 - movement.y1};
'wall_check: for wall in walls.iter() {
let b = Point {x: wall.line.x1 - wall.line.x2, y: wall.line.y1 - wall.line.y2};
let c = Point {x: movement.x1 - wall.line.x1, y: movement.y1 - wall.line.y1};
let al_nu = b.y*c.x-b.x*c.y;
let al_de = a.y*b.x-a.x*b.y;
let be_nu = c.y*a.x-c.x*a.y;
if al_de == 0 {
continue 'wall_check;
}
let al = al_nu as f32/al_de as f32;
if al < 0.0 || al > 1.0 {
continue 'wall_check;
}
let be = be_nu as f32/al_de as f32;
if be < 0.0 || be > 1.0 {
continue 'wall_check;
}
let p_col = Point {x: movement.x1 + (al*a.x as f32) as i32, y: movement.y1 + (al*a.y as f32) as i32};
}
*/
// // Constants
// Rendering
const WIDTH: u32 = 1280;
const HEIGHT: u32 = 720;
const TEX_W: i32 = 320;
const TEX_H: i32 = 180;
// Game
const FUNCTION_LIFETIME: i64 = 10000;
const COLOR_GRAD: [u8; 3*256] = [
12, 7, 135,
16, 7, 136,
19, 6, 137,
22, 6, 138,
24, 6, 140,
27, 6, 141,
29, 6, 142,
31, 5, 143,
33, 5, 144,
35, 5, 145,
38, 5, 146,
40, 5, 146,
42, 5, 147,
43, 5, 148,
45, 4, 149,
47, 4, 150,
49, 4, 151,
51, 4, 151,
53, 4, 152,
54, 4, 153,
56, 4, 154,
58, 4, 154,
60, 3, 155,
61, 3, 156,
63, 3, 156,
65, 3, 157,
66, 3, 158,
68, 3, 158,
70, 3, 159,
71, 2, 160,
73, 2, 160,
75, 2, 161,
76, 2, 161,
78, 2, 162,
80, 2, 162,
81, 1, 163,
83, 1, 163,
84, 1, 164,
86, 1, 164,
88, 1, 165,
89, 1, 165,
91, 0, 165,
92, 0, 166,
94, 0, 166,
95, 0, 166,
97, 0, 167,
99, 0, 167,
100, 0, 167,
102, 0, 167,
103, 0, 168,
105, 0, 168,
106, 0, 168,
108, 0, 168,
110, 0, 168,
111, 0, 168,
113, 0, 168,
114, 0, 169,
116, 0, 169,
117, 0, 169,
119, 1, 168,
120, 1, 168,
122, 1, 168,
123, 2, 168,
125, 2, 168,
126, 3, 168,
128, 3, 168,
129, 4, 167,
131, 4, 167,
132, 5, 167,
134, 6, 167,
135, 7, 166,
136, 7, 166,
138, 8, 166,
139, 9, 165,
141, 11, 165,
142, 12, 164,
144, 13, 164,
145, 14, 163,
146, 15, 163,
148, 16, 162,
149, 17, 161,
150, 18, 161,
152, 19, 160,
153, 20, 160,
155, 21, 159,
156, 23, 158,
157, 24, 157,
158, 25, 157,
160, 26, 156,
161, 27, 155,
162, 28, 154,
164, 29, 154,
165, 30, 153,
166, 32, 152,
167, 33, 151,
169, 34, 150,
170, 35, 149,
171, 36, 149,
172, 37, 148,
173, 38, 147,
175, 40, 146,
176, 41, 145,
177, 42, 144,
178, 43, 143,
179, 44, 142,
180, 45, 141,
181, 46, 140,
183, 47, 139,
184, 49, 138,
185, 50, 137,
186, 51, 137,
187, 52, 136,
188, 53, 135,
189, 54, 134,
190, 55, 133,
191, 57, 132,
192, 58, 131,
193, 59, 130,
194, 60, 129,
195, 61, 128,
196, 62, 127,
197, 63, 126,
198, 64, 125,
199, 66, 124,
200, 67, 123,
201, 68, 122,
202, 69, 122,
203, 70, 121,
204, 71, 120,
205, 72, 119,
206, 73, 118,
207, 75, 117,
208, 76, 116,
208, 77, 115,
209, 78, 114,
210, 79, 113,
211, 80, 112,
212, 81, 112,
213, 83, 111,
214, 84, 110,
215, 85, 109,
215, 86, 108,
216, 87, 107,
217, 88, 106,
218, 89, 105,
219, 91, 105,
220, 92, 104,
220, 93, 103,
221, 94, 102,
222, 95, 101,
223, 96, 100,
224, 98, 99,
224, 99, 98,
225, 100, 98,
226, 101, 97,
227, 102, 96,
227, 104, 95,
228, 105, 94,
229, 106, 93,
230, 107, 92,
230, 108, 92,
231, 110, 91,
232, 111, 90,
232, 112, 89,
233, 113, 88,
234, 114, 87,
235, 116, 86,
235, 117, 86,
236, 118, 85,
237, 119, 84,
237, 121, 83,
238, 122, 82,
238, 123, 81,
239, 124, 80,
240, 126, 80,
240, 127, 79,
241, 128, 78,
241, 129, 77,
242, 131, 76,
242, 132, 75,
243, 133, 74,
244, 135, 73,
244, 136, 73,
245, 137, 72,
245, 139, 71,
246, 140, 70,
246, 141, 69,
247, 143, 68,
247, 144, 67,
247, 145, 67,
248, 147, 66,
248, 148, 65,
249, 149, 64,
249, 151, 63,
249, 152, 62,
250, 154, 61,
250, 155, 60,
251, 156, 60,
251, 158, 59,
251, 159, 58,
251, 161, 57,
252, 162, 56,
252, 164, 55,
252, 165, 54,
252, 166, 54,
253, 168, 53,
253, 169, 52,
253, 171, 51,
253, 172, 50,
253, 174, 49,
254, 175, 49,
254, 177, 48,
254, 178, 47,
254, 180, 46,
254, 181, 46,
254, 183, 45,
254, 185, 44,
254, 186, 43,
254, 188, 43,
254, 189, 42,
254, 191, 41,
254, 192, 41,
254, 194, 40,
254, 195, 40,
254, 197, 39,
254, 199, 39,
253, 200, 38,
253, 202, 38,
253, 203, 37,
253, 205, 37,
253, 207, 37,
252, 208, 36,
252, 210, 36,
252, 212, 36,
251, 213, 36,
251, 215, 36,
251, 217, 36,
250, 218, 36,
250, 220, 36,
249, 222, 36,
249, 223, 36,
248, 225, 37,
248, 227, 37,
247, 229, 37,
247, 230, 37,
246, 232, 38,
246, 234, 38,
245, 235, 38,
244, 237, 39,
244, 239, 39,
243, 241, 39,
242, 242, 38,
242, 244, 38,
241, 246, 37,
241, 247, 36,
240, 249, 33
];
const C: f32 = 0.1;
// // Utility Functions
fn fexp8(x: f32) -> f32 {
let mut a = 1.0 + x / 256.0;
for _ in 0..8 {
a *= a;
}
return a;
}
fn fexp16(x: f32) -> f32 {
let mut a = 1.0 + x / 256.0;
for _ in 0..16 {
a *= a;
}
return a;
}
fn fsqrt8(x: f32) -> f32 {
return fexp8(0.5 * x.ln());
}
fn color_from_value(val: f32) -> [u8; 4] {
let mut i = 0;
if val > 0.0 && val < 50.0 {
i = (256.0*(val/50.0)) as usize;
}
let c: [u8; 4] = [0, COLOR_GRAD[i*3 + 2], COLOR_GRAD[i*3 + 1], COLOR_GRAD[i*3 + 0]]; // a, b, g, r
return c;
}
// // Traits
pub trait Draw {
fn draw(&self, tex: &mut [u8; (TEX_H*TEX_W*4) as usize], cam: &Camera);
fn on_screen(&self, cam: &Camera) -> bool;
}
pub trait Move {
fn move_amount(&mut self, dx: i32, dy: i32);
fn move_to(&mut self, x: i32, y: i32);
}
pub trait Walled {
fn get_lines(&self) -> Vec<Line>;
}
pub trait Function {
fn func(&self, x: i32, y: i32, t: i64) -> f32;
}
// // "Objects"
pub struct Square {
x: i32,
y: i32,
w: i32,
h: i32
}
pub struct ColorSquare {
sqr: Square,
col: [u8; 4] // a, b, g, r
}
pub struct Sprite {
sqr: Square,
data: Vec<u8>
}
pub enum Texture {
Square(ColorSquare),
Sprite(Sprite),
Line(ColorLine)
}
#[derive(Copy, Clone)]
pub struct Point {
x: i32,
y: i32
}
#[derive(Copy, Clone)]
pub struct Line {
x1: i32,
y1: i32,
x2: i32,
y2: i32
}
pub struct ColorLine {
line: Line,
col: [u8; 4] // a, b, g, r
}
pub struct ColorPoint {
point: Point,
col: [u8; 4] // a, b, g, r
}
pub struct Wall {
line: Line
}
pub struct Polygon {
lines: Vec<Line>
}
pub struct Mesh {
dx: i32,
dy: i32,
r: i32
}
pub enum EnvObject {
Wall(Wall),
Polygon(Polygon)
}
pub struct Environment {
walls: Vec<EnvObject>
}
struct Agent {
x: i32,
y: i32,
vx: f32,
vy: f32,
speed: f32,
tex: Texture,
mesh: Mesh
}
pub enum State {
Dashing,
Sliding,
Jumping,
Stopping,
Static
}
struct Player {
agent: Agent,
states: Vec<State>
}
pub struct Camera {
x: i32,
y: i32,
}
pub struct FunctionStart {
x0: i32,
y0: i32,
t0: i64
}
pub struct PointStart {
f_start: FunctionStart,
dx: f32,
dy: f32
}
pub enum FieldFunction {
PointFunc(PointStart),
CircleWave(FunctionStart)
}
// Impl
impl Agent {
fn move_agent_to(&mut self, x: i32, y: i32, walls: &Vec<EnvObject>) -> bool {
let mut coll = false;
let movement = Line {x1: self.x + self.mesh.dx, y1: self.y + self.mesh.dy, x2: x + self.mesh.dx, y2: y + self.mesh.dy};
let mut l = movement.lenght();
let p_end = Point {x: movement.x2, y: movement.y2};
let mut lines: Vec<Line> = Vec::new();
let mn = movement.direction_vector();
for wall in walls.iter() {
for line in wall.get_lines().iter() {
if line.in_reach(movement) {
lines.push(*line);
}
}
}
let mut part_m = Point {x: movement.x1, y: movement.y1};
'steps: while l > 0.0 {
if l > self.mesh.r as f32 {
part_m.x += (mn[0] * (self.mesh.r - 2) as f32) as i32;
part_m.y += (mn[1] * (self.mesh.r - 2) as f32) as i32;
} else {
part_m = p_end;
}
'wall_check: for line in lines.iter() {
let d = line.distance_to_point(&part_m);
if d < self.mesh.r as f32 {
let n = line.normal_vector();
let dx = (self.mesh.r as f32 - d)*n[0];
let dy = (self.mesh.r as f32 - d)*n[1];
if dx > 0.0 { part_m.x += (dx + 1.5) as i32; }
else { part_m.x += (dx - 1.5) as i32; }
if dx > 0.0 { part_m.y += (dy + 1.5) as i32; }
else { part_m.y += (dy - 1.5) as i32; }
coll = true;
break 'steps;
} else {
continue 'wall_check;
}
}
l -= self.mesh.r as f32 * 0.5;
}
self.x = part_m.x - self.mesh.dx;
self.y = part_m.y - self.mesh.dy;
self.tex.move_to(self.x, self.y);
return coll;
}
fn move_agent_amount(&mut self, dx: i32, dy: i32, walls: &Vec<EnvObject>) -> bool {
return self.move_agent_to(self.x + dx, self.y + dy, walls);
}
}
impl Player {
fn move_player(&mut self, dt: i64, walls: &Vec<EnvObject>) {
self.agent.move_agent_amount((self.agent.vx*dt as f32) as i32, (self.agent.vy*dt as f32) as i32, walls);
}
}
impl Point {
fn on_screen(&self, cam: &Camera) -> bool {
if self.x > cam.x && self.x < TEX_W + cam.x {
if self.y > cam.y && self.y < TEX_H + cam.y {
return true;
}
}
return false;
}
}
impl Line {
fn swap_points(&self) -> Line {
return Line {x1: self.x2, y1: self.y2, x2: self.x1, y2: self.y1};
}
fn lenght_vec(&self) -> [i32; 2] {
return [(self.x2-self.x1).abs(), (self.y2-self.y1).abs()];
}
fn lenght(&self) -> f32 {
return (((self.x2-self.x1)*(self.x2-self.x1) + (self.y2-self.y1)*(self.y2-self.y1)) as f32).sqrt();
}
fn distance_to_point(&self, p: &Point) -> f32{
let ax = self.x2-self.x1;
let ay = self.y2-self.y1;
let norm = ax*ax + ay*ay;
let mut u = ((p.x - self.x1) * ax + (p.y - self.y1) * ay) as f32 / norm as f32;
if u > 1.0 {
u = 1.0;
} else if u < 0.0 {
u = 0.0;
}
let x = self.x1 as f32 + u * ax as f32;
let y = self.y1 as f32 + u * ay as f32;
let dx = x - p.x as f32;
let dy = y - p.y as f32;
let dist = (dx * dx + dy * dy).sqrt();
return dist;
}
fn normal_vector(&self) -> [f32; 2] {
let v = self.direction_vector();
return [v[1], -v[0]];
}
fn direction_vector(&self) -> [f32; 2] {
let n = self.lenght();
let x = (self.x2-self.x1) as f32 / n;
let y = (self.y2-self.y1) as f32 / n;
return [x, y];
}
fn in_reach(&self, l: Line) -> bool {
let ld = l.direction_vector();
let n = self.normal_vector();
if ld[0]*n[0] + ld[1]*n[1] > 0.0 {
return false;
}
/*
let v0 = [(l.x2 - l.x1) as f32, (l.y2 - l.y1) as f32];
let v1 = [(self.x1 - l.x1) as f32, (self.y1 - l.y1) as f32];
let v2 = [(self.x2 - l.x1) as f32, (self.y2 - l.y1) as f32];
if v0[0]*v1[0] + v0[1]*v1[1] < 0.0 && v0[0]*v2[0] + v0[1]*v2[1] < 0.0 {
return false;
}
let d = self.distance_to_point(&Point {x: l.x1, y: l.y1});
if d > l.lenght(){
return false;
}*/
return true;
}
}
impl Wall {
fn get_lines(&self) -> Vec<Line> {
let mut v: Vec<Line> = Vec::new();
v.push(self.line);
v.push(self.line.swap_points());
return v;
}
}
impl Draw for ColorPoint {
fn draw(&self, tex: &mut [u8; (TEX_H*TEX_W*4) as usize], cam: &Camera) {
if !self.on_screen(cam) {
return;
}
let sx = self.point.x - cam.x;
let sy = self.point.y - cam.y;
for c in 0..4 {
tex[((sy*TEX_W + sx)*4 + c) as usize] = self.col[c as usize];
}
}
fn on_screen(&self, cam: &Camera) -> bool {
return self.point.on_screen(cam);
}
}
impl Draw for ColorSquare {
fn draw(&self, tex: &mut [u8; (TEX_H*TEX_W*4) as usize], cam: &Camera) {
if !self.on_screen(cam) {
return;
}
let sx = self.sqr.x - cam.x;
let sy = self.sqr.y - cam.y;
'y: for j in 0..self.sqr.h {
if j + sy < 0 {
continue 'y;
} else if j + sy + 1 > TEX_H {
break 'y;
}
'x: for i in 0..self.sqr.w {
if i + sx < 0 {
continue 'x;
} else if i + sx + 1 > TEX_W {
continue 'y;
}
for c in 0..4 {
tex[(((sy+j)*TEX_W + sx + i)*4 + c) as usize] = self.col[c as usize];
}
}
}
}
fn on_screen(&self, cam: &Camera) -> bool {
let sx = self.sqr.x - cam.x;
let sy = self.sqr.y - cam.y;
if sx + self.sqr.w < 0 || sx > TEX_W {
return false;
} else if sy + self.sqr.h < 0 || sy > TEX_H {
return false;
}
return true;
}
}
impl Draw for ColorLine {
fn draw(&self, tex: &mut [u8; (TEX_H*TEX_W*4) as usize], cam: &Camera) {
if !self.on_screen(cam) {
return;
}
let mut line = self.line;
let lx = line.x2-line.x1;
let ly = line.y2-line.y1;
if lx.abs() > ly.abs() {
if line.x2 < line.x1 {
line = line.swap_points();
}
let dy = ly as f32 / lx as f32;
for x in 0..lx {
let sy = (line.y1 as f32 + dy * x as f32) as i32 - cam.y;
let sx = line.x1 + x - cam.x;
if sx < 0 || sx + 1 > TEX_W || sy < 0 || sy + 1 > TEX_H {
continue;
}
for c in 0..4 {
tex[(((sy as i32)*TEX_W + sx)*4 + c) as usize] = self.col[c as usize];
}
}
} else {
if line.y2 < line.y1 {
line = line.swap_points();
}
let dx = lx as f32 / ly as f32;
for y in 0..ly {
let sx = (line.x1 as f32 + dx * y as f32) as i32 - cam.x;
let sy = line.y1 + y - cam.y;
if sx < 0 || sx + 1 > TEX_W || sy < 0 || sy + 1 > TEX_H {
continue;
}
for c in 0..4 {
tex[(((sy as i32)*TEX_W + sx)*4 + c) as usize] = self.col[c as usize];
}
}
}
}
fn on_screen(&self, cam: &Camera) -> bool {
let l = self.line.lenght_vec();
if l[0] < TEX_W && l[1] < TEX_H {
let p1 = Point {x: self.line.x1, y: self.line.y1};
let p2 = Point {x: self.line.x2, y: self.line.y2};
if p1.on_screen(cam) || p2.on_screen(cam) {
return true;
}
} else {
let mut p: Vec<Point> = Vec::new();
for q in 0..11 {
p.push(Point {x: ((10-q)*self.line.x1 + q*self.line.x2)/10, y: ((10-q)*self.line.y1 + q*self.line.y2)/10});
}
for point in p.iter() {
if point.on_screen(cam) {
return true;
}
}
}
return false;
}
}
impl Move for ColorSquare {
fn move_amount(&mut self, dx: i32, dy: i32) {
self.sqr.x += dx;
self.sqr.y += dy;
}
fn move_to(&mut self, x: i32, y: i32) {
self.sqr.x = x;
self.sqr.y = y;
}
}
impl Move for Sprite {
fn move_amount(&mut self, dx: i32, dy: i32) {
self.sqr.x += dx;
self.sqr.y += dy;
}
fn move_to(&mut self, x: i32, y: i32) {
self.sqr.x = x;
self.sqr.y = y;
}
}
impl Move for ColorLine {
fn move_amount(&mut self, dx: i32, dy: i32) {
self.move_to(self.line.x1 + dx, self.line.y1 + dy);
}
fn move_to(&mut self, x: i32, y: i32) {
self.line.x2 = self.line.x2 - self.line.x1 + x;
self.line.y2 = self.line.y2 - self.line.y1 + y;
self.line.x1 = x;
self.line.y1 = y;
}
}
impl Draw for Texture {
fn draw(&self, tex: &mut [u8; (TEX_H*TEX_W*4) as usize], cam: &Camera) {
match self {
Texture::Square(color_square) => {
color_square.draw(tex, cam);
},
Texture::Line(color_line) => {
color_line.draw(tex, cam);
},
_ => ()
}
}
fn on_screen(&self, cam: &Camera) -> bool {
match self {
Texture::Square(color_square) => {
return color_square.on_screen(cam);
},
Texture::Line(color_line) => {
return color_line.on_screen(cam);
},
_ => true
}
}
}
impl Move for Texture {
fn move_amount(&mut self, dx: i32, dy: i32) {
match self {
Texture::Square(color_square) => {
color_square.move_amount(dx, dy);
},
Texture::Sprite(sprite) => {
sprite.move_amount(dx, dy);
},
Texture::Line(line) => {
line.move_amount(dx, dy);
},
_ => ()
}
}
fn move_to(&mut self, x: i32, y: i32) {
match self {
Texture::Square(color_square) => {
color_square.move_to(x, y);
},
Texture::Sprite(sprite) => {
sprite.move_to(x, y);
},
Texture::Line(line) => {
line.move_to(x, y);
},
_ => ()
}
}
}
impl Walled for EnvObject {
fn get_lines(&self) -> Vec<Line> {
match self {
EnvObject::Wall(wall) => {
return wall.get_lines();
},
_ => {
let v: Vec<Line> = Vec::new();
return v;
}
}
}
}
impl Function for FieldFunction {
fn func(&self, x: i32, y: i32, t: i64) -> f32 {
match self {
FieldFunction::PointFunc(start) => {
let t1 = t-start.f_start.t0;
if t1 > FUNCTION_LIFETIME {
return 0.0;
}
let x1 = x as f32 - start.f_start.x0 as f32 - C*t1 as f32*start.dx;
let y1 = y as f32 - start.f_start.y0 as f32 - C*t1 as f32*start.dy;
let r = (x1*x1 + y1*y1).sqrt();
let s = 20.0;
if r.abs() > s {
return 0.0;
}
return s * fexp8(-r*r*(1.0/s));
},
FieldFunction::CircleWave(start) => {
let x1 = x-start.x0;
let y1 = y-start.y0;
let t1 = t-start.t0;
if t1 > FUNCTION_LIFETIME {
return 0.0;
}
let r = ((x1*x1 + y1*y1) as f32).sqrt() - C*t1 as f32;
let s = 20.0;
if r.abs() > s {
return 0.0;
}
return s * fexp8(- r*r*(1.0/s));
},
_ => {
return 0.0;
}
}
}
}
fn main() {
// Rendering
let ctx = sdl2::init().unwrap();
let _video_ctx = ctx.video().unwrap();
let title = "Amplitude";
unsafe {
let window = sys::SDL_CreateWindow(title.as_ptr() as *const libc::c_char, 536805376, 536805376, WIDTH as i32, HEIGHT as i32, 4);
let renderer = sys::SDL_CreateRenderer(window, -1, 2);
let texture = sys::SDL_CreateTexture(renderer, 373694468, 1, TEX_W, TEX_H);
let mut tex_data = [0u8; (TEX_H*TEX_W*4) as usize];
let NULL: *const sys::SDL_Rect = std::ptr::null();
// Input
let mut event_pump = ctx.event_pump().unwrap();
// Time
let start = PreciseTime::now();
let mut timer = PreciseTime::now();
let mut dt: i64 = 0;
let mut t = 0;
let mut frame = 0;
// Game
let test_sqr = ColorSquare {
sqr: Square {
x: 100,
y: 100,
w: 50,
h: 20
},
col: [255, 0, 0, 255]
};
let player_sqr = ColorSquare {
sqr: Square {
x: 0,
y: 0,
w: 10,
h: 10
},
col: [255, 250, 100, 55]
};
let c_line1 = ColorLine {
line: Line {
x1: 200,
y1: 200,
x2: 500,
y2: 700
},
col: [255, 50, 250, 100]
};
let test_wall1 = Wall {
line: Line {
x1: 200,
y1: 200,
x2: 500,
y2: 700
}
};
let mut camera = Camera {x: 0, y: 0};
let mut player_agent = Agent{
x: 0,
y: 0,
vx: 0.0,
vy: 0.0,
speed: 0.3,
tex: Texture::Square(player_sqr),
mesh: Mesh {dx: 5, dy: 5, r: 5}
};
let mut player = Player {
agent: player_agent,
states: Vec::new()
};
let mut walls: Vec<EnvObject> = Vec::new();
walls.push(EnvObject::Wall(test_wall1));
let mut f_list: Vec<FieldFunction> = Vec::new();
let tmp1 = FieldFunction::CircleWave(FunctionStart {x0: 0, y0: 0, t0: 0});
let tmp2 = FieldFunction::PointFunc(PointStart{f_start: FunctionStart {x0: 0, y0: 0, t0: 0}, dx: 0.4 , dy: 0.4 });
f_list.push(tmp1);
f_list.push(tmp2);
'running: loop {
// Input
for event in event_pump.poll_iter() {
match event {
Event::Quit {..} |
Event::KeyDown { keycode: Some(Keycode::Escape), .. } => {
break 'running
},
// Control input
Event::KeyDown { keycode: Some(Keycode::A), .. } => {
player.agent.vx = -player.agent.speed;
},
Event::KeyUp { keycode: Some(Keycode::A), .. } => {
if player.agent.vx < 0.0 {
player.agent.vx = 0.0;
}
},
Event::KeyDown { keycode: Some(Keycode::D), .. } => {
player.agent.vx = player.agent.speed;
},
Event::KeyUp { keycode: Some(Keycode::D), .. } => {
if player.agent.vx > 0.0 {
player.agent.vx = 0.0;
}
},
Event::KeyDown { keycode: Some(Keycode::W), .. } => {
player.agent.vy = -player.agent.speed;
},
Event::KeyUp { keycode: Some(Keycode::W), .. } => {
if player.agent.vy < 0.0 {
player.agent.vy = 0.0;
}
},
Event::KeyDown { keycode: Some(Keycode::S), .. } => {
player.agent.vy = player.agent.speed;
},
Event::KeyUp { keycode: Some(Keycode::S), .. } => {
if player.agent.vy > 0.0 {
player.agent.vy = 0.0;
}
},
_ => {}
}
}
// Time
dt = timer.to(PreciseTime::now()).num_milliseconds();
timer = PreciseTime::now();
t = start.to(timer).num_milliseconds();
frame += 1;
// Updating
//println!("{}, {}", (player.vx*dt as f32) as i32, (player.vy*dt as f32) as i32);
camera.x = player.agent.x - TEX_W/2 + player.agent.mesh.dx;
camera.y = player.agent.y - TEX_H/2 + player.agent.mesh.dy;
let mut max_val = 0.0;
for i in 0..TEX_W {
for j in 0..TEX_H {
let mut val = 0.0;
let x = camera.x + i;
let y = camera.y + j;
for f in f_list.iter() {
val += f.func(x, y, t);
}
if val > max_val {
max_val = val;
}
let a = color_from_value(val);
for k in 0..4 {
tex_data[((TEX_W*j + i)*4 + k) as usize] = a[k as usize];
// tex_data[((TEX_W*j + i)*4 + k) as usize] = 0;
}
}
}
player.move_player(dt, &walls);
test_sqr.draw(&mut tex_data, &camera);
c_line1.draw(&mut tex_data, &camera);
player.agent.tex.draw(&mut tex_data, &camera);
// Rendering
sys::SDL_SetRenderDrawColor(renderer, 100, 100, 100, 255);
sys::SDL_RenderClear(renderer);
sys::SDL_UpdateTexture(texture, NULL, tex_data.as_ptr() as *const std::ffi::c_void, TEX_W*4);
let a = sys::SDL_RenderCopy(renderer, texture, NULL, NULL);
sys::SDL_RenderPresent(renderer);
::std::thread::sleep(Duration::new(0, 1_000_000_000u32 / 60));
}
let end = PreciseTime::now();
let seconds = start.to(end).num_seconds();
println!("seconds: {}", seconds);
println!("Frames per second: {}", frame/seconds);
sys::SDL_DestroyRenderer(renderer);
sys::SDL_DestroyWindow(window);
sys::SDL_Quit();
}
}
fn p_col(x: i32, y: i32, t: i32) -> [u8; 4] {
let mut a = [255u8; 4];
let r1 = ((x*x + y*y) as f32).sqrt();
let th = (y as f32).atan2(x as f32);
// let r2 = (((x-TEX_W)*(x-TEX_W) + y*y) as f32).sqrt();
// let r3 = ((x*x + (y-TEX_H)*(y-TEX_H)) as f32).sqrt();
// let ar = [r1, r2, r3];
for i in 0..3 {
a[1+i as usize] = (((r1*0.4).sin()*(10.0*(th+((t*i) as f32)/2000.0)).sin() + 1.0)*128.0) as u8;
}
return a;
}
|
#[cfg(feature = "python")]
pub mod py;
mod test;
/// The freely-jointed chain (FJC) model thermodynamics in the modified canonical ensemble approximated using an asymptotic approach valid for weak potentials.
pub mod weak_potential;
/// The freely-jointed chain (FJC) model thermodynamics in the modified canonical ensemble approximated using an asymptotic approach valid for strong potentials.
pub mod strong_potential;
/// The structure of the thermodynamics of the FJC model in the modified canonical ensemble approximated using an asymptotic approach.
pub struct FJC
{
/// The mass of each hinge in the chain in units of kg/mol.
pub hinge_mass: f64,
/// The length of each link in the chain in units of nm.
pub link_length: f64,
/// The number of links in the chain.
pub number_of_links: u8,
/// The thermodynamic functions of the model in the isotensional ensemble approximated using an asymptotic approach valid for weak potentials.
pub weak_potential: weak_potential::FJC,
/// The thermodynamic functions of the model in the isotensional ensemble approximated using an asymptotic approach valid for strong potentials.
pub strong_potential: strong_potential::FJC
}
/// The implemented functionality of the thermodynamics of the FJC model in the modified canonical ensemble approximated using an asymptotic approach.
impl FJC
{
/// Initializes and returns an instance of the thermodynamics of the FJC model in the modified canonical ensemble approximated using an asymptotic approach.
pub fn init(number_of_links: u8, link_length: f64, hinge_mass: f64) -> Self
{
FJC
{
hinge_mass,
link_length,
number_of_links,
weak_potential: weak_potential::FJC::init(number_of_links, link_length, hinge_mass),
strong_potential: strong_potential::FJC::init(number_of_links, link_length, hinge_mass)
}
}
}
|
use super::{SnapshotsState, StateBuilder};
use crate::error::{Error, UnderlyingError};
use crate::storage::stream::{ReadEggExt, WriteEggExt};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use std::collections::VecDeque;
use std::convert::TryFrom;
use std::fs;
use std::io;
use std::path;
// Public interfaces
impl SnapshotsState {
const VERSION: u16 = 1;
pub const STATE_FILE_NAME: &'static str = "state";
pub const SNAPSHOTS_PATH: &'static str = "snapshots";
//SnapshotsState::STATE_PATH).join(SnapshotsState::STATE_FILE_NAME))
/// Creates a new snapshot storage state file if needed and returns the current state of snapshots in the repository
// TODO: The problem with this method is that we have no idea if a snapshot state file was expected
pub fn load(path_to_repository: &path::Path) -> Result<SnapshotsState, Error> {
// TODO: Create a snapshots directory to place the files in
// TODO: This probably needs to be atomic to recover from a bad create repository
// Create the snapshot storage state file
let path_to_state = path_to_repository
.join(Self::SNAPSHOTS_PATH)
.join(Self::STATE_FILE_NAME);
// Since we are initialising we expect there to already be a snapshots state file
if path_to_state.exists() == false {
return Err(Error::invalid_repository()
.add_generic_message("No snapshots state file was found"));
}
SnapshotsState::parse_state_file(path_to_state)
}
// Creates a new state file with presumed defaults, ie empty state
pub fn new(path_to_repository: &path::Path) -> Result<SnapshotsState, Error> {
// TODO: Create a snapshots directory to place the files in
// TODO: This probably needs to be atomic to recover from a bad create repository
let path_to_state = path_to_repository
.join(Self::SNAPSHOTS_PATH)
.join(Self::STATE_FILE_NAME);
// Create the snapshot storage state file
let snapshot_file = fs::OpenOptions::new()
.create_new(true)
.write(true)
.open(path_to_state.as_path())
.map_err(|err| {
Error::file_error(Some(UnderlyingError::from(err)))
.add_user_message(
"Failed to create a configuration file while creating a new repository",
)
.add_debug_message(format!(
"Failed to create a snapshot state file, the path was {}",
path_to_state.display()
))
})?;
let file_writer = io::BufWriter::new(snapshot_file);
let initial_state = SnapshotsState {
working_snapshot: None,
latest_snapshot: None,
recent_snapshots: VecDeque::new(),
root_snapshots: Vec::new(),
end_snapshots: Vec::new(),
path_to_state_file: path_to_state,
};
// Write initial state
SnapshotsState::write_state_file(&initial_state, file_writer)
.map_err(|err| err.add_generic_message("While creating a new snapshots state file"))?;
Ok(initial_state)
}
/// Loads the current state of snapshots
pub(in crate::snapshots::state) fn parse_state_file(
path_to_state: path::PathBuf,
) -> Result<SnapshotsState, Error> {
let snapshot_file = fs::OpenOptions::new()
.read(true)
.open(path_to_state.as_path())
.map_err(|err| Error::file_error(Some(UnderlyingError::from(err)))
.add_debug_message(format!("While trying to open the snapshot state file an error occured, the path used was {}", path_to_state.display()))
.add_user_message("Failed to open a snapshot configuration file"))?;
let mut file_reader = io::BufReader::new(snapshot_file);
// This version is the most recent so parse it
// Read the version of the snapshot state file
let version = file_reader.read_u16::<LittleEndian>().map_err(|err| {
Error::file_error(Some(UnderlyingError::from(err)))
.add_generic_message("Failed to read the version of the snapshot state file")
})?;
if version != SnapshotsState::VERSION {
// TODO: Upgrade path for snapshot state file
unimplemented!(
"Reached the upgrade path for snapshot state, version recorded was: {:?}",
version
);
}
let mut snapshot_state = SnapshotsState {
end_snapshots: Vec::new(),
root_snapshots: Vec::new(),
recent_snapshots: VecDeque::new(),
latest_snapshot: None,
working_snapshot: None,
path_to_state_file: path_to_state,
};
let mut builder = StateBuilder::new(&mut snapshot_state);
// Read working snapshot
let working_snapshot = file_reader
.read_optional_hash()
.map_err(|err| err.add_generic_message("Failed to read working snapshot"))?;
builder.set_working_snapshot(working_snapshot);
// Read latest snapshot
let latest_snapshot = file_reader
.read_optional_hash()
.map_err(|err| err.add_generic_message("Failed to read latest snapshot"))?;
builder.set_latest_snapshot(latest_snapshot);
// Read number of recent snapshot files
let recent_snapshots_total = file_reader.read_u16::<LittleEndian>().map_err(|err| {
Error::file_error(Some(UnderlyingError::from(err)))
.add_generic_message("Failed to read total number of snapshots")
})?;
// Fill the Dequeue with the most recent snapshots
// let mut recent_snapshots = VecDeque::with_capacity(usize::from(recent_snapshots_total));
for _ in 0..recent_snapshots_total {
let recent_snapshot = file_reader.read_hash().map_err(|err| {
err.add_generic_message("While reading a recent snapshot loading snapshots state")
})?;
builder.add_recent_snapshot(recent_snapshot);
}
// Read number of root snapshots
let total_root_ids = file_reader.read_u16::<LittleEndian>().map_err(|err| {
Error::file_error(Some(UnderlyingError::from(err)))
.add_generic_message("Failed to read total root snapshots")
})?;
// Read Root ID's
// let mut root_snapshots = Vec::with_capacity(usize::from(total_root_ids));
for _ in 0..total_root_ids {
let root_id = file_reader
.read_hash()
.map_err(|err| err.add_generic_message("Failed to read root snapshot hash"))?;
builder.add_root_node(root_id);
}
// Read number of end snapshots
let total_end_ids = file_reader.read_u16::<LittleEndian>().map_err(|err| {
Error::file_error(Some(UnderlyingError::from(err)))
.add_generic_message("Failed to read number of snapshots")
})?;
// Read end ID's
// let mut end_snapshots = Vec::with_capacity(usize::from(total_end_ids));
for _ in 0..total_end_ids {
let end_id = file_reader
.read_hash()
.map_err(|err| err.add_generic_message("Failed to read end snapshot hash"))?;
builder.add_end_node(end_id);
}
// We store a copy of the path to the state file since we require it when making changes to state
Ok(snapshot_state)
}
#[allow(dead_code)]
fn check_state() -> () {
//TODO: Used to validate a state file after recovering from a interrupted operation
}
// Writes the current state of the snapshot storage system
pub(in crate::snapshots::state) fn write_state_file(
current_state: &SnapshotsState,
mut file_writer: io::BufWriter<fs::File>,
) -> Result<(), Error> {
// TODO: Use a generic writer instead of a BufWriter
// Write the version of the snapshot state file
file_writer
.write_u16::<LittleEndian>(SnapshotsState::VERSION)
.map_err(|err| {
Error::file_error(Some(UnderlyingError::from(err)))
.add_generic_message("While writing the version of the snapshot state file")
})?;
// Write the ID of the working snapshot - This hash may not be present even after initialization
file_writer
.write_optional_hash(current_state.working_snapshot.as_ref())
.map_err(|err| err.add_generic_message("While writing the working snapshot hash"))?;
// Write the snapshot ID of the latest snapshot
file_writer
.write_optional_hash(current_state.latest_snapshot.as_ref())
.map_err(|err| {
err.add_generic_message("While updating the latest snapshot in the state file")
})?;
// Write number of recently accessed data files
// TODO: This should be a u8 and only store the most recent 10?
file_writer
.write_u16::<LittleEndian>(u16::try_from(current_state.recent_snapshots.len()).unwrap())
.map_err(|err| {
Error::file_error(Some(UnderlyingError::from(err)))
.add_generic_message("Failed to write the number of recent snapshots")
})?;
// Write the recently used hashes
for recent_hash in ¤t_state.recent_snapshots {
file_writer.write_hash(recent_hash).map_err(|err| {
err.add_generic_message("Failed to write recent hash in state file")
})?;
}
// Write number of root snapshots
file_writer
.write_u16::<LittleEndian>(u16::try_from(current_state.root_snapshots.len()).unwrap())
.map_err(|err| {
Error::file_error(Some(UnderlyingError::from(err)))
.add_generic_message("Failed to write number of root snapshots")
})?;
for root_id in ¤t_state.root_snapshots {
file_writer.write_hash(&root_id).map_err(|err| {
err.add_generic_message("Failed to write root snapshot in state file")
})?;
}
// Write number of end snapshots
file_writer
.write_u16::<LittleEndian>(u16::try_from(current_state.end_snapshots.len()).unwrap())
.map_err(|err| {
Error::file_error(Some(UnderlyingError::from(err)))
.add_generic_message("Failed to write number of end snapshots")
})?;
// Write the end snapshots
for end_id in ¤t_state.end_snapshots {
file_writer.write_hash(&end_id).map_err(|err| {
err.add_generic_message("Failed to write end snapshot in state file")
})?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::hash::Hash;
use crate::snapshots::SnapshotsState;
use std::collections::VecDeque;
use std::io;
use testspace::TestSpace;
#[test]
fn test_read_write_state() {
let root = Hash::generate_random_hash();
let mut root_hashes = Vec::new();
root_hashes.push(root);
let working = Hash::generate_random_hash();
let ts = TestSpace::new();
let tsf = ts.create_tsf();
let test_state = SnapshotsState {
latest_snapshot: None,
working_snapshot: Some(working),
recent_snapshots: VecDeque::new(),
root_snapshots: root_hashes,
end_snapshots: Vec::new(),
path_to_state_file: tsf.get_path().to_path_buf(),
};
{
let file = tsf.open_file();
let file_writer = io::BufWriter::new(file);
SnapshotsState::write_state_file(&test_state, file_writer)
.expect("Failed to write state");
}
let path_to_state = tsf.get_path();
let result_state = SnapshotsState::parse_state_file(path_to_state.to_path_buf())
.expect("Failed to read state");
assert_eq!(result_state, test_state);
}
}
|
use crate::{
cli::{Command, PecuniaCli, Resource},
client::ApiClient,
};
use anyhow::Result;
use log::{self, debug, info};
use structopt::StructOpt;
mod cli;
mod client;
mod configuration;
mod model;
#[tokio::main]
async fn main() -> Result<()> {
pretty_env_logger::init_custom_env("PECUNIA_LOG");
let opt = PecuniaCli::from_args();
debug!("Given command / subcommand + arguments are: {:?}", opt);
let cfg = configuration::Configuration::new().unwrap();
match opt.cmd {
Command::Get(c) => match c {
Resource::Company { symbol } => {
info!("Got subcommand 'get company'. Fetching company information ...");
let client = ApiClient::new(cfg.iex_api_token);
println!(
"{}",
serde_json::to_string_pretty(&client.get_company(&symbol).await?)?
);
}
Resource::IntradayPrices { symbol } => {
info!("Got subcommand 'get intraday-prices'. Fetching intraday price information information ...");
let client = ApiClient::new(cfg.iex_api_token);
println!(
"{}",
serde_json::to_string_pretty(&client.get_intraday_prices(&symbol).await?)?
);
}
Resource::HistoricalPrices { symbol, date } => {
info!("Got subcommand 'get historical-prices'. Fetching historical price information information ...");
let client = ApiClient::new(cfg.iex_api_token);
println!(
"{}",
serde_json::to_string_pretty(
&client.get_historical_prices(&symbol, date).await?
)?
);
}
},
}
Ok(())
}
|
mod key_state;
mod mouse_state;
mod renderer;
mod table_tool;
use key_state::KeyState;
use mouse_state::MouseState;
use renderer::Renderer;
use table_tool::TableTool;
struct ModelessContent {
content: State<SelectList<room_modeless::Content>>,
page_x: i32,
page_y: i32,
minimized: bool,
}
struct ElementId {
header_room_id: String,
}
enum Modal {
None,
NewChannel,
ImportedFiles,
}
enum Overlay {
None,
DragFile,
}
struct Contextmenu {
page_x: i32,
page_y: i32,
kind: ContextmenuKind,
}
enum ContextmenuKind {
Character(BlockId),
Boxblock(BlockId),
}
pub struct Room {
cmds: Cmds<Msg, On>,
peer: Rc<Peer>,
peer_id: Rc<String>,
room: Rc<MeshRoom>,
room_id: Rc<String>,
client_id: Rc<String>,
element_id: ElementId,
table_tools: State<SelectList<TableTool>>,
modeless_list: ModelessList<ModelessContent>,
modeless_container_element: Option<State<web_sys::Element>>,
block_arena: block::Arena,
local_block_arena: block::Arena,
player_arena: player::Arena,
resource_arena: resource::Arena,
renderer: Option<Renderer>,
camera_matrix: CameraMatrix,
chat_id: BlockId,
world_id: BlockId,
modal: Modal,
overlay: Overlay,
contextmenu: Option<Contextmenu>,
mouse_state: MouseState,
key_state: KeyState,
canvas: Option<Rc<web_sys::HtmlCanvasElement>>,
canvas_pos: [f32; 2],
canvas_size: [f32; 2],
drawing_line: Vec<[f64; 2]>,
grabbed_object_id: ObjectId,
}
|
struct Solution;
impl Solution {
pub fn unique_paths_with_obstacles(obstacle_grid: Vec<Vec<i32>>) -> i32 {
let (m, n) = (obstacle_grid.len(), obstacle_grid[0].len());
let mut dp = vec![vec![0; n]; m];
for i in 0..m {
for j in 0..n {
if obstacle_grid[i][j] == 1 {
dp[i][j] = 0;
}
else if i == 0 && j == 0 {
dp[0][0] = 1;
}
else if i == 0 {
dp[i][j] = dp[i][j-1];
}
else if j == 0 {
dp[i][j] = dp[i-1][j];
}
else {
dp[i][j] = dp[i-1][j] + dp[i][j-1];
}
}
}
dp[m-1][n-1]
}
} |
// Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use libp2p::gossipsub::Topic;
use serde::Deserialize;
#[derive(Debug, Deserialize)]
#[serde(default)]
pub struct Libp2pConfig {
pub listening_multiaddr: String,
pub bootstrap_peers: Vec<String>,
#[serde(skip_deserializing)] // Always use default
pub pubsub_topics: Vec<Topic>,
}
impl Libp2pConfig {
/// Sets the pubsub topics to the network name provided
pub fn set_network_name(&mut self, s: &str) {
self.pubsub_topics = vec![
Topic::new(format!("/fil/blocks/{}", s)),
Topic::new(format!("/fil/msgs/{}", s)),
]
}
}
impl Default for Libp2pConfig {
fn default() -> Self {
Libp2pConfig {
listening_multiaddr: "/ip4/0.0.0.0/tcp/0".to_owned(),
pubsub_topics: vec![
Topic::new("/fil/blocks/interop".to_owned()),
Topic::new("/fil/msgs/interop".to_owned()),
],
bootstrap_peers: vec![],
}
}
}
|
use std::fmt;
use std::io;
struct Person {
firstname: String,
lastname: String,
age: u32,
}
//function to_string()
impl fmt::Display for Person {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Your name is: {} {}. Your age is: {}", self.firstname, self.lastname, self.age)
}
}
impl Person {
fn print (&self) {
println!("Your name is: {} {}. Your age is: {}", self.firstname, self.lastname, self.age);
}
fn happy_birthday (&mut self) -> u32 {
self.age += 1;
self.age
}
pub fn set_last_name (&mut self) {
println!("Please type your name");
let mut new_firstname = String::new();
io::stdin()
.read_line(&mut new_firstname)
.expect("Failed to read line");
new_firstname = new_firstname.trim().to_string();
println!("Please type your surname");
let mut new_lastname = String::new();
io::stdin()
.read_line(&mut new_lastname)
.expect("Failed to read line");
new_lastname = new_lastname.trim().to_string();
self.firstname = new_firstname;
self.lastname = new_lastname;
}
}
fn main() {
let mut new_person = Person {
firstname: String::from("Frankie"),
lastname: String::from("Miller"),
age: 25,
};
println!("Validating the print() function:");
new_person.print();
println!("Validating the to_string() function:");
println!("{}", new_person.to_string());
println!("Validating the happy_birthday() function:");
new_person.happy_birthday();
println!("Happy birthday {}! You are {} years old now!", new_person.firstname, new_person.age);
//Validating user data -> function set_last_name():
//2.- Requesting the info from the user:
println!("Now, we are going to check the name and surname of the person");
println!("You are {} {}. Is that correct?", new_person.firstname, new_person.lastname);
println!("Please, type YES if it is correct or NO if not:");
let mut check_data = String::new();
io::stdin()
.read_line(&mut check_data)
.expect("Failed to read line");
check_data = check_data.trim().to_lowercase();
//3.- Modifying the instance
if check_data == "no" {
new_person.set_last_name();
println!("You are {0} {1}. Thank you Mr. {1}.", new_person.firstname, new_person.lastname);
}
else if check_data == "yes" {
println!("Thank you, Mr. {}", new_person.lastname);
}
else {
println!("I have specifically asked you to write YES or NO. I don't have time for that. Goodbye.")
}
}
|
#[doc = "Register `FDCAN_TTIR` reader"]
pub type R = crate::R<FDCAN_TTIR_SPEC>;
#[doc = "Register `FDCAN_TTIR` writer"]
pub type W = crate::W<FDCAN_TTIR_SPEC>;
#[doc = "Field `SBC` reader - SBC"]
pub type SBC_R = crate::BitReader;
#[doc = "Field `SBC` writer - SBC"]
pub type SBC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMC` reader - SMC"]
pub type SMC_R = crate::BitReader;
#[doc = "Field `SMC` writer - SMC"]
pub type SMC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CSM` reader - CSM"]
pub type CSM_R = crate::BitReader;
#[doc = "Field `CSM` writer - CSM"]
pub type CSM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SOG` reader - SOG"]
pub type SOG_R = crate::BitReader;
#[doc = "Field `SOG` writer - SOG"]
pub type SOG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RTMI` reader - RTMI"]
pub type RTMI_R = crate::BitReader;
#[doc = "Field `RTMI` writer - RTMI"]
pub type RTMI_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TTMI` reader - TTMI"]
pub type TTMI_R = crate::BitReader;
#[doc = "Field `TTMI` writer - TTMI"]
pub type TTMI_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SWE` reader - SWE"]
pub type SWE_R = crate::BitReader;
#[doc = "Field `SWE` writer - SWE"]
pub type SWE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `GTW` reader - GTW"]
pub type GTW_R = crate::BitReader;
#[doc = "Field `GTW` writer - GTW"]
pub type GTW_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `GTD` reader - GTD"]
pub type GTD_R = crate::BitReader;
#[doc = "Field `GTD` writer - GTD"]
pub type GTD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `GTE` reader - GTE"]
pub type GTE_R = crate::BitReader;
#[doc = "Field `GTE` writer - GTE"]
pub type GTE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXU` reader - TXU"]
pub type TXU_R = crate::BitReader;
#[doc = "Field `TXU` writer - TXU"]
pub type TXU_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXO` reader - TXO"]
pub type TXO_R = crate::BitReader;
#[doc = "Field `TXO` writer - TXO"]
pub type TXO_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SE1` reader - SE1"]
pub type SE1_R = crate::BitReader;
#[doc = "Field `SE1` writer - SE1"]
pub type SE1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SE2` reader - SE2"]
pub type SE2_R = crate::BitReader;
#[doc = "Field `SE2` writer - SE2"]
pub type SE2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ELC` reader - ELC"]
pub type ELC_R = crate::BitReader;
#[doc = "Field `ELC` writer - ELC"]
pub type ELC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IWTG` reader - IWTG"]
pub type IWTG_R = crate::BitReader;
#[doc = "Field `IWTG` writer - IWTG"]
pub type IWTG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `WT` reader - WT"]
pub type WT_R = crate::BitReader;
#[doc = "Field `WT` writer - WT"]
pub type WT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AW` reader - AW"]
pub type AW_R = crate::BitReader;
#[doc = "Field `AW` writer - AW"]
pub type AW_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CER` reader - CER"]
pub type CER_R = crate::BitReader;
#[doc = "Field `CER` writer - CER"]
pub type CER_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - SBC"]
#[inline(always)]
pub fn sbc(&self) -> SBC_R {
SBC_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - SMC"]
#[inline(always)]
pub fn smc(&self) -> SMC_R {
SMC_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - CSM"]
#[inline(always)]
pub fn csm(&self) -> CSM_R {
CSM_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - SOG"]
#[inline(always)]
pub fn sog(&self) -> SOG_R {
SOG_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - RTMI"]
#[inline(always)]
pub fn rtmi(&self) -> RTMI_R {
RTMI_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - TTMI"]
#[inline(always)]
pub fn ttmi(&self) -> TTMI_R {
TTMI_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - SWE"]
#[inline(always)]
pub fn swe(&self) -> SWE_R {
SWE_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - GTW"]
#[inline(always)]
pub fn gtw(&self) -> GTW_R {
GTW_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - GTD"]
#[inline(always)]
pub fn gtd(&self) -> GTD_R {
GTD_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - GTE"]
#[inline(always)]
pub fn gte(&self) -> GTE_R {
GTE_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - TXU"]
#[inline(always)]
pub fn txu(&self) -> TXU_R {
TXU_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - TXO"]
#[inline(always)]
pub fn txo(&self) -> TXO_R {
TXO_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - SE1"]
#[inline(always)]
pub fn se1(&self) -> SE1_R {
SE1_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - SE2"]
#[inline(always)]
pub fn se2(&self) -> SE2_R {
SE2_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - ELC"]
#[inline(always)]
pub fn elc(&self) -> ELC_R {
ELC_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - IWTG"]
#[inline(always)]
pub fn iwtg(&self) -> IWTG_R {
IWTG_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - WT"]
#[inline(always)]
pub fn wt(&self) -> WT_R {
WT_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - AW"]
#[inline(always)]
pub fn aw(&self) -> AW_R {
AW_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - CER"]
#[inline(always)]
pub fn cer(&self) -> CER_R {
CER_R::new(((self.bits >> 18) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - SBC"]
#[inline(always)]
#[must_use]
pub fn sbc(&mut self) -> SBC_W<FDCAN_TTIR_SPEC, 0> {
SBC_W::new(self)
}
#[doc = "Bit 1 - SMC"]
#[inline(always)]
#[must_use]
pub fn smc(&mut self) -> SMC_W<FDCAN_TTIR_SPEC, 1> {
SMC_W::new(self)
}
#[doc = "Bit 2 - CSM"]
#[inline(always)]
#[must_use]
pub fn csm(&mut self) -> CSM_W<FDCAN_TTIR_SPEC, 2> {
CSM_W::new(self)
}
#[doc = "Bit 3 - SOG"]
#[inline(always)]
#[must_use]
pub fn sog(&mut self) -> SOG_W<FDCAN_TTIR_SPEC, 3> {
SOG_W::new(self)
}
#[doc = "Bit 4 - RTMI"]
#[inline(always)]
#[must_use]
pub fn rtmi(&mut self) -> RTMI_W<FDCAN_TTIR_SPEC, 4> {
RTMI_W::new(self)
}
#[doc = "Bit 5 - TTMI"]
#[inline(always)]
#[must_use]
pub fn ttmi(&mut self) -> TTMI_W<FDCAN_TTIR_SPEC, 5> {
TTMI_W::new(self)
}
#[doc = "Bit 6 - SWE"]
#[inline(always)]
#[must_use]
pub fn swe(&mut self) -> SWE_W<FDCAN_TTIR_SPEC, 6> {
SWE_W::new(self)
}
#[doc = "Bit 7 - GTW"]
#[inline(always)]
#[must_use]
pub fn gtw(&mut self) -> GTW_W<FDCAN_TTIR_SPEC, 7> {
GTW_W::new(self)
}
#[doc = "Bit 8 - GTD"]
#[inline(always)]
#[must_use]
pub fn gtd(&mut self) -> GTD_W<FDCAN_TTIR_SPEC, 8> {
GTD_W::new(self)
}
#[doc = "Bit 9 - GTE"]
#[inline(always)]
#[must_use]
pub fn gte(&mut self) -> GTE_W<FDCAN_TTIR_SPEC, 9> {
GTE_W::new(self)
}
#[doc = "Bit 10 - TXU"]
#[inline(always)]
#[must_use]
pub fn txu(&mut self) -> TXU_W<FDCAN_TTIR_SPEC, 10> {
TXU_W::new(self)
}
#[doc = "Bit 11 - TXO"]
#[inline(always)]
#[must_use]
pub fn txo(&mut self) -> TXO_W<FDCAN_TTIR_SPEC, 11> {
TXO_W::new(self)
}
#[doc = "Bit 12 - SE1"]
#[inline(always)]
#[must_use]
pub fn se1(&mut self) -> SE1_W<FDCAN_TTIR_SPEC, 12> {
SE1_W::new(self)
}
#[doc = "Bit 13 - SE2"]
#[inline(always)]
#[must_use]
pub fn se2(&mut self) -> SE2_W<FDCAN_TTIR_SPEC, 13> {
SE2_W::new(self)
}
#[doc = "Bit 14 - ELC"]
#[inline(always)]
#[must_use]
pub fn elc(&mut self) -> ELC_W<FDCAN_TTIR_SPEC, 14> {
ELC_W::new(self)
}
#[doc = "Bit 15 - IWTG"]
#[inline(always)]
#[must_use]
pub fn iwtg(&mut self) -> IWTG_W<FDCAN_TTIR_SPEC, 15> {
IWTG_W::new(self)
}
#[doc = "Bit 16 - WT"]
#[inline(always)]
#[must_use]
pub fn wt(&mut self) -> WT_W<FDCAN_TTIR_SPEC, 16> {
WT_W::new(self)
}
#[doc = "Bit 17 - AW"]
#[inline(always)]
#[must_use]
pub fn aw(&mut self) -> AW_W<FDCAN_TTIR_SPEC, 17> {
AW_W::new(self)
}
#[doc = "Bit 18 - CER"]
#[inline(always)]
#[must_use]
pub fn cer(&mut self) -> CER_W<FDCAN_TTIR_SPEC, 18> {
CER_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "The flags are set when one of the listed conditions is detected (edge-sensitive). The flags remain set until the Host clears them. A flag is cleared by writing a 1 to the corresponding bit position. Writing a 0 has no effect. A hard reset will clear the register.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fdcan_ttir::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fdcan_ttir::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FDCAN_TTIR_SPEC;
impl crate::RegisterSpec for FDCAN_TTIR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`fdcan_ttir::R`](R) reader structure"]
impl crate::Readable for FDCAN_TTIR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`fdcan_ttir::W`](W) writer structure"]
impl crate::Writable for FDCAN_TTIR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets FDCAN_TTIR to value 0"]
impl crate::Resettable for FDCAN_TTIR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
/*
Copyright (c) 2023 Uber Technologies, Inc.
<p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
<p>http://www.apache.org/licenses/LICENSE-2.0
<p>Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing permissions and
limitations under the License.
*/
use crate::models::matches::Range;
use regex::Regex;
use std::collections::HashMap;
use tree_sitter::{Node, TreeCursor};
use crate::models::capture_group_patterns::ConcreteSyntax;
use crate::models::matches::Match;
// Precompile the regex outside the function
lazy_static! {
static ref RE_VAR: Regex = Regex::new(r"^:\[(?P<var_name>\w+)\]").unwrap();
}
// Struct to avoid dealing with lifetimes
#[derive(Clone, PartialEq, Eq)]
pub struct CapturedNode {
range: Range,
text: String,
}
pub(crate) fn get_all_matches_for_concrete_syntax(
node: &Node, code_str: &[u8], meta: &ConcreteSyntax, recursive: bool,
replace_node: Option<String>,
) -> (Vec<Match>, bool) {
let mut matches: Vec<Match> = Vec::new();
if let (mut match_map, true) = get_matches_for_node(&mut node.walk(), code_str, meta) {
let replace_node_key = replace_node.clone().unwrap_or("*".to_string());
let replace_node_match = if replace_node_key != "*" {
match_map
.get(&replace_node_key)
.cloned()
.unwrap_or_else(|| {
panic!("The tag {replace_node_key} provided in the replace node is not present")
})
} else {
CapturedNode {
range: Range::from(node.range()),
text: node.utf8_text(code_str).unwrap().to_string(),
}
};
match_map.insert(replace_node_key, replace_node_match.clone());
matches.push(Match {
matched_string: replace_node_match.text,
range: replace_node_match.range,
matches: match_map.into_iter().map(|(k, v)| (k, v.text)).collect(),
associated_comma: None,
associated_comments: Vec::new(),
});
}
if recursive {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if let (mut inner_matches, true) =
get_all_matches_for_concrete_syntax(&child, code_str, meta, recursive, replace_node.clone())
{
matches.append(&mut inner_matches);
}
}
}
let is_empty = matches.is_empty();
(matches, !is_empty)
}
/// `find_next_sibling` navigates the cursor through the tree to the next sibling.
/// If no sibling exists, it ascends the tree until it can move laterally or until it reaches the root.
///
/// # Arguments
///
/// * `cursor` - A mutable reference to a `TreeCursor` used to navigate the tree.
///
/// The function mutates the cursor position. If no further siblings exist, the cursor ends at the root.
fn find_next_sibling(cursor: &mut TreeCursor) {
while !cursor.goto_next_sibling() {
if !cursor.goto_parent() {
break;
}
}
}
/// This function performs the actual matching of the ConcreteSyntax pattern against a syntax tree
/// node. The matching is done in the following way:
///
/// - If the ConcreteSyntax is empty and all the nodes have been visited, then we found a match!
///
/// - If the ConcreteSyntax starts with `:[variable]`, the function tries to match the variable
/// against all possible AST nodes starting at the current's cursor position (i.e., the node itself,
/// its first child, the child of the first child, and so on.)
/// If it succeeds, it advances the ConcreteSyntax by the length of the matched
/// AST node and calls itself recursively to try to match the rest of the ConcreteSyntax.
///
/// - If the ConcreteSyntax doesn't start with `:[variable]`, the function checks if the node is a leaf
/// (i.e., has no children). If it is, and its text starts with the metasyyntax, we match the text,
/// and advance to the next immediate node (i.e., it's sibling or it's parent's sibling). If does not
/// match we cannot match the meta syntax template.
///
/// - If the ConcreteSyntax doesn't start with `:[variable]` and the node is not a leaf, the function
/// moves the cursor to the first child of the node and calls itself recursively to try to match
/// the ConcreteSyntax.
pub(crate) fn get_matches_for_node(
cursor: &mut TreeCursor, source_code: &[u8], meta: &ConcreteSyntax,
) -> (HashMap<String, CapturedNode>, bool) {
let match_template = meta.0.as_str();
if match_template.is_empty() {
return (
HashMap::new(),
!cursor.goto_next_sibling() && !cursor.goto_parent(),
);
}
let node = cursor.node();
// In case the template starts with :[var_name], we try match
if let Some(caps) = RE_VAR.captures(match_template) {
let var_name = &caps["var_name"];
let meta_adv_len = caps[0].len();
let meta_advanced = ConcreteSyntax(
match_template[meta_adv_len..]
.to_string()
.trim_start()
.to_string(),
);
// If we need to match a variable `:[var]`, we can match it against the next node or any of it's
// first children. We need to try all possibilities.
loop {
let mut tmp_cursor = cursor.clone();
let current_node = cursor.node();
let current_node_code = current_node.utf8_text(source_code).unwrap();
find_next_sibling(&mut tmp_cursor);
if let (mut recursive_matches, true) =
get_matches_for_node(&mut tmp_cursor, source_code, &meta_advanced)
{
// If we already matched this variable, we need to make sure that the match is the same. Otherwise, we were unsuccessful.
// No other way of unrolling exists.
if recursive_matches.contains_key(var_name)
&& recursive_matches[var_name].text.trim() != current_node_code.trim()
{
return (HashMap::new(), false);
}
recursive_matches.insert(
var_name.to_string(),
CapturedNode {
range: Range::from(current_node.range()),
text: current_node_code.to_string(),
},
);
return (recursive_matches, true);
}
if !cursor.goto_first_child() {
break;
}
}
} else if node.child_count() == 0 {
let code = node.utf8_text(source_code).unwrap().trim();
if match_template.starts_with(code) && !code.is_empty() {
let advance_by = code.len();
// Can only advance if there is still enough chars to consume
if advance_by > match_template.len() {
return (HashMap::new(), false);
}
let meta_substring = ConcreteSyntax(
match_template[advance_by..]
.to_string()
.trim_start()
.to_owned(),
);
find_next_sibling(cursor);
return get_matches_for_node(cursor, source_code, &meta_substring);
}
} else {
cursor.goto_first_child();
return get_matches_for_node(cursor, source_code, meta);
}
(HashMap::new(), false)
}
#[cfg(test)]
#[path = "unit_tests/concrete_syntax_test.rs"]
mod concrete_syntax_test;
|
use std::str::Chars;
use std::iter::Peekable;
use errors::{CalcrResult, CalcrError};
use token::Token;
use token::TokVal::*;
use token::OpKind::*;
use token::DelimKind::*;
pub fn lex_equation(eq: &String) -> CalcrResult<Vec<Token>> {
let mut lexer = Lexer {
pos: 0,
iter: eq.chars().peekable(),
};
lexer.lex_expression()
}
pub struct Lexer<'a> {
pos: usize,
iter: Peekable<Chars<'a>>,
}
impl<'a> Lexer<'a> {
pub fn lex_expression(&mut self) -> CalcrResult<Vec<Token>> {
let mut out = Vec::new();
loop {
self.consume_whitespace();
let tok = match self.peek_char() {
Some(ch) if ch.is_numeric() => try!(self.lex_number()),
Some(ch) if ch.is_alphabetic() => try!(self.lex_name()),
Some(_) => try!(self.lex_single_char()),
None => break,
};
out.push(tok);
}
Ok(out)
}
fn lex_number(&mut self) -> CalcrResult<Token> {
let num_str = self.consume_while(|ch| ch.is_numeric() || ch == '.');
if let Ok(num) = num_str.parse::<f64>() {
Ok(Token {
val: Num(num),
span: (self.pos - num_str.len(), self.pos),
})
} else {
Err(CalcrError {
desc: format!("Invalid number: {}", num_str),
span: Some((self.pos - num_str.len(), self.pos)),
})
}
}
fn lex_name(&mut self) -> CalcrResult<Token> {
let name_str = self.consume_while(|ch| ch.is_alphabetic() || ch.is_numeric());
let len = name_str.chars().count();
Ok(Token {
val: Name(name_str),
span: (self.pos - len, self.pos),
})
}
fn lex_single_char(&mut self) -> CalcrResult<Token> {
let val = match self.consume_char() {
'+' => Op(Plus),
'-' => Op(Minus),
'*' => Op(Mult),
'/' => Op(Div),
'^' => Op(Pow),
'!' => Op(Fact),
'=' => Op(Assign),
'√' => Name("sqrt".to_string()),
'(' => OpenDelim(Paren),
'[' => OpenDelim(Bracket),
'{' => OpenDelim(Brace),
')' => CloseDelim(Paren),
']' => CloseDelim(Bracket),
'}' => CloseDelim(Brace),
'|' => AbsDelim,
ch => return Err(CalcrError {
desc: format!("Invalid char: {}", ch),
span: Some((self.pos - 1, self.pos)),
}),
};
Ok(Token {
val: val,
span: (self.pos - 1, self.pos),
})
}
/// Peeks at the next `char` and returns `Some` if one was found, or `None` if none are left
fn peek_char(&mut self) -> Option<char> {
self.iter.peek().map(|ch| *ch)
}
/// Consumes a `char` - thereby advanding `pos` - and returns it
///
/// # Panics
/// This function panics if there are no more chars to consume
fn consume_char(&mut self) -> char {
let ch = self.iter.next();
self.pos += 1;
ch.unwrap().to_lowercase().next().unwrap()
}
/// Consumes `char`s long as `pred` returns true and we are not eof
///
/// The `char`s are returned as a `String`. Note that unlike `consume_char` this function will
/// not panic.
fn consume_while<F>(&mut self, pred: F) -> String where F: Fn(char) -> bool {
let mut out = String::new();
loop {
match self.peek_char() {
Some(ch) if pred(ch) => out.push(self.consume_char()),
_ => break,
}
}
out
}
/// Consumes any current whitespace
fn consume_whitespace(&mut self) {
self.consume_while(|ch| ch.is_whitespace());
}
}
#[cfg(test)]
mod tests {
use super::lex_equation;
use token::Token;
use token::TokVal::*;
use token::OpKind::*;
use token::DelimKind::*;
#[test]
fn empty() {
let eq = "".to_string();
let toks = lex_equation(&eq);
assert_eq!(toks, Ok(vec!()));
}
#[test]
fn single_char() {
let eq = "2".to_string();
let toks = lex_equation(&eq);
assert_eq!(toks, Ok(vec!(Token { val: Num(2.0), span: (0, 1) })));
}
#[test]
fn utf8() {
let eq = "π𐍈".to_string();
let toks = lex_equation(&eq);
assert_eq!(toks, Ok(vec!(Token { val: Name(eq), span: (0, 2) })));
}
#[test]
fn double_width() {
let eq = "指事字假借".to_string();
let toks = lex_equation(&eq);
assert_eq!(toks, Ok(vec!(Token { val: Name(eq), span: (0, 5) })));
}
#[test]
fn ops() {
let eq = "+-*/!^".to_string();
let toks = lex_equation(&eq);
assert_eq!(toks, Ok(vec!(Token { val: Op(Plus), span: (0,1) },
Token { val: Op(Minus), span: (1,2) },
Token { val: Op(Mult), span: (2,3) },
Token { val: Op(Div), span: (3,4) },
Token { val: Op(Fact), span: (4,5) },
Token { val: Op(Pow), span: (5,6) })));
}
#[test]
fn delims() {
let eq = "|()[]{}".to_string();
let toks = lex_equation(&eq);
assert_eq!(toks, Ok(vec!(Token { val: AbsDelim, span: (0,1) },
Token { val: OpenDelim(Paren), span: (1,2) },
Token { val: CloseDelim(Paren), span: (2,3) },
Token { val: OpenDelim(Bracket), span: (3,4) },
Token { val: CloseDelim(Bracket), span: (4,5) },
Token { val: OpenDelim(Brace), span: (5,6) },
Token { val: CloseDelim(Brace), span: (6,7) })));
}
#[test]
fn sqrt_single_char() {
let eq = "√".to_string();
let toks = lex_equation(&eq);
assert_eq!(toks, Ok(vec!(Token { val: Name("sqrt".to_string()), span: (0,1) })));
}
#[test]
fn invalid_char() {
let eq = "?".to_string();
let err = lex_equation(&eq);
assert!(err.is_err());
}
} |
extern crate mio;
extern crate rand;
extern crate rustc_serialize;
extern crate uuid;
extern crate ws;
mod models;
mod server;
mod engine;
mod game;
mod messages;
fn main() {
server::start();
}
|
/// An enum to represent all characters in the Specials block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Specials {
/// \u{fff9}: ''
InterlinearAnnotationAnchor,
/// \u{fffa}: ''
InterlinearAnnotationSeparator,
/// \u{fffb}: ''
InterlinearAnnotationTerminator,
/// \u{fffc}: ''
ObjectReplacementCharacter,
/// \u{fffd}: '�'
ReplacementCharacter,
}
impl Into<char> for Specials {
fn into(self) -> char {
match self {
Specials::InterlinearAnnotationAnchor => '',
Specials::InterlinearAnnotationSeparator => '',
Specials::InterlinearAnnotationTerminator => '',
Specials::ObjectReplacementCharacter => '',
Specials::ReplacementCharacter => '�',
}
}
}
impl std::convert::TryFrom<char> for Specials {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'' => Ok(Specials::InterlinearAnnotationAnchor),
'' => Ok(Specials::InterlinearAnnotationSeparator),
'' => Ok(Specials::InterlinearAnnotationTerminator),
'' => Ok(Specials::ObjectReplacementCharacter),
'�' => Ok(Specials::ReplacementCharacter),
_ => Err(()),
}
}
}
impl Into<u32> for Specials {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Specials {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Specials {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Specials {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Specials::InterlinearAnnotationAnchor
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Specials{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
#[doc = "Register `FDCAN_TTCPT` reader"]
pub type R = crate::R<FDCAN_TTCPT_SPEC>;
#[doc = "Field `CT` reader - Cycle Count Value"]
pub type CT_R = crate::FieldReader;
#[doc = "Field `SWV` reader - Stop Watch Value"]
pub type SWV_R = crate::FieldReader<u16>;
impl R {
#[doc = "Bits 0:5 - Cycle Count Value"]
#[inline(always)]
pub fn ct(&self) -> CT_R {
CT_R::new((self.bits & 0x3f) as u8)
}
#[doc = "Bits 16:31 - Stop Watch Value"]
#[inline(always)]
pub fn swv(&self) -> SWV_R {
SWV_R::new(((self.bits >> 16) & 0xffff) as u16)
}
}
#[doc = "FDCAN TT Capture Time Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fdcan_ttcpt::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FDCAN_TTCPT_SPEC;
impl crate::RegisterSpec for FDCAN_TTCPT_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`fdcan_ttcpt::R`](R) reader structure"]
impl crate::Readable for FDCAN_TTCPT_SPEC {}
#[doc = "`reset()` method sets FDCAN_TTCPT to value 0"]
impl crate::Resettable for FDCAN_TTCPT_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::cmp::Reverse;
use std::collections::BinaryHeap;
use std::collections::HashMap;
use crate::position::Pos;
use crate::position::{neighbors, Dir};
fn make_path(first_middle: Pos, second_middle: Pos, back: &HashMap<Pos, PosData>) -> Vec<Pos> {
let mut vec1 = vec![first_middle];
while back[vec1.last().unwrap()].prev != *vec1.last().unwrap() {
vec1.push(back[vec1.last().unwrap()].prev);
}
let mut vec2 = vec![second_middle];
while back[vec2.last().unwrap()].prev != *vec2.last().unwrap() {
vec2.push(back[vec2.last().unwrap()].prev);
}
if vec2.last().unwrap() > vec1.last().unwrap() {
vec1.reverse();
vec1.append(&mut vec2);
vec1
} else {
vec2.reverse();
vec2.append(&mut vec1);
vec2
}
}
#[derive(Clone, Copy)]
struct PosData {
pub dist: usize,
pub prev: Pos,
pub target: Pos,
}
pub fn A_star(start: Pos) -> Vec<Pos> {
let end: Pos = Pos(0xfedcba9876543210); // 18364758544493064720
if start == end {
return vec![start];
}
let mut positions_data: HashMap<Pos, PosData> = HashMap::new(); // dist, target
positions_data.insert(
start,
PosData {
dist: 0,
prev: start,
target: end,
},
);
positions_data.insert(
end,
PosData {
dist: 0,
prev: end,
target: start,
},
);
let mut queue: BinaryHeap<(Reverse<usize>, Pos, Pos)> = BinaryHeap::new();
for start_neib in neighbors(start) {
let dist = 1 + start_neib.manhattan(end);
queue.push((Reverse(dist), start_neib, start));
}
for end_neib in neighbors(end) {
let dist = 1 + end_neib.manhattan(start);
queue.push((Reverse(dist), end_neib, end));
}
while let Some((_, pos, prev)) = queue.pop() {
let prev_data = positions_data[&prev];
if positions_data.contains_key(&pos) {
let curr_data = positions_data[&pos];
if curr_data.target != prev_data.target {
return make_path(pos, prev, &positions_data);
}
} else {
let dist = prev_data.dist + 1;
positions_data.insert(
pos,
PosData {
dist,
prev,
target: prev_data.target,
},
);
for new_pos in neighbors(pos) {
let new_path_len = dist + 1 + new_pos.manhattan(prev_data.target);
if !positions_data.contains_key(&new_pos) {
queue.push((Reverse(new_path_len), new_pos, pos));
} else {
if positions_data[&new_pos].target != prev_data.target {
queue.push((Reverse(new_path_len), new_pos, pos));
}
}
}
}
}
unreachable!()
}
fn dir(first: Pos, second: Pos) -> Dir {
let mut iter = neighbors(first);
while let Some(i) = iter.next() {
if i == second {
return iter.get_dir();
}
}
Dir::End
}
pub fn solution(path: Vec<Pos>) -> Vec<Dir> {
let n = path.len();
let mut res = Vec::new();
for i in 1..n {
res.push(dir(path[i - 1], path[i]));
}
res
}
#[cfg(test)]
mod tests {
use crate::position::maze;
use crate::A_star::{solution, A_star};
#[test]
fn A_star_test() {
let start = maze();
let solution = solution(A_star(start));
println!("{:?}", solution);
}
}
|
use spectral::prelude::*;
use soapier::{self, wsdl};
#[wsdl("./fixtures/iptocountry.wsdl")]
#[test]
fn test_creates_find_country_as_string() {
} |
struct FizzBuzzer {
next: u32,
max: u32,
}
impl FizzBuzzer {
fn new(starting_value: u32, length: u32) -> Self {
let max = if length > 0 { starting_value + length - 1 } else { 0 }; // protect from underflow
FizzBuzzer { next: starting_value, max }
}
}
impl Iterator for FizzBuzzer {
type Item = String;
fn next(&mut self) -> Option<Self::Item> {
if self.next > self.max { return None }
let s = match (self.next % 3 == 0, self.next % 5 == 0) {
(false, false) => format!("{}", self.next),
(true, false) => String::from("Fizz"),
(false, true) => String::from("Buzz"),
(true, true) => String::from("FizzBuzz"),
};
self.next += 1;
Some(String::from(s))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
for text in FizzBuzzer::new(1, 100) {
println!("{}", text);
}
}
}
|
//! Inline Python code directly in your Rust code.
//!
//! # Example
//!
//! ```
//! #![feature(proc_macro_hygiene)]
//! use inline_python::python;
//!
//! fn main() {
//! let who = "world";
//! let n = 5;
//! python! {
//! for i in range('n):
//! print(i, "Hello", 'who)
//! print("Goodbye")
//! }
//! }
//! ```
//!
//! # How to use
//!
//! Use the `python!{..}` macro to write Python code directly in your Rust code.
//! You'll need to add `#![feature(proc_macro_hygiene)]`, and use a nightly
//! version of the compiler that supports this feature.
//!
//! ## Using Rust variables
//!
//! To reference Rust variables, use `'var`, as shown in the example above.
//! `var` needs to implement [`pyo3::ToPyObject`].
//!
//! ## Re-using a Python context
//! It is possible to create a [`Context`] object ahead of time and use it for running the Python code.
//! The context can be re-used for multiple invocations to share global variables across macro calls.
//!
//! ```
//! # #![feature(proc_macro_hygiene)]
//! # use inline_python::python;
//! let c = inline_python::Context::new();
//! python! {
//! #![context = &c]
//! foo = 5
//! }
//! python! {
//! #![context = &c]
//! assert foo == 5
//! }
//! ```
//!
//! ## Getting information back
//!
//! A [`Context`] object could also be used to pass information back to Rust,
//! as you can retrieve the global Python variables from the context through
//! [`Context::get_global`].
//!
//! ## Syntax issues
//!
//! Since the Rust tokenizer will tokenize the Python code, some valid Python
//! code is rejected. The two main things to remember are:
//!
//! - Use double quoted strings (`""`) instead of single quoted strings (`''`).
//!
//! (Single quoted strings only work if they contain a single character, since
//! in Rust, `'a'` is a character literal.)
//!
//! - Use `//`-comments instead of `#`-comments.
//!
//! (If you use `#` comments, the Rust tokenizer will try to tokenize your
//! comment, and complain if your comment doesn't tokenize properly.)
//!
//! Other minor things that don't work are:
//!
//! - Certain escape codes in string literals.
//! (Specifically: `\a`, `\b`, `\f`, `\v`, `\N{..}`, `\123` (octal escape
//! codes), `\u`, and `\U`.)
//!
//! These, however, are accepted just fine: `\\`, `\n`, `\t`, `\r`, `\xAB`
//! (hex escape codes), and `\0`
//!
//! - Raw string literals with escaped double quotes. (E.g. `r"...\"..."`.)
//!
//! - Triple-quoted byte- and raw-strings with content that would not be valid
//! as a regular string. And the same for raw-byte and raw-format strings.
//! (E.g. `b"""\xFF"""`, `r"""\z"""`, `fr"\z"`, `br"\xFF"`.)
//!
//! - The `//` and `//=` operators are unusable, as they start a comment.
//!
//! Workaround: you can write `##` instead, which is automatically converted
//! to `//`.
//!
//! Everything else should work fine.
use std::os::raw::c_char;
pub use inline_python_macros::python;
pub use pyo3;
use pyo3::{
ffi,
types::{PyAny, PyDict},
AsPyPointer, FromPyObject, IntoPy, PyErr, PyObject, PyResult, Python,
};
#[doc(hidden)]
pub use std::ffi::CStr;
/// An execution context for Python code.
///
/// If you pass a manually created context to the `python!{}` macro, you can share it across invocations.
/// This will keep all global variables and imports intact between macro invocations.
///
/// ```
/// # #![feature(proc_macro_hygiene)]
/// # use inline_python::python;
/// let c = inline_python::Context::new();
/// python! {
/// #![context = &c]
/// foo = 5
/// }
/// python! {
/// #![context = &c]
/// assert foo == 5
/// }
/// ```
///
/// You may also use it to inspect global variables after the execution of the Python code.
/// Note that you need to acquire the GIL in order to access those globals:
///
/// ```
/// # #![feature(proc_macro_hygiene)]
/// use inline_python::python;
/// let context = inline_python::Context::new();
/// python! {
/// #![context = &context]
/// foo = 5
/// }
///
/// let foo: Option<i32> = context.get_global("foo").unwrap();
/// assert_eq!(foo, Some(5));
/// ```
pub struct Context {
globals: PyObject,
}
impl Context {
/// Create a new context for running python code.
///
/// This function temporarily acquires the GIL.
/// If you already have the GIL, use [`Context::new_with_gil`] instead.
///
/// This function panics if it fails to create the context.
/// See [`Context::new_checked`] for a version that returns a result.
pub fn new() -> Self {
let gil = Python::acquire_gil();
let py = gil.python();
match Self::new_with_gil(py) {
Ok(x) => x,
Err(error) => {
error.print(py);
panic!("failed to create python context");
}
}
}
/// Create a new context for running python code.
///
/// This function temporarily acquires the GIL.
/// If you already have the GIL, use [`Context::new_with_gil`] instead.
pub fn new_checked() -> PyResult<Self> {
let gil = Python::acquire_gil();
let py = gil.python();
Self::new_with_gil(py)
}
/// Create a new context for running Python code.
///
/// You must acquire the GIL to call this function.
pub fn new_with_gil(py: Python) -> PyResult<Self> {
let main_mod = unsafe { ffi::PyImport_AddModule("__main__\0".as_ptr() as *const _) };
if main_mod.is_null() {
return Err(PyErr::fetch(py));
};
let globals = PyDict::new(py);
if unsafe { ffi::PyDict_Merge(globals.as_ptr(), ffi::PyModule_GetDict(main_mod), 0) != 0 } {
return Err(PyErr::fetch(py));
}
Ok(Self {
globals: globals.into_py(py),
})
}
/// Get the globals as dictionary.
pub fn globals<'p>(&self, py: Python<'p>) -> &'p PyDict {
unsafe { py.from_borrowed_ptr(self.globals.as_ptr()) }
}
/// Retrieve a global variable from the context.
///
/// This function temporarily acquires the GIL.
/// If you already have the GIL, use [`Context::get_global_with_gil`] instead.
pub fn get_global<T: for<'p> FromPyObject<'p>>(&self, name: &str) -> PyResult<Option<T>> {
self.get_global_with_gil(Python::acquire_gil().python(), name)
}
/// Retrieve a global variable from the context.
pub fn get_global_with_gil<'p, T: FromPyObject<'p>>(&self, py: Python<'p>, name: &str) -> PyResult<Option<T>> {
match self.globals(py).get_item(name) {
None => Ok(None),
Some(value) => FromPyObject::extract(value).map(Some),
}
}
}
#[doc(hidden)]
pub fn run_python_code<'p>(py: Python<'p>, context: &Context, compiled_code: &[u8], rust_vars: Option<&PyDict>) -> PyResult<&'p PyAny> {
unsafe {
// Add the rust variable in a global dictionary named RUST.
// If no rust vars are given, make the RUST global an empty dictionary.
let rust_vars = rust_vars.unwrap_or_else(|| PyDict::new(py)).as_ptr();
if ffi::PyDict_SetItemString(context.globals.as_ptr(), "RUST\0".as_ptr() as *const _, rust_vars) != 0 {
return Err(PyErr::fetch(py));
}
let compiled_code = python_unmarshal_object_from_bytes(py, compiled_code)?;
let result = ffi::PyEval_EvalCode(compiled_code.as_ptr(), context.globals.as_ptr(), std::ptr::null_mut());
py.from_owned_ptr_or_err(result)
}
}
extern "C" {
fn PyMarshal_ReadObjectFromString(data: *const c_char, len: isize) -> *mut ffi::PyObject;
}
/// Use built-in python marshal support to read an object from bytes.
fn python_unmarshal_object_from_bytes(py: Python, data: &[u8]) -> pyo3::PyResult<PyObject> {
unsafe {
let object = PyMarshal_ReadObjectFromString(data.as_ptr() as *const c_char, data.len() as isize);
if object.is_null() {
return Err(PyErr::fetch(py));
}
Ok(PyObject::from_owned_ptr(py, object))
}
}
|
use std::{cmp, path::PathBuf};
use std::{str::FromStr, sync::Arc};
use druid::{
piet::PietTextLayout, widget::SvgData, Affine, Command, Env, Event, EventCtx,
PaintCtx, Point, Rect, RenderContext, Size, Target, TextLayout, Vec2, Widget,
WidgetId, WindowId,
};
use include_dir::{include_dir, Dir};
use lapce_proxy::dispatch::FileNodeItem;
use parking_lot::Mutex;
use crate::{
command::LapceCommand, command::LapceUICommand, command::LAPCE_UI_COMMAND,
editor::EditorSplitState, movement::LinePosition, movement::Movement,
palette::file_svg, palette::svg_tree_size, panel::PanelPosition,
panel::PanelProperty, state::LapceFocus, state::LapceUIState,
state::LAPCE_APP_STATE, theme::LapceTheme,
};
pub const ICONS_DIR: Dir = include_dir!("../icons");
#[derive(Clone)]
pub struct FileExplorerState {
// pub widget_id: WidgetId,
window_id: WindowId,
tab_id: WidgetId,
// cwd: PathBuf,
pub items: Vec<FileNodeItem>,
index: usize,
count: usize,
position: PanelPosition,
}
impl PanelProperty for FileExplorerState {
fn position(&self) -> &PanelPosition {
&self.position
}
fn active(&self) -> usize {
0
}
fn size(&self) -> (f64, f64) {
(300.0, 0.5)
}
fn paint(&self, ctx: &mut PaintCtx, data: &LapceUIState, env: &Env) {
let rects = ctx.region().rects().to_vec();
let size = ctx.size();
let state = LAPCE_APP_STATE.get_tab_state(&self.window_id, &self.tab_id);
let line_height = env.get(LapceTheme::EDITOR_LINE_HEIGHT);
let width = size.width;
let index = self.index;
for rect in rects {
if let Some(background) = LAPCE_APP_STATE.theme.get("background") {
ctx.fill(rect, background);
}
let min = (rect.y0 / line_height).floor() as usize;
let max = (rect.y1 / line_height) as usize + 1;
let mut i = 0;
let level = 0;
for item in self.items.iter() {
i = self.paint_item(
ctx,
min,
max,
line_height,
width,
level,
i,
index,
item,
env,
);
i += 1;
if i > max {
break;
}
}
}
}
}
impl FileExplorerState {
pub fn new(window_id: WindowId, tab_id: WidgetId) -> FileExplorerState {
let items = Vec::new();
FileExplorerState {
window_id,
tab_id,
items,
index: 0,
count: 0,
position: PanelPosition::LeftTop,
}
}
pub fn get_item(&mut self, index: usize) -> Option<&mut FileNodeItem> {
let mut i = 0;
for item in self.items.iter_mut() {
let result = get_item_children(i, index, item);
if result.0 == index {
return result.1;
}
i = result.0 + 1;
}
None
}
pub fn update_count(&mut self) {
let mut count = 0;
for item in self.items.iter() {
count += get_item_count(item);
}
self.count = count;
}
pub fn run_command(
&mut self,
ctx: &mut EventCtx,
data: &mut LapceUIState,
count: Option<usize>,
command: LapceCommand,
) -> LapceFocus {
self.request_paint(ctx);
match command {
LapceCommand::Up => {
self.index = Movement::Up.update_index(
self.index,
self.count,
count.unwrap_or(1),
false,
);
LapceFocus::FileExplorer
}
LapceCommand::Down => {
self.index = Movement::Down.update_index(
self.index,
self.count,
count.unwrap_or(1),
false,
);
LapceFocus::FileExplorer
}
LapceCommand::ListNext => {
self.index = Movement::Down.update_index(
self.index,
self.count,
count.unwrap_or(1),
true,
);
LapceFocus::FileExplorer
}
LapceCommand::ListPrevious => {
self.index = Movement::Up.update_index(
self.index,
self.count,
count.unwrap_or(1),
true,
);
LapceFocus::FileExplorer
}
LapceCommand::GotoLineDefaultFirst => {
self.index = match count {
Some(n) => Movement::Line(LinePosition::Line(n)),
None => Movement::Line(LinePosition::First),
}
.update_index(self.index, self.count, 1, false);
LapceFocus::FileExplorer
}
LapceCommand::GotoLineDefaultLast => {
self.index = match count {
Some(n) => Movement::Line(LinePosition::Line(n)),
None => Movement::Line(LinePosition::Last),
}
.update_index(self.index, self.count, 1, false);
LapceFocus::FileExplorer
}
LapceCommand::ListSelect => {
let index = self.index;
let state =
LAPCE_APP_STATE.get_tab_state(&self.window_id, &self.tab_id);
let item = self.get_item(index).unwrap();
let path_buf = item.path_buf.clone();
let is_dir = item.is_dir;
if !is_dir {
state.editor_split.lock().open_file(
ctx,
data,
path_buf.to_str().unwrap(),
);
LapceFocus::Editor
} else {
if item.read {
item.open = !item.open;
self.update_count();
self.request_paint(ctx);
} else {
let mut item = item.clone();
state.clone().proxy.lock().as_ref().unwrap().read_dir(
&path_buf,
Box::new(move |result| {
let mut file_explorer = state.file_explorer.lock();
let current_item = file_explorer.get_item(index);
if current_item != Some(&mut item) {
return;
}
let current_item = current_item.unwrap();
current_item.open = true;
current_item.read = true;
if let Ok(res) = result {
let resp: Result<
Vec<FileNodeItem>,
serde_json::Error,
> = serde_json::from_value(res);
if let Ok(items) = resp {
current_item.children = items;
}
}
file_explorer.update_count();
LAPCE_APP_STATE.submit_ui_command(
LapceUICommand::RequestPaint,
file_explorer.widget_id(),
);
}),
);
}
LapceFocus::FileExplorer
}
}
_ => LapceFocus::FileExplorer,
}
}
fn request_paint(&self, ctx: &mut EventCtx) {
ctx.submit_command(Command::new(
LAPCE_UI_COMMAND,
LapceUICommand::RequestPaint,
Target::Widget(self.widget_id()),
));
}
fn paint_item(
&self,
ctx: &mut PaintCtx,
min: usize,
max: usize,
line_height: f64,
width: f64,
level: usize,
i: usize,
index: usize,
item: &FileNodeItem,
env: &druid::Env,
) -> usize {
if i > max {
return i;
}
if i >= min && i <= max {
if i == index {
ctx.fill(
Rect::ZERO
.with_origin(Point::new(0.0, i as f64 * line_height))
.with_size(Size::new(width, line_height)),
&env.get(LapceTheme::EDITOR_CURRENT_LINE_BACKGROUND),
);
}
let y = i as f64 * line_height;
let svg_y = y + 4.0;
let mut text_layout = TextLayout::<String>::from_text(
item.path_buf.file_name().unwrap().to_str().unwrap(),
);
let padding = 15.0 * level as f64;
if item.is_dir {
let icon_name = if item.open {
"chevron-down.svg"
} else {
"chevron-right.svg"
};
let svg = SvgData::from_str(
ICONS_DIR
.get_file(icon_name)
.unwrap()
.contents_utf8()
.unwrap(),
)
.unwrap();
svg.to_piet(Affine::translate(Vec2::new(1.0 + padding, svg_y)), ctx);
let icon_name = if item.open {
"default_folder_opened.svg"
} else {
"default_folder.svg"
};
let svg = SvgData::from_str(
ICONS_DIR
.get_file(icon_name)
.unwrap()
.contents_utf8()
.unwrap(),
)
.unwrap();
let scale = 0.5;
let affine = Affine::new([
scale,
0.0,
0.0,
scale,
1.0 + 16.0 + padding,
svg_y + 1.0,
]);
svg.to_piet(affine, ctx);
} else {
if let Some(exten) = item.path_buf.extension() {
if let Some(exten) = exten.to_str() {
let exten = match exten {
"rs" => "rust",
"md" => "markdown",
"cc" => "cpp",
_ => exten,
};
if let Some((svg, svg_tree)) = file_svg(exten) {
let svg_size = svg_tree_size(&svg_tree);
let scale = 13.0 / svg_size.height;
let affine = Affine::new([
scale,
0.0,
0.0,
scale,
1.0 + 18.0 + padding,
svg_y + 2.0,
]);
svg.to_piet(affine, ctx);
}
}
}
}
text_layout.set_text_color(LapceTheme::EDITOR_FOREGROUND);
text_layout.rebuild_if_needed(ctx.text(), env);
text_layout.draw(ctx, Point::new(38.0 + padding, y + 3.0));
}
let mut i = i;
if item.open {
for item in &item.children {
i = self.paint_item(
ctx,
min,
max,
line_height,
width,
level + 1,
i + 1,
index,
item,
env,
);
if i > max {
return i;
}
}
}
i
}
pub fn widget_id(&self) -> WidgetId {
let state = LAPCE_APP_STATE.get_tab_state(&self.window_id, &self.tab_id);
let panel = state.panel.lock();
panel.widget_id(&self.position)
}
}
fn get_item_count(item: &FileNodeItem) -> usize {
let mut count = 1;
if item.open {
for child in item.children.iter() {
count += get_item_count(child);
}
}
count
}
fn get_item_children<'a>(
i: usize,
index: usize,
item: &'a mut FileNodeItem,
) -> (usize, Option<&'a mut FileNodeItem>) {
if i == index {
return (i, Some(item));
}
let mut i = i;
if item.open {
for child in item.children.iter_mut() {
let (new_index, node) = get_item_children(i + 1, index, child);
if new_index == index {
return (new_index, node);
}
i = new_index;
}
}
(i, None)
}
// pub struct FileExplorer {
// window_id: WindowId,
// tab_id: WidgetId,
// }
// impl FileExplorer {
// pub fn new(window_id: WindowId, tab_id: WidgetId) -> FileExplorer {
// FileExplorer { window_id, tab_id }
// }
//
// fn paint_item(
// &self,
// ctx: &mut druid::PaintCtx,
// min: usize,
// max: usize,
// line_height: f64,
// width: f64,
// level: usize,
// i: usize,
// index: usize,
// item: &FileNodeItem,
// env: &druid::Env,
// ) -> usize {
// if i > max {
// return i;
// }
// if i >= min && i <= max {
// if i == index {
// ctx.fill(
// Rect::ZERO
// .with_origin(Point::new(0.0, i as f64 * line_height))
// .with_size(Size::new(width, line_height)),
// &env.get(LapceTheme::EDITOR_CURRENT_LINE_BACKGROUND),
// );
// }
// let y = i as f64 * line_height;
// let svg_y = y + 4.0;
// let mut text_layout = TextLayout::<String>::from_text(
// item.path_buf.file_name().unwrap().to_str().unwrap(),
// );
// let padding = 15.0 * level as f64;
// if item.is_dir {
// let icon_name = if item.open {
// "chevron-down.svg"
// } else {
// "chevron-right.svg"
// };
// let svg = SvgData::from_str(
// ICONS_DIR
// .get_file(icon_name)
// .unwrap()
// .contents_utf8()
// .unwrap(),
// )
// .unwrap();
// svg.to_piet(Affine::translate(Vec2::new(1.0 + padding, svg_y)), ctx);
//
// let icon_name = if item.open {
// "default_folder_opened.svg"
// } else {
// "default_folder.svg"
// };
// let svg = SvgData::from_str(
// ICONS_DIR
// .get_file(icon_name)
// .unwrap()
// .contents_utf8()
// .unwrap(),
// )
// .unwrap();
// let scale = 0.5;
// let affine = Affine::new([
// scale,
// 0.0,
// 0.0,
// scale,
// 1.0 + 16.0 + padding,
// svg_y + 1.0,
// ]);
// svg.to_piet(affine, ctx);
// } else {
// if let Some(exten) = item.path_buf.extension() {
// if let Some(exten) = exten.to_str() {
// let exten = match exten {
// "rs" => "rust",
// "md" => "markdown",
// "cc" => "cpp",
// _ => exten,
// };
// if let Some((svg, svg_tree)) = file_svg(exten) {
// let svg_size = svg_tree_size(&svg_tree);
// let scale = 13.0 / svg_size.height;
// let affine = Affine::new([
// scale,
// 0.0,
// 0.0,
// scale,
// 1.0 + 18.0 + padding,
// svg_y + 2.0,
// ]);
// svg.to_piet(affine, ctx);
// }
// }
// }
// }
// text_layout.set_text_color(LapceTheme::EDITOR_FOREGROUND);
// text_layout.rebuild_if_needed(ctx.text(), env);
// text_layout.draw(ctx, Point::new(38.0 + padding, y + 3.0));
// }
// let mut i = i;
// if item.open {
// for item in &item.children {
// i = self.paint_item(
// ctx,
// min,
// max,
// line_height,
// width,
// level + 1,
// i + 1,
// index,
// item,
// env,
// );
// if i > max {
// return i;
// }
// }
// }
// i
// }
// }
//
// impl Widget<LapceUIState> for FileExplorer {
// fn event(
// &mut self,
// ctx: &mut EventCtx,
// event: &Event,
// data: &mut LapceUIState,
// env: &druid::Env,
// ) {
// match event {
// Event::Command(cmd) => match cmd {
// _ if cmd.is(LAPCE_UI_COMMAND) => {
// let command = cmd.get_unchecked(LAPCE_UI_COMMAND);
// match command {
// LapceUICommand::RequestLayout => {
// ctx.request_layout();
// }
// LapceUICommand::RequestPaint => {
// ctx.request_paint();
// }
// _ => (),
// }
// }
// _ => (),
// },
// _ => (),
// }
// }
//
// fn lifecycle(
// &mut self,
// ctx: &mut druid::LifeCycleCtx,
// event: &druid::LifeCycle,
// data: &LapceUIState,
// env: &druid::Env,
// ) {
// }
//
// fn update(
// &mut self,
// ctx: &mut druid::UpdateCtx,
// old_data: &LapceUIState,
// data: &LapceUIState,
// env: &druid::Env,
// ) {
// // let file_explorer = &data.file_explorer;
// // let old_file_explorer = &old_data.file_explorer;
// // if file_explorer.index != old_file_explorer.index {
// // ctx.request_paint();
// // }
// }
//
// fn layout(
// &mut self,
// ctx: &mut druid::LayoutCtx,
// bc: &druid::BoxConstraints,
// data: &LapceUIState,
// env: &druid::Env,
// ) -> druid::Size {
// bc.max()
// }
//
// fn paint(
// &mut self,
// ctx: &mut druid::PaintCtx,
// data: &LapceUIState,
// env: &druid::Env,
// ) {
// let rects = ctx.region().rects().to_vec();
// let size = ctx.size();
// let state = LAPCE_APP_STATE.get_tab_state(&self.window_id, &self.tab_id);
// let line_height = env.get(LapceTheme::EDITOR_LINE_HEIGHT);
// let file_explorer = state.file_explorer.lock();
// let width = size.width;
// let index = file_explorer.index;
//
// for rect in rects {
// if let Some(background) = LAPCE_APP_STATE.theme.get("background") {
// ctx.fill(rect, background);
// }
// let min = (rect.y0 / line_height).floor() as usize;
// let max = (rect.y1 / line_height) as usize + 1;
// let mut i = 0;
// let level = 0;
// for item in file_explorer.items.iter() {
// i = self.paint_item(
// ctx,
// min,
// max,
// line_height,
// width,
// level,
// i,
// index,
// item,
// env,
// );
// i += 1;
// if i > max {
// break;
// }
// }
// }
// }
// }
|
use crate::crypto::hash::Hashable;
use crate::miner::memory_pool::MemoryPool;
use crate::network::server::Handle;
use crate::transaction::Transaction;
use std::sync::Mutex;
/// Handler for new transaction
// We may want to add the result of memory pool check
pub fn new_transaction(transaction: Transaction, mempool: &Mutex<MemoryPool>, _server: &Handle) {
let mut mempool = mempool.lock().unwrap();
// memory pool check
if !mempool.contains(&transaction.hash()) && !mempool.is_double_spend(&transaction.input) {
// if check passes, insert the new transaction into the mempool
//server.broadcast(Message::NewTransactionHashes(vec![transaction.hash()]));
mempool.insert(transaction);
}
drop(mempool);
}
|
extern crate libc;
use libc::{c_void, size_t, pipe, fork, close, write, read};
use std::ffi::CString;
fn main() {
let mut pp = [0; 2];
let mut qq = [0; 2];
let message = "Hello";
unsafe {
pipe(pp.as_mut_ptr());
pipe(qq.as_mut_ptr());
match fork() {
0 => {
// 子プロセス
close(pp[1]);
close(qq[0]);
let mut data: [u8; 256] = [0; 256];
let mut buf = data.as_mut_ptr() as *mut c_void;
read(pp[0], buf, 256);
for i in data.iter_mut() {
*i = *i + 1;
}
write(qq[1], buf, (data.len()) as size_t);
},
_ => {
// 親プロセス
close(pp[0]);
close(qq[1]);
let src_data = CString::new(message).unwrap().as_ptr();
let src_buf = src_data as *const c_void;
write(pp[1], src_buf, (message.len() + 1) as size_t);
let mut dst_data: [u8; 256] = [0; 256];
let mut dst_buf = dst_data.as_mut_ptr() as *mut c_void;
read(qq[0], dst_buf, 256);
for x in dst_data.iter() {
print!("{} ", x);
}
},
}
}
}
|
//! Common Google API types
pub mod drive;
pub mod oauth;
use serde::Deserialize;
/// Struct describing a generic response from a Google API
#[derive(Deserialize, Debug)]
pub struct GoogleResponse<T> {
#[serde(flatten)]
/// The data returned by Google, if there was no error
pub data: Option<T>,
/// The error returned by Google, if there was an error
pub error: Option<GoogleError>
}
/// Struct describing an error response from a Google API
#[derive(Deserialize, Debug)]
pub struct GoogleError {
/// The error code
pub code: i16,
/// The error message
pub message: String,
/// Specific details around the error(s)
pub errors: Vec<ErrorData>
}
/// Struct describing a specific Error returned from a Google API
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ErrorData {
/// The domain in which the error occurred
pub domain: String,
/// The reason why the error occured
pub reason: String,
/// The error message
pub message: String,
/// The location type at which the error occurred
pub location_type: Option<String>,
/// The location at which the error occurred
pub location: Option<String>
} |
// Import hacspec and all needed definitions.
use hacspec_lib::*;
const BLOCKSIZE: usize = 16;
const IVSIZE: usize = 12;
bytes!(Block, BLOCKSIZE);
bytes!(Word, 4);
bytes!(RoundKey, BLOCKSIZE);
bytes!(Nonce, IVSIZE);
bytes!(SBox, 256);
bytes!(RCon, 15);
// for aes128
bytes!(Bytes144, 144);
bytes!(Bytes176, 176);
// for aes256
bytes!(Bytes208, 208);
bytes!(Bytes240, 240);
// Two Types of keys
bytes!(Key128, BLOCKSIZE);
bytes!(Key256, 2 * BLOCKSIZE);
const SBOX: SBox = SBox(secret_bytes!([
0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,
0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,
0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,
0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,
0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,
0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,
0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,
0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,
0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,
0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,
0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,
0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,
0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,
0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,
0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,
0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16
]));
const RCON: RCon = RCon(secret_bytes!([
0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d
]));
#[derive(Clone, Copy)]
pub enum AesVariant {
Aes128,
Aes256,
}
pub(crate) fn key_length(alg: AesVariant) -> usize {
match alg {
AesVariant::Aes128 => 4,
AesVariant::Aes256 => 8,
}
}
pub(crate) fn rounds(alg: AesVariant) -> usize {
match alg {
AesVariant::Aes128 => 10,
AesVariant::Aes256 => 14,
}
}
fn key_schedule_length(alg: AesVariant) -> usize {
match alg {
AesVariant::Aes128 => 176,
AesVariant::Aes256 => 240,
}
}
fn iterations(alg: AesVariant) -> usize {
match alg {
AesVariant::Aes128 => 40,
AesVariant::Aes256 => 52,
}
}
fn sub_bytes(state: Block) -> Block {
let mut st = state;
for i in 0..BLOCKSIZE {
st[i] = SBOX[U8::declassify(state[i])];
}
st
}
fn shift_row(i: usize, shift: usize, state: Block) -> Block {
let mut out = state;
out[i] = state[i + (4 * (shift % 4))];
out[i + 4] = state[i + (4 * ((shift + 1) % 4))];
out[i + 8] = state[i + (4 * ((shift + 2) % 4))];
out[i + 12] = state[i + (4 * ((shift + 3) % 4))];
out
}
fn shift_rows(state: Block) -> Block {
let state = shift_row(1, 1, state);
let state = shift_row(2, 2, state);
shift_row(3, 3, state)
}
fn xtime(x: U8) -> U8 {
let x1 = x << 1;
let x7 = x >> 7;
let x71 = x7 & U8(0x01);
let x711b = x71 * U8(0x1b);
x1 ^ x711b
}
fn mix_column(c: usize, state: Block) -> Block {
let i0 = 4 * c;
let s0 = state[i0];
let s1 = state[i0 + 1];
let s2 = state[i0 + 2];
let s3 = state[i0 + 3];
let mut st = state;
let tmp = s0 ^ s1 ^ s2 ^ s3;
st[i0] = s0 ^ tmp ^ (xtime(s0 ^ s1));
st[i0 + 1] = s1 ^ tmp ^ (xtime(s1 ^ s2));
st[i0 + 2] = s2 ^ tmp ^ (xtime(s2 ^ s3));
st[i0 + 3] = s3 ^ tmp ^ (xtime(s3 ^ s0));
st
}
fn mix_columns(state: Block) -> Block {
let state = mix_column(0, state);
let state = mix_column(1, state);
let state = mix_column(2, state);
mix_column(3, state)
}
fn add_round_key(state: Block, key: RoundKey) -> Block {
let mut out = state;
for i in 0..BLOCKSIZE {
out[i] ^= key[i];
}
out
}
fn aes_enc(state: Block, round_key: RoundKey) -> Block {
let state = sub_bytes(state);
let state = shift_rows(state);
let state = mix_columns(state);
add_round_key(state, round_key)
}
fn aes_enc_last(state: Block, round_key: RoundKey) -> Block {
let state = sub_bytes(state);
let state = shift_rows(state);
add_round_key(state, round_key)
}
fn rounds_aes(state: Block, key: ByteSeq) -> Block {
let mut out = state;
for i in 0..key.num_chunks(BLOCKSIZE) {
let (_, key_block) = key.get_chunk(BLOCKSIZE, i);
out = aes_enc(out, RoundKey::from_seq(&key_block));
}
out
}
fn block_cipher_aes(input: Block, key: ByteSeq, nr: usize) -> Block {
let k0 = RoundKey::from_slice_range(&key, 0..16);
let k = ByteSeq::from_slice_range(&key, 16..nr * 16);
let kn = RoundKey::from_slice(&key, nr * 16, 16);
let state = add_round_key(input, k0);
let state = rounds_aes(state, k);
aes_enc_last(state, kn)
}
fn rotate_word(w: Word) -> Word {
Word([w[1], w[2], w[3], w[0]])
}
fn slice_word(w: Word) -> Word {
Word([
SBOX[usize::from(w[0])],
SBOX[usize::from(w[1])],
SBOX[usize::from(w[2])],
SBOX[usize::from(w[3])],
])
}
fn aes_keygen_assist(w: Word, rcon: U8) -> Word {
let mut k = rotate_word(w);
k = slice_word(k);
k[0] ^= rcon;
k
}
fn key_expansion_word(w0: Word, w1: Word, i: usize, nk: usize, nr: usize) -> Word {
assert!(i < (4 * (nr + 1)));
let mut k = w1;
if i % nk == 0 {
k = aes_keygen_assist(k, RCON[i / nk]);
} else if nk > 6 && i % nk == 4 {
k = slice_word(k);
}
for i in 0..4 {
k[i] ^= w0[i];
}
k
}
fn key_expansion_aes(key: &ByteSeq, nk: usize, nr: usize, alg: AesVariant) -> ByteSeq {
let mut key_ex = ByteSeq::new(key_schedule_length(alg));
key_ex = key_ex.update_start(key);
let word_size = key_length(alg);
for j in 0..iterations(alg) {
let i = j + word_size;
let word = key_expansion_word(
Word::from_slice(&key_ex, 4 * (i - word_size), 4),
Word::from_slice(&key_ex, 4 * i - 4, 4),
i,
nk,
nr,
);
key_ex = key_ex.update(4 * i, &word);
}
key_ex
}
pub(crate) fn aes_encrypt_block(
k: &ByteSeq,
input: Block,
nk: usize,
nr: usize,
alg: AesVariant,
) -> Block {
let key_ex = key_expansion_aes(k, nk, nr, alg);
block_cipher_aes(input, key_ex, nr)
}
pub fn aes128_encrypt_block(k: Key128, input: Block, nk: usize, nr: usize) -> Block {
aes_encrypt_block(&ByteSeq::from_seq(&k), input, nk, nr, AesVariant::Aes128)
}
pub fn aes256_encrypt_block(k: Key256, input: Block, nk: usize, nr: usize) -> Block {
aes_encrypt_block(&ByteSeq::from_seq(&k), input, nk, nr, AesVariant::Aes256)
}
pub(crate) fn aes_ctr_keyblock(
k: &ByteSeq,
n: Nonce,
c: U32,
nk: usize,
nr: usize,
alg: AesVariant,
) -> Block {
let mut input = Block::new();
input = input.update(0, &n);
input = input.update(12, &U32_to_be_bytes(c));
aes_encrypt_block(k, input, nk, nr, alg)
}
pub(crate) fn xor_block(block: Block, keyblock: Block) -> Block {
let mut out = block;
for i in 0..BLOCKSIZE {
out[i] ^= keyblock[i];
}
out
}
fn aes_counter_mode(
key: &ByteSeq,
nonce: Nonce,
counter: U32,
msg: &ByteSeq,
nk: usize,
nr: usize,
alg: AesVariant,
) -> ByteSeq {
let mut ctr = counter;
let mut blocks_out = ByteSeq::new(msg.len());
for i in 0..msg.num_chunks(BLOCKSIZE) {
let (block_len, msg_block) = msg.get_chunk(BLOCKSIZE, i);
let key_block = aes_ctr_keyblock(key, nonce, ctr, nk, nr, alg);
if msg_block.len() == BLOCKSIZE {
blocks_out = blocks_out.set_chunk(
BLOCKSIZE,
i,
&xor_block(Block::from_seq(&msg_block), key_block),
);
ctr += U32(1);
} else {
// Last block that needs padding
let last_block = Block::new().update_start(&msg_block);
blocks_out = blocks_out.set_chunk(
BLOCKSIZE,
i,
&xor_block(last_block, key_block).slice_range(0..block_len),
);
}
}
blocks_out
}
pub(crate) fn aes_encrypt(
key: &ByteSeq,
nonce: Nonce,
counter: U32,
msg: &ByteSeq,
alg: AesVariant,
) -> ByteSeq {
aes_counter_mode(key, nonce, counter, msg, key_length(alg), rounds(alg), alg)
}
pub(crate) fn aes_decrypt(
key: &ByteSeq,
nonce: Nonce,
counter: U32,
ctxt: &ByteSeq,
alg: AesVariant,
) -> ByteSeq {
aes_counter_mode(key, nonce, counter, ctxt, key_length(alg), rounds(alg), alg)
}
pub fn aes128_encrypt(key: Key128, nonce: Nonce, counter: U32, msg: &ByteSeq) -> ByteSeq {
aes_encrypt(
&ByteSeq::from_seq(&key),
nonce,
counter,
msg,
AesVariant::Aes128,
)
}
pub fn aes128_decrypt(key: Key128, nonce: Nonce, counter: U32, ctxt: &ByteSeq) -> ByteSeq {
aes_decrypt(
&ByteSeq::from_seq(&key),
nonce,
counter,
ctxt,
AesVariant::Aes128,
)
}
pub fn aes256_encrypt(key: Key256, nonce: Nonce, counter: U32, msg: &ByteSeq) -> ByteSeq {
aes_encrypt(
&ByteSeq::from_seq(&key),
nonce,
counter,
msg,
AesVariant::Aes256,
)
}
pub fn aes256_decrypt(key: Key256, nonce: Nonce, counter: U32, ctxt: &ByteSeq) -> ByteSeq {
aes_decrypt(
&ByteSeq::from_seq(&key),
nonce,
counter,
ctxt,
AesVariant::Aes256,
)
}
#[test]
fn test_kat_block1() {
let msg = Block(secret_bytes!([
0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17,
0x2a
]));
let key = Key128(secret_bytes!([
0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f,
0x3c
]));
let ctxt = secret_bytes!([
0x3a, 0xd7, 0x7b, 0xb4, 0x0d, 0x7a, 0x36, 0x60, 0xa8, 0x9e, 0xca, 0xf3, 0x24, 0x66, 0xef,
0x97
]);
let c = aes128_encrypt_block(key, msg, 4, 10);
assert_bytes_eq!(ctxt, c);
}
#[test]
fn test_kat_block2() {
let msg = Block::from_public_slice(&[
0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x20, 0x6d, 0x73,
0x67,
]);
let key = Key128::from_public_slice(&[
0xae, 0x68, 0x52, 0xf8, 0x12, 0x10, 0x67, 0xcc, 0x4b, 0xf7, 0xa5, 0x76, 0x55, 0x77, 0xf3,
0x9e,
]);
let ctxt = ByteSeq::from_public_slice(&[
0x61, 0x5f, 0x09, 0xfb, 0x35, 0x3f, 0x61, 0x3b, 0xa2, 0x8f, 0xf3, 0xa3, 0x0c, 0x64, 0x75,
0x2d,
]);
let c = aes128_encrypt_block(key, msg, 4, 10);
assert_bytes_eq!(ctxt, c);
}
#[test]
fn test_kat_block1_aes256() {
let msg = Block::from_public_slice(&[
0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee,
0xff,
]);
let key = Key256::from_public_slice(&[
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d,
0x1e, 0x1f,
]);
let ctxt = ByteSeq::from_public_slice(&[
0x8e, 0xa2, 0xb7, 0xca, 0x51, 0x67, 0x45, 0xbf, 0xea, 0xfc, 0x49, 0x90, 0x4b, 0x49, 0x60,
0x89,
]);
let c = aes256_encrypt_block(key, msg, 8, 14);
assert_bytes_eq!(ctxt, c);
}
|
pub mod constant;
pub mod cluster;
|
#[derive(Debug)]
pub struct Loan {
pub amount: f64,
pub periods: u32,
pub period_rate: f64,
pub payment: f64,
}
#[derive(Debug)]
pub struct Payment {
pub amount: f64,
pub interest: f64,
pub principal: f64,
pub balance: f64,
}
impl Loan {
pub fn new(amount: f64, rate: f64, periods: u32) -> Loan {
let fixed_rate = if rate >= 0.2 { rate / 100.0 } else { rate };
let period_rate = fixed_rate / 12.0;
Loan {
amount,
periods,
period_rate,
payment: (amount / Loan::discount(rate, periods) * 100.0).round() / 100.0,
}
}
pub fn set_payment(&mut self, payment: f64) {
self.payment = payment;
}
/// Discount Factor
fn discount(apr: f64, periods: u32) -> f64 {
let period_rate = apr / 12.0;
let daily = (period_rate + 1.0).powi(periods as i32);
(daily - 1.0) / (period_rate * daily)
}
}
impl Iterator for Loan {
type Item = Payment;
fn next(&mut self) -> Option<Self::Item> {
if self.amount <= 0.0 {
return None;
}
let interest = (self.amount * self.period_rate * 100.0).round() / 100.0;
let amount = if self.amount > self.payment {
self.payment
} else {
self.amount + interest
};
let p = Payment {
amount,
interest,
principal: amount - interest,
balance: self.amount - amount + interest,
};
self.amount -= p.principal;
Some(p)
}
}
/// Just prints the initial payment information
pub fn print_first(mut loan: Loan) {
if let Some(p) = loan.next() {
println!("Initial Loan Payment:");
println!(" Interest: ${:.2}", p.interest);
println!(" Principal: ${:.2}", p.principal);
println!("Total Payment: ${:.2}", p.amount);
}
}
/// Prints full amortization table
pub fn print_table(loan: Loan) {
println!(
"{:<7} {:>12} {:>12} {:>12} {:>12}",
"Payment", "Total", "Interest", "Principal", "Balance"
);
for (i, p) in loan.enumerate() {
println!(
"{:<7} {:>12.2} {:>12.2} {:>12.2} {:>12.2}",
i + 1,
p.amount,
p.interest,
p.principal,
p.balance
);
}
}
/// Prints totals of the loan
pub fn print_totals(loan: Loan) {
let mut total_interest: f64 = 0.0;
let mut total_principal: f64 = 0.0;
for p in loan {
total_interest += p.interest;
total_principal += p.principal;
}
println!("Total Interest Payed: ${:.2}", total_interest);
println!(
"Total Amount Payed: ${:.2}",
total_interest + total_principal
);
}
|
#[doc = "Register `MASK` reader"]
pub type R = crate::R<MASK_SPEC>;
#[doc = "Register `MASK` writer"]
pub type W = crate::W<MASK_SPEC>;
#[doc = "Field `CCRCFAILIE` reader - Command CRC fail interrupt enable"]
pub type CCRCFAILIE_R = crate::BitReader;
#[doc = "Field `CCRCFAILIE` writer - Command CRC fail interrupt enable"]
pub type CCRCFAILIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DCRCFAILIE` reader - Data CRC fail interrupt enable"]
pub type DCRCFAILIE_R = crate::BitReader;
#[doc = "Field `DCRCFAILIE` writer - Data CRC fail interrupt enable"]
pub type DCRCFAILIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CTIMEOUTIE` reader - Command timeout interrupt enable"]
pub type CTIMEOUTIE_R = crate::BitReader;
#[doc = "Field `CTIMEOUTIE` writer - Command timeout interrupt enable"]
pub type CTIMEOUTIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DTIMEOUTIE` reader - Data timeout interrupt enable"]
pub type DTIMEOUTIE_R = crate::BitReader;
#[doc = "Field `DTIMEOUTIE` writer - Data timeout interrupt enable"]
pub type DTIMEOUTIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXUNDERRIE` reader - Tx FIFO underrun error interrupt enable"]
pub type TXUNDERRIE_R = crate::BitReader;
#[doc = "Field `TXUNDERRIE` writer - Tx FIFO underrun error interrupt enable"]
pub type TXUNDERRIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RXOVERRIE` reader - Rx FIFO overrun error interrupt enable"]
pub type RXOVERRIE_R = crate::BitReader;
#[doc = "Field `RXOVERRIE` writer - Rx FIFO overrun error interrupt enable"]
pub type RXOVERRIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CMDRENDIE` reader - Command response received interrupt enable"]
pub type CMDRENDIE_R = crate::BitReader;
#[doc = "Field `CMDRENDIE` writer - Command response received interrupt enable"]
pub type CMDRENDIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CMDSENTIE` reader - Command sent interrupt enable"]
pub type CMDSENTIE_R = crate::BitReader;
#[doc = "Field `CMDSENTIE` writer - Command sent interrupt enable"]
pub type CMDSENTIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DATAENDIE` reader - Data end interrupt enable"]
pub type DATAENDIE_R = crate::BitReader;
#[doc = "Field `DATAENDIE` writer - Data end interrupt enable"]
pub type DATAENDIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `STBITERRIE` reader - Start bit error interrupt enable"]
pub type STBITERRIE_R = crate::BitReader;
#[doc = "Field `STBITERRIE` writer - Start bit error interrupt enable"]
pub type STBITERRIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBCKENDIE` reader - Data block end interrupt enable"]
pub type DBCKENDIE_R = crate::BitReader;
#[doc = "Field `DBCKENDIE` writer - Data block end interrupt enable"]
pub type DBCKENDIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CMDACTIE` reader - Command acting interrupt enable"]
pub type CMDACTIE_R = crate::BitReader;
#[doc = "Field `CMDACTIE` writer - Command acting interrupt enable"]
pub type CMDACTIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXACTIE` reader - Data transmit acting interrupt enable"]
pub type TXACTIE_R = crate::BitReader;
#[doc = "Field `TXACTIE` writer - Data transmit acting interrupt enable"]
pub type TXACTIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RXACTIE` reader - Data receive acting interrupt enable"]
pub type RXACTIE_R = crate::BitReader;
#[doc = "Field `RXACTIE` writer - Data receive acting interrupt enable"]
pub type RXACTIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXFIFOHEIE` reader - Tx FIFO half empty interrupt enable"]
pub type TXFIFOHEIE_R = crate::BitReader;
#[doc = "Field `TXFIFOHEIE` writer - Tx FIFO half empty interrupt enable"]
pub type TXFIFOHEIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RXFIFOHFIE` reader - Rx FIFO half full interrupt enable"]
pub type RXFIFOHFIE_R = crate::BitReader;
#[doc = "Field `RXFIFOHFIE` writer - Rx FIFO half full interrupt enable"]
pub type RXFIFOHFIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXFIFOFIE` reader - Tx FIFO full interrupt enable"]
pub type TXFIFOFIE_R = crate::BitReader;
#[doc = "Field `TXFIFOFIE` writer - Tx FIFO full interrupt enable"]
pub type TXFIFOFIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RXFIFOFIE` reader - Rx FIFO full interrupt enable"]
pub type RXFIFOFIE_R = crate::BitReader;
#[doc = "Field `RXFIFOFIE` writer - Rx FIFO full interrupt enable"]
pub type RXFIFOFIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXFIFOEIE` reader - Tx FIFO empty interrupt enable"]
pub type TXFIFOEIE_R = crate::BitReader;
#[doc = "Field `TXFIFOEIE` writer - Tx FIFO empty interrupt enable"]
pub type TXFIFOEIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RXFIFOEIE` reader - Rx FIFO empty interrupt enable"]
pub type RXFIFOEIE_R = crate::BitReader;
#[doc = "Field `RXFIFOEIE` writer - Rx FIFO empty interrupt enable"]
pub type RXFIFOEIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXDAVLIE` reader - Data available in Tx FIFO interrupt enable"]
pub type TXDAVLIE_R = crate::BitReader;
#[doc = "Field `TXDAVLIE` writer - Data available in Tx FIFO interrupt enable"]
pub type TXDAVLIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RXDAVLIE` reader - Data available in Rx FIFO interrupt enable"]
pub type RXDAVLIE_R = crate::BitReader;
#[doc = "Field `RXDAVLIE` writer - Data available in Rx FIFO interrupt enable"]
pub type RXDAVLIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SDIOITIE` reader - SDIO mode interrupt received interrupt enable"]
pub type SDIOITIE_R = crate::BitReader;
#[doc = "Field `SDIOITIE` writer - SDIO mode interrupt received interrupt enable"]
pub type SDIOITIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CEATAENDIE` reader - CE-ATA command completion signal received interrupt enable"]
pub type CEATAENDIE_R = crate::BitReader;
#[doc = "Field `CEATAENDIE` writer - CE-ATA command completion signal received interrupt enable"]
pub type CEATAENDIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Command CRC fail interrupt enable"]
#[inline(always)]
pub fn ccrcfailie(&self) -> CCRCFAILIE_R {
CCRCFAILIE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Data CRC fail interrupt enable"]
#[inline(always)]
pub fn dcrcfailie(&self) -> DCRCFAILIE_R {
DCRCFAILIE_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Command timeout interrupt enable"]
#[inline(always)]
pub fn ctimeoutie(&self) -> CTIMEOUTIE_R {
CTIMEOUTIE_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Data timeout interrupt enable"]
#[inline(always)]
pub fn dtimeoutie(&self) -> DTIMEOUTIE_R {
DTIMEOUTIE_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Tx FIFO underrun error interrupt enable"]
#[inline(always)]
pub fn txunderrie(&self) -> TXUNDERRIE_R {
TXUNDERRIE_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Rx FIFO overrun error interrupt enable"]
#[inline(always)]
pub fn rxoverrie(&self) -> RXOVERRIE_R {
RXOVERRIE_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Command response received interrupt enable"]
#[inline(always)]
pub fn cmdrendie(&self) -> CMDRENDIE_R {
CMDRENDIE_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Command sent interrupt enable"]
#[inline(always)]
pub fn cmdsentie(&self) -> CMDSENTIE_R {
CMDSENTIE_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Data end interrupt enable"]
#[inline(always)]
pub fn dataendie(&self) -> DATAENDIE_R {
DATAENDIE_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Start bit error interrupt enable"]
#[inline(always)]
pub fn stbiterrie(&self) -> STBITERRIE_R {
STBITERRIE_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Data block end interrupt enable"]
#[inline(always)]
pub fn dbckendie(&self) -> DBCKENDIE_R {
DBCKENDIE_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Command acting interrupt enable"]
#[inline(always)]
pub fn cmdactie(&self) -> CMDACTIE_R {
CMDACTIE_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Data transmit acting interrupt enable"]
#[inline(always)]
pub fn txactie(&self) -> TXACTIE_R {
TXACTIE_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Data receive acting interrupt enable"]
#[inline(always)]
pub fn rxactie(&self) -> RXACTIE_R {
RXACTIE_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Tx FIFO half empty interrupt enable"]
#[inline(always)]
pub fn txfifoheie(&self) -> TXFIFOHEIE_R {
TXFIFOHEIE_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Rx FIFO half full interrupt enable"]
#[inline(always)]
pub fn rxfifohfie(&self) -> RXFIFOHFIE_R {
RXFIFOHFIE_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - Tx FIFO full interrupt enable"]
#[inline(always)]
pub fn txfifofie(&self) -> TXFIFOFIE_R {
TXFIFOFIE_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - Rx FIFO full interrupt enable"]
#[inline(always)]
pub fn rxfifofie(&self) -> RXFIFOFIE_R {
RXFIFOFIE_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - Tx FIFO empty interrupt enable"]
#[inline(always)]
pub fn txfifoeie(&self) -> TXFIFOEIE_R {
TXFIFOEIE_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - Rx FIFO empty interrupt enable"]
#[inline(always)]
pub fn rxfifoeie(&self) -> RXFIFOEIE_R {
RXFIFOEIE_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - Data available in Tx FIFO interrupt enable"]
#[inline(always)]
pub fn txdavlie(&self) -> TXDAVLIE_R {
TXDAVLIE_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - Data available in Rx FIFO interrupt enable"]
#[inline(always)]
pub fn rxdavlie(&self) -> RXDAVLIE_R {
RXDAVLIE_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - SDIO mode interrupt received interrupt enable"]
#[inline(always)]
pub fn sdioitie(&self) -> SDIOITIE_R {
SDIOITIE_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - CE-ATA command completion signal received interrupt enable"]
#[inline(always)]
pub fn ceataendie(&self) -> CEATAENDIE_R {
CEATAENDIE_R::new(((self.bits >> 23) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Command CRC fail interrupt enable"]
#[inline(always)]
#[must_use]
pub fn ccrcfailie(&mut self) -> CCRCFAILIE_W<MASK_SPEC, 0> {
CCRCFAILIE_W::new(self)
}
#[doc = "Bit 1 - Data CRC fail interrupt enable"]
#[inline(always)]
#[must_use]
pub fn dcrcfailie(&mut self) -> DCRCFAILIE_W<MASK_SPEC, 1> {
DCRCFAILIE_W::new(self)
}
#[doc = "Bit 2 - Command timeout interrupt enable"]
#[inline(always)]
#[must_use]
pub fn ctimeoutie(&mut self) -> CTIMEOUTIE_W<MASK_SPEC, 2> {
CTIMEOUTIE_W::new(self)
}
#[doc = "Bit 3 - Data timeout interrupt enable"]
#[inline(always)]
#[must_use]
pub fn dtimeoutie(&mut self) -> DTIMEOUTIE_W<MASK_SPEC, 3> {
DTIMEOUTIE_W::new(self)
}
#[doc = "Bit 4 - Tx FIFO underrun error interrupt enable"]
#[inline(always)]
#[must_use]
pub fn txunderrie(&mut self) -> TXUNDERRIE_W<MASK_SPEC, 4> {
TXUNDERRIE_W::new(self)
}
#[doc = "Bit 5 - Rx FIFO overrun error interrupt enable"]
#[inline(always)]
#[must_use]
pub fn rxoverrie(&mut self) -> RXOVERRIE_W<MASK_SPEC, 5> {
RXOVERRIE_W::new(self)
}
#[doc = "Bit 6 - Command response received interrupt enable"]
#[inline(always)]
#[must_use]
pub fn cmdrendie(&mut self) -> CMDRENDIE_W<MASK_SPEC, 6> {
CMDRENDIE_W::new(self)
}
#[doc = "Bit 7 - Command sent interrupt enable"]
#[inline(always)]
#[must_use]
pub fn cmdsentie(&mut self) -> CMDSENTIE_W<MASK_SPEC, 7> {
CMDSENTIE_W::new(self)
}
#[doc = "Bit 8 - Data end interrupt enable"]
#[inline(always)]
#[must_use]
pub fn dataendie(&mut self) -> DATAENDIE_W<MASK_SPEC, 8> {
DATAENDIE_W::new(self)
}
#[doc = "Bit 9 - Start bit error interrupt enable"]
#[inline(always)]
#[must_use]
pub fn stbiterrie(&mut self) -> STBITERRIE_W<MASK_SPEC, 9> {
STBITERRIE_W::new(self)
}
#[doc = "Bit 10 - Data block end interrupt enable"]
#[inline(always)]
#[must_use]
pub fn dbckendie(&mut self) -> DBCKENDIE_W<MASK_SPEC, 10> {
DBCKENDIE_W::new(self)
}
#[doc = "Bit 11 - Command acting interrupt enable"]
#[inline(always)]
#[must_use]
pub fn cmdactie(&mut self) -> CMDACTIE_W<MASK_SPEC, 11> {
CMDACTIE_W::new(self)
}
#[doc = "Bit 12 - Data transmit acting interrupt enable"]
#[inline(always)]
#[must_use]
pub fn txactie(&mut self) -> TXACTIE_W<MASK_SPEC, 12> {
TXACTIE_W::new(self)
}
#[doc = "Bit 13 - Data receive acting interrupt enable"]
#[inline(always)]
#[must_use]
pub fn rxactie(&mut self) -> RXACTIE_W<MASK_SPEC, 13> {
RXACTIE_W::new(self)
}
#[doc = "Bit 14 - Tx FIFO half empty interrupt enable"]
#[inline(always)]
#[must_use]
pub fn txfifoheie(&mut self) -> TXFIFOHEIE_W<MASK_SPEC, 14> {
TXFIFOHEIE_W::new(self)
}
#[doc = "Bit 15 - Rx FIFO half full interrupt enable"]
#[inline(always)]
#[must_use]
pub fn rxfifohfie(&mut self) -> RXFIFOHFIE_W<MASK_SPEC, 15> {
RXFIFOHFIE_W::new(self)
}
#[doc = "Bit 16 - Tx FIFO full interrupt enable"]
#[inline(always)]
#[must_use]
pub fn txfifofie(&mut self) -> TXFIFOFIE_W<MASK_SPEC, 16> {
TXFIFOFIE_W::new(self)
}
#[doc = "Bit 17 - Rx FIFO full interrupt enable"]
#[inline(always)]
#[must_use]
pub fn rxfifofie(&mut self) -> RXFIFOFIE_W<MASK_SPEC, 17> {
RXFIFOFIE_W::new(self)
}
#[doc = "Bit 18 - Tx FIFO empty interrupt enable"]
#[inline(always)]
#[must_use]
pub fn txfifoeie(&mut self) -> TXFIFOEIE_W<MASK_SPEC, 18> {
TXFIFOEIE_W::new(self)
}
#[doc = "Bit 19 - Rx FIFO empty interrupt enable"]
#[inline(always)]
#[must_use]
pub fn rxfifoeie(&mut self) -> RXFIFOEIE_W<MASK_SPEC, 19> {
RXFIFOEIE_W::new(self)
}
#[doc = "Bit 20 - Data available in Tx FIFO interrupt enable"]
#[inline(always)]
#[must_use]
pub fn txdavlie(&mut self) -> TXDAVLIE_W<MASK_SPEC, 20> {
TXDAVLIE_W::new(self)
}
#[doc = "Bit 21 - Data available in Rx FIFO interrupt enable"]
#[inline(always)]
#[must_use]
pub fn rxdavlie(&mut self) -> RXDAVLIE_W<MASK_SPEC, 21> {
RXDAVLIE_W::new(self)
}
#[doc = "Bit 22 - SDIO mode interrupt received interrupt enable"]
#[inline(always)]
#[must_use]
pub fn sdioitie(&mut self) -> SDIOITIE_W<MASK_SPEC, 22> {
SDIOITIE_W::new(self)
}
#[doc = "Bit 23 - CE-ATA command completion signal received interrupt enable"]
#[inline(always)]
#[must_use]
pub fn ceataendie(&mut self) -> CEATAENDIE_W<MASK_SPEC, 23> {
CEATAENDIE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "mask register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mask::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mask::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MASK_SPEC;
impl crate::RegisterSpec for MASK_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mask::R`](R) reader structure"]
impl crate::Readable for MASK_SPEC {}
#[doc = "`write(|w| ..)` method takes [`mask::W`](W) writer structure"]
impl crate::Writable for MASK_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MASK to value 0"]
impl crate::Resettable for MASK_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Register `IDMACTRLR` reader"]
pub type R = crate::R<IDMACTRLR_SPEC>;
#[doc = "Register `IDMACTRLR` writer"]
pub type W = crate::W<IDMACTRLR_SPEC>;
#[doc = "Field `IDMAEN` reader - IDMA enable This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0)."]
pub type IDMAEN_R = crate::BitReader;
#[doc = "Field `IDMAEN` writer - IDMA enable This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0)."]
pub type IDMAEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IDMABMODE` reader - Buffer mode selection. This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0)."]
pub type IDMABMODE_R = crate::BitReader;
#[doc = "Field `IDMABMODE` writer - Buffer mode selection. This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0)."]
pub type IDMABMODE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IDMABACT` reader - Double buffer mode active buffer indication This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0). When IDMA is enabled this bit is toggled by hardware."]
pub type IDMABACT_R = crate::BitReader;
#[doc = "Field `IDMABACT` writer - Double buffer mode active buffer indication This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0). When IDMA is enabled this bit is toggled by hardware."]
pub type IDMABACT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - IDMA enable This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0)."]
#[inline(always)]
pub fn idmaen(&self) -> IDMAEN_R {
IDMAEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Buffer mode selection. This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0)."]
#[inline(always)]
pub fn idmabmode(&self) -> IDMABMODE_R {
IDMABMODE_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Double buffer mode active buffer indication This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0). When IDMA is enabled this bit is toggled by hardware."]
#[inline(always)]
pub fn idmabact(&self) -> IDMABACT_R {
IDMABACT_R::new(((self.bits >> 2) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - IDMA enable This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0)."]
#[inline(always)]
#[must_use]
pub fn idmaen(&mut self) -> IDMAEN_W<IDMACTRLR_SPEC, 0> {
IDMAEN_W::new(self)
}
#[doc = "Bit 1 - Buffer mode selection. This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0)."]
#[inline(always)]
#[must_use]
pub fn idmabmode(&mut self) -> IDMABMODE_W<IDMACTRLR_SPEC, 1> {
IDMABMODE_W::new(self)
}
#[doc = "Bit 2 - Double buffer mode active buffer indication This bit can only be written by firmware when DPSM is inactive (DPSMACT = 0). When IDMA is enabled this bit is toggled by hardware."]
#[inline(always)]
#[must_use]
pub fn idmabact(&mut self) -> IDMABACT_W<IDMACTRLR_SPEC, 2> {
IDMABACT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "The receive and transmit FIFOs can be read or written as 32-bit wide registers. The FIFOs contain 32 entries on 32 sequential addresses. This allows the CPU to use its load and store multiple operands to read from/write to the FIFO.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`idmactrlr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`idmactrlr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct IDMACTRLR_SPEC;
impl crate::RegisterSpec for IDMACTRLR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`idmactrlr::R`](R) reader structure"]
impl crate::Readable for IDMACTRLR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`idmactrlr::W`](W) writer structure"]
impl crate::Writable for IDMACTRLR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets IDMACTRLR to value 0"]
impl crate::Resettable for IDMACTRLR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::collections::HashMap;
use regex::Regex;
struct TicketValidation {
name: String,
lower_one: usize,
upper_one: usize,
lower_two: usize,
upper_two: usize
}
impl From<&str> for TicketValidation {
fn from(line: &str) -> Self {
let re = Regex::new(r"(\w+): (\d+)-(\d+) or (\d+)-(\d+)").unwrap();
let matches = re.captures(line).unwrap();
TicketValidation {
name: String::from(&matches[1]),
lower_one: matches[2].parse().unwrap(),
upper_one: matches[3].parse().unwrap(),
lower_two: matches[4].parse().unwrap(),
upper_two: matches[5].parse().unwrap()
}
}
}
impl TicketValidation {
fn validate(&self, other: usize) -> bool {
(other >= self.lower_one && other <= self.upper_one) ||
(other >= self.lower_two && other <= self.upper_two)
}
}
fn parse_input(path: &str) -> (Vec<TicketValidation>, Vec<usize>, Vec<Vec<usize>>) {
let file = std::fs::read_to_string(path).unwrap();
let mut lines = file.lines();
let mut ticket_validations = Vec::new();
loop {
let line = lines.next().unwrap();
if line.is_empty() { break; }
ticket_validations.push(TicketValidation::from(line));
}
lines.next();
let my_ticket = lines.next().unwrap().split(",").map(|n| n.parse().unwrap()).collect();
lines.next();
lines.next();
let mut nearby_tickets = Vec::new();
while let Some(line) = lines.next() {
nearby_tickets.push(
line.split(",").map(|n| n.parse().unwrap()).collect()
);
}
(ticket_validations, my_ticket, nearby_tickets)
}
fn find_invalid_fields(nearby: &[Vec<usize>], validations: &[TicketValidation]) -> Vec<usize> {
nearby.iter()
.flatten()
.filter(|&&n| validations.iter().all(|val| !val.validate(n)))
.map(|n| *n)
.collect()
}
fn column_possibilities(nearby: &[Vec<usize>], validations: &[TicketValidation]) -> Vec<Vec<usize>> {
let valid_tickets: Vec<&Vec<usize>> = nearby
.iter()
.filter(|ticket| {
ticket.iter().all(|&n| {
validations.iter().any(|val| val.validate(n))
})
})
.collect();
let col_count = valid_tickets[0].len();
// Return a list of length col_count where each is a [rule_index]
(0..col_count)
.map(|index| {
let rule_indeces = validations
.iter()
.enumerate()
.filter(|(_, validation)| {
valid_tickets.iter().map(|ticket| ticket[index]).all(|n| validation.validate(n))
})
.map(|(val_idx, _)| val_idx)
.collect();
rule_indeces
})
.collect()
}
fn only<F: Iterator<Item=usize>>(iter: &mut F) -> Option<usize> {
let first_number = match iter.next() {
Some(n) => n,
None => return None
};
match iter.next() {
Some(_) => None,
None => Some(first_number)
}
}
fn solve_possibilities(columns: &[Vec<usize>]) -> HashMap<usize, usize> {
// `possibilities` is a list of lists where each inner list is called a
// possibility_list and each entry in that is called a possibility
// solved contains a map of rule_index: column_index
let mut solved: HashMap<usize, usize> = HashMap::new();
loop {
// Each iteration of this we are looking for possibility_lists that can
// only be one value.
let solved_possibility_lists: Vec<(usize, usize)> = columns
.iter()
.enumerate()
.filter(|(column_index, _)| solved.values().find(|&v| v == column_index).is_none())
.filter_map(|(column_index, rule_list)| {
let mut unclaimed_rule_indeces = rule_list
.iter()
.filter(|possibility| !solved.contains_key(possibility))
.map(|&n| n);
only(&mut unclaimed_rule_indeces).map(|rule_index| (rule_index, column_index))
})
.collect();
if solved_possibility_lists.len() == 0 { break; }
solved_possibility_lists.iter().for_each(|(rule_index, column_index)|
{ solved.insert(*rule_index, *column_index); });
}
solved
}
fn main() {
// Part one
let (validations, my_ticket, nearby) = parse_input("input.txt");
let invalid_sum = find_invalid_fields(&nearby, &validations).iter().sum::<usize>();
println!("Part one: {}", invalid_sum);
// positions 0-5 are the departure fields
let columns = column_possibilities(&nearby, &validations);
let mappings = solve_possibilities(&columns);
println!("{:?}", mappings);
let product = mappings.iter()
.filter(|(&rule_index, _)| rule_index < 6)
.map(|(_, &col_index)| my_ticket.get(col_index).unwrap())
.product::<usize>();
println!("Part two: {}", product);
}
#[test]
fn test_parsers() {
let val = TicketValidation::from("class: 1-3 or 5-7");
assert_eq!(val.name, String::from("class"));
assert_eq!(val.lower_one, 1);
assert_eq!(val.upper_one, 3);
assert_eq!(val.lower_two, 5);
assert_eq!(val.upper_two, 7);
assert_eq!(val.validate(2), true);
assert_eq!(val.validate(8), false);
}
#[test]
fn test_part_one() {
let (validations, _, nearby) = parse_input("example1.txt");
assert_eq!(find_invalid_fields(&nearby, &validations).iter().sum::<usize>(), 71);
}
#[test]
fn test_part_two() {
let (validations, _, nearby) = parse_input("example1.txt");
let columns = column_possibilities(&nearby, &validations);
let solved = solve_possibilities(&columns);
println!("{:?}", solved);
assert_eq!(solved[&0], 1);
assert_eq!(solved[&1], 0);
assert_eq!(solved[&2], 2);
} |
// TODO Vec should be any iterator
// TODO: f64 should just a value that supports Sum and to string
pub fn sum(nums: &Vec<f64>) -> f64 {
let mut result: f64 = 0.0; // should be first
for num in nums {
result = result + num;
}
result
}
pub fn mean(nums: &Vec<f64>) -> f64 {
let nums_sum = sum(&nums);
let len = nums.len() as f64;
nums_sum / len
}
|
use std::io;
fn test_set_1(n: String) -> (String, String) {
let mut a = String::new();
let mut b = String::new();
for c in n.trim().chars() {
if c == '4' {
a.push('3');
b.push('1');
} else {
a.push(c);
b.push('0');
}
}
(a, b)
}
fn main() {
let mut t = String::new();
io::stdin().read_line(&mut t)
.expect("Failed to read line");
let t: usize = t.trim().parse().unwrap();
for case in 0..t {
let mut n = String::new();
io::stdin().read_line(&mut n)
.expect("Failed to read line");
let (a, b) = test_set_1(n);
print!("Case #{}: {} {}\n", case + 1, a, b);
}
}
|
use crate::Signature;
use crate::Tagged;
use crate::{CallInfo, ReturnValue, ShellError, Value};
use serde::{Deserialize, Serialize};
use std::io;
pub trait Plugin {
fn config(&mut self) -> Result<Signature, ShellError>;
fn begin_filter(&mut self, _call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
Ok(vec![])
}
fn filter(&mut self, _input: Tagged<Value>) -> Result<Vec<ReturnValue>, ShellError> {
Ok(vec![])
}
fn end_filter(&mut self) -> Result<Vec<ReturnValue>, ShellError> {
Ok(vec![])
}
fn sink(&mut self, _call_info: CallInfo, _input: Vec<Tagged<Value>>) {}
fn quit(&mut self) {}
}
pub fn serve_plugin(plugin: &mut dyn Plugin) {
let args = std::env::args();
if args.len() > 1 {
let input = args.skip(1).next();
let input = match input {
Some(arg) => std::fs::read_to_string(arg),
None => {
send_response(ShellError::string(format!("No input given.")));
return;
}
};
if let Ok(input) = input {
let command = serde_json::from_str::<NuCommand>(&input);
match command {
Ok(NuCommand::config) => {
send_response(plugin.config());
return;
}
Ok(NuCommand::begin_filter { params }) => {
send_response(plugin.begin_filter(params));
}
Ok(NuCommand::filter { params }) => {
send_response(plugin.filter(params));
}
Ok(NuCommand::end_filter) => {
send_response(plugin.end_filter());
return;
}
Ok(NuCommand::sink { params }) => {
plugin.sink(params.0, params.1);
return;
}
Ok(NuCommand::quit) => {
plugin.quit();
return;
}
e => {
send_response(ShellError::string(format!(
"Could not handle plugin message: {} {:?}",
input, e
)));
return;
}
}
}
} else {
loop {
let mut input = String::new();
match io::stdin().read_line(&mut input) {
Ok(_) => {
let command = serde_json::from_str::<NuCommand>(&input);
match command {
Ok(NuCommand::config) => {
send_response(plugin.config());
break;
}
Ok(NuCommand::begin_filter { params }) => {
send_response(plugin.begin_filter(params));
}
Ok(NuCommand::filter { params }) => {
send_response(plugin.filter(params));
}
Ok(NuCommand::end_filter) => {
send_response(plugin.end_filter());
break;
}
Ok(NuCommand::sink { params }) => {
plugin.sink(params.0, params.1);
break;
}
Ok(NuCommand::quit) => {
plugin.quit();
break;
}
e => {
send_response(ShellError::string(format!(
"Could not handle plugin message: {} {:?}",
input, e
)));
break;
}
}
}
e => {
send_response(ShellError::string(format!(
"Could not handle plugin message: {:?}",
e,
)));
break;
}
}
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct JsonRpc<T> {
jsonrpc: String,
pub method: String,
pub params: T,
}
impl<T> JsonRpc<T> {
pub fn new<U: Into<String>>(method: U, params: T) -> Self {
JsonRpc {
jsonrpc: "2.0".into(),
method: method.into(),
params,
}
}
}
fn send_response<T: Serialize>(result: T) {
let response = JsonRpc::new("response", result);
let response_raw = serde_json::to_string(&response);
match response_raw {
Ok(response) => println!("{}", response),
Err(err) => println!("{}", err),
}
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "method")]
#[allow(non_camel_case_types)]
pub enum NuCommand {
config,
begin_filter {
params: CallInfo,
},
filter {
params: Tagged<Value>,
},
end_filter,
sink {
params: (CallInfo, Vec<Tagged<Value>>),
},
quit,
}
|
fn main() {
let tup = (500, 6.4, 1);
let (_x, _y, _z) = tup;
println!("The value of y is: {}", _y);
println!("The value of y is: {}", tup.2);
}
|
#[macro_use]
extern crate nom;
mod parser;
mod entry;
mod ledger;
fn main() {
println!("Hello, world!");
}
|
#![feature(str_char)]
fn return_initial(s: &str) -> char {
let string = s.to_string();
let v: Vec<char> = string.chars().collect::<Vec<char>>();
v[0]
}
fn main() {
let s: &str = "apple";
let c = return_initial(s);
println!("{}", c);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.