text stringlengths 8 4.13M |
|---|
pub fn is_leap_year(year: u64) -> bool {
let div_by_4 = year % 4 == 0;
let div_by_100 = year % 100 == 0;
let div_by_400 = year % 400 == 0;
(div_by_100 && div_by_400) || (div_by_4 & !div_by_100)
}
|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Scheme for config.toml
use serde::Deserialize;
use std::path::PathBuf;
#[derive(Deserialize, Debug)]
pub struct Config {
pub txpool: TxPoolConfig,
pub api: ApiConfig,
pub db: DBConfig,
pub consensus: ConsensusConfig,
pub network: NetworkConfig,
}
#[derive(Deserialize, Debug)]
pub struct TxPoolConfig {
pub pool_capacity: usize,
}
#[derive(Deserialize, Debug)]
pub struct ApiConfig {
pub rpc_addr: String,
pub rpc_workers: usize,
pub rpc_maxconn: usize,
}
#[derive(Deserialize, Debug)]
pub struct DBConfig {
pub memory_budget: u64,
pub path: Option<PathBuf>,
pub partitions: Option<Vec<Partition>>,
}
#[derive(Deserialize, Debug)]
pub struct Partition {
pub path: PathBuf,
pub target_size: u64,
}
#[derive(Deserialize, Debug)]
pub struct ConsensusConfig {
pub poa: Option<PoaConfig>,
pub raft: Option<RaftConfig>,
pub hotstuff: Option<HotStuffConfig>,
}
#[derive(Deserialize, Debug)]
pub struct PoaConfig {
pub secret_key_file: Option<PathBuf>,
}
#[derive(Deserialize, Debug)]
pub struct RaftConfig {
pub secret_key_file: Option<PathBuf>,
pub init_extra_election_timeout: Option<u64>,
pub extra_election_timeout_per_kb: Option<u64>,
pub request_proposal_min_interval: Option<u64>,
}
#[derive(Deserialize, Debug)]
pub struct HotStuffConfig {
pub secret_key_file: Option<PathBuf>,
pub init_extra_timeout: Option<u64>,
}
#[derive(Deserialize, Debug)]
pub struct NetworkConfig {
pub max_in_peers: u32,
pub max_out_peers: u32,
pub listen_addresses: Vec<String>,
pub external_addresses: Vec<String>,
pub bootnodes: Vec<String>,
pub reserved_nodes: Vec<String>,
pub reserved_only: bool,
pub secret_key_file: PathBuf,
}
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use hydroflow::scheduled::graph::Hydroflow;
use hydroflow::scheduled::graph_ext::GraphExt;
use hydroflow::scheduled::handoff::{Iter, VecHandoff};
use hydroflow::scheduled::query::Query as Q;
use timely::dataflow::operators::{Concatenate, Filter, Inspect, ToStream};
const NUM_OPS: usize = 20;
const NUM_INTS: usize = 100_000;
const BRANCH_FACTOR: usize = 2;
fn benchmark_hydroflow(c: &mut Criterion) {
c.bench_function("fork_join/hydroflow", |b| {
b.iter(|| {
let mut df = Hydroflow::new();
let (start_send, start_recv) = df.make_edge::<_, VecHandoff<usize>>("start");
let mut sent = false;
df.add_subgraph_source("source", start_send, move |_ctx, send| {
if !sent {
sent = true;
send.give(Iter(0..NUM_INTS));
}
});
let (send1, mut recv1) = df.make_edge::<_, VecHandoff<_>>("1");
let (send2, mut recv2) = df.make_edge::<_, VecHandoff<_>>("2");
df.add_subgraph_in_2out(
"fork",
start_recv,
send1,
send2,
|_ctx, recv, send1, send2| {
for v in recv.take_inner().into_iter() {
if v % 2 == 0 {
send1.give(Some(v));
} else {
send2.give(Some(v));
}
}
},
);
for _ in 0..NUM_OPS {
let (send1, next_recv1) = df.make_edge("1");
let (send2, next_recv2) = df.make_edge("2");
df.add_subgraph_2in_2out(
"join-fork",
recv1,
recv2,
send1,
send2,
|_ctx, recv1, recv2, send1, send2| {
for v in recv1.take_inner().into_iter().chain(recv2.take_inner()) {
if v % 2 == 0 {
send1.give(Some(v));
} else {
send2.give(Some(v));
}
}
},
);
recv1 = next_recv1;
recv2 = next_recv2;
}
df.add_subgraph_2sink("join (union)", recv1, recv2, |_ctx, recv1, recv2| {
for x in recv1.take_inner() {
black_box(x);
}
for x in recv2.take_inner() {
black_box(x);
}
});
df.run_available()
})
});
}
fn benchmark_hydroflow_builder(c: &mut Criterion) {
c.bench_function("fork_join/hydroflow_builder", |b| {
b.iter(|| {
// TODO(justin): this creates more operators than necessary.
let mut q = Q::new();
let mut source = q.source(|_ctx, send| {
send.give(Iter(0..NUM_INTS));
});
for _ in 0..NUM_OPS {
let mut outs = source.tee(2).into_iter();
let (mut out1, mut out2) = (outs.next().unwrap(), outs.next().unwrap());
out1 = out1.filter(|x| x % 2 == 0);
out2 = out2.filter(|x| x % 2 == 1);
source = out1.concat(out2);
}
source.sink(|v| {
black_box(v);
});
q.run_available();
})
});
}
fn benchmark_raw(c: &mut Criterion) {
c.bench_function("fork_join/raw", |b| {
b.iter(|| {
let mut parts = [(); BRANCH_FACTOR].map(|_| Vec::new());
let mut data: Vec<_> = (0..NUM_INTS).collect();
for _ in 0..NUM_OPS {
for i in data.drain(..) {
parts[i % BRANCH_FACTOR].push(i);
}
for part in parts.iter_mut() {
data.append(part);
}
}
})
});
}
fn benchmark_timely(c: &mut Criterion) {
c.bench_function("fork_join/timely", |b| {
b.iter(|| {
timely::example(|scope| {
let mut op = (0..NUM_INTS).to_stream(scope);
for _ in 0..NUM_OPS {
let mut ops = Vec::new();
for i in 0..BRANCH_FACTOR {
ops.push(op.filter(move |x| x % BRANCH_FACTOR == i))
}
op = scope.concatenate(ops);
}
op.inspect(|i| {
black_box(i);
});
});
})
});
}
// fn benchmark_spinach(c: &mut Criterion) {
// c.bench_function("spinach", |b| {
// b.to_async(
// tokio::runtime::Builder::new_current_thread()
// .build()
// .unwrap(),
// )
// .iter(|| {
// async {
// use spinachflow::comp::Comp;
// type MyLatRepr =
// spinachflow::lattice::set_union::SetUnionRepr<spinachflow::tag::VEC, usize>;
// let op = <spinachflow::op::OnceOp<MyLatRepr>>::new((0..NUM_INTS).collect());
// struct Even();
// impl spinachflow::func::unary::Morphism for Even {
// type InLatRepr = MyLatRepr;
// type OutLatRepr = MyLatRepr;
// fn call<Y: spinachflow::hide::Qualifier>(
// &self,
// item: spinachflow::hide::Hide<Y, Self::InLatRepr>,
// ) -> spinachflow::hide::Hide<Y, Self::OutLatRepr> {
// item.filter(|i| 0 == i % 2)
// }
// }
// struct Odds();
// impl spinachflow::func::unary::Morphism for Odds {
// type InLatRepr = MyLatRepr;
// type OutLatRepr = MyLatRepr;
// fn call<Y: spinachflow::hide::Qualifier>(
// &self,
// item: spinachflow::hide::Hide<Y, Self::InLatRepr>,
// ) -> spinachflow::hide::Hide<Y, Self::OutLatRepr> {
// item.filter(|i| 1 == i % 2)
// }
// }
// ///// MAGIC NUMBER!!!!!!!! is NUM_OPS
// seq_macro::seq!(N in 0..20 {
// let [ op_even, op_odds ] = spinachflow::op::fixed_split::<_, 2>(op);
// let op_even = spinachflow::op::MorphismOp::new(op_even, Even());
// let op_odds = spinachflow::op::MorphismOp::new(op_odds, Odds());
// let op = spinachflow::op::MergeOp::new(op_even, op_odds);
// let op = spinachflow::op::DynOpDelta::new(Box::new(op));
// });
// let comp = spinachflow::comp::NullComp::new(op);
// spinachflow::comp::CompExt::run(&comp).await.unwrap_err();
// }
// });
// });
// }
// fn benchmark_spinach_switch(c: &mut Criterion) {
// c.bench_function("spinach w/ switch", |b| {
// b.to_async(
// tokio::runtime::Builder::new_current_thread()
// .build()
// .unwrap(),
// )
// .iter(|| {
// async {
// use spinachflow::comp::Comp;
// type MyLatRepr =
// spinachflow::lattice::set_union::SetUnionRepr<spinachflow::tag::VEC, usize>;
// let op = <spinachflow::op::OnceOp<MyLatRepr>>::new((0..NUM_INTS).collect());
// struct SwitchEvenOdd();
// impl spinachflow::func::unary::Morphism for SwitchEvenOdd {
// type InLatRepr = MyLatRepr;
// type OutLatRepr = spinachflow::lattice::pair::PairRepr<MyLatRepr, MyLatRepr>;
// fn call<Y: spinachflow::hide::Qualifier>(
// &self,
// item: spinachflow::hide::Hide<Y, Self::InLatRepr>,
// ) -> spinachflow::hide::Hide<Y, Self::OutLatRepr> {
// let (a, b) = item.switch(|i| 0 == i % 2);
// spinachflow::hide::Hide::zip(a, b)
// }
// }
// ///// MAGIC NUMBER!!!!!!!! is NUM_OPS
// seq_macro::seq!(N in 0..20 {
// let op = spinachflow::op::MorphismOp::new(op, SwitchEvenOdd());
// let ( op_even, op_odds ) = spinachflow::op::SwitchOp::new(op);
// let op = spinachflow::op::MergeOp::new(op_even, op_odds);
// let op = spinachflow::op::DynOpDelta::new(Box::new(op));
// });
// let comp = spinachflow::comp::NullComp::new(op);
// spinachflow::comp::CompExt::run(&comp).await.unwrap_err();
// }
// });
// });
// }
// fn benchmark_spinachflow_symm(c: &mut Criterion) {
// c.bench_function("spinachflow (symmetric)", |b| {
// b.to_async(
// tokio::runtime::Builder::new_current_thread()
// .build()
// .unwrap(),
// )
// .iter(|| {
// async {
// use spinachflow::futures::StreamExt;
// use spinachflow::futures::future::ready;
// let stream = spinachflow::futures::stream::iter(0..NUM_INTS);
// ///// MAGIC NUMBER!!!!!!!! is NUM_OPS
// seq_macro::seq!(N in 0..20 {
// let splitter = spinachflow::stream::Splitter::new(stream);
// let mut i = 0;
// let splits = [(); BRANCH_FACTOR].map(|_| {
// let j = i;
// i += 1;
// splitter.add_split().filter(move |x| ready(j == x % BRANCH_FACTOR))
// });
// let stream = spinachflow::stream::SelectArr::new(splits);
// let stream: std::pin::Pin<Box<dyn spinachflow::futures::Stream<Item = usize>>> = Box::pin(stream);
// });
// let mut stream = stream;
// loop {
// let item = stream.next().await;
// if item.is_none() {
// break;
// }
// }
// }
// });
// });
// }
// criterion_group!(
// name = fork_join_dataflow;
// config = Criterion::default().with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));
// targets = benchmark_babyflow
// );
// criterion_group!(fork_join_dataflow, benchmark_timely,);
criterion_group!(
fork_join_dataflow,
benchmark_hydroflow,
benchmark_hydroflow_builder,
benchmark_timely,
benchmark_raw,
// benchmark_spinach,
// benchmark_spinach_switch,
// benchmark_spinachflow_symm,
);
criterion_main!(fork_join_dataflow);
|
fn binary_search(v: &Vec<i32>, target: i32) -> Option<i32> {
let mut min: usize = 0;
let mut max: usize = v.len() - 1;
let mut found: Option<i32> = None;
while min <= max {
let mid: usize = min + (max - min) / 2;
if target == v[mid] {
found = Some(target);
break;
}
if target > v[mid] {
min = mid + 1;
}
if target < v[mid] {
max = mid - 1;
}
}
found
}
#[test]
fn test_search() {
let v: Vec<i32> = vec![1,2,3,4];
assert_eq!(binary_search(&v, 2), Some(2));
assert_eq!(binary_search(&v, 5), None);
assert_eq!(binary_search(&v, 4), Some(4));
}
|
impl Solution {
pub fn h_index(mut citations: Vec<i32>) -> i32 {
let (mut res,n) = (0,citations.len());
citations.sort_unstable();
for i in (1..=n).rev(){
if citations[n -i] >= i as i32{
res = i as i32;
break;
}
}
res
}
} |
pub(crate) mod history; |
/*
chapter 4
primitive types
char
*/
fn main() {
let n = 'n';
println!("{}", n);
let heart = '❤';
println!("{}", heart);
}
// output should be:
/*
n
❤
*/
|
// base16.rs
use std::vec;
enum DecodeSize {
Done(uint), // on success
Fail(uint, ~str), // on failure
}
static BASE16_TABLE: &'static [u8] = bytes!("0123456789ABCDEF");
static BASE16_DECODE_MAP: [u8, ..256] = [
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 255, 255, 255, 255, 255, 255,
255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
];
// TODO: doc
pub fn encode(src: &[u8]) -> ~[u8] {
let dst_length = src.len() * 2;
let mut dst = vec::with_capacity(dst_length);
unsafe { vec::raw::set_len(&mut dst, dst_length); }
base16_encode(BASE16_TABLE, dst, src);
dst
}
// TODO: doc
pub fn decode(src: &[u8]) -> ~[u8] {
match decode_result(src) {
Ok(dst) => dst,
Err(reason) => fail!(reason)
}
}
// TODO: doc
pub fn decode_result(src: &[u8]) -> Result<~[u8], ~str> {
let dst_length = src.len() / 2;
let mut dst = vec::with_capacity(dst_length);
unsafe { vec::raw::set_len(&mut dst, dst_length); }
match base16_decode(BASE16_DECODE_MAP, dst, src) {
Done(_) => Ok(dst),
Fail(_, reason) => Err(reason)
}
}
fn base16_encode(table: &[u8], dst: &mut [u8], src: &[u8]) {
for i in range(0u, src.len()) {
dst[i+1*i] = table[src[i]>>4];
dst[i+1*i + 1] = table[src[i] & 0x0f];
}
}
fn base16_decode(decode_map: &[u8], dst: &mut [u8], src: &[u8]) -> DecodeSize {
if src.len() % 2 == 1 {
return Fail(0, ~"odd length base16 data");
}
for i in range(0, src.len()/2) {
let a = decode_map[src[i*2]];
if (a == 0xff) { return Fail(0, fail_decode_on(src[i*2])); }
let b = decode_map[src[i*2+1]];
if (b == 0xff) { return Fail(0, fail_decode_on(src[i*2+1])); }
dst[i] = a<<4 | b;
}
Done(src.len()/2)
}
fn fail_decode_on(b: u8) -> ~str {
format!("illegal base16 byte {}", b)
}
|
use libc::{c_int, c_ushort, c_void, setsockopt, socklen_t, SOL_SOCKET};
use std::io::Error;
use std::mem::{forget, size_of_val};
use std::os::unix::io::RawFd;
use std::ptr::null;
#[repr(C)]
#[derive(Debug)]
pub struct Op {
code: u16,
jt: u8,
jf: u8,
k: u32,
}
impl Op {
pub fn new(code: u16, jt: u8, jf: u8, k: u32) -> Op {
Op {
code: code,
jt: jt,
jf: jf,
k: k,
}
}
}
#[repr(C)]
#[derive(Debug)]
pub struct Prog {
len: c_ushort,
filter: *mut Op,
}
impl Prog {
pub fn new(ops: Vec<Op>) -> Prog {
let mut ops = ops.into_boxed_slice();
let len = ops.len();
let ptr = ops.as_mut_ptr();
forget(ops);
Prog {
len: len as _,
filter: ptr,
}
}
}
impl Drop for Prog {
fn drop(&mut self) {
unsafe {
let len = self.len as usize;
let ptr = self.filter;
Vec::from_raw_parts(ptr, len, len);
}
}
}
const SO_ATTACH_FILTER: c_int = 26;
const SO_DETACH_FILTER: c_int = 27;
const SO_LOCK_FILTER: c_int = 44;
#[macro_export]
macro_rules! bpfprog {
($count:expr, $($code:tt $jt:tt $jf:tt $k:tt),*) => {
{
let mut ops = Vec::with_capacity($count);
$(ops.push(bpf::Op::new($code, $jt, $jf, $k));)*
bpf::Prog::new(ops)
}
}
}
pub fn attach_filter(fd: RawFd, prog: Prog) -> Result<(), Error> {
match unsafe {
setsockopt(
fd as c_int,
SOL_SOCKET,
SO_ATTACH_FILTER,
&prog as *const _ as *const c_void,
size_of_val(&prog) as socklen_t,
)
} {
0 => Ok(()),
_ => Err(Error::last_os_error()),
}
}
pub fn detach_filter(fd: RawFd) -> Result<(), Error> {
match unsafe { setsockopt(fd as c_int, SOL_SOCKET, SO_DETACH_FILTER, null(), 0) } {
0 => Ok(()),
_ => Err(Error::last_os_error()),
}
}
pub fn lock_filter(fd: RawFd) -> Result<(), Error> {
let one: c_int = 1;
match unsafe {
setsockopt(
fd as c_int,
SOL_SOCKET,
SO_LOCK_FILTER,
&one as *const _ as *const c_void,
size_of_val(&one) as socklen_t,
)
} {
0 => Ok(()),
_ => Err(Error::last_os_error()),
}
}
|
#[doc = "Reader of register XTAL_CLK_DIV_CONFIG"]
pub type R = crate::R<u32, super::XTAL_CLK_DIV_CONFIG>;
#[doc = "Writer for register XTAL_CLK_DIV_CONFIG"]
pub type W = crate::W<u32, super::XTAL_CLK_DIV_CONFIG>;
#[doc = "Register XTAL_CLK_DIV_CONFIG `reset()`'s with value 0"]
impl crate::ResetValue for super::XTAL_CLK_DIV_CONFIG {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `SYSCLK_DIV`"]
pub type SYSCLK_DIV_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SYSCLK_DIV`"]
pub struct SYSCLK_DIV_W<'a> {
w: &'a mut W,
}
impl<'a> SYSCLK_DIV_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03);
self.w
}
}
#[doc = "Reader of field `LLCLK_DIV`"]
pub type LLCLK_DIV_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `LLCLK_DIV`"]
pub struct LLCLK_DIV_W<'a> {
w: &'a mut W,
}
impl<'a> LLCLK_DIV_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2);
self.w
}
}
impl R {
#[doc = "Bits 0:1 - System clock pre-divider value. The 24 MHz crystal clock is divided to generate the system clock. 0: NO_DIV: SYSCLK= XTALCLK/1 1: DIV_BY_2: SYSCLK= XTALCLK/2 2: DIV_BY_4: SYSCLK= XTALCLK/4 3: DIV_BY_8: SYSCLK= XTALCLK/8"]
#[inline(always)]
pub fn sysclk_div(&self) -> SYSCLK_DIV_R {
SYSCLK_DIV_R::new((self.bits & 0x03) as u8)
}
#[doc = "Bits 2:3 - Link Layer clock pre-divider value. The 24 MHz crystal clock is divided to generate the Link Layer clock. 0: NO_DIV: LLCLK= XTALCLK/1 1: DIV_BY_2: LLCLK= XTALCLK/2 2: DIV_BY_4: LLCLK= XTALCLK/4 3: DIV_BY_8: LLCLK= XTALCLK/8"]
#[inline(always)]
pub fn llclk_div(&self) -> LLCLK_DIV_R {
LLCLK_DIV_R::new(((self.bits >> 2) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 0:1 - System clock pre-divider value. The 24 MHz crystal clock is divided to generate the system clock. 0: NO_DIV: SYSCLK= XTALCLK/1 1: DIV_BY_2: SYSCLK= XTALCLK/2 2: DIV_BY_4: SYSCLK= XTALCLK/4 3: DIV_BY_8: SYSCLK= XTALCLK/8"]
#[inline(always)]
pub fn sysclk_div(&mut self) -> SYSCLK_DIV_W {
SYSCLK_DIV_W { w: self }
}
#[doc = "Bits 2:3 - Link Layer clock pre-divider value. The 24 MHz crystal clock is divided to generate the Link Layer clock. 0: NO_DIV: LLCLK= XTALCLK/1 1: DIV_BY_2: LLCLK= XTALCLK/2 2: DIV_BY_4: LLCLK= XTALCLK/4 3: DIV_BY_8: LLCLK= XTALCLK/8"]
#[inline(always)]
pub fn llclk_div(&mut self) -> LLCLK_DIV_W {
LLCLK_DIV_W { w: self }
}
}
|
use *;
impl<T> Reactor<T> {
/// Evaluate bool.
pub fn eval_bool(&self, id: ptr::Bool, env: &mut Environment<T>) -> bool
where T: Float, f64: Cast<T>
{
use fns::Bool::*;
match self[id] {
Data(a) => a,
Not(a) => !self.eval_bool(a, env),
And(a, b) => {
let a = self.eval_bool(a, env);
let b = self.eval_bool(b, env);
a && b
}
Or(a, b) => {
let a = self.eval_bool(a, env);
let b = self.eval_bool(b, env);
a || b
}
Xor(a, b) => {
let a = self.eval_bool(a, env);
let b = self.eval_bool(b, env);
a ^ b
}
EqBool(a, b) => {
let a = self.eval_bool(a, env);
let b = self.eval_bool(b, env);
a == b
}
Eq1(a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
a == b
}
Eq2(a, b) => {
let a = self.eval2(a, env);
let b = self.eval2(b, env);
a == b
}
Eq3(a, b) => {
let a = self.eval3(a, env);
let b = self.eval3(b, env);
a == b
}
Eq4(a, b) => {
let a = self.eval4(a, env);
let b = self.eval4(b, env);
a == b
}
Less(a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
a < b
}
GreaterOrEqual(a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
a >= b
}
}
}
/// Evaluate scalar.
pub fn eval1(&self, id: ptr::Point1<T>, env: &mut Environment<T>) -> T
where T: Float,
f64: Cast<T>
{
use fns::Point1::*;
if let Some(&Some(val)) = env.cache.points1.get(&usize::from(id)) {
return val;
}
let val = match self[id] {
Data(a) => a,
Time1(f, t) => self.eval_spline1(f, self.eval1(t, env), env),
Time2(f, t) => self.eval_surface1(f, self.eval2(t, env), env),
Sum(a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
a + b
}
Diff(a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
a - b
}
Prod(a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
a * b
}
Div(a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
a / b
}
Dot2(a, b) => {
use vecmath::vec2_dot as dot;
let a = self.eval2(a, env);
let b = self.eval2(b, env);
dot(a, b)
}
Dot3(a, b) => {
use vecmath::vec3_dot as dot;
let a = self.eval3(a, env);
let b = self.eval3(b, env);
dot(a, b)
}
Dot4(a, b) => {
use vecmath::vec4_dot as dot;
let a = self.eval4(a, env);
let b = self.eval4(b, env);
dot(a, b)
}
Cross(a, b) => {
use vecmath::vec2_dot as dot;
let a = self.eval2(a, env);
let b = self.eval2(b, env);
dot(a, b)
}
Abs(a) => {
let a = self.eval1(a, env);
let _0 = T::zero();
if a < _0 {-a} else {a}
}
Len2(a) => vecmath::vec2_len(self.eval2(a, env)),
Len3(a) => vecmath::vec3_len(self.eval3(a, env)),
Len4(a) => vecmath::vec4_len(self.eval4(a, env)),
Neg(a) => -self.eval1(a, env),
Sign(a) => self.eval1(a, env).signum(),
Sin(a) => self.eval1(a, env).sin(),
Cos(a) => self.eval1(a, env).cos(),
Tan(a) => self.eval1(a, env).tan(),
Asin(a) => self.eval1(a, env).asin(),
Acos(a) => self.eval1(a, env).acos(),
Atan(a) => self.eval1(a, env).atan(),
Atan2(a, b) => self.eval1(a, env).atan2(self.eval1(b, env)),
Sinh(a) => self.eval1(a, env).sinh(),
Cosh(a) => self.eval1(a, env).cosh(),
Tanh(a) => self.eval1(a, env).tanh(),
Asinh(a) => self.eval1(a, env).asinh(),
Acosh(a) => self.eval1(a, env).acosh(),
Atanh(a) => self.eval1(a, env).atanh(),
Sqrt(a) => self.eval1(a, env).sqrt(),
Max(a, b) => self.eval1(a, env).max(self.eval1(b, env)),
Min(a, b) => self.eval1(a, env).min(self.eval1(b, env)),
DegToRad(a) => self.eval1(a, env).deg_to_rad(),
RadToDeg(a) => self.eval1(a, env).rad_to_deg(),
Time => env.time.cast(),
DeltaTime => env.dt.cast(),
};
if let Some(entry) = env.cache.points1.get_mut(&usize::from(id)) {
*entry = Some(val);
}
val
}
/// Evaluate 2D point.
pub fn eval2(&self, id: ptr::Point2<T>, env: &mut Environment<T>) -> [T; 2]
where T: Float,
f64: Cast<T>
{
use fns::Point2::*;
if let Some(&Some(val)) = env.cache.points2.get(&usize::from(id)) {
return val;
}
let val = match self[id] {
Data(a) => a,
Time1(f, t) => self.eval_spline2(f, self.eval1(t, env), env),
Time2(f, t) => self.eval_surface2(f, self.eval2(t, env), env),
Sum(a, b) => {
use vecmath::vec2_add as add;
let a = self.eval2(a, env);
let b = self.eval2(b, env);
add(a, b)
}
Prod(a, b) => {
let a = self.eval2(a, env);
let b = self.eval2(b, env);
[a[0] * b[0], a[1] * b[1]]
}
Diff(a, b) => {
use vecmath::vec2_sub as sub;
let a = self.eval2(a, env);
let b = self.eval2(b, env);
sub(a, b)
}
Max(a, b) => {
let a = self.eval2(a, env);
let b = self.eval2(b, env);
[a[0].max(b[0]), a[1].max(b[1])]
}
Min(a, b) => {
let a = self.eval2(a, env);
let b = self.eval2(b, env);
[a[0].min(b[0]), a[1].min(b[1])]
}
};
if let Some(entry) = env.cache.points2.get_mut(&usize::from(id)) {
*entry = Some(val);
}
val
}
/// Evaluate 3D point.
pub fn eval3(&self, id: ptr::Point3<T>, env: &mut Environment<T>) -> [T; 3]
where T: Float,
f64: Cast<T>
{
use fns::Point3::*;
if let Some(&Some(val)) = env.cache.points3.get(&usize::from(id)) {
return val;
}
let val = match self[id] {
Data(a) => a,
Time1(f, t) => self.eval_spline3(f, self.eval1(t, env), env),
Time2(f, t) => self.eval_surface3(f, self.eval2(t, env), env),
Sum(a, b) => {
use vecmath::vec3_add as add;
let a = self.eval3(a, env);
let b = self.eval3(b, env);
add(a, b)
}
Prod(a, b) => {
let a = self.eval3(a, env);
let b = self.eval3(b, env);
[a[0] * b[0], a[1] * b[1], a[2] * b[2]]
}
Diff(a, b) => {
use vecmath::vec3_sub as sub;
let a = self.eval3(a, env);
let b = self.eval3(b, env);
sub(a, b)
}
Cross(a, b) => {
use vecmath::vec3_cross as cross;
let a = self.eval3(a, env);
let b = self.eval3(b, env);
cross(a, b)
}
Max(a, b) => {
let a = self.eval3(a, env);
let b = self.eval3(b, env);
[a[0].max(b[0]), a[1].max(b[1]), a[2].max(b[2])]
}
Min(a, b) => {
let a = self.eval3(a, env);
let b = self.eval3(b, env);
[a[0].min(b[0]), a[1].min(b[1]), a[2].min(b[2])]
}
};
if let Some(entry) = env.cache.points3.get_mut(&usize::from(id)) {
*entry = Some(val);
}
val
}
/// Evaluate 4D point.
pub fn eval4(&self, id: ptr::Point4<T>, env: &mut Environment<T>) -> [T; 4]
where T: Float,
f64: Cast<T>
{
use fns::Point4::*;
if let Some(&Some(val)) = env.cache.points4.get(&usize::from(id)) {
return val;
}
let val = match self[id] {
Data(a) => a,
Time1(f, t) => self.eval_spline4(f, self.eval1(t, env), env),
Time2(f, t) => self.eval_surface4(f, self.eval2(t, env), env),
Sum(a, b) => {
use vecmath::vec4_add as add;
let a = self.eval4(a, env);
let b = self.eval4(b, env);
add(a, b)
}
Prod(a, b) => {
let a = self.eval4(a, env);
let b = self.eval4(b, env);
[a[0] * b[0], a[1] * b[1], a[2] * b[2], a[3] * b[3]]
}
Diff(a, b) => {
use vecmath::vec4_sub as sub;
let a = self.eval4(a, env);
let b = self.eval4(b, env);
sub(a, b)
}
Max(a, b) => {
let a = self.eval4(a, env);
let b = self.eval4(b, env);
[a[0].max(b[0]), a[1].max(b[1]), a[2].max(b[2]), a[3].max(b[3])]
}
Min(a, b) => {
let a = self.eval4(a, env);
let b = self.eval4(b, env);
[a[0].min(b[0]), a[1].min(b[1]), a[2].min(b[2]), a[3].min(b[3])]
}
};
if let Some(entry) = env.cache.points4.get_mut(&usize::from(id)) {
*entry = Some(val);
}
val
}
/// Evaluate 1D spline with an argument.
pub fn eval_spline1(&self, id: SplineRef1<T>, arg: T, env: &mut Environment<T>) -> T
where T: Float,
f64: Cast<T>
{
use fns::Spline::*;
match self[id] {
Line(a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
let _1 = T::one();
a * (_1 - arg) + b * arg
}
QuadraticBezier(a, b, c) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
let c = self.eval1(c, env);
let _1 = T::one();
let ab = a * (_1 - arg) + b * arg;
let bc = b * (_1 - arg) + c * arg;
ab * (_1 - arg) + bc * arg
}
CubicBezier(a, b, c, d) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
let c = self.eval1(c, env);
let d = self.eval1(d, env);
let _1 = T::one();
let ab = a * (_1 - arg) + b * arg;
let cd = c * (_1 - arg) + d * arg;
ab * (_1 - arg) + cd * arg
}
Segment(f, a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
let _1 = T::one();
let t = a * (_1 - arg) + b * arg;
self.eval_spline1(f, t, env)
}
OnSurface(f, a, b) => {
use vecmath::vec2_add as add;
use vecmath::vec2_scale as scale;
let a = self.eval2(a, env);
let b = self.eval2(b, env);
let _1 = T::one();
let t = add(scale(a, _1 - arg), scale(b, arg));
self.eval_surface1(f, t, env)
}
Contour(f) => {
let _025: T = 0.25.cast();
let _4: T = 4.0.cast();
let _0: T = 0.0.cast();
let _05: T = 0.5.cast();
let _1: T = 1.0.cast();
let _075 = 0.75.cast();
if arg < _025 {self.eval_surface1(f, [_4 * arg, _0], env)}
else if arg < _05 {self.eval_surface1(f, [_1, _4 * (arg - _025)], env)}
else if arg < _075 {self.eval_surface1(f, [_1 - _4 * (arg - _05), _1], env)}
else {self.eval_surface1(f, [_0, _1 - _4 * (arg - _075)], env)}
}
}
}
/// Evaluate 2D spline with an argument.
pub fn eval_spline2(&self, id: SplineRef2<T>, arg: T, env: &mut Environment<T>) -> [T; 2]
where T: Float,
f64: Cast<T>
{
use fns::Spline::*;
use vecmath::vec2_add as add;
use vecmath::vec2_scale as scale;
match self[id] {
Line(a, b) => {
let a = self.eval2(a, env);
let b = self.eval2(b, env);
let _1 = T::one();
add(scale(a, _1 - arg), scale(b, arg))
}
QuadraticBezier(a, b, c) => {
let a = self.eval2(a, env);
let b = self.eval2(b, env);
let c = self.eval2(c, env);
let _1 = T::one();
let ab = add(scale(a, _1 - arg), scale(b, arg));
let bc = add(scale(b, _1 - arg), scale(c, arg));
add(scale(ab, _1 - arg), scale(bc, arg))
}
CubicBezier(a, b, c, d) => {
let a = self.eval2(a, env);
let b = self.eval2(b, env);
let c = self.eval2(c, env);
let d = self.eval2(d, env);
let _1 = T::one();
let ab = add(scale(a, _1 - arg), scale(b, arg));
let cd = add(scale(c, _1 - arg), scale(d, arg));
add(scale(ab, _1 - arg), scale(cd, arg))
}
Segment(f, a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
let _1 = T::one();
let t = a * (_1 - arg) + b * arg;
self.eval_spline2(f, t, env)
}
OnSurface(f, a, b) => {
use vecmath::vec2_add as add;
use vecmath::vec2_scale as scale;
let a = self.eval2(a, env);
let b = self.eval2(b, env);
let _1 = T::one();
let t = add(scale(a, _1 - arg), scale(b, arg));
self.eval_surface2(f, t, env)
}
Contour(f) => {
let _025: T = 0.25.cast();
let _4: T = 4.0.cast();
let _0: T = 0.0.cast();
let _05: T = 0.5.cast();
let _1: T = 1.0.cast();
let _075 = 0.75.cast();
if arg < _025 {self.eval_surface2(f, [_4 * arg, _0], env)}
else if arg < _05 {self.eval_surface2(f, [_1, _4 * (arg - _025)], env)}
else if arg < _075 {self.eval_surface2(f, [_1 - _4 * (arg - _05), _1], env)}
else {self.eval_surface2(f, [_0, _1 - _4 * (arg - _075)], env)}
}
}
}
/// Evaluate 3D spline with an argument.
pub fn eval_spline3(&self, id: SplineRef3<T>, arg: T, env: &mut Environment<T>) -> [T; 3]
where T: Float,
f64: Cast<T>
{
use fns::Spline::*;
use vecmath::vec3_add as add;
use vecmath::vec3_scale as scale;
match self[id] {
Line(a, b) => {
let a = self.eval3(a, env);
let b = self.eval3(b, env);
let _1 = T::one();
add(scale(a, _1 - arg), scale(b, arg))
}
QuadraticBezier(a, b, c) => {
let a = self.eval3(a, env);
let b = self.eval3(b, env);
let c = self.eval3(c, env);
let _1 = T::one();
let ab = add(scale(a, _1 - arg), scale(b, arg));
let bc = add(scale(b, _1 - arg), scale(c, arg));
add(scale(ab, _1 - arg), scale(bc, arg))
}
CubicBezier(a, b, c, d) => {
let a = self.eval3(a, env);
let b = self.eval3(b, env);
let c = self.eval3(c, env);
let d = self.eval3(d, env);
let _1 = T::one();
let ab = add(scale(a, _1 - arg), scale(b, arg));
let cd = add(scale(c, _1 - arg), scale(d, arg));
add(scale(ab, _1 - arg), scale(cd, arg))
}
Segment(f, a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
let _1 = T::one();
let t = a * (_1 - arg) + b * arg;
self.eval_spline3(f, t, env)
}
OnSurface(f, a, b) => {
use vecmath::vec2_add as add;
use vecmath::vec2_scale as scale;
let a = self.eval2(a, env);
let b = self.eval2(b, env);
let _1 = T::one();
let t = add(scale(a, _1 - arg), scale(b, arg));
self.eval_surface3(f, t, env)
}
Contour(f) => {
let _025: T = 0.25.cast();
let _4: T = 4.0.cast();
let _0: T = 0.0.cast();
let _05: T = 0.5.cast();
let _1: T = 1.0.cast();
let _075 = 0.75.cast();
if arg < _025 {self.eval_surface3(f, [_4 * arg, _0], env)}
else if arg < _05 {self.eval_surface3(f, [_1, _4 * (arg - _025)], env)}
else if arg < _075 {self.eval_surface3(f, [_1 - _4 * (arg - _05), _1], env)}
else {self.eval_surface3(f, [_0, _1 - _4 * (arg - _075)], env)}
}
}
}
/// Evaluate 4D spline with an argument.
pub fn eval_spline4(&self, id: SplineRef4<T>, arg: T, env: &mut Environment<T>) -> [T; 4]
where T: Float,
f64: Cast<T>
{
use fns::Spline::*;
use vecmath::vec4_add as add;
use vecmath::vec4_scale as scale;
match self[id] {
Line(a, b) => {
let a = self.eval4(a, env);
let b = self.eval4(b, env);
let _1 = T::one();
add(scale(a, _1 - arg), scale(b, arg))
}
QuadraticBezier(a, b, c) => {
let a = self.eval4(a, env);
let b = self.eval4(b, env);
let c = self.eval4(c, env);
let _1 = T::one();
let ab = add(scale(a, _1 - arg), scale(b, arg));
let bc = add(scale(b, _1 - arg), scale(c, arg));
add(scale(ab, _1 - arg), scale(bc, arg))
}
CubicBezier(a, b, c, d) => {
let a = self.eval4(a, env);
let b = self.eval4(b, env);
let c = self.eval4(c, env);
let d = self.eval4(d, env);
let _1 = T::one();
let ab = add(scale(a, _1 - arg), scale(b, arg));
let cd = add(scale(c, _1 - arg), scale(d, arg));
add(scale(ab, _1 - arg), scale(cd, arg))
}
Segment(f, a, b) => {
let a = self.eval1(a, env);
let b = self.eval1(b, env);
let _1 = T::one();
let t = a * (_1 - arg) + b * arg;
self.eval_spline4(f, t, env)
}
OnSurface(f, a, b) => {
use vecmath::vec2_add as add;
use vecmath::vec2_scale as scale;
let a = self.eval2(a, env);
let b = self.eval2(b, env);
let _1 = T::one();
let t = add(scale(a, _1 - arg), scale(b, arg));
self.eval_surface4(f, t, env)
}
Contour(f) => {
let _025: T = 0.25.cast();
let _4: T = 4.0.cast();
let _0: T = 0.0.cast();
let _05: T = 0.5.cast();
let _1: T = 1.0.cast();
let _075 = 0.75.cast();
if arg < _025 {self.eval_surface4(f, [_4 * arg, _0], env)}
else if arg < _05 {self.eval_surface4(f, [_1, _4 * (arg - _025)], env)}
else if arg < _075 {self.eval_surface4(f, [_1 - _4 * (arg - _05), _1], env)}
else {self.eval_surface4(f, [_0, _1 - _4 * (arg - _075)], env)}
}
}
}
/// Evaluate 1D surface with an argument.
pub fn eval_surface1(
&self,
id: SurfaceRef1<T>,
arg: [T; 2],
env: &mut Environment<T>
) -> T
where T: Float,
f64: Cast<T>
{
use fns::Surface::*;
match self[id] {
Rect(ref rect) => {
let a = self.eval1(rect[0], env);
let b = self.eval1(rect[1], env);
let c = self.eval1(rect[2], env);
let d = self.eval1(rect[3], env);
let _1 = T::one();
let ab = a * (_1 - arg[0]) + b * arg[0];
let cd = c * (_1 - arg[0]) + d * arg[0];
ab * (_1 - arg[1]) + cd * arg[1]
}
Lerp(ab, cd) => {
let ab = self.eval_spline1(ab, arg[0], env);
let cd = self.eval_spline1(cd, arg[1], env);
let _1 = T::one();
ab * (_1 - arg[1]) + cd * arg[1]
}
CurvedQuad {smooth, ab, cd, ac, bd} => {
let smooth = self.eval1(smooth, env);
let _1 = T::one();
let _0 = T::zero();
let _05: T = 0.5.cast();
let _4: T = 4.0.cast();
let abx = self.eval_spline1(ab, arg[1], env);
let cdx = self.eval_spline1(cd, arg[1], env);
let acx = self.eval_spline1(ac, arg[0], env);
let bdx = self.eval_spline1(bd, arg[0], env);
let w0 = _4 * (arg[0] - _05) * (arg[0] - _05) + smooth;
let w1 = _4 * (arg[1] - _05) * (arg[1] - _05) + smooth;
// Normalize weights.
let (w0, w1) = (w0 / (w0 + w1), w1 / (w0 + w1));
let a = abx + (cdx - abx) * arg[0];
let b = acx + (bdx - acx) * arg[1];
if w0 == _1 {a}
else if w1 == _1 {b}
else if (w0 + w1) == _0 {
(a + b) * _05
}
else {
a * w0 + b * w1
}
}
Circle(center, radius) => {
let center = self.eval1(center, env);
let radius = self.eval1(radius, env);
let two_pi = 6.283185307179586.cast();
let angle = arg[0] * two_pi;
center + radius * arg[1] * angle.sin()
}
}
}
/// Evaluate 2D surface with an argument.
pub fn eval_surface2(
&self,
id: SurfaceRef2<T>,
arg: [T; 2], env: &mut Environment<T>
) -> [T; 2]
where T: Float,
f64: Cast<T>
{
use fns::Surface::*;
use vecmath::vec2_add as add;
use vecmath::vec2_sub as sub;
use vecmath::vec2_scale as scale;
match self[id] {
Rect(ref rect) => {
let a = self.eval2(rect[0], env);
let b = self.eval2(rect[1], env);
let c = self.eval2(rect[2], env);
let d = self.eval2(rect[3], env);
let _1 = T::one();
let ab = add(scale(a, _1 - arg[0]), scale(b, arg[0]));
let cd = add(scale(c, _1 - arg[0]), scale(d, arg[0]));
add(scale(ab, _1 - arg[1]), scale(cd, arg[1]))
}
Lerp(ab, cd) => {
let ab = self.eval_spline2(ab, arg[0], env);
let cd = self.eval_spline2(cd, arg[1], env);
let _1 = T::one();
add(scale(ab, _1 - arg[1]), scale(cd, arg[1]))
}
CurvedQuad {smooth, ab, cd, ac, bd} => {
let smooth = self.eval1(smooth, env);
let _1 = T::one();
let _0 = T::zero();
let _05: T = 0.5.cast();
let _4: T = 4.0.cast();
let abx = self.eval_spline2(ab, arg[1], env);
let cdx = self.eval_spline2(cd, arg[1], env);
let acx = self.eval_spline2(ac, arg[0], env);
let bdx = self.eval_spline2(bd, arg[0], env);
let w0 = _4 * (arg[0] - _05) * (arg[0] - _05) + smooth;
let w1 = _4 * (arg[1] - _05) * (arg[1] - _05) + smooth;
// Normalize weights.
let (w0, w1) = (w0 / (w0 + w1), w1 / (w0 + w1));
let a = add(abx, scale(sub(cdx, abx), arg[0]));
let b = add(acx, scale(sub(bdx, acx), arg[1]));
if w0 == _1 {a}
else if w1 == _1 {b}
else if (w0 + w1) == _0 {
scale(add(a, b), _05)
}
else {
add(scale(a, w0), scale(b, w1))
}
}
Circle(center, radius) => {
let center = self.eval2(center, env);
let radius = self.eval1(radius, env);
let two_pi = 6.283185307179586.cast();
let angle = arg[0] * two_pi;
[
center[0] + radius * arg[1] * angle.cos(),
center[1] + radius * arg[1] * angle.sin()
]
}
}
}
/// Evaluate 3D surface with an argument.
pub fn eval_surface3(
&self,
id: SurfaceRef3<T>,
arg: [T; 2],
env: &mut Environment<T>
) -> [T; 3]
where T: Float,
f64: Cast<T>
{
use fns::Surface::*;
use vecmath::vec3_add as add;
use vecmath::vec3_sub as sub;
use vecmath::vec3_scale as scale;
match self[id] {
Rect(ref rect) => {
let a = self.eval3(rect[0], env);
let b = self.eval3(rect[1], env);
let c = self.eval3(rect[2], env);
let d = self.eval3(rect[3], env);
let _1 = T::one();
let ab = add(scale(a, _1 - arg[0]), scale(b, arg[0]));
let cd = add(scale(c, _1 - arg[0]), scale(d, arg[0]));
add(scale(ab, _1 - arg[1]), scale(cd, arg[1]))
}
Lerp(ab, cd) => {
let ab = self.eval_spline3(ab, arg[0], env);
let cd = self.eval_spline3(cd, arg[1], env);
let _1 = T::one();
add(scale(ab, _1 - arg[1]), scale(cd, arg[1]))
}
CurvedQuad {smooth, ab, cd, ac, bd} => {
let smooth = self.eval1(smooth, env);
let _1 = T::one();
let _0 = T::zero();
let _05: T = 0.5.cast();
let _4: T = 4.0.cast();
let abx = self.eval_spline3(ab, arg[1], env);
let cdx = self.eval_spline3(cd, arg[1], env);
let acx = self.eval_spline3(ac, arg[0], env);
let bdx = self.eval_spline3(bd, arg[0], env);
let w0 = _4 * (arg[0] - _05) * (arg[0] - _05) + smooth;
let w1 = _4 * (arg[1] - _05) * (arg[1] - _05) + smooth;
// Normalize weights.
let (w0, w1) = (w0 / (w0 + w1), w1 / (w0 + w1));
let a = add(abx, scale(sub(cdx, abx), arg[0]));
let b = add(acx, scale(sub(bdx, acx), arg[1]));
if w0 == _1 {a}
else if w1 == _1 {b}
else if (w0 + w1) == _0 {
scale(add(a, b), _05)
}
else {
add(scale(a, w0), scale(b, w1))
}
}
Circle(center, radius) => {
let center = self.eval3(center, env);
let radius = self.eval1(radius, env);
let two_pi = 6.283185307179586.cast();
let angle = arg[0] * two_pi;
[
center[0] + radius * arg[1] * angle.cos(),
center[1] + radius * arg[1] * angle.sin(),
center[2]
]
}
}
}
/// Evaluate 4D surface with an argument.
pub fn eval_surface4(
&self,
id: SurfaceRef4<T>,
arg: [T; 2],
env: &mut Environment<T>
) -> [T; 4]
where T: Float,
f64: Cast<T>
{
use fns::Surface::*;
use vecmath::vec4_add as add;
use vecmath::vec4_sub as sub;
use vecmath::vec4_scale as scale;
match self[id] {
Rect(ref rect) => {
let a = self.eval4(rect[0], env);
let b = self.eval4(rect[1], env);
let c = self.eval4(rect[2], env);
let d = self.eval4(rect[3], env);
let _1 = T::one();
let ab = add(scale(a, _1 - arg[0]), scale(b, arg[0]));
let cd = add(scale(c, _1 - arg[0]), scale(d, arg[0]));
add(scale(ab, _1 - arg[1]), scale(cd, arg[1]))
}
Lerp(ab, cd) => {
let ab = self.eval_spline4(ab, arg[0], env);
let cd = self.eval_spline4(cd, arg[1], env);
let _1 = T::one();
add(scale(ab, _1 - arg[1]), scale(cd, arg[1]))
}
CurvedQuad {smooth, ab, cd, ac, bd} => {
let smooth = self.eval1(smooth, env);
let _1 = T::one();
let _0 = T::zero();
let _05: T = 0.5.cast();
let _4: T = 4.0.cast();
let abx = self.eval_spline4(ab, arg[1], env);
let cdx = self.eval_spline4(cd, arg[1], env);
let acx = self.eval_spline4(ac, arg[0], env);
let bdx = self.eval_spline4(bd, arg[0], env);
let w0 = _4 * (arg[0] - _05) * (arg[0] - _05) + smooth;
let w1 = _4 * (arg[1] - _05) * (arg[1] - _05) + smooth;
// Normalize weights.
let (w0, w1) = (w0 / (w0 + w1), w1 / (w0 + w1));
let a = add(abx, scale(sub(cdx, abx), arg[0]));
let b = add(acx, scale(sub(bdx, acx), arg[1]));
if w0 == _1 {a}
else if w1 == _1 {b}
else if (w0 + w1) == _0 {
scale(add(a, b), _05)
}
else {
add(scale(a, w0), scale(b, w1))
}
}
Circle(center, radius) => {
let center = self.eval4(center, env);
let radius = self.eval1(radius, env);
let two_pi = 6.283185307179586.cast();
let angle = arg[0] * two_pi;
[
center[0] + radius * arg[1] * angle.cos(),
center[1] + radius * arg[1] * angle.sin(),
center[2],
center[3]
]
}
}
}
/// Evaluates color.
pub fn eval_color(
&self,
id: ptr::Color,
space: color::ColorSpace,
env: &mut Environment<T>
) -> Color
where T: Float + Cast<f32>, f64: Cast<T>
{
use color::gamma_linear_to_srgb;
use fns::Color::*;
use color::ColorSpace::*;
match self[id] {
Data(data) => match space {
Linear => data,
SRGB => gamma_linear_to_srgb(data)
},
Time1(f, t) => {
let t = self.eval1(t, env);
self.eval_color_spline(f, t, space, env)
}
}
}
/// Evaluates color spline.
pub fn eval_color_spline(
&self,
id: ptr::ColorSpline,
arg: T,
space: color::ColorSpace,
env: &mut Environment<T>
) -> Color
where T: Float + Cast<f32>, f64: Cast<T>
{
use color::gamma_linear_to_srgb;
use color::ColorSpace::*;
use fns::ColorSpline::*;
use vecmath::vec4_add as add;
use vecmath::vec4_scale as scale;
let t: f32 = arg.cast();
match self[id] {
Lerp(a, b) => {
let a = self.eval_color(a, Linear, env);
let b = self.eval_color(b, Linear, env);
let ab = add(scale(a, 1.0 - t), scale(b, t));
match space {
Linear => ab,
SRGB => gamma_linear_to_srgb(ab)
}
}
}
}
}
|
use std::collections::HashSet;
use std::mem;
use std::path::PathBuf;
use std::thread;
use std::time::Duration;
use std::time::Instant;
use std::vec;
use clap;
use futures::Future;
use futures_cpupool::CpuPool;
use num_cpus;
use output::Output;
use output::OutputJob;
use convert::utils::*;
use misc::*;
use misc::args::ClapSubCommandRzbackupArgs;
use zbackup::data::*;
use zbackup::disk_format::*;
use zbackup::repository::*;
use zbackup::repository_core::*;
pub fn balance_bundles (
output: & Output,
arguments: & BalanceBundlesArguments,
) -> Result <bool, String> {
let minimum_chunk_count: u64 =
arguments.chunks_per_bundle * arguments.fill_factor / 100;
let repository_core;
let backup_chunk_ids;
if arguments.cluster_backups {
// open repository
let repository =
string_result_with_prefix (
|| format! (
"Error opening repository {}: ",
arguments.repository_path.to_string_lossy ()),
Repository::open (
& output,
arguments.repository_config.clone (),
& arguments.repository_path,
arguments.password_file_path.clone ()),
) ?;
repository_core =
repository.core ().clone ();
// load indexes
repository.load_indexes (
output,
) ?;
// get list of backup files
let backup_files =
scan_backups (
output,
& arguments.repository_path,
) ?;
// get a list of chunks used by backups
backup_chunk_ids =
get_recursive_chunks (
output,
& repository,
& backup_files,
) ?;
} else {
// open repository
repository_core =
string_result_with_prefix (
|| format! (
"Error opening repository {}: ",
arguments.repository_path.to_string_lossy ()),
RepositoryCore::open (
& output,
& arguments.repository_path,
arguments.password_file_path.clone ()),
) ?;
backup_chunk_ids =
HashSet::new ()
};
// create cpu pool
let num_threads =
(num_cpus::get () - 1) * 5 / 3 + 1;
let cpu_pool =
CpuPool::new (
num_threads);
loop {
{
// begin transaction
let atomic_file_writer =
AtomicFileWriter::new (
output,
& arguments.repository_path,
Some (arguments.sleep_time),
) ?;
// get list of index files
let old_index_ids_and_sizes = (
scan_index_files_with_sizes (
& arguments.repository_path)
) ?;
output.message_format (
format_args! (
"Found {} index files",
old_index_ids_and_sizes.len ()));
// read indexes and discard any which are balanced
let mut unbalanced_indexes: Vec <(IndexId, Vec <RawIndexEntry>)> =
Vec::new ();
let mut new_bundles_total: u64 = 0;
read_indexes_find_unbalanced (
output,
& repository_core,
& arguments,
minimum_chunk_count,
& old_index_ids_and_sizes,
& backup_chunk_ids,
& mut unbalanced_indexes,
& mut new_bundles_total,
) ?;
// do nothing if there is only one unbalanced bundle
if count_unbalanced_bundles (
minimum_chunk_count,
arguments.chunks_per_bundle,
& backup_chunk_ids,
& unbalanced_indexes,
) < 2 {
output_message! (
output,
"Nothing to do");
break;
}
// balance bundles
if balance_bundles_real (
output,
& cpu_pool,
num_threads,
& repository_core,
& atomic_file_writer,
& arguments,
minimum_chunk_count,
& backup_chunk_ids,
unbalanced_indexes,
new_bundles_total,
) ? {
break;
}
}
// sleep a while
if arguments.sleep_time != Duration::from_secs (0) {
let output_job =
output_job_start! (
output,
"Sleeping");
thread::sleep (
arguments.sleep_time);
output_job.complete ();
}
}
// return
Ok (true)
}
fn read_indexes_find_unbalanced (
output: & Output,
repository_core: & RepositoryCore,
arguments: & BalanceBundlesArguments,
minimum_chunk_count: u64,
old_index_ids_and_sizes: & Vec <(IndexId, u64)>,
backup_chunk_ids: & HashSet <ChunkId>,
unbalanced_indexes: & mut Vec <(IndexId, Vec <RawIndexEntry>)>,
new_bundles_total: & mut u64,
) -> Result <(), String> {
let output_job =
output_job_start! (
output,
"Loading indexes");
let total_index_size =
old_index_ids_and_sizes.iter ().map (
|& (_, old_index_size)|
old_index_size
).sum ();
let mut seen_bundle_ids: HashSet <BundleId> =
HashSet::new ();
let mut read_index_size: u64 = 0;
let mut unbalanced_chunks_count: u64 = 0;
for & (
old_index_id,
old_index_size,
) in old_index_ids_and_sizes.iter () {
output_job.progress (
read_index_size,
total_index_size);
let old_index_path =
repository_core.index_path (
old_index_id);
let old_index_entries =
index_read_path (
& old_index_path,
repository_core.encryption_key (),
) ?;
for & RawIndexEntry {
index_bundle_header: ref old_index_bundle_header,
..
} in old_index_entries.iter () {
if seen_bundle_ids.contains (
& old_index_bundle_header.bundle_id ()) {
return Err (
format! (
"Duplicated bundle id in index: {}",
old_index_bundle_header.bundle_id ()));
}
seen_bundle_ids.insert (
old_index_bundle_header.bundle_id (),
);
}
let old_index_unbalanced_chunks_count =
old_index_entries.iter ().map (
|& RawIndexEntry {
index_bundle_header: ref _old_index_bundle_header,
bundle_info: ref old_index_bundle_info,
}| {
let old_index_backup_chunk_ids: HashSet <ChunkId> =
old_index_bundle_info.chunks ().map (
|old_index_bundle_info_chunk|
old_index_bundle_info_chunk.chunk_id ()
).collect ();
let old_index_num_backup_chunks =
old_index_backup_chunk_ids.intersection (
backup_chunk_ids,
).count () as u64;
(
old_index_num_backup_chunks,
old_index_bundle_info.num_chunks ()
- old_index_num_backup_chunks,
)
}).filter (
|& (
old_index_backup_chunk_count,
old_index_non_backup_chunk_count,
)| {
let old_index_total_chunk_count =
old_index_backup_chunk_count
+ old_index_non_backup_chunk_count;
(
old_index_backup_chunk_count > 0
&& old_index_non_backup_chunk_count > 0
) || (
old_index_total_chunk_count
< minimum_chunk_count
|| old_index_total_chunk_count
> arguments.chunks_per_bundle
)
}).map (
|(backup_chunk_count, non_backup_chunk_count)|
backup_chunk_count + non_backup_chunk_count
).sum ();
if old_index_unbalanced_chunks_count > 0 {
unbalanced_indexes.push (
(
old_index_id,
old_index_entries,
)
);
}
unbalanced_chunks_count +=
old_index_unbalanced_chunks_count;
read_index_size +=
old_index_size;
}
* new_bundles_total =
(unbalanced_chunks_count + arguments.chunks_per_bundle - 1)
/ arguments.chunks_per_bundle;
output_job.complete ();
output_message! (
output,
"Found {} chunks in {} {} to balance into {} bundles",
unbalanced_chunks_count,
unbalanced_indexes.len (),
if unbalanced_indexes.len () == 1 { "index" } else { "indexes" },
new_bundles_total);
Ok (())
}
fn count_unbalanced_bundles (
minimum_chunk_count: u64,
maximum_chunk_count: u64,
backup_chunk_ids: & HashSet <ChunkId>,
unbalanced_indexes: & [(IndexId, Vec <RawIndexEntry>)],
) -> u64 {
let unbalanced_bundle_ids: HashSet <BundleId> =
unbalanced_indexes.iter ().flat_map (
|& (ref _index_id, ref index_entries)|
index_entries.iter ().filter (
|&& RawIndexEntry {
ref bundle_info,
..
}| {
let bundle_backup_chunk_ids: HashSet <ChunkId> =
bundle_info.chunks ().map (
|bundle_info_chunk|
bundle_info_chunk.chunk_id ()
).collect ();
let bundle_num_backup_chunks =
bundle_backup_chunk_ids.intersection (
backup_chunk_ids,
).count () as u64;
let bundle_num_non_backup_chunks =
bundle_info.num_chunks ()
- bundle_num_backup_chunks;
(
bundle_num_backup_chunks > 0
&& bundle_num_non_backup_chunks > 0
) || (
bundle_info.num_chunks () < minimum_chunk_count
|| bundle_info.num_chunks () > maximum_chunk_count
)
}).map (
|& RawIndexEntry {
ref index_bundle_header,
..
}|
index_bundle_header.bundle_id ()
)
).collect ();
unbalanced_bundle_ids.len () as u64
}
fn balance_bundles_real (
output: & Output,
cpu_pool: & CpuPool,
max_tasks: usize,
repository_core: & RepositoryCore,
atomic_file_writer: & AtomicFileWriter,
arguments: & BalanceBundlesArguments,
minimum_chunk_count: u64,
backup_chunk_ids: & HashSet <ChunkId>,
unbalanced_indexes: Vec <(IndexId, Vec <RawIndexEntry>)>,
new_bundles_total: u64,
) -> Result <bool, String> {
let output_job =
output_job_start! (
output,
"Balancing bundles");
let start_time =
Instant::now ();
let checkpoint_time =
start_time + arguments.checkpoint_time;
enum Task {
ReadBundle {
output_job: OutputJob,
chunks: Vec <(ChunkId, Vec <u8>)>,
},
WriteBundle {
output_job: OutputJob,
index_entry: RawIndexEntry,
},
}
struct State {
new_bundles_count: u64,
pending_backup_chunks: Vec <(ChunkId, Vec <u8>)>,
pending_non_backup_chunks: Vec <(ChunkId, Vec <u8>)>,
pending_index_entries: Vec <RawIndexEntry>,
index_iterator: vec::IntoIter <(IndexId, Vec <RawIndexEntry>)>,
index_entry_iterator: vec::IntoIter <RawIndexEntry>,
}
let mut state = State {
new_bundles_count: 0,
pending_backup_chunks: Vec::new (),
pending_non_backup_chunks: Vec::new (),
pending_index_entries: Vec::new (),
index_iterator: unbalanced_indexes.into_iter (),
index_entry_iterator: Vec::new ().into_iter (),
};
// concurrent operation
concurrent_controller (
output,
max_tasks,
& mut state,
|state| {
// write bundles
if (
state.pending_backup_chunks.len ()
>= arguments.chunks_per_bundle as usize
|| state.pending_non_backup_chunks.len ()
>= arguments.chunks_per_bundle as usize
) {
let mut bundle_chunks;
if state.pending_backup_chunks.len ()
>= arguments.chunks_per_bundle as usize {
bundle_chunks =
state.pending_backup_chunks.split_off (
arguments.chunks_per_bundle as usize);
mem::swap (
& mut bundle_chunks,
& mut state.pending_backup_chunks);
} else {
bundle_chunks =
state.pending_non_backup_chunks.split_off (
arguments.chunks_per_bundle as usize);
mem::swap (
& mut bundle_chunks,
& mut state.pending_non_backup_chunks);
};
let repository_core = repository_core.clone ();
let atomic_file_writer = atomic_file_writer.clone ();
let output_job_write_bundle =
output_job_start! (
output,
"Writing bundle {} of {}",
state.new_bundles_count + 1,
new_bundles_total);
let task = (
cpu_pool.spawn_fn (move || {
flush_bundle (
& output_job_write_bundle,
& repository_core,
atomic_file_writer,
& bundle_chunks,
).map (
|index_entry|
Task::WriteBundle {
output_job: output_job_write_bundle,
index_entry: index_entry
}
)
}).boxed ());
state.new_bundles_count += 1;
return Some (task);
}
// read bundles
if checkpoint_time <= Instant::now () {
return None;
}
loop {
if let Some (RawIndexEntry {
index_bundle_header,
bundle_info,
}) = state.index_entry_iterator.next () {
let bundle_backup_chunk_ids: HashSet <ChunkId> =
bundle_info.chunks ().map (
|bundle_info_chunk|
bundle_info_chunk.chunk_id ()
).collect ();
let bundle_num_backup_chunks =
bundle_backup_chunk_ids.intersection (
& backup_chunk_ids,
).count () as u64;
let bundle_num_non_backup_chunks =
bundle_info.num_chunks ()
- bundle_num_backup_chunks;
if (
bundle_num_backup_chunks == 0
|| bundle_num_non_backup_chunks == 0
) && (
bundle_info.num_chunks ()
>= minimum_chunk_count
&& bundle_info.num_chunks ()
<= arguments.chunks_per_bundle
) {
state.pending_index_entries.push (
RawIndexEntry {
index_bundle_header: index_bundle_header,
bundle_info: bundle_info,
}
);
} else {
let bundle_path =
repository_core.bundle_path (
index_bundle_header.bundle_id ());
atomic_file_writer.delete (
bundle_path.clone ());
let encryption_key =
repository_core.encryption_key ();
let output_job_read_bundle =
output_job_start! (
output,
"Reading bundle {}",
index_bundle_header.bundle_id ());
return Some (
cpu_pool.spawn_fn (move || {
let bundle_chunks =
bundle_read_path (
& bundle_path,
encryption_key,
) ?;
Ok (Task::ReadBundle {
output_job: output_job_read_bundle,
chunks: bundle_chunks,
})
}).boxed ());
}
} else if let Some ((index_id, index_entries)) =
state.index_iterator.next () {
atomic_file_writer.delete (
repository_core.index_path (
index_id));
state.index_entry_iterator =
index_entries.into_iter ();
} else {
return None;
}
}
},
|state, task_value| {
// process background task
match task_value {
Task::ReadBundle {
output_job: output_job_read_bundle,
chunks: bundle_chunks,
} => {
output_job_read_bundle.remove ();
for (
bundle_chunk_id,
bundle_chunk_data,
) in bundle_chunks {
if backup_chunk_ids.contains (
& bundle_chunk_id,
) {
state.pending_backup_chunks.push (
(
bundle_chunk_id,
bundle_chunk_data,
),
);
} else {
state.pending_non_backup_chunks.push (
(
bundle_chunk_id,
bundle_chunk_data,
),
);
}
}
},
Task::WriteBundle {
output_job: output_job_write_bundle,
index_entry,
} => {
output_job_write_bundle.remove ();
state.pending_index_entries.push (
index_entry);
},
}
Ok (())
},
) ?;
output.unpause ();
// write final bundle
let mut pending_chunks: Vec <(ChunkId, Vec <u8>)> =
state.pending_backup_chunks.into_iter ().chain (
state.pending_non_backup_chunks,
).collect ();
if state.new_bundles_count == new_bundles_total - 1 {
let output_job_final_bundle =
output_job_start! (
output,
"Writing bundle {} of {}",
state.new_bundles_count + 1,
new_bundles_total);
state.pending_index_entries.push (
flush_bundle (
& output_job_final_bundle,
& repository_core,
atomic_file_writer.clone (),
& pending_chunks,
) ?
);
pending_chunks.clear ();
output_job_final_bundle.remove ();
state.new_bundles_count += 1;
}
output_job_replace! (
output_job,
"Balanced {} out of {} bundles",
state.new_bundles_count,
new_bundles_total);
// perform checkpoint
if state.new_bundles_count < new_bundles_total {
if ! pending_chunks.is_empty () {
let output_job_checkpoint =
output_job_start! (
output,
"Writing remaining chunks for checkpoint");
state.pending_index_entries.push (
flush_bundle (
& output_job_checkpoint,
& repository_core,
atomic_file_writer.clone (),
& pending_chunks,
) ?
);
output_job_checkpoint.remove ();
}
for index_entry in state.index_entry_iterator {
state.pending_index_entries.push (
index_entry);
}
}
// write index
flush_index (
output,
& repository_core,
& atomic_file_writer,
& state.pending_index_entries,
) ?;
// commit changes
{
let output_job_commit =
output_job_start! (
output,
"Comitting changes");
atomic_file_writer.commit () ?;
output_job_commit.remove ();
}
// return
Ok (state.new_bundles_count == new_bundles_total)
}
fn flush_bundle (
output_job: & OutputJob,
repository_core: & RepositoryCore,
atomic_file_writer: AtomicFileWriter,
bundle_chunks: & Vec <(ChunkId, Vec <u8>)>,
) -> Result <RawIndexEntry, String> {
let new_bundle_id =
BundleId::random ();
let new_bundle_path =
repository_core.bundle_path (
new_bundle_id);
let mut new_bundle_file =
atomic_file_writer.create (
new_bundle_path,
) ?;
let total_chunks =
bundle_chunks.len () as u64;
let new_index_bundle_info =
bundle_write_direct (
& mut new_bundle_file,
repository_core.encryption_key (),
& bundle_chunks,
move |chunks_written| {
output_job.progress (
chunks_written,
total_chunks)
}
) ?;
let new_index_bundle_header =
DiskIndexBundleHeader::new (
new_bundle_id);
Ok (RawIndexEntry {
index_bundle_header: new_index_bundle_header,
bundle_info: new_index_bundle_info,
})
}
fn flush_index (
output: & Output,
repository_core: & RepositoryCore,
atomic_file_writer: & AtomicFileWriter,
new_index_entries: & Vec <RawIndexEntry>,
) -> Result <(), String> {
if new_index_entries.is_empty () {
return Ok (());
}
let output_job =
output_job_start! (
output,
"Writing index");
let new_index_id =
IndexId::random ();
let new_index_path =
repository_core.index_path (
new_index_id);
let mut new_index_file =
atomic_file_writer.create (
new_index_path,
) ?;
index_write_direct (
& mut new_index_file,
repository_core.encryption_key (),
& new_index_entries,
) ?;
output_job.remove ();
Ok (())
}
command! (
name = balance_bundles,
export = balance_bundles_command,
arguments = BalanceBundlesArguments {
repository_path: PathBuf,
password_file_path: Option <PathBuf>,
repository_config: RepositoryConfig,
chunks_per_bundle: u64,
fill_factor: u64,
checkpoint_time: Duration,
sleep_time: Duration,
cluster_backups: bool,
},
clap_subcommand = {
clap::SubCommand::with_name ("balance-bundles")
.about ("rewrites bundles so they are a consistent size")
.arg (
clap::Arg::with_name ("repository")
.long ("repository")
.value_name ("REPOSITORY")
.required (true)
.help ("Path to the repository, used to obtain encryption key")
)
.arg (
clap::Arg::with_name ("password-file")
.long ("password-file")
.value_name ("PASSWORD-FILE")
.required (false)
.help ("Path to the password file")
)
.arg (
clap::Arg::with_name ("chunks-per-bundle")
.long ("chunks-per-bundle")
.value_name ("CHUNKS-PER-BUNDLE")
.default_value ("256")
.help ("Chunks per bundle")
)
.arg (
clap::Arg::with_name ("fill-factor")
.long ("fill-factor")
.value_name ("FILL-FACTOR")
.default_value ("25")
.help ("Minimum fill factor as percentage")
)
.arg (
clap::Arg::with_name ("checkpoint-time")
.long ("checkpoint-time")
.value_name ("CHECKPOINT-TIME")
.default_value ("10 minutes")
.help ("Time between checkpoints")
)
.arg (
clap::Arg::with_name ("sleep-time")
.long ("sleep-time")
.value_name ("SLEEP-TIME")
.default_value ("10 seconds")
.help ("Sleep time on every checkpoint")
)
.arg (
clap::Arg::with_name ("cluster-backups")
.long ("cluster-backups")
.help ("Cluster chunks required to expand backups")
)
.repository_config_args ()
},
clap_arguments_parse = |clap_matches| {
let arguments = BalanceBundlesArguments {
repository_path:
args::path_required (
& clap_matches,
"repository"),
password_file_path:
args::path_optional (
& clap_matches,
"password-file"),
repository_config:
args::repository_config (
clap_matches),
chunks_per_bundle:
args::u64_required (
& clap_matches,
"chunks-per-bundle"),
fill_factor:
args::u64_required (
& clap_matches,
"fill-factor"),
checkpoint_time:
args::duration_required (
& clap_matches,
"checkpoint-time"),
sleep_time:
args::duration_required (
& clap_matches,
"sleep-time"),
cluster_backups:
args::bool_flag (
& clap_matches,
"cluster-backups"),
};
if arguments.fill_factor > 100 {
args::error_exit (
format! (
"Value of --fill-factor must be between 0 and 100"));
}
arguments
},
action = |output, arguments| {
balance_bundles (output, arguments)
},
);
// ex: noet ts=4 filetype=rust
|
fn count_orbits(map: &[(&str, &str)]) -> usize {
let mut count = 0;
for (mut target, _) in map {
count += 1;
while let Some((t, _)) = map.iter().find(|(_, x)| x == &target) {
count += 1;
target = t;
}
}
count
}
fn list_orbits<'a>(map: &[(&'a str, &'a str)], start: &str) -> Vec<&'a str> {
let mut point = start;
let mut orbits = Vec::new();
while let Some((t, _)) = map.iter().find(|(_, x)| x == &point) {
orbits.push(*t);
point = t;
}
orbits
}
fn main() {
let input = include_str!("../input/day6.txt");
// right orbits left
let orbit_list = input.lines().map(|x| {
let mut s = x.split(')');
(s.next().unwrap(), s.next().unwrap())
}).collect::<Vec<_>>();
let you_orbits = list_orbits(&orbit_list, "YOU");
let san_orbits = list_orbits(&orbit_list, "SAN");
// Find the first point which is shared between YOU and SAN
let shared_point = you_orbits.iter().find(|x| san_orbits.iter().find(|y| x == y).is_some()).expect("No possible route");
// Get the position of that point in each list and add them together
let distance = you_orbits.iter().position(|x| x == shared_point).unwrap() + san_orbits.iter().position(|x| x == shared_point).unwrap();
println!("distance: {}", distance);
} |
#[allow(unused_imports)]
use super::prelude::*;
use super::intcode::IntcodeDevice;
type Input = IntcodeDevice;
pub fn input_generator(input: &str) -> Input {
input.parse().expect("Error parsing the IntcodeDevice")
}
pub fn part1(input: &Input) -> u32 {
let base_device = input;
let mut device = input.clone();
(0..50)
.flat_map(|y| (0..50).map(move |x| (x,y)))
.map(|(x,y)| is_attracted(&mut device, base_device, x, y) as u32)
.sum()
}
pub fn part2(input: &Input) -> i64 {
let base_device = input;
let mut device = input.clone();
let (mut x, mut y) = (0, 0);
loop {
if !is_attracted(&mut device, &base_device, x, y + 99) {
x += 1;
continue;
}
if !is_attracted(&mut device, &base_device, x + 99, y) {
y += 1;
continue;
}
return 10000 * x + y
}
}
fn is_attracted(device: &mut IntcodeDevice, base_device: &IntcodeDevice, x: i64, y: i64) -> bool {
device.reset(&base_device);
device.input.push_back(x);
device.input.push_back(y);
device.execute();
match device.output.pop_back().expect("Intcode device didn't report anything") {
0 => false,
1 => true,
_ => panic!("Invalid report from intcode device")
}
}
|
use futures::{future, Future, Stream};
use hyper::http::uri::InvalidUri;
use hyper::Uri;
use hyper::{self, Body, Client, Request};
use hyper_tls::HttpsConnector;
use std::io::{self, Error, ErrorKind};
use tokio_core::reactor::Core;
use url::form_urlencoded;
#[derive(Debug)]
pub enum ClientError {
Error(Error),
HyperTlsError(hyper_tls::Error),
UriError(InvalidUri),
}
impl From<Error> for ClientError {
fn from(error: Error) -> Self {
ClientError::Error(error)
}
}
impl From<hyper_tls::Error> for ClientError {
fn from(error: hyper_tls::Error) -> Self {
ClientError::HyperTlsError(error)
}
}
impl From<InvalidUri> for ClientError {
fn from(error: InvalidUri) -> Self {
ClientError::UriError(error)
}
}
pub struct PushoverClient<'a> {
core: Core,
client: Client<HttpsConnector<hyper::client::HttpConnector>>,
key: &'a str,
uri: Uri,
}
impl<'a> PushoverClient<'a> {
pub fn from(key: &'a str) -> Result<Self, ClientError> {
let uri = "https://api.pushover.net/1/messages.json".parse()?;
let core = Core::new()?;
let https = HttpsConnector::new(4)?;
let client = Client::builder().build(https);
Ok(PushoverClient {
core,
client,
key: &key,
uri,
})
}
fn make_body(&self, user: &str, message: &str) -> Body {
let str_body = form_urlencoded::Serializer::new(String::new())
.append_pair("user", user)
.append_pair("token", self.key)
.append_pair("message", message)
.finish();
Body::from(str_body)
}
pub fn push(&mut self, user: &str, message: &str) -> io::Result<()> {
let req = Request::builder()
.uri(self.uri.clone())
.method("POST")
.body(self.make_body(&user, &message))
.unwrap();
let work = self
.client
.request(req)
.map(|res| {
res.into_body()
.fold(Vec::new(), |mut v, chunk| {
v.extend(&chunk[..]);
future::ok::<_, hyper::Error>(v)
})
.and_then(|chunks| {
let s = String::from_utf8(chunks).unwrap();
future::ok::<_, hyper::Error>(s)
})
})
.map_err(|err| Error::new(ErrorKind::Other, err));
let body_work = self.core.run(work)?;
if let Ok(resp_body) = self.core.run(body_work) {
println!("{}", resp_body);
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ctor() {
let client = PushoverClient::from("my key");
assert_eq!(client.is_ok(), true);
}
}
|
/// EditUserOption edit user options
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct EditUserOption {
pub active: Option<bool>,
pub admin: Option<bool>,
pub allow_create_organization: Option<bool>,
pub allow_git_hook: Option<bool>,
pub allow_import_local: Option<bool>,
pub email: String,
pub full_name: Option<String>,
pub location: Option<String>,
pub login_name: Option<String>,
pub max_repo_creation: Option<i64>,
pub must_change_password: Option<bool>,
pub password: Option<String>,
pub prohibit_login: Option<bool>,
pub source_id: Option<i64>,
pub website: Option<String>,
}
impl EditUserOption {
/// Create a builder for this object.
#[inline]
pub fn builder() -> EditUserOptionBuilder<crate::generics::MissingEmail> {
EditUserOptionBuilder {
body: Default::default(),
_email: core::marker::PhantomData,
}
}
#[inline]
pub fn admin_edit_user() -> EditUserOptionPatchBuilder<crate::generics::MissingUsername, crate::generics::MissingEmail> {
EditUserOptionPatchBuilder {
inner: Default::default(),
_param_username: core::marker::PhantomData,
_email: core::marker::PhantomData,
}
}
}
impl Into<EditUserOption> for EditUserOptionBuilder<crate::generics::EmailExists> {
fn into(self) -> EditUserOption {
self.body
}
}
impl Into<EditUserOption> for EditUserOptionPatchBuilder<crate::generics::UsernameExists, crate::generics::EmailExists> {
fn into(self) -> EditUserOption {
self.inner.body
}
}
/// Builder for [`EditUserOption`](./struct.EditUserOption.html) object.
#[derive(Debug, Clone)]
pub struct EditUserOptionBuilder<Email> {
body: self::EditUserOption,
_email: core::marker::PhantomData<Email>,
}
impl<Email> EditUserOptionBuilder<Email> {
#[inline]
pub fn active(mut self, value: impl Into<bool>) -> Self {
self.body.active = Some(value.into());
self
}
#[inline]
pub fn admin(mut self, value: impl Into<bool>) -> Self {
self.body.admin = Some(value.into());
self
}
#[inline]
pub fn allow_create_organization(mut self, value: impl Into<bool>) -> Self {
self.body.allow_create_organization = Some(value.into());
self
}
#[inline]
pub fn allow_git_hook(mut self, value: impl Into<bool>) -> Self {
self.body.allow_git_hook = Some(value.into());
self
}
#[inline]
pub fn allow_import_local(mut self, value: impl Into<bool>) -> Self {
self.body.allow_import_local = Some(value.into());
self
}
#[inline]
pub fn email(mut self, value: impl Into<String>) -> EditUserOptionBuilder<crate::generics::EmailExists> {
self.body.email = value.into();
unsafe { std::mem::transmute(self) }
}
#[inline]
pub fn full_name(mut self, value: impl Into<String>) -> Self {
self.body.full_name = Some(value.into());
self
}
#[inline]
pub fn location(mut self, value: impl Into<String>) -> Self {
self.body.location = Some(value.into());
self
}
#[inline]
pub fn login_name(mut self, value: impl Into<String>) -> Self {
self.body.login_name = Some(value.into());
self
}
#[inline]
pub fn max_repo_creation(mut self, value: impl Into<i64>) -> Self {
self.body.max_repo_creation = Some(value.into());
self
}
#[inline]
pub fn must_change_password(mut self, value: impl Into<bool>) -> Self {
self.body.must_change_password = Some(value.into());
self
}
#[inline]
pub fn password(mut self, value: impl Into<String>) -> Self {
self.body.password = Some(value.into());
self
}
#[inline]
pub fn prohibit_login(mut self, value: impl Into<bool>) -> Self {
self.body.prohibit_login = Some(value.into());
self
}
#[inline]
pub fn source_id(mut self, value: impl Into<i64>) -> Self {
self.body.source_id = Some(value.into());
self
}
#[inline]
pub fn website(mut self, value: impl Into<String>) -> Self {
self.body.website = Some(value.into());
self
}
}
/// Builder created by [`EditUserOption::admin_edit_user`](./struct.EditUserOption.html#method.admin_edit_user) method for a `PATCH` operation associated with `EditUserOption`.
#[repr(transparent)]
#[derive(Debug, Clone)]
pub struct EditUserOptionPatchBuilder<Username, Email> {
inner: EditUserOptionPatchBuilderContainer,
_param_username: core::marker::PhantomData<Username>,
_email: core::marker::PhantomData<Email>,
}
#[derive(Debug, Default, Clone)]
struct EditUserOptionPatchBuilderContainer {
body: self::EditUserOption,
param_username: Option<String>,
}
impl<Username, Email> EditUserOptionPatchBuilder<Username, Email> {
/// username of user to edit
#[inline]
pub fn username(mut self, value: impl Into<String>) -> EditUserOptionPatchBuilder<crate::generics::UsernameExists, Email> {
self.inner.param_username = Some(value.into());
unsafe { std::mem::transmute(self) }
}
#[inline]
pub fn active(mut self, value: impl Into<bool>) -> Self {
self.inner.body.active = Some(value.into());
self
}
#[inline]
pub fn admin(mut self, value: impl Into<bool>) -> Self {
self.inner.body.admin = Some(value.into());
self
}
#[inline]
pub fn allow_create_organization(mut self, value: impl Into<bool>) -> Self {
self.inner.body.allow_create_organization = Some(value.into());
self
}
#[inline]
pub fn allow_git_hook(mut self, value: impl Into<bool>) -> Self {
self.inner.body.allow_git_hook = Some(value.into());
self
}
#[inline]
pub fn allow_import_local(mut self, value: impl Into<bool>) -> Self {
self.inner.body.allow_import_local = Some(value.into());
self
}
#[inline]
pub fn email(mut self, value: impl Into<String>) -> EditUserOptionPatchBuilder<Username, crate::generics::EmailExists> {
self.inner.body.email = value.into();
unsafe { std::mem::transmute(self) }
}
#[inline]
pub fn full_name(mut self, value: impl Into<String>) -> Self {
self.inner.body.full_name = Some(value.into());
self
}
#[inline]
pub fn location(mut self, value: impl Into<String>) -> Self {
self.inner.body.location = Some(value.into());
self
}
#[inline]
pub fn login_name(mut self, value: impl Into<String>) -> Self {
self.inner.body.login_name = Some(value.into());
self
}
#[inline]
pub fn max_repo_creation(mut self, value: impl Into<i64>) -> Self {
self.inner.body.max_repo_creation = Some(value.into());
self
}
#[inline]
pub fn must_change_password(mut self, value: impl Into<bool>) -> Self {
self.inner.body.must_change_password = Some(value.into());
self
}
#[inline]
pub fn password(mut self, value: impl Into<String>) -> Self {
self.inner.body.password = Some(value.into());
self
}
#[inline]
pub fn prohibit_login(mut self, value: impl Into<bool>) -> Self {
self.inner.body.prohibit_login = Some(value.into());
self
}
#[inline]
pub fn source_id(mut self, value: impl Into<i64>) -> Self {
self.inner.body.source_id = Some(value.into());
self
}
#[inline]
pub fn website(mut self, value: impl Into<String>) -> Self {
self.inner.body.website = Some(value.into());
self
}
}
impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for EditUserOptionPatchBuilder<crate::generics::UsernameExists, crate::generics::EmailExists> {
type Output = crate::user::User;
const METHOD: http::Method = http::Method::PATCH;
fn rel_path(&self) -> std::borrow::Cow<'static, str> {
format!("/admin/users/{username}", username=self.inner.param_username.as_ref().expect("missing parameter username?")).into()
}
fn modify(&self, req: Client::Request) -> Result<Client::Request, crate::client::ApiError<Client::Response>> {
use crate::client::Request;
Ok(req
.json(&self.inner.body))
}
}
impl crate::client::ResponseWrapper<crate::user::User, EditUserOptionPatchBuilder<crate::generics::UsernameExists, crate::generics::EmailExists>> {
#[inline]
pub fn message(&self) -> Option<String> {
self.headers.get("message").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
#[inline]
pub fn url(&self) -> Option<String> {
self.headers.get("url").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
}
|
use chrono::{DateTime, Utc};
pub trait RetryPolicy {
/// Determine if a task should be retried according to a retry policy.
fn should_retry(&self, n_past_retries: u32) -> RetryDecision;
}
/// Outcome of evaluating a retry policy for a failed task.
pub enum RetryDecision {
/// Retry after the specified timestamp.
Retry { execute_after: DateTime<Utc> },
/// Give up.
DoNotRetry,
}
|
use crate::errors::ConnectorXError;
#[cfg(feature = "src_oracle")]
use crate::sources::oracle::OracleDialect;
use fehler::{throw, throws};
use log::{debug, trace, warn};
use sqlparser::ast::{
BinaryOperator, Expr, Function, FunctionArg, Ident, ObjectName, Query, Select, SelectItem,
SetExpr, Statement, TableAlias, TableFactor, TableWithJoins, Value,
};
use sqlparser::dialect::Dialect;
use sqlparser::parser::Parser;
#[cfg(feature = "src_oracle")]
use std::any::Any;
#[derive(Debug, Clone)]
pub enum CXQuery<Q = String> {
Naked(Q), // The query directly comes from the user
Wrapped(Q), // The user query is already wrapped in a subquery
}
impl<Q: std::fmt::Display> std::fmt::Display for CXQuery<Q> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
CXQuery::Naked(q) => write!(f, "{}", q),
CXQuery::Wrapped(q) => write!(f, "{}", q),
}
}
}
impl<Q: AsRef<str>> CXQuery<Q> {
pub fn as_str(&self) -> &str {
match self {
CXQuery::Naked(q) => q.as_ref(),
CXQuery::Wrapped(q) => q.as_ref(),
}
}
}
impl From<&str> for CXQuery {
fn from(s: &str) -> CXQuery<String> {
CXQuery::Naked(s.to_string())
}
}
impl From<&&str> for CXQuery {
fn from(s: &&str) -> CXQuery<String> {
CXQuery::Naked(s.to_string())
}
}
impl From<&String> for CXQuery {
fn from(s: &String) -> CXQuery {
CXQuery::Naked(s.clone())
}
}
impl From<&CXQuery> for CXQuery {
fn from(q: &CXQuery) -> CXQuery {
q.clone()
}
}
impl CXQuery<String> {
pub fn naked<Q: AsRef<str>>(q: Q) -> Self {
CXQuery::Naked(q.as_ref().to_string())
}
}
impl<Q: AsRef<str>> AsRef<str> for CXQuery<Q> {
fn as_ref(&self) -> &str {
match self {
CXQuery::Naked(q) => q.as_ref(),
CXQuery::Wrapped(q) => q.as_ref(),
}
}
}
impl<Q> CXQuery<Q> {
pub fn map<F, U>(&self, f: F) -> CXQuery<U>
where
F: Fn(&Q) -> U,
{
match self {
CXQuery::Naked(q) => CXQuery::Naked(f(q)),
CXQuery::Wrapped(q) => CXQuery::Wrapped(f(q)),
}
}
}
impl<Q, E> CXQuery<Result<Q, E>> {
pub fn result(self) -> Result<CXQuery<Q>, E> {
match self {
CXQuery::Naked(q) => q.map(CXQuery::Naked),
CXQuery::Wrapped(q) => q.map(CXQuery::Wrapped),
}
}
}
// wrap a query into a derived table
fn wrap_query(
query: &mut Query,
projection: Vec<SelectItem>,
selection: Option<Expr>,
tmp_tab_name: &str,
) -> Statement {
let with = query.with.clone();
query.with = None;
let alias = if tmp_tab_name.is_empty() {
None
} else {
Some(TableAlias {
name: Ident {
value: tmp_tab_name.into(),
quote_style: None,
},
columns: vec![],
})
};
Statement::Query(Box::new(Query {
with,
body: SetExpr::Select(Box::new(Select {
distinct: false,
top: None,
projection,
from: vec![TableWithJoins {
relation: TableFactor::Derived {
lateral: false,
subquery: Box::new(query.clone()),
alias,
},
joins: vec![],
}],
lateral_views: vec![],
selection,
group_by: vec![],
cluster_by: vec![],
distribute_by: vec![],
sort_by: vec![],
having: None,
})),
order_by: vec![],
limit: None,
offset: None,
fetch: None,
}))
}
trait StatementExt {
fn as_query(&self) -> Option<&Query>;
}
impl StatementExt for Statement {
fn as_query(&self) -> Option<&Query> {
match self {
Statement::Query(q) => Some(q),
_ => None,
}
}
}
trait QueryExt {
fn as_select_mut(&mut self) -> Option<&mut Select>;
}
impl QueryExt for Query {
fn as_select_mut(&mut self) -> Option<&mut Select> {
match self.body {
SetExpr::Select(ref mut select) => Some(select),
_ => None,
}
}
}
#[throws(ConnectorXError)]
pub fn count_query<T: Dialect>(sql: &CXQuery<String>, dialect: &T) -> CXQuery<String> {
trace!("Incoming query: {}", sql);
const COUNT_TMP_TAB_NAME: &str = "CXTMPTAB_COUNT";
#[allow(unused_mut)]
let mut table_alias = COUNT_TMP_TAB_NAME;
// HACK: Some dialect (e.g. Oracle) does not support "AS" for alias
#[cfg(feature = "src_oracle")]
if dialect.type_id() == (OracleDialect {}.type_id()) {
// table_alias = "";
return CXQuery::Wrapped(format!(
"SELECT COUNT(*) FROM ({}) {}",
sql.as_str(),
COUNT_TMP_TAB_NAME
));
}
let tsql = match sql.map(|sql| Parser::parse_sql(dialect, sql)).result() {
Ok(ast) => {
let projection = vec![SelectItem::UnnamedExpr(Expr::Function(Function {
name: ObjectName(vec![Ident {
value: "count".to_string(),
quote_style: None,
}]),
args: vec![FunctionArg::Unnamed(Expr::Wildcard)],
over: None,
distinct: false,
}))];
let ast_count: Statement = match ast {
CXQuery::Naked(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
if query.offset.is_none() {
query.order_by = vec![]; // mssql offset must appear with order by
}
let select = query
.as_select_mut()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?;
select.sort_by = vec![];
wrap_query(&mut query, projection, None, table_alias)
}
CXQuery::Wrapped(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let select = query
.as_select_mut()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?;
select.projection = projection;
Statement::Query(Box::new(query))
}
};
format!("{}", ast_count)
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
format!(
"SELECT COUNT(*) FROM ({}) as {}",
sql.as_str(),
COUNT_TMP_TAB_NAME
)
}
};
debug!("Transformed count query: {}", tsql);
CXQuery::Wrapped(tsql)
}
#[throws(ConnectorXError)]
pub fn limit1_query<T: Dialect>(sql: &CXQuery<String>, dialect: &T) -> CXQuery<String> {
trace!("Incoming query: {}", sql);
let sql = match Parser::parse_sql(dialect, sql.as_str()) {
Ok(mut ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
match &mut ast[0] {
Statement::Query(q) => {
q.limit = Some(Expr::Value(Value::Number("1".to_string(), false)));
}
_ => throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string())),
};
format!("{}", ast[0])
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
format!("{} LIMIT 1", sql.as_str())
}
};
debug!("Transformed limit 1 query: {}", sql);
CXQuery::Wrapped(sql)
}
#[throws(ConnectorXError)]
#[cfg(feature = "src_oracle")]
pub fn limit1_query_oracle(sql: &CXQuery<String>) -> CXQuery<String> {
trace!("Incoming oracle query: {}", sql);
CXQuery::Wrapped(format!("SELECT * FROM ({}) WHERE rownum = 1", sql))
// let ast = Parser::parse_sql(&OracleDialect {}, sql.as_str())?;
// if ast.len() != 1 {
// throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
// }
// let ast_part: Statement;
// let mut query = ast[0]
// .as_query()
// .ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
// .clone();
// let selection = Expr::BinaryOp {
// left: Box::new(Expr::CompoundIdentifier(vec![Ident {
// value: "rownum".to_string(),
// quote_style: None,
// }])),
// op: BinaryOperator::Eq,
// right: Box::new(Expr::Value(Value::Number("1".to_string(), false))),
// };
// ast_part = wrap_query(&mut query, vec![SelectItem::Wildcard], Some(selection), "");
// let tsql = format!("{}", ast_part);
// debug!("Transformed limit 1 query: {}", tsql);
// CXQuery::Wrapped(tsql)
}
#[throws(ConnectorXError)]
pub fn single_col_partition_query<T: Dialect>(
sql: &str,
col: &str,
lower: i64,
upper: i64,
dialect: &T,
) -> String {
trace!("Incoming query: {}", sql);
const PART_TMP_TAB_NAME: &str = "CXTMPTAB_PART";
#[allow(unused_mut)]
let mut table_alias = PART_TMP_TAB_NAME;
#[allow(unused_mut)]
let mut cid = Box::new(Expr::CompoundIdentifier(vec![
Ident {
value: PART_TMP_TAB_NAME.to_string(),
quote_style: None,
},
Ident {
value: col.to_string(),
quote_style: None,
},
]));
// HACK: Some dialect (e.g. Oracle) does not support "AS" for alias
#[cfg(feature = "src_oracle")]
if dialect.type_id() == (OracleDialect {}.type_id()) {
return format!("SELECT * FROM ({}) CXTMPTAB_PART WHERE CXTMPTAB_PART.{} >= {} AND CXTMPTAB_PART.{} < {}", sql, col, lower, col, upper);
// table_alias = "";
// cid = Box::new(Expr::Identifier(Ident {
// value: col.to_string(),
// quote_style: None,
// }));
}
let tsql = match Parser::parse_sql(dialect, sql) {
Ok(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let select = query
.as_select_mut()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let ast_part: Statement;
let lb = Expr::BinaryOp {
left: Box::new(Expr::Value(Value::Number(lower.to_string(), false))),
op: BinaryOperator::LtEq,
right: cid.clone(),
};
let ub = Expr::BinaryOp {
left: cid,
op: BinaryOperator::Lt,
right: Box::new(Expr::Value(Value::Number(upper.to_string(), false))),
};
let selection = Expr::BinaryOp {
left: Box::new(lb),
op: BinaryOperator::And,
right: Box::new(ub),
};
if query.limit.is_none() && select.top.is_none() && !query.order_by.is_empty() {
// order by in a partition query does not make sense because partition is unordered.
// clear the order by beceause mssql does not support order by in a derived table.
// also order by in the derived table does not make any difference.
query.order_by.clear();
}
ast_part = wrap_query(
&mut query,
vec![SelectItem::Wildcard],
Some(selection),
table_alias,
);
format!("{}", ast_part)
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
format!("SELECT * FROM ({}) AS CXTMPTAB_PART WHERE CXTMPTAB_PART.{} >= {} AND CXTMPTAB_PART.{} < {}", sql, col, lower, col, upper)
}
};
debug!("Transformed single column partition query: {}", tsql);
tsql
}
#[throws(ConnectorXError)]
pub fn get_partition_range_query<T: Dialect>(sql: &str, col: &str, dialect: &T) -> String {
trace!("Incoming query: {}", sql);
const RANGE_TMP_TAB_NAME: &str = "CXTMPTAB_RANGE";
#[allow(unused_mut)]
let mut table_alias = RANGE_TMP_TAB_NAME;
#[allow(unused_mut)]
let mut args = vec![FunctionArg::Unnamed(Expr::CompoundIdentifier(vec![
Ident {
value: RANGE_TMP_TAB_NAME.to_string(),
quote_style: None,
},
Ident {
value: col.to_string(),
quote_style: None,
},
]))];
// HACK: Some dialect (e.g. Oracle) does not support "AS" for alias
#[cfg(feature = "src_oracle")]
if dialect.type_id() == (OracleDialect {}.type_id()) {
return format!(
"SELECT MIN({}.{}) as min, MAX({}.{}) as max FROM ({}) {}",
RANGE_TMP_TAB_NAME, col, RANGE_TMP_TAB_NAME, col, sql, RANGE_TMP_TAB_NAME
);
// table_alias = "";
// args = vec![FunctionArg::Unnamed(Expr::Identifier(Ident {
// value: col.to_string(),
// quote_style: None,
// }))];
}
let tsql = match Parser::parse_sql(dialect, sql) {
Ok(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let ast_range: Statement;
query.order_by = vec![];
let projection = vec![
SelectItem::UnnamedExpr(Expr::Function(Function {
name: ObjectName(vec![Ident {
value: "min".to_string(),
quote_style: None,
}]),
args: args.clone(),
over: None,
distinct: false,
})),
SelectItem::UnnamedExpr(Expr::Function(Function {
name: ObjectName(vec![Ident {
value: "max".to_string(),
quote_style: None,
}]),
args,
over: None,
distinct: false,
})),
];
ast_range = wrap_query(&mut query, projection, None, table_alias);
format!("{}", ast_range)
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
format!(
"SELECT MIN({}.{}) as min, MAX({}.{}) as max FROM ({}) AS {}",
RANGE_TMP_TAB_NAME, col, RANGE_TMP_TAB_NAME, col, sql, RANGE_TMP_TAB_NAME
)
}
};
debug!("Transformed partition range query: {}", tsql);
tsql
}
#[throws(ConnectorXError)]
pub fn get_partition_range_query_sep<T: Dialect>(
sql: &str,
col: &str,
dialect: &T,
) -> (String, String) {
trace!("Incoming query: {}", sql);
const RANGE_TMP_TAB_NAME: &str = "CXTMPTAB_RANGE";
let (sql_min, sql_max) = match Parser::parse_sql(dialect, sql) {
Ok(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let ast_range_min: Statement;
let ast_range_max: Statement;
query.order_by = vec![];
let min_proj = vec![SelectItem::UnnamedExpr(Expr::Function(Function {
name: ObjectName(vec![Ident {
value: "min".to_string(),
quote_style: None,
}]),
args: vec![FunctionArg::Unnamed(Expr::CompoundIdentifier(vec![
Ident {
value: RANGE_TMP_TAB_NAME.to_string(),
quote_style: None,
},
Ident {
value: col.to_string(),
quote_style: None,
},
]))],
over: None,
distinct: false,
}))];
let max_proj = vec![SelectItem::UnnamedExpr(Expr::Function(Function {
name: ObjectName(vec![Ident {
value: "max".to_string(),
quote_style: None,
}]),
args: vec![FunctionArg::Unnamed(Expr::CompoundIdentifier(vec![
Ident {
value: RANGE_TMP_TAB_NAME.into(),
quote_style: None,
},
Ident {
value: col.into(),
quote_style: None,
},
]))],
over: None,
distinct: false,
}))];
ast_range_min = wrap_query(&mut query.clone(), min_proj, None, RANGE_TMP_TAB_NAME);
ast_range_max = wrap_query(&mut query, max_proj, None, RANGE_TMP_TAB_NAME);
(format!("{}", ast_range_min), format!("{}", ast_range_max))
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
(
format!(
"SELECT MIN({}.{}) as min FROM ({}) AS {}",
RANGE_TMP_TAB_NAME, col, sql, RANGE_TMP_TAB_NAME
),
format!(
"SELECT MAX({}.{}) as max FROM ({}) AS {}",
RANGE_TMP_TAB_NAME, col, sql, RANGE_TMP_TAB_NAME
),
)
}
};
debug!(
"Transformed separated partition range query: {}, {}",
sql_min, sql_max
);
(sql_min, sql_max)
}
|
//! Path Stroking
//!
//! # Example
//!
//! // Input Path
//! let mut path = agg::Path::new();
//! path.move_to( 0.0, 0.0);
//! path.line_to(100.0, 100.0);
//! path.line_to(200.0, 50.0);
//!
//! // Stroke
//! let mut stroke = agg::Stroke::new( path );
//! stroke.width(2.5);
//! stroke.line_cap(agg::LineCap::Square);
//! stroke.line_join(agg::LineJoin::Miter);
//! stroke.line_join(agg::LineJoin::Miter);
//! stroke.miter_limit(5.0);
//!
//! // Draw
//! let mut ras = agg::RasterizerScanline::new();
//! ras.add_path(&stroke);
//!
use crate::paths::PathCommand;
use crate::paths::Vertex;
use crate::paths::len;
use crate::paths::cross;
use crate::paths::split;
use crate::VertexSource;
use std::f64::consts::PI;
/// Line End or Cap Style
#[derive(Debug,Copy,Clone,PartialEq)]
pub enum LineCap {
Butt, Square, Round
}
/// Lines Join Style on the outside
#[derive(Debug,Copy,Clone,PartialEq)]
pub enum LineJoin {
Miter, MiterRevert, Round, Bevel, MiterRound, MiterAccurate, None,
}
/// Lines Join Style on the inside
#[derive(Debug,Copy,Clone,PartialEq)]
pub enum InnerJoin {
Bevel, Miter, Jag, Round
}
/// Stroke for Paths and Vertex Sources
///
/// **Missing:** fn shorten() [private]
///
#[derive(Debug)]
pub struct Stroke<T: VertexSource> {
/// Source of Verticies
source: T,
/// Width of line in pixels, can be negative, 0.5
width: f64,
/// Absolute value of the width in pixel, 0.5
width_abs: f64,
/// Minimum Limit to determine if segments are almost co-linear, 0.5/1024
width_eps: f64,
/// Sign of the width, +1.0
width_sign: f64,
/// Maximum Length of miter at segment intersection, 3.0
miter_limit: f64,
/// Maximum Length of the inner miter at segment intersections, 1.01
inner_miter_limit: f64,
/// Approximation scale, 1.0
approx_scale: f64,
/// Line Cap Style
line_cap: LineCap,
/// Line Join Style
line_join: LineJoin,
/// Line Join Style, Inner Angle
inner_join: InnerJoin,
}
impl<T> VertexSource for Stroke<T> where T: VertexSource {
fn xconvert(&self) -> Vec<Vertex<f64>> {
self.stroke()
}
}
macro_rules! prev {
($i:expr, $n:expr) => ( ($i + $n - 1) % $n )
}
macro_rules! curr {
($i:expr, $n:expr) => ( $i )
}
macro_rules! next {
($i:expr, $n:expr) => ( ($i + 1) % $n )
}
impl<T> Stroke<T> where T: VertexSource {
/// Create a new Stroke from a Vertex Source
pub fn new(source: T) -> Self {
Stroke {
source,
width: 0.5,
width_abs: 0.5,
width_eps: 0.5/1024.0,
width_sign: 1.0,
miter_limit: 4.0,
inner_miter_limit: 1.01,
approx_scale: 1.0,
inner_join: InnerJoin::Miter,
line_cap: LineCap::Butt,
line_join: LineJoin::Miter,
}
}
/// Set the Stroke Width
pub fn width(&mut self, width: f64) {
self.width = width / 2.0;
self.width_abs = self.width.abs();
self.width_sign = if self.width < 0.0 { -1.0 } else { 1.0 };
}
/// Set Line cap style
///
/// Available options are
/// - `Butt`
/// - `Square`
/// - `Round`
pub fn line_cap(&mut self, line_cap: LineCap) {
self.line_cap = line_cap;
}
/// Set Line Join style
///
/// Available options are
/// - `Miter`
/// - `MiterRevert`
/// - `RoundJoin`
/// - `Bevel`
/// - `MiterRound`
///
/// Variants of `MiterAccurate` and `None` are not available and will
/// be reset to `Miter`
///
pub fn line_join(&mut self, line_join: LineJoin) {
self.line_join = line_join;
if self.line_join == LineJoin::MiterAccurate {
self.line_join = LineJoin::Miter;
}
if self.line_join == LineJoin::None {
self.line_join = LineJoin::Miter;
}
}
/// Set Inner Join style
///
/// Available options are
/// - `Bevel`
/// - `Miter`
/// - `Jag`
/// - `Round`
pub fn inner_join(&mut self, inner_join: InnerJoin) {
self.inner_join = inner_join;
}
/// Set miter limit
pub fn miter_limit(&mut self, miter_limit: f64) {
self.miter_limit = miter_limit;
}
// Set miter limit theta
//pub fn miter_limit_theta(&mut self, miter_limit_theta: f64) {
// self.miter_limit_theta = miter_limit_theta;
//}
/// Set inner miter limit
pub fn inner_miter_limit(&mut self, inner_miter_limit: f64) {
self.inner_miter_limit = inner_miter_limit;
}
/// Set approximation scale
pub fn approximation_scale(&mut self, scale: f64) {
self.approx_scale = scale;
}
/// Calculate Line End Cap
///
fn calc_cap(&self, v0: &Vertex<f64>, v1: &Vertex<f64>) -> Vec<Vertex<f64>> {
let mut out = vec![];
let dx = v1.x-v0.x;
let dy = v1.y-v0.y;
let len = (dx*dx + dy*dy).sqrt();
let dx1 = self.width * dy / len;
let dy1 = self.width * dx / len;
match self.line_cap {
LineCap::Square => {
let dx2 = dy1 * self.width_sign;
let dy2 = dx1 * self.width_sign;
out.push(Vertex::line_to(v0.x - dx1 - dx2, v0.y + dy1 - dy2));
out.push(Vertex::line_to(v0.x + dx1 - dx2, v0.y - dy1 - dy2));
},
LineCap::Butt => {
out.push(Vertex::line_to(v0.x - dx1, v0.y + dy1));
out.push(Vertex::line_to(v0.x + dx1, v0.y - dy1));
},
LineCap::Round => {
let da = 2.0 * (self.width_abs / (self.width_abs + 0.125 / self.approx_scale)).acos();
let n = (PI / da) as usize;
let da = PI / (n + 1) as f64;
out.push(Vertex::line_to(v0.x - dx1, v0.y + dy1));
if self.width_sign > 0.0 {
let mut a1 = dy1.atan2(-dx1);
a1 += da;
for _i in 0 .. n {
out.push(Vertex::line_to(v0.x + a1.cos() * self.width,
v0.y + a1.sin() * self.width));
a1 += da;
}
} else {
let mut a1 = (-dy1).atan2(dx1);
a1 -= da;
for _i in 0 .. n {
out.push(Vertex::line_to(v0.x + a1.cos() * self.width,
v0.y + a1.sin() * self.width));
a1 -= da;
}
}
out.push(Vertex::line_to(v0.x + dx1, v0.y - dy1));
}
}
out
}
/// Calculate an Arc
///
/// Returns Vertices represening the Arc
///
fn calc_arc(&self, x: f64, y: f64, dx1: f64, dy1: f64, dx2: f64, dy2: f64) -> Vec<Vertex<f64>> {
let mut out = vec![];
// Starting and Ending Angle
let mut a1 = (dy1 * self.width_sign).atan2(dx1 * self.width_sign);
let mut a2 = (dy2 * self.width_sign).atan2(dx2 * self.width_sign);
//
let mut da = 2.0 * (self.width_abs / (self.width_abs + 0.125 / self.approx_scale)).acos();
out.push(Vertex::line_to(x + dx1, y + dy1));
// Positive Line Width
if self.width_sign > 0.0 {
// Require a1 > a2
if a1 > a2 {
a2 += 2.0 * PI;
}
// Number of points in Arc
let n = ((a2 - a1) / da) as i64;
// Arc Increment in radians
da = (a2 - a1) / (n + 1) as f64;
// Increment from original angle as a1 is at initial point
a1 += da;
// Create Arc
for _ in 0 .. n {
out.push(Vertex::line_to(x + a1.cos() * self.width,
y + a1.sin() * self.width));
a1 += da;
}
} else {
// Negative Line Width
// Require: a2 < a1
if a1 < a2 {
a2 -= 2.0 * PI;
}
// Number of point in Arc
let n = ((a1 - a2) / da) as i64;
// Arc Increment in radians
da = (a1 - a2) / (n + 1) as f64;
// Decrement from original angle as a1 is at initial point
a1 -= da;
// Create Arc
for _ in 0 .. n {
out.push(Vertex::line_to(x + a1.cos() * self.width,
y + a1.sin() * self.width));
a1 -= da;
}
}
// Add Last Point
out.push(Vertex::line_to(x + dx2, y + dy2));
out
}
/// Calculate a Miter Join
///
/// Return the Miter Join for 3 points
///
fn calc_miter(&self,
p0: &Vertex<f64>,
p1: &Vertex<f64>,
p2: &Vertex<f64>,
dx1: f64, dy1: f64, dx2: f64, dy2: f64,
join: LineJoin, mlimit: f64, dbevel: f64)
-> Vec<Vertex<f64>>{
let mut out = vec![];
let mut xi = p1.x;
let mut yi = p1.y;
let mut di = 1.0;
let lim = self.width_abs * mlimit;
let mut miter_limit_exceeded = true; // Assume the worst
let mut intersection_failed = true; // Assume the worst
// Find the Intersection between the two points
//
// a--b-p
// 0 1 c
// ----- \
// \ \
// \2 d
if let Some((xit,yit)) = self.calc_intersection(p0.x + dx1, p0.y - dy1, // a
p1.x + dx1, p1.y - dy1, // b
p1.x + dx2, p1.y - dy2, // c
p2.x + dx2, p2.y - dy2) { // d
// Calculation of the intersection succeeded
xi = xit;
yi = yit;
let pz = Vertex::line_to(xi,yi); // Intersection point
di = len(p1,&pz); // Distance from p1 to p
if di <= lim {
// Inside the miter limit - Simplest case
out.push(Vertex::line_to(xi, yi));
miter_limit_exceeded = false;
}
intersection_failed = false;
} else {
// Calculation of the intersection failed, most probably
// the three points lie one straight line.
// First check if v0 and v2 lie on the opposite sides of vector:
// (v1.x, v1.y) -> (v1.x+dx1, v1.y-dy1), that is, the perpendicular
// to the line determined by vertices v0 and v1.
// This condition determines whether the next line segments continues
// the previous one or goes back.
//----------------
let x2 = p1.x + dx1;
let y2 = p1.y - dy1;
let pz = Vertex::line_to(x2,y2);
if (cross(&p0, &p1, &pz) < 0.0) == (cross(&p1, &p2, &pz) < 0.0) {
// This case means that the next segment continues
// the previous one (straight line)
//-----------------
out.push(Vertex::line_to(p1.x + dx1, p1.y - dy1));
miter_limit_exceeded = false;
}
}
if miter_limit_exceeded {
// Miter limit exceeded
//------------------------
match join {
LineJoin::MiterRevert => {
// For the compatibility with SVG, PDF, etc,
// we use a simple bevel join instead of
// "smart" bevel
out.push(Vertex::line_to(p1.x + dx1, p1.y - dy1));
out.push(Vertex::line_to(p1.x + dx2, p1.y - dy2));
},
LineJoin::Round => out.extend( self.calc_arc(p1.x, p1.y, dx1, -dy1, dx2, -dy2)),
_ => { // default
// If no miter-revert, calculate new dx1, dy1, dx2, dy2
//----------------
if intersection_failed {
let mlimit = mlimit * self.width_sign;
out.push(Vertex::line_to(p1.x + dx1 + dy1 * mlimit,
p1.y - dy1 + dx1 * mlimit));
out.push(Vertex::line_to(p1.x + dx2 - dy2 * mlimit,
p1.y - dy2 - dx2 * mlimit));
} else {
let x1 = p1.x + dx1;
let y1 = p1.y - dy1;
let x2 = p1.x + dx2;
let y2 = p1.y - dy2;
let di = (lim - dbevel) / (di - dbevel);
out.push(Vertex::line_to(x1 + (xi - x1) * di,
y1 + (yi - y1) * di));
out.push(Vertex::line_to(x2 + (xi - x2) * di,
y2 + (yi - y2) * di));
}
}
}
}
out
}
/// Calculate Intersection of two lines
///
/// Parallel Line are return as `None` otherwise the Intersection
/// (`px`,`py`) is returned
///
/// [Line-Line Intersection at Wikipedia](https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection#Given_two_points_on_each_line)
///
/// Lines are specified to pairs of points
/// - (`ax`, `ay`) -> (`bx`, `by`)
/// - (`cv`, `cy`) -> (`dx`, `dy`)
///
/// The intersection is defined at
///```text
/// px = ax + t (bx-ax)
/// py = ay + y (by-ay)
///```
/// where
///```text
/// (ay-cy)(dx-cx) - (ax-cx)(dy-cy)
/// t = ----------------------------------
/// (bx-ax)(dy-cy) - (by-ay)(dx-cx)
///```
fn calc_intersection(&self,
ax: f64, ay: f64, bx: f64, by: f64,
cx: f64, cy: f64, dx: f64, dy: f64)
-> Option<(f64, f64)> {
let intersection_epsilon = 1.0e-30;
// Numerator
let num = (ay-cy) * (dx-cx) - (ax-cx) * (dy-cy);
// Denominator
let den = (bx-ax) * (dy-cy) - (by-ay) * (dx-cx);
// Denominator == 0 :: Lines are Parallel or Co-Linear
if den.abs() < intersection_epsilon {
return None;
}
// Compute Intersection
let r = num / den;
let x = ax + r * (bx-ax);
let y = ay + r * (by-ay);
Some((x,y))
}
/// Calculate the Join of Two Line Segments
///
/// [SVG Line Joins](https://www.w3.org/TR/SVG/painting.html#LineJoin)
///
fn calc_join(&self,
p0: &Vertex<f64>,
p1: &Vertex<f64>,
p2: &Vertex<f64>) -> Vec<Vertex<f64>> {
let mut out = vec![];
let len1 = len(p1,p0);
let len2 = len(p2,p1);
if len1 == 0.0 {
panic!("Same point between p0,p1 {:?} {:?}", p0,p1);
}
if len2 == 0.0 {
panic!("Same point between p1,p2 {:?} {:?}", p1,p2);
}
// Distance, perpendidular from line
let dx1 = self.width * (p1.y-p0.y) / len1;
let dy1 = self.width * (p1.x-p0.x) / len1;
let dx2 = self.width * (p2.y-p1.y) / len2;
let dy2 = self.width * (p2.x-p1.x) / len2;
// Cross Product of the three points
let cp = cross(p0, p1, p2);
if cp != 0.0 && cp.is_sign_positive() == self.width.is_sign_positive() {
// Inner Join
let mut limit = if len1 < len2 {
len1 / self.width_abs
} else {
len2 / self.width_abs
};
// Enforce Minimum Miter Limit
if limit < self.inner_miter_limit {
limit = self.inner_miter_limit;
}
// Construct Joins
match self.inner_join {
// Simple Bevel Join
InnerJoin::Bevel => {
out.push(Vertex::line_to(p1.x + dx1, p1.y - dy1));
out.push(Vertex::line_to(p1.x + dx2, p1.y - dy2));
},
InnerJoin::Miter => {
out.extend(self.calc_miter(p0, p1, p2, dx1, dy1, dx2, dy2, LineJoin::MiterRevert, limit, 0.0));
}
InnerJoin::Jag |
InnerJoin::Round => {
let cp = (dx1-dx2).powi(2) + (dy1-dy2).powi(2);
if cp < len1.powi(2) && cp < len2.powi(2) {
out.extend(self.calc_miter(p0,p1,p2, dx1, dy1, dx2, dy2, LineJoin::MiterRevert, limit, 0.0));
} else {
if self.inner_join == InnerJoin::Jag {
out.push(Vertex::line_to(p1.x + dx1, p1.y - dy1));
out.push(Vertex::line_to(p1.x, p1.y ));
out.push(Vertex::line_to(p1.x + dx2, p1.y - dy2));
}
if self.inner_join == InnerJoin::Round {
out.push(Vertex::line_to(p1.x + dx1, p1.y - dy1));
out.push(Vertex::line_to(p1.x, p1.y ));
out.extend(self.calc_arc(p1.x, p1.y, dx2, -dy2, dx1, -dy1));
out.push(Vertex::line_to(p1.x, p1.y ));
out.push(Vertex::line_to(p1.x + dx2, p1.y - dy2));
}
}
}
}
} else {
// Outer Join
let dx = (dx1 + dx2) / 2.0;
let dy = (dy1 + dy2) / 2.0;
let dbevel = (dx*dx + dy*dy).sqrt();
if (self.line_join == LineJoin::Round || self.line_join == LineJoin::Bevel) && self.approx_scale * (self.width_abs - dbevel) < self.width_eps {
// This is an optimization that reduces the number of points
// in cases of almost collinear segments. If there's no
// visible difference between bevel and miter joins we'd rather
// use miter join because it adds only one point instead of two.
//
// Here we calculate the middle point between the bevel points
// and then, the distance between v1 and this middle point.
// At outer joins this distance always less than stroke width,
// because it's actually the height of an isosceles triangle of
// v1 and its two bevel points. If the difference between this
// width and this value is small (no visible bevel) we can
// add just one point.
//
// The constant in the expression makes the result approximately
// the same as in round joins and caps. You can safely comment
// out this entire "if".
//-------------------
if let Some((dx,dy)) =
self.calc_intersection(p0.x + dx1, p0.y - dy1,
p1.x + dx1, p1.y - dy1,
p1.x + dx2, p1.y - dy2,
p2.x + dx2, p2.y - dy2) {
out.push(Vertex::line_to(dx, dy));
} else {
out.push(Vertex::line_to(p1.x + dx1, p1.y - dy1));
}
return out ;
}
match self.line_join {
LineJoin::Miter |
LineJoin::MiterRevert |
LineJoin::MiterRound =>
out.extend(self.calc_miter(p0,p1,p2, dx1,dy1,dx2,dy2,
self.line_join,
self.miter_limit,
dbevel)),
LineJoin::Round => out.extend(
self.calc_arc(p1.x, p1.y, dx1, -dy1, dx2, -dy2)
),
LineJoin::Bevel => {
out.push(Vertex::line_to(p1.x + dx1, p1.y - dy1));
out.push(Vertex::line_to(p1.x + dx2, p1.y - dy2));
},
LineJoin::None | LineJoin::MiterAccurate => {},
}
}
out
}
/// Stroke the Vertex Source
///
/// There is lots of logic here and probably overly complex
///
fn stroke(&self) -> Vec<Vertex<f64>> {
let mut all_out = vec![];
// Get verticies from Vertex Source
let v0 = &self.source.xconvert();
// Split and loop along unique paths, ended by MoveTo's
let pairs = split(&v0);
for (m1,m2) in pairs {
let mut outf = vec![];
// Clean the current path, return new path
let v = clean_path(&v0[m1..=m2]);
if v.len() <= 1 {
continue;
}
// Check for Closed Path Element
let closed = is_path_closed(&v);
// Ignore Closed Tag Element
let n = if closed { v.len() - 1 } else { v.len() };
let (n1,n2) = if closed { (0, n) } else { (1,n-1) };
// Forward Path
if ! closed {
outf.extend( self.calc_cap(&v[0], &v[1]) );
}
for i in n1 .. n2 { // Forward Path
outf.extend(
self.calc_join(&v[prev!(i,n)], &v[curr!(i,n)], &v[next!(i,n)])
);
}
if closed {
// Close the polygon
let n = outf.len();
let last = outf[n-1];
outf.push( Vertex::close_polygon(last.x, last.y) );
}
// Backward Path
let mut outb = vec![];
if ! closed {
outb.extend( self.calc_cap(&v[n-1], &v[n-2]) ); // End Cap
}
for i in (n1 .. n2).rev() { // Backward Path
outb.extend(
self.calc_join(&v[next!(i,n)], &v[curr!(i,n)], &v[prev!(i,n)])
);
}
if closed {
// Set first point as a MoveTo
outb[0].cmd = PathCommand::MoveTo;
// Close the polygon, using the last point
let n = outb.len();
let last = outb[n-1];
outb.push( Vertex::close_polygon(last.x, last.y) );
} else {
// Close the polygon, using the last point
let n = outb.len();
let last = outb[n-1];
outb.push( Vertex::close_polygon(last.x, last.y) );
}
// Set First point as MoveTo
outf[0].cmd = PathCommand::MoveTo;
// Combine Forward and Backward Paths
outf.extend(outb);
// Add to Path Collection
all_out.extend(outf);
}
all_out
}
}
pub struct Dash<S: VertexSource> {
source: S,
dashes: Vec<f64>,
total_dash_len: f64,
dash_start: f64,
shorten: f64,
closed: bool,
}
impl<S> VertexSource for Dash<S> where S: VertexSource {
fn xconvert(&self) -> Vec<Vertex<f64>> {
self.draw()
}
}
impl<S> Dash<S> where S: VertexSource {
pub fn new(source: S) -> Self {
Self {
dashes: vec![], source,
total_dash_len: 0.0, dash_start: 0.0,
shorten: 0.0,
closed: false,
}
}
pub fn remove_all_dashed(&mut self) {
self.dashes.clear();
self.total_dash_len = 0.0;
}
pub fn add_dash(&mut self, length: f64, gap: f64) {
if length <= 0.0 || gap <= 0.0 {
return;
}
self.total_dash_len += length + gap;
self.dashes.push( length );
self.dashes.push( gap );
}
pub fn shorten(&mut self, shorten: f64) {
self.shorten = shorten;
}
pub fn dash_start(&mut self, start: f64) {
self.dash_start = start;
self.calc_dash_start( start.abs() );
}
fn calc_dash_start(&self, ds: f64) -> (f64,usize) {
let mut curr_dash = 0;
let mut curr_dash_start = 0.0;
let mut ds = ds;
while ds > 0.0 {
if ds > self.dashes[curr_dash] {
ds -= self.dashes[curr_dash];
curr_dash += 1;
curr_dash_start = 0.0;
if curr_dash >= self.dashes.len() {
curr_dash = 0;
}
} else {
curr_dash_start = ds;
ds = 0.0;
}
}
(curr_dash_start, curr_dash)
}
pub fn remove_all(&mut self) {
self.dashes.clear();
self.closed = false;
}
fn draw(&self) -> Vec<Vertex<f64>> {
let mut out = vec![];
let src = self.source.xconvert();
if src.len() < 2 || self.dashes.len() < 2 {
return out;
}
let mut i = 0;
let mut v1 = src[i];
i += 1;
let mut v2 = src[i];
let (mut x, mut y) = (v1.x, v1.y);
out.push( Vertex::move_to(x, y) );
// Length of the Current Segment
let mut curr_rest = len(&v1,&v2);
let (mut curr_dash_start, mut curr_dash) =
if self.dash_start >= 0.0 {
self.calc_dash_start(self.dash_start)
} else {
(self.dash_start, 0)
};
loop {
let dash_rest = self.dashes[curr_dash] - curr_dash_start;
let cmd = if curr_dash % 2 == 1 {
Vertex::move_to
} else {
Vertex::line_to
};
if curr_rest > dash_rest {
// Dash fits within the line segment
curr_rest -= dash_rest;
curr_dash += 1;
if curr_dash >= self.dashes.len() {
curr_dash = 0;
}
curr_dash_start = 0.0;
x = v2.x - (v2.x - v1.x) * curr_rest / len(&v1,&v2);
y = v2.y - (v2.y - v1.y) * curr_rest / len(&v1,&v2);
} else {
// Dash is longer than line segment
curr_dash_start += curr_rest;
x = v2.x;
y = v2.y;
i += 1;
v1 = v2;
if self.closed {
if i > src.len() {
out.push(cmd(x,y));
break;
} else {
let n = if i >= src.len() { 0 } else { i };
v2 = src[n];
}
} else {
if i >= src.len() {
out.push(cmd(x,y));
break;
} else {
v2 = src[i];
}
}
curr_rest = len(&v1,&v2);
}
out.push( cmd(x,y) );
}
out
}
}
/// Check if Path is Closed
///
/// Path is considered close if the any of the verticies have
/// a PathCommand::Close vertex
///
fn is_path_closed(verts: &[Vertex<f64>]) -> bool {
for v in verts {
if v.cmd == PathCommand::Close {
return true;
}
}
false
}
/// Remove repeated vertices
///
/// Repeated verticies are defined with a distance <= 1e-6
///
fn clean_path(v: &[Vertex<f64>]) -> Vec<Vertex<f64>>{
let mut mark = vec![];
if ! v.is_empty() {
mark.push(0);
}
// Find indicies of LineTo verticies far enough away from last point
// All other vertices are included
for i in 1 .. v.len() {
match v[i].cmd {
PathCommand::LineTo => {
if len(&v[i-1],&v[i]) >= 1e-6 {
mark.push(i);
}
},
_ => mark.push(i),
}
}
if mark.is_empty() {
return vec![]
}
// Collect only "ok" verticies
let mut out : Vec<_> = mark.into_iter().map(|i| v[i]).collect();
// Return if path is not closeda
if ! is_path_closed(&out) {
return out;
}
// Path is closed
let first = out[0];
loop {
// Get Last LineTo Command
let i = match last_line_to(&out) {
Some(i) => i,
None => panic!("Missing Last Line To"),
};
let last = out[i];
// If last point and first are **NOT** the same, done
if len(&first, &last) >= 1e-6 {
break;
}
// If **SAME** point, remove last Vertex and continue
out.remove(i);
}
out
}
/// Return index of the last LineTo Vertex in the array
fn last_line_to(v: &[Vertex<f64>]) -> Option<usize> {
let mut i = v.len()-1;
while i > 0 {
if v[i].cmd == PathCommand::LineTo {
return Some(i);
}
i -= 1;
}
None
}
|
extern crate pcap_file;
use pcap_file::pcap::{PacketHeader, PcapHeader, PcapReader, PcapWriter};
static DATA: &[u8; 1455] = include_bytes!("little_endian.pcap");
#[test]
fn read() {
let pcap_reader = PcapReader::new(&DATA[..]).unwrap();
//Global header len
let mut data_len = 24;
for pcap in pcap_reader {
let pcap = pcap.unwrap();
//Packet header len
data_len += 16;
data_len += pcap.data.len();
}
assert_eq!(data_len, DATA.len());
}
#[test]
fn read_write() {
let pcap_reader = PcapReader::new(&DATA[..]).unwrap();
let header = pcap_reader.header;
let mut out = Vec::new();
let mut pcap_writer = PcapWriter::with_header(header, out).unwrap();
for pcap in pcap_reader {
pcap_writer.write_packet(&pcap.unwrap()).unwrap();
}
out = pcap_writer.into_writer();
assert_eq!(&DATA[..], &out[..]);
}
#[test]
fn big_endian() {
let data = include_bytes!("big_endian.pcap");
//Global header test
let mut pcap_reader = PcapReader::new(&data[..]).unwrap();
let header = PcapHeader {
magic_number: 0xa1b2c3d4,
version_major: 2,
version_minor: 4,
ts_correction: 0,
ts_accuracy: 0,
snaplen: 0xffff,
datalink: pcap_file::DataLink::ETHERNET,
};
assert_eq!(pcap_reader.header, header);
assert_eq!(pcap_reader.header.endianness(), pcap_file::Endianness::Big);
//Packet header test
let packet = pcap_reader.next().unwrap().unwrap();
let pkt_hdr = PacketHeader {
ts_sec: 0x4fa11b29,
ts_nsec: 152630000,
incl_len: 0x62,
orig_len: 0x62,
};
assert_eq!(packet.header, pkt_hdr);
}
#[test]
fn little_endian() {
let data = include_bytes!("little_endian.pcap");
//Global header test
let mut pcap_reader = PcapReader::new(&data[..]).unwrap();
let header = pcap_file::pcap::PcapHeader {
magic_number: 0xd4c3b2a1,
version_major: 2,
version_minor: 4,
ts_correction: 0,
ts_accuracy: 0,
snaplen: 0x1000,
datalink: pcap_file::DataLink::ETHERNET,
};
assert_eq!(pcap_reader.header, header);
assert_eq!(pcap_reader.header.endianness(), pcap_file::Endianness::Little);
//Packet header test
let packet = pcap_reader.next().unwrap().unwrap();
let pkt_hdr = PacketHeader {
ts_sec: 0x4f633248,
ts_nsec: 0x0,
incl_len: 0x75,
orig_len: 0x75,
};
assert_eq!(packet.header, pkt_hdr);
} |
use algebra::biginteger::{BigInteger, BigInteger256};
use num_bigint::BigUint;
use std::cmp::Ordering::{Equal, Greater, Less};
use std::convert::TryInto;
const BIGINT256_NUM_BITS: i32 = 256;
const BIGINT256_LIMB_BITS: i32 = 64;
const BIGINT256_LIMB_BYTES: i32 = BIGINT256_LIMB_BITS / 8;
const BIGINT256_NUM_LIMBS: i32 =
(BIGINT256_NUM_BITS + BIGINT256_LIMB_BITS - 1) / BIGINT256_LIMB_BITS;
const BIGINT256_NUM_BYTES: usize = (BIGINT256_NUM_LIMBS as usize) * 8;
#[derive(Copy, Clone)]
pub struct CamlBigint256(pub BigInteger256);
pub type CamlBigint256Ptr = ocaml::Pointer<CamlBigint256>;
extern "C" fn caml_bigint_256_compare_raw(x: ocaml::Value, y: ocaml::Value) -> libc::c_int {
let x: CamlBigint256Ptr = ocaml::FromValue::from_value(x);
let y: CamlBigint256Ptr = ocaml::FromValue::from_value(y);
match x.as_ref().0.cmp(&y.as_ref().0) {
Less => -1,
Equal => 0,
Greater => 1,
}
}
impl From<&CamlBigint256> for BigUint {
fn from(x: &CamlBigint256) -> BigUint {
let x_ = (x.0).0.as_ptr() as *const u8;
let x_ = unsafe { std::slice::from_raw_parts(x_, BIGINT256_NUM_BYTES) };
num_bigint::BigUint::from_bytes_le(x_)
}
}
impl From<&BigUint> for CamlBigint256 {
fn from(x: &BigUint) -> CamlBigint256 {
let mut bytes = x.to_bytes_le();
bytes.resize(BIGINT256_NUM_BYTES, 0);
let limbs = bytes.as_ptr();
let limbs = limbs as *const [u64; BIGINT256_NUM_LIMBS as usize];
let limbs = unsafe { &(*limbs) };
CamlBigint256(BigInteger256(*limbs))
}
}
impl std::fmt::Display for CamlBigint256 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
BigUint::from(self).fmt(f)
}
}
ocaml::custom!(CamlBigint256 {
compare: caml_bigint_256_compare_raw,
});
#[ocaml::func]
pub fn caml_bigint_256_of_numeral(
s: &[u8],
_len: u32,
base: u32,
) -> Result<CamlBigint256, ocaml::Error> {
match BigUint::parse_bytes(s, base) {
Some(data) => Ok((&data).into()),
None => Err(ocaml::Error::invalid_argument("caml_bigint_256_of_numeral")
.err()
.unwrap()),
}
}
#[ocaml::func]
pub fn caml_bigint_256_of_decimal_string(s: &[u8]) -> Result<CamlBigint256, ocaml::Error> {
match BigUint::parse_bytes(s, 10) {
Some(data) => Ok((&data).into()),
None => Err(
ocaml::Error::invalid_argument("caml_bigint_256_of_decimal_string")
.err()
.unwrap(),
),
}
}
#[ocaml::func]
pub fn caml_bigint_256_num_limbs() -> ocaml::Int {
return BIGINT256_NUM_LIMBS.try_into().unwrap();
}
#[ocaml::func]
pub fn caml_bigint_256_bytes_per_limb() -> ocaml::Int {
return BIGINT256_LIMB_BYTES.try_into().unwrap();
}
#[ocaml::func]
pub fn caml_bigint_256_div(x: CamlBigint256Ptr, y: CamlBigint256Ptr) -> CamlBigint256 {
let res: BigUint = BigUint::from(x.as_ref()) / BigUint::from(y.as_ref());
(&res).into()
}
#[ocaml::func]
pub fn caml_bigint_256_compare(x: CamlBigint256Ptr, y: CamlBigint256Ptr) -> ocaml::Int {
match x.as_ref().0.cmp(&y.as_ref().0) {
Less => -1,
Equal => 0,
Greater => 1,
}
}
#[ocaml::func]
pub fn caml_bigint_256_print(x: CamlBigint256Ptr) {
println!("{}", BigUint::from(x.as_ref()));
}
#[ocaml::func]
pub fn caml_bigint_256_to_string(x: CamlBigint256Ptr) -> String {
BigUint::from(x.as_ref()).to_string()
}
#[ocaml::func]
pub fn caml_bigint_256_test_bit(x: CamlBigint256Ptr, i: ocaml::Int) -> Result<bool, ocaml::Error> {
match i.try_into() {
Ok(i) => Ok(x.as_ref().0.get_bit(i)),
Err(_) => Err(ocaml::Error::invalid_argument("caml_bigint_256_test_bit")
.err()
.unwrap()),
}
}
#[ocaml::func]
pub fn caml_bigint_256_to_bytes(x: CamlBigint256Ptr) -> ocaml::Value {
let len = std::mem::size_of::<CamlBigint256>();
let str = unsafe { ocaml::sys::caml_alloc_string(len) };
unsafe {
core::ptr::copy_nonoverlapping(x.as_ptr() as *const u8, ocaml::sys::string_val(str), len);
}
ocaml::Value(str)
}
#[ocaml::func]
pub fn caml_bigint_256_of_bytes(x: &[u8]) -> Result<CamlBigint256, ocaml::Error> {
let len = std::mem::size_of::<CamlBigint256>();
if x.len() != len {
ocaml::Error::failwith("caml_bigint_256_of_bytes")?;
};
let x = unsafe { *(x.as_ptr() as *const CamlBigint256) };
Ok(x)
}
|
use std::cell::RefCell;
use std::ops::Range;
use nom::branch::alt;
use nom::bytes::complete::{take, take_till1, take_while};
use nom::character::complete::{anychar, char};
use nom::combinator::{all_consuming, map, not, recognize, rest, verify};
use nom::sequence::{delimited, preceded, terminated};
type LocatedSpan<'a> = nom_locate::LocatedSpan<&'a str, State<'a>>;
type IResult<'a, T> = nom::IResult<LocatedSpan<'a>, T>;
trait ToRange {
fn to_range(&self) -> Range<usize>;
}
impl<'a> ToRange for LocatedSpan<'a> {
fn to_range(&self) -> Range<usize> {
let start = self.location_offset();
let end = start + self.fragment().len();
start..end
}
}
#[derive(Debug)]
struct Error(Range<usize>, String);
#[derive(Clone, Debug)]
struct State<'a>(&'a RefCell<Vec<Error>>);
impl<'a> State<'a> {
pub fn report_error(&self, error: Error) {
self.0.borrow_mut().push(error);
}
}
fn expect<'a, F, E, T>(parser: F, error_msg: E) -> impl Fn(LocatedSpan<'a>) -> IResult<Option<T>>
where
F: Fn(LocatedSpan<'a>) -> IResult<T>,
E: ToString,
{
move |input| match parser(input) {
Ok((remaining, out)) => Ok((remaining, Some(out))),
Err(nom::Err::Error((input, _))) | Err(nom::Err::Failure((input, _))) => {
let err = Error(input.to_range(), error_msg.to_string());
input.extra.report_error(err);
Ok((input, None))
}
Err(err) => Err(err),
}
}
#[derive(Debug)]
struct Ident(String);
#[derive(Debug)]
enum Expr {
Ident(Ident),
Paren(Box<Expr>),
Error,
}
fn ident(input: LocatedSpan) -> IResult<Expr> {
let first = verify(anychar, |c| c.is_ascii_alphabetic() || *c == '_');
let rest = take_while(|c: char| c.is_ascii_alphanumeric() || "_-'".contains(c));
let ident = recognize(preceded(first, rest));
map(ident, |span: LocatedSpan| {
Expr::Ident(Ident(span.fragment().to_string()))
})(input)
}
fn paren(input: LocatedSpan) -> IResult<Expr> {
let paren = delimited(
char('('),
expect(expr, "expected expression after `(`"),
expect(char(')'), "missing `)`"),
);
map(paren, |inner| {
Expr::Paren(Box::new(inner.unwrap_or(Expr::Error)))
})(input)
}
fn error(input: LocatedSpan) -> IResult<Expr> {
map(take_till1(|c| c == ')'), |span: LocatedSpan| {
let err = Error(span.to_range(), format!("unexpected `{}`", span.fragment()));
span.extra.report_error(err);
Expr::Error
})(input)
}
fn expr(input: LocatedSpan) -> IResult<Expr> {
alt((paren, ident, error))(input)
}
fn source_file(input: LocatedSpan) -> IResult<Expr> {
let expr = alt((expr, map(take(0usize), |_| Expr::Error)));
terminated(expr, preceded(expect(not(anychar), "expected EOF"), rest))(input)
}
fn parse(source: &str) -> (Expr, Vec<Error>) {
let errors = RefCell::new(Vec::new());
let input = LocatedSpan::new_extra(source, State(&errors));
let (_, expr) = all_consuming(source_file)(input).expect("parser cannot fail");
(expr, errors.into_inner())
}
fn main() {
for input in &["foo", "(foo)", "(foo))", "(%", "(", "%", "()", ""] {
println!("{:7} {:?}", input, parse(input));
}
}
|
use lazy_static::lazy_static;
use envconfig::Envconfig;
#[derive(Debug)]
pub struct Settings {
pub rpc: crate::rpc::Config,
pub service: crate::srv::Config,
pub db: crate::db::Config,
pub nats: Nats,
pub redis: Redis,
}
#[derive(Debug, Clone, Envconfig)]
pub struct Nats {
#[envconfig(from = "NATS_URL")]
pub url: String,
}
#[derive(Debug, Clone, Envconfig)]
pub struct Redis {
#[envconfig(from = "REDIS_URL")]
pub url: String,
}
lazy_static! {
static ref SETTINGS: Settings = {
dotenv::dotenv().ok();
Settings::init()
};
}
impl Settings {
fn init() -> Self {
Settings {
rpc: crate::rpc::Config::init_from_env().unwrap(),
service: crate::srv::Config::init_from_env().unwrap(),
db: crate::db::Config::init_from_env().unwrap(),
nats: Nats::init_from_env().unwrap(),
redis: Redis::init_from_env().unwrap(),
}
}
pub fn get() -> &'static Self {
&SETTINGS
}
}
|
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate hdk;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate holochain_json_derive;
mod definitions;
use definitions::{ Definition, Catalog, valid_definition, valid_base_and_target };
use hdk::{
entry_definition::ValidatingEntryType,
error::ZomeApiResult,
holochain_core_types::{
dna::entry_types::Sharing,
entry::Entry,
link::{
link_data::LinkData,
LinkMatch
}
},
api::{
link_entries,
get_links,
entry_address,
},
holochain_persistence_api::{
cas::content::Address,
},
holochain_json_api::{
json::JsonString,
error::JsonError
}
};
use std::convert::TryInto;
fn definition_entry () -> ValidatingEntryType {
entry! {
name: "Definition",
description: "Definitions that make up the Set Match Games system",
sharing: Sharing::Public,
validation_package: || {
hdk::ValidationPackageDefinition::Entry
},
validation: |validation_data: hdk::EntryValidationData<Definition>| {
let definition = match validation_data {
hdk::EntryValidationData::Create{
entry,
validation_data: _
} => entry,
_ => return Err("Cannot modify or delete Definitions".to_string())
};
valid_definition(&definition)
}
}
}
fn catalog_entry () -> ValidatingEntryType {
entry! {
name: "Catalog",
description: "Entries that link to all definitions of a type",
sharing: Sharing::Public,
validation_package: || {
hdk::ValidationPackageDefinition::Entry
},
validation: |_validation_data: hdk::EntryValidationData<Catalog>| {
Ok(())
},
links: [
to! {
"Definition",
link_type: "Catalog",
validation_package : || {
hdk::ValidationPackageDefinition::Entry
},
validation: |validation_data: hdk::LinkValidationData| {
if let hdk::LinkValidationData::LinkAdd{
link: LinkData{link: link_, .. },
validation_data: _
} = validation_data {
// check that base and target exist
let base = handle_get_catalog(link_.base().to_owned())?;
let target = handle_get_definition(link_.target().to_owned())?;
return valid_base_and_target(&base, &target);
} else {
// LinkRemove is the other type that can be found here, but it isn't implemented.
return Err("Cannot remove links at this time.".to_string());
}
}
}
]
}
}
fn handle_create_definition(definition: Definition) -> ZomeApiResult<Address> {
let new_entry = Entry::App("Definition".into(), definition.clone().into());
let address: Address = hdk::commit_entry(&new_entry)?;
hdk::debug(format!("handle_create_definition({:?})", address))?;
let catalog_address: Address = match definition {
Definition::Game{..} => handle_create_catalog("Game Catalog", "Game").unwrap(),
Definition::Format{..} => handle_create_catalog("Format Catalog", "Format").unwrap(),
Definition::Component{..} => handle_create_catalog("Component Catalog", "Component").unwrap(),
};
// check that this link hasn't already been made
let links: Vec<Address> = get_links(&catalog_address, LinkMatch::Exactly("Catalog"), LinkMatch::Any)?.addresses();
if links.contains(&address) {
return Ok(address)
};
link_entries(&catalog_address, &address, "Catalog", "")?;
Ok(address)
}
fn handle_create_catalog(name_str: &str, type_str: &str) -> ZomeApiResult<Address> {
let catalog = Catalog {
name: String::from(name_str),
type_: String::from(type_str)
};
let new_entry = Entry::App("Catalog".into(), catalog.into());
let address: Address = hdk::commit_entry(&new_entry)?;
Ok(address)
}
fn handle_get_definition(address: Address) -> ZomeApiResult<Definition> {
match hdk::get_entry(&address) {
Ok(Some(Entry::App(_, api_result))) => Ok(api_result.try_into()?),
_ => Err(String::from("No definition found").into())
}
}
fn handle_get_entry_address(entry: Entry) -> ZomeApiResult<Address> {
Ok(entry_address(&entry)?)
}
// TODO consider removing
fn handle_get_catalog(address: Address) -> ZomeApiResult<Catalog> {
match hdk::get_entry(&address) {
Ok(Some(Entry::App(_, api_result))) => Ok(api_result.try_into()?),
_ => Err(String::from("No definition catalog found").into())
}
}
fn handle_get_catalog_links(catalog_type: String, catalog_name: String) -> ZomeApiResult<Vec<Address>> {
let catalog = Catalog {
name: catalog_name.clone(),
type_: catalog_type
};
let catalog_entry = Entry::App("Catalog".into(), catalog.into());
let address: Address = entry_address(&catalog_entry)?;
let links: Vec<Address> = get_links(&address, LinkMatch::Exactly("Catalog"), LinkMatch::Any)?.addresses();
Ok(links)
}
fn handle_get_all_definitions_of_type(catalog_type: String) -> ZomeApiResult<Vec<Definition>> {
let catalog_name: String = catalog_type.clone() + " Catalog";
let links: Vec<Address> = handle_get_catalog_links(catalog_type, catalog_name)?;
let definitions: Vec<Definition> = links.into_iter().map(|address| {
handle_get_definition(address).unwrap()
}).collect();
Ok(definitions)
}
fn handle_get_definitions_from_catalog(catalog_type: String, catalog_name: String) -> ZomeApiResult<Vec<Definition>> {
let links: Vec<Address> = handle_get_catalog_links(catalog_type, catalog_name)?;
let definitions: Vec<Definition> = links.into_iter().map(|address| {
handle_get_definition(address).unwrap()
}).collect();
Ok(definitions)
}
define_zome! {
entries: [
definition_entry(),
catalog_entry()
]
init: || { Ok(()) }
validate_agent: |validation_data : EntryValidationData::<AgentId>| {
Ok(())
}
functions: [
create_definition: {
inputs: |definition: Definition|,
outputs: |address: ZomeApiResult<Address>|,
handler: handle_create_definition
}
/*
create_catalog: {
inputs: |catalog: Catalog|,
outputs: |address: ZomeApiResult<Address>|,
handler: handle_create_catalog
}
*/
get_definition: {
inputs: |address: Address|,
outputs: |definition: ZomeApiResult<Definition>|,
handler: handle_get_definition
}
get_entry_address: {
inputs: |entry: Entry|,
outputs: |address: ZomeApiResult<Address>|,
handler: handle_get_entry_address
}
get_catalog: {
inputs: |address: Address|,
outputs: |definition: ZomeApiResult<Catalog>|,
handler: handle_get_catalog
}
get_catalog_links: {
inputs: |catalog_type: String, catalog_name: String|,
outputs: |linked_addresses: ZomeApiResult<Vec<Address>>|,
handler: handle_get_catalog_links
}
get_all_definitions_of_type: {
inputs: |catalog_type: String|,
outputs: |linked_definitions: ZomeApiResult<Vec<Definition>>|,
handler: handle_get_all_definitions_of_type
}
get_definitions_from_catalog: {
inputs: |catalog_type: String, catalog_name: String|,
outputs: |linked_definitions: ZomeApiResult<Vec<Definition>>|,
handler: handle_get_definitions_from_catalog
}
]
traits: {
hc_public [
create_definition,
// create_catalog,
get_definition,
get_entry_address,
get_catalog,
get_catalog_links,
get_all_definitions_of_type,
get_definitions_from_catalog
]
}
}
|
use log::error;
pub mod mem_map;
mod game_pad;
mod vram;
mod wram;
use self::game_pad::GamePad;
use self::vram::Vram;
use self::wram::Wram;
use self::mem_map::*;
use super::{Mmu, Rom};
pub struct BasicMMU {
rom: Rom,
wram: Wram,
vram: Vram,
game_pad: GamePad,
}
impl BasicMMU {
pub fn new<T: Into<Rom>>(rom: T) -> BasicMMU {
let rom = rom.into();
BasicMMU {
rom,
wram: Wram::new(),
vram: Vram::new(),
game_pad: GamePad::new(),
}
}
}
impl Mmu for BasicMMU {
fn read_byte(&self, addr: u16) -> u8 {
match addr {
ROM_START...ROM_END => self.rom.read_byte(addr - ROM_START),
WRAM_START...WRAM_END => self.wram.read_byte(addr - WRAM_START),
VRAM_START...VRAM_END => self.vram.read_byte(addr - VRAM_START),
_ => panic!("Unrecognized Address: 0x{:04x}", addr),
}
}
fn write_byte(&mut self, addr: u16, value: u8) {
match addr {
ROM_START...ROM_END => error!("Attempting to write to ROM"),
WRAM_START...WRAM_END => self.wram.write_byte(addr - WRAM_START, value),
VRAM_START...VRAM_END => self.vram.write_byte(addr - VRAM_START, value),
_ => panic!("Unrecognized Address: 0x{:04x}", addr),
}
}
fn rom_len(&self) -> usize {
self.rom.len()
}
}
|
const POINTERS_IN_NODE: u64 = disk::SECTOR_SIZE / page::POINTER_SIZE;
struct Array<T> {
root: page::Pointer,
len: u64,
_phantom: PhantomData<T>,
}
impl Array<T> {
fn is_leaf(&self) -> bool {
self.len <= POINTERS_IN_NODE
}
fn for_each<F>(&self, fs: &fs::State, range: Range, f: F) -> Result<(), alloc::Error>
where F: Fn(usize, page::Pointer) -> Result<(), alloc::Error> {
self.for_each_idx(fs, range.start, range, f)
}
fn for_each_idx<F>(&self, fs: &fs::State, idx: usize, range: Range, f: F) -> Result<(), alloc::Error>
where F: Fn(usize, page::Pointer) -> Result<(), alloc::Error> {
if self.is_leaf() {
let buf = fs.read(self.root)?;
for i in range {
f(idx + i, little_endian::read(&buf[i * page::POINTER_SIZE..]))?;
}
} else {
let max_child = (self.len + POINTERS_IN_NODE / 2) / POINTERS_IN_NODE;
self.for_each_idx(idx, range.start..cmp::min(range.end, range.start + POINTERS_IN_NODE), f)?;
for i in (POINTERS_IN_NODE..range.end).step_by(max_child) {
self.for_each_idx()
}
}
}
}
impl<T: Object + From<page::Pointer>> Object {
fn gc_visit(&self, fs: &fs::State) -> Result<(), alloc::Error> {
}
}
|
use friday_error::FridayError;
use std::time::Duration;
#[derive(Eq, PartialEq, PartialOrd, Ord)]
pub enum Status<A> {
Continue(A),
Retry(A, Duration),
Exit
}
// Karta is the swedish word for map
// Each Karta will guide a user towards Friday.
pub trait Karta {
// Used for error logging etc
fn name(&self) -> String;
// This is an approximate time since if other Kartas
// takes up much time this cannot be guaranteed.
fn time_to_clue(&self) -> Duration;
// The clue can be anything - e.g say we ping a site
// or try to receive access to wifi via bluetooth or whatever
fn clue(&mut self) -> Result<(), FridayError>;
}
|
use std::collections::{BTreeMap, BinaryHeap, VecDeque};
use std::vec::Vec;
/// A limit specification.
#[derive(PartialEq, PartialOrd, Eq, Ord, Copy, Clone, Debug)]
pub enum Capacity {
Limit(usize),
Infinite,
}
impl ::std::str::FromStr for Capacity {
type Err = ::std::num::ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if vec!["inf", "infinite", "∞"].into_iter().any(|i| i == s) {
Ok(Capacity::Infinite)
} else {
s.parse().map(|n| Capacity::Limit(n))
}
}
}
pub trait Agenda {
type Item;
fn enqueue(&mut self, Self::Item) -> Option<Self::Item>;
fn dequeue(&mut self) -> Option<Self::Item>;
fn peek_next(&self) -> Option<&Self::Item>;
fn is_empty(&self) -> bool;
}
pub trait Weighted {
type Weight;
fn get_weight(&self) -> Self::Weight;
}
pub struct Queue<I> {
data: Vec<I>,
capacity: Capacity
}
impl<I> Agenda for Queue<I> {
type Item = I;
fn enqueue(&mut self, item: I) -> Option<I> {
self.data.push(item);
if Capacity::Limit(self.data.len()) <= self.capacity {
None
} else {
Some(self.data.remove(0))
}
}
fn dequeue(&mut self) -> Option<I> {
self.data.pop()
}
fn peek_next(&self) -> Option<&I> {
self.data.last()
}
fn is_empty(&self) -> bool {
self.data.is_empty()
}
}
impl<T> Queue<T> {
pub fn set_capacity(&mut self, c: usize) {
self.capacity = Capacity::Limit(c);
}
pub fn new() -> Self {
Queue{ data: Vec::new(), capacity: Capacity::Infinite }
}
}
impl<T> ::std::iter::FromIterator<T> for Queue<T> {
fn from_iter<I>(iter: I) -> Self where I: IntoIterator<Item=T> {
Queue{ capacity: Capacity::Infinite, data: iter.into_iter().collect() }
}
}
// #[derive(Debug, PartialEq, Eq)]
pub struct PriorityQueue<I>
where
I: Weighted,
{
data: BTreeMap<I::Weight, Vec<I>>, // The values should always be non-empty.
capacity: Capacity,
size: usize,
last_key: Option<I::Weight>, // largest key w.r.t. Ord
}
impl<I> ::std::iter::FromIterator<I> for PriorityQueue<I>
where
I: Weighted,
I::Weight: Ord + Clone
{
fn from_iter<It>(iter: It) -> Self
where
It: IntoIterator<Item=I>
{
let mut q = PriorityQueue::new(Capacity::Infinite);
for element in iter {
q.enqueue(element);
}
q
}
}
impl<I> PriorityQueue<I>
where
I: Weighted
{
pub fn size(&self) -> usize {
self.size
}
pub fn capacity(&self) -> Capacity {
self.capacity
}
pub fn is_at_capacity(&self) -> bool {
Capacity::Limit(self.size) == self.capacity
}
}
impl<I> Agenda for PriorityQueue<I>
where
I: Weighted,
I::Weight: Ord + Clone,
{
type Item = I;
fn enqueue(&mut self, item: I) -> Option<I> {
let priority = item.get_weight();
if Capacity::Limit(self.size) < self.capacity {
self.enqueue_unchecked(priority, item);
None
} else if &priority <
self.last_key.as_ref().expect(
"[ERROR] `last_key` should not be `None` when the queue is non-empty.",
)
{
self.enqueue_unchecked(priority, item);
self.drop_last()
} else {
Some(item)
}
}
fn dequeue(&mut self) -> Option<I> {
match match self.data.iter_mut().next() {
Some((k, v)) => {
let res = if v.len() > 0 {
self.size -= 1;
if self.size == 0 {
self.last_key = None;
}
Some(v.remove(0))
} else {
None
};
let key = if v.is_empty() { Some(k.clone()) } else { None };
(key, res)
}
None => (None, None),
} {
(Some(k), res) => {
self.data.remove(&k);
res
}
(None, res) => res,
}
}
fn peek_next(&self) -> Option<&I> {
self.data.values().next().and_then(|v| v.last())
}
fn is_empty(&self) -> bool {
self.size == 0
}
}
impl<I> PriorityQueue<I>
where
I: Weighted,
I::Weight: Ord,
{
pub fn new(capacity: Capacity) -> PriorityQueue<I> {
assert!(capacity > Capacity::Limit(0));
PriorityQueue {
data: BTreeMap::new(),
capacity,
size: 0,
last_key: None,
}
}
}
impl<I> PriorityQueue<I>
where
I: Weighted,
I::Weight: Ord + Clone,
{
pub fn set_capacity(&mut self, capacity: usize) -> Vec<I> {
self.capacity = Capacity::Limit(capacity);
let mut res = Vec::new();
while Capacity::Limit(self.size) > self.capacity {
// TODO optimise to remove entire key-value-pairs at a time
res.push(self.drop_last().expect(
"[ERROR] `last_key` should not be `None` when the queue is non-empty.",
));
}
res.reverse();
res
}
fn enqueue_unchecked(&mut self, priority: I::Weight, item: I) {
self.data
.entry(priority.clone())
.or_insert_with(Vec::new)
.push(item);
self.last_key = match self.last_key {
Some(ref lk) if priority < *lk => Some(lk.clone()),
_ => Some(priority),
};
self.size += 1;
}
fn drop_last(&mut self) -> Option<I> {
match self.last_key.clone() {
Some(key) => {
let mut vec = self.data.remove(&key).expect(
"[ERROR] `last_key` should only hold keys that occur in `data`.",
);
let item = vec.pop().expect(
"[ERROR] `data` should not contain empty `Vec`tors.",
);
if !vec.is_empty() {
self.data.insert(key.clone(), vec);
} else {
self.last_key = self.data.keys().next_back().cloned();
}
self.size -= 1;
Some(item)
}
None => None,
}
}
}
impl<I: Ord> Agenda for BinaryHeap<I> {
type Item = I;
fn enqueue(&mut self, item: Self::Item) -> Option<Self::Item> {
self.push(item);
None
}
fn dequeue(&mut self) -> Option<Self::Item> {
self.pop()
}
fn peek_next(&self) -> Option<&Self::Item> {
self.peek()
}
fn is_empty(&self) -> bool {
self.is_empty()
}
}
impl<I> Agenda for Vec<I> {
type Item = I;
fn enqueue(&mut self, item: Self::Item) -> Option<Self::Item> {
self.push(item);
None
}
fn dequeue(&mut self) -> Option<Self::Item> {
self.pop()
}
fn peek_next(&self) -> Option<&Self::Item> {
self.last()
}
fn is_empty(&self) -> bool {
self.is_empty()
}
}
impl<I> Agenda for VecDeque<I> {
type Item = I;
fn enqueue(&mut self, item: Self::Item) -> Option<Self::Item> {
self.push_back(item);
None
}
fn dequeue(&mut self) -> Option<Self::Item> {
self.pop_front()
}
fn peek_next(&self) -> Option<&Self::Item> {
self.front()
}
fn is_empty(&self) -> bool {
self.is_empty()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn capacity() {
for (i, j) in vec![(0, 1), (9, 10), (0, 100)] {
assert!(Capacity::Limit(i) < Capacity::Limit(j));
assert!(Capacity::Limit(i) < Capacity::Infinite);
assert!(Capacity::Limit(j) < Capacity::Infinite);
}
}
#[test]
fn test_bounded_priority_queue() {
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
struct Item(char);
impl Weighted for Item {
type Weight = char;
fn get_weight(&self) -> char {
match *self {
Item(c) => c,
}
}
}
let mut q = PriorityQueue::new(Capacity::Limit(5));
assert_eq!(q.size(), 0);
assert!(q.is_empty());
assert_eq!(q.enqueue(Item('i')), None);
assert_eq!(q.peek_next(), Some(&Item('i')));
assert_eq!(q.size(), 1);
assert_eq!(q.enqueue(Item('h')), None);
assert_eq!(q.peek_next(), Some(&Item('h')));
assert_eq!(q.size(), 2);
assert_eq!(q.enqueue(Item('g')), None);
assert_eq!(q.peek_next(), Some(&Item('g')));
assert_eq!(q.size(), 3);
assert_eq!(q.enqueue(Item('a')), None);
assert_eq!(q.peek_next(), Some(&Item('a')));
assert_eq!(q.size(), 4);
assert_eq!(q.enqueue(Item('f')), None);
assert_eq!(q.peek_next(), Some(&Item('a')));
assert_eq!(q.size(), 5);
assert!(q.is_at_capacity());
assert_eq!(q.enqueue(Item('e')), Some(Item('i')));
assert_eq!(q.set_capacity(7), vec![]);
assert_eq!(q.enqueue(Item('c')), None);
assert_eq!(q.enqueue(Item('b')), None);
assert_eq!(q.set_capacity(5), vec![(Item('g')), (Item('h'))]);
assert_eq!(q.size(), 5);
assert!(q.is_at_capacity());
assert_eq!(q.dequeue(), Some(Item('a')));
assert_eq!(q.dequeue(), Some(Item('b')));
assert_eq!(q.dequeue(), Some(Item('c')));
assert_eq!(q.dequeue(), Some(Item('e')));
assert_eq!(q.dequeue(), Some(Item('f')));
assert_eq!(q.dequeue(), None);
}
} |
use crate::errors::UErr;
use crate::output::p2s;
use crate::registry::{GCRootsTx, Register};
use crate::scan::Scanner;
use crate::statistics::{Statistics, StatsMsg, StatsTx};
use crate::storepaths::{Cache, Lookup, StorePaths};
use crate::App;
use anyhow::{Context, Result};
use ignore::{self, DirEntry, WalkParallel, WalkState};
use std::io::{self, ErrorKind};
use std::os::unix::fs::MetadataExt;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::channel;
use std::sync::Arc;
#[derive(Clone, Debug)]
struct ProcessingContext {
startdev: u64,
cache: Arc<Cache>,
scanner: Arc<Scanner>,
stats: StatsTx,
gc: GCRootsTx,
abort: Arc<AtomicBool>,
}
impl ProcessingContext {
fn create(app: &App, stats: &mut Statistics, gc: GCRootsTx) -> Result<Self> {
Ok(Self {
startdev: app.start_meta()?.dev(),
cache: Arc::new(app.cache()?),
scanner: Arc::new(app.scanner()?),
stats: stats.tx(),
gc,
abort: Arc::new(AtomicBool::new(false)),
})
}
/// Scans a single DirEntry.
///
/// The cache is queried first. Results (scanned or cached) are sent to the registry and
/// statistics collector.
fn scan_entry(&self, dent: DirEntry) -> Result<WalkState> {
let mut sp = match self.cache.lookup(dent) {
Lookup::Dir(sp) | Lookup::Hit(sp) => sp,
Lookup::Miss(d) => self.scanner.find_paths(d)?,
};
if let Some(err) = sp.error() {
if err.is_partial() {
warn!("{}", err);
self.stats.send(StatsMsg::SoftError).unwrap();
} else {
return Err(err.clone().into());
}
}
if sp.metadata()?.dev() != self.startdev {
return Ok(WalkState::Skip);
}
self.cache.insert(&mut sp).context(UErr::WalkAbort)?;
self.stats.send(StatsMsg::Scan((&sp).into())).unwrap();
if !sp.is_empty() {
self.gc.send(sp).unwrap();
}
Ok(WalkState::Continue)
}
/// Walks through a directory hierachy and processes each found DirEntry.
fn walk(self, walker: WalkParallel) -> Result<Arc<Cache>> {
walker.run(|| {
let pctx = self.clone();
Box::new(move |res: Result<DirEntry, ignore::Error>| {
res.map_err(From::from)
.and_then(|dent| pctx.scan_entry(dent))
.unwrap_or_else(|err| {
if let Some(UErr::WalkAbort) = err.downcast_ref::<UErr>() {
error!("Traversal error: {:#}", &err);
pctx.abort.store(true, Ordering::SeqCst);
return WalkState::Quit;
} else if let Some(UErr::FiletypeUnknown) = err.downcast_ref::<UErr>() {
// ignore & continue
return WalkState::Continue;
} else if let Some(e) = err.downcast_ref::<ignore::Error>() {
error!("Traversal failure: {:#}", &e);
pctx.abort.store(true, Ordering::SeqCst);
return WalkState::Quit;
} else if let Some(e) = err.downcast_ref::<io::Error>() {
if e.kind() == ErrorKind::PermissionDenied {
error!("I/O error: {:#}", &err);
pctx.abort.store(true, Ordering::SeqCst);
return WalkState::Quit;
}
}
warn!("{:#}", &err);
pctx.stats.send(StatsMsg::SoftError).unwrap();
WalkState::Continue
})
})
});
if self.abort.load(Ordering::SeqCst) {
Err(UErr::WalkAbort.into())
} else {
Ok(self.cache)
}
}
}
/// Creates threads, starts parallel scanning and collects results.
pub fn spawn_threads(app: &App, gcroots: &mut dyn Register) -> Result<Statistics> {
let mut stats = app.statistics();
let (gc_tx, gc_rx) = channel::<StorePaths>();
let mut cache = crossbeam::scope(|sc| -> Result<Arc<Cache>> {
let pctx = ProcessingContext::create(app, &mut stats, gc_tx)?;
let walker = app.walker()?.build_parallel();
info!("{}: Scouting {}", crate_name!(), p2s(&app.opt.startdir));
let walk_hdl = sc.spawn(|_| pctx.walk(walker));
sc.spawn(|_| stats.receive_loop());
gcroots.register_loop(gc_rx);
walk_hdl.join().expect("subthread panic")
})
.expect("thread panic")?;
if app.register {
gcroots.commit(&app.exectx)?;
// don't touch cache if in no-register mode
Arc::get_mut(&mut cache)
.expect("dangling cache references (all threads terminated?)")
.commit(&app.exectx)?;
cache.log_statistics();
}
stats.log_summary(&app.opt.startdir);
Ok(stats)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::registry;
use crate::registry::tests::{fake_gc, FakeGCRoots};
use crate::tests::{app, assert_eq_vecs, FIXTURES};
use ignore::WalkBuilder;
use std::fs;
use std::fs::{create_dir, set_permissions, File, Permissions};
use std::io::Write;
use std::os::unix::fs::{symlink, PermissionsExt};
use std::path::{Path, PathBuf};
use std::sync::mpsc::channel;
use tempfile::TempDir;
use users::mock::{MockUsers, User};
use users::os::unix::UserExt;
// helper functions
fn wfile<P: AsRef<Path>>(path: P, contents: &str) {
let mut file = File::create(path).unwrap();
file.write_all(contents.as_bytes()).unwrap();
}
/// Walks whatever a given WalkBuilder builds and collects path relative to the fixtures dir.
/// Hard errors lead to a panic, partial errors are silently ignored.
pub fn walk2vec(wb: &WalkBuilder, prefix: &Path) -> Vec<PathBuf> {
let mut paths = vec![];
let prefix = prefix.canonicalize().unwrap();
for r in wb.build() {
if let Ok(dent) = r {
let p = dent.path().strip_prefix(&prefix).unwrap();
paths.push(p.to_owned());
}
}
paths.sort();
paths
}
struct TestDir {
temp: TempDir,
}
/// Create and remove directory for running tests. Provides an easy way to execute setup code.
impl TestDir {
fn new<F>(setup: F) -> Self
where
F: FnOnce(&Path),
{
let temp = TempDir::new().unwrap();
setup(&temp.path());
Self { temp }
}
fn path(&self) -> &Path {
self.temp.path()
}
}
impl Drop for TestDir {
/// Set read/exec bits everywhere -- else TempDir's cleanup might fail
fn drop(&mut self) {
for entry in fs::read_dir(self.temp.path()).unwrap() {
if let Ok(f) = entry {
set_permissions(f.path(), Permissions::from_mode(0o755)).ok();
}
}
}
}
#[test]
fn walk_fixture_dir1() {
let mut gcroots = fake_gc();
let stats = spawn_threads(&app("dir1"), &mut gcroots).unwrap();
assert_eq_vecs(
gcroots.registered,
|s| s.to_owned(),
&[
"dir1/duplicated|010yd8jls8w4vcnql4zhjbnyp2yay5pl-bash-4.4-p5",
"dir1/notignored|00n9gkswhqdgbhgs7lnz2ckqxphavjr8-ChasingBottoms-1.3.1.2.drv",
"dir1/notignored|00y6xgsdpjx3fyz4v7k5lwivi28yqd9f-initrd-fsinfo.drv",
"dir1/proto-http.la|9w3ci6fskmz3nw27fb68hybfa5v1r33f-libidn-1.33",
"dir1/proto-http.la|knvydciispmr4nr2rxg0iyyff3n1v4ax-gcc-6.2.0-lib",
"dir1/script.zip|9v78r3afqy9xn9zwdj9wfys6sk3vc01d-coreutils-8.31",
"dir1/six.py|1b4i3gm31j1ipfbx1v9a3hhgmp2wvyyw-python2.7-six-1.9.0",
],
);
assert_eq!(stats.softerrors, 0);
}
#[test]
fn harderror_on_unreadable_file() {
let t = TestDir::new(|p| {
let f = p.join("unreadable_file");
wfile(&f, "/nix/store/dxscwf37hgq0xafs54h0c8xx47vg6d5g-n");
set_permissions(&f, Permissions::from_mode(0o000)).unwrap();
});
assert!(spawn_threads(&app(t.path()), &mut FakeGCRoots::new(t.path())).is_err());
}
#[test]
fn harderror_on_unreadable_dir() {
let t = TestDir::new(|p| {
let d = p.join("unreadable_dir");
create_dir(&d).unwrap();
wfile(
&d.join("file3"),
"/nix/store/5hg176hhc19mg8vm2rg3lv2j3vlj166b-m",
);
set_permissions(&d, Permissions::from_mode(0o111)).unwrap();
});
assert!(spawn_threads(&app(t.path()), &mut FakeGCRoots::new(t.path())).is_err());
}
#[test]
fn harderror_on_traversable_dir() {
let t = TestDir::new(|p| {
let d = p.join("untraversable_dir");
create_dir(&d).unwrap();
set_permissions(&d, Permissions::from_mode(0o000)).unwrap();
});
assert!(spawn_threads(&app(t.path()), &mut FakeGCRoots::new(t.path())).is_err());
}
#[test]
fn ignore_dangling_link() {
let t = TestDir::new(|p| {
symlink(p.join("no/where"), p.join("symlink")).unwrap();
});
let stats = spawn_threads(&app(t.path()), &mut FakeGCRoots::new(t.path())).unwrap();
assert_eq!(stats.softerrors, 0);
}
#[test]
fn softfail_on_broken_zip_archive() {
let t = TestDir::new(|p| {
fs::write(
p.join("broken.zip"),
&fs::read(&*FIXTURES.join("dir1/script.zip")).unwrap()[..200],
)
.unwrap()
});
let stats = spawn_threads(&app(t.path()), &mut FakeGCRoots::new(t.path())).unwrap();
assert_eq!(stats.softerrors, 1);
}
#[test]
fn walk_infiniteloop() {
let t = TempDir::new().unwrap();
let p = t.path();
create_dir(p.join("dir1")).unwrap();
create_dir(p.join("dir2")).unwrap();
symlink("../dir2/file2", p.join("dir1/file1")).unwrap();
symlink("../dir1/file1", p.join("dir2/file2")).unwrap();
symlink(".", p.join("recursive")).unwrap();
let mut gcroots = registry::tests::FakeGCRoots::new(p);
let stats = spawn_threads(&app(p), &mut gcroots).unwrap();
assert_eq!(gcroots.registered.len(), 0);
assert_eq!(stats.softerrors, 0);
}
#[test]
fn should_not_cross_devices() {
let app = app("dir1");
let (tx, _) = channel::<StorePaths>();
let mut pctx = ProcessingContext::create(&app, &mut app.statistics(), tx).unwrap();
pctx.startdev = 0;
let dent = app.walker().unwrap().build().next().unwrap().unwrap();
assert_eq!(WalkState::Skip, pctx.scan_entry(dent).unwrap());
}
#[test]
fn walk_should_obey_exclude() {
let mut app = app(".");
app.overrides = vec![
"!dir1".to_owned(),
"!lftp*".to_owned(),
"!cache*".to_owned(),
];
assert_eq!(
vec![
"",
"dir2",
"dir2/ignored",
"dir2/link",
"miniegg-1-py3.5.egg",
]
.into_iter()
.map(PathBuf::from)
.collect::<Vec<_>>(),
walk2vec(&app.walker().unwrap(), &*FIXTURES)
);
}
#[test]
fn walk_should_obey_excludefile() {
let t = TempDir::new().unwrap();
let p = t.path();
let mut users = MockUsers::with_current_uid(100);
users.add_user(User::new(100, "johndoe", 100).with_home_dir(&*p.to_string_lossy()));
let app = app(p);
wfile(p.join(".userscan-ignore"), "file2\n*.jpg\ndata*\n");
for f in vec!["file1", "file2", "pic.jpg", "data.json"] {
File::create(p.join(f)).unwrap();
}
let walker = app
.walker()
.and_then(|wb| crate::add_dotexclude(wb, &users))
.unwrap();
assert_eq!(
vec!["", ".userscan-ignore", "file1"]
.into_iter()
.map(PathBuf::from)
.collect::<Vec<_>>(),
walk2vec(&walker, p)
);
}
}
|
// Copyright 2020 Shift Cryptosecurity AG
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate log;
pub use hidapi::HidApi;
use futures::prelude::*;
use futures::task::SpawnError;
use hidapi::{HidDevice, HidError};
use std::io;
use std::pin::Pin;
use std::sync::mpsc;
use std::sync::{Arc, Mutex};
use std::task::{Context, Poll, Waker};
use thiserror::Error;
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
#[derive(Error, Debug)]
pub enum Error {
#[error("libhid failed")]
HidApi(#[from] HidError),
#[error("io failed")]
Io(#[from] io::Error),
#[error("spawn failed")]
Spawn(#[from] SpawnError),
}
enum ReadState {
Idle,
Busy,
}
struct DeviceInner {
device: Arc<Mutex<HidDevice>>,
read_thread: Option<std::thread::JoinHandle<()>>,
rstate: ReadState,
data_rx: mpsc::Receiver<Option<[u8; 64]>>, // One message per read
req_tx: Option<mpsc::Sender<Waker>>, // One message per expected read
buffer: Option<[u8; 64]>,
buffer_pos: usize,
}
pub struct Device {
// store an Option so that `close` works
inner: Option<Arc<Mutex<DeviceInner>>>,
}
impl Clone for Device {
fn clone(&self) -> Self {
Device {
inner: self.inner.as_ref().map(Arc::clone),
}
}
}
impl Drop for Device {
fn drop(&mut self) {
debug!("dropping hid connection");
if let Some(inner) = self.inner.take() {
if let Ok(mut guard) = inner.lock() {
// Take the waker queue and drop it so that the reader thread finihes
let req_tx = guard.req_tx.take();
drop(req_tx);
// Wait for the reader thread to finish
match guard.read_thread.take() {
Some(jh) => match jh.join() {
Ok(_) => info!("device read thread joined"),
Err(_) => error!("failed to join device read thread"),
},
None => error!("already joined"),
}
} else {
error!("Failed to take lock on device");
}
} else {
error!("there was no inner");
}
}
}
impl Device {
pub fn new(device: HidDevice) -> Result<Self, Error> {
let (data_tx, data_rx) = mpsc::channel();
let (req_tx, req_rx) = mpsc::channel::<Waker>();
// set non-blocking so that we can ignore spurious wakeups.
//device.set_blocking_mode(false);
// Must be accessed from both inner thread and asyn_write
let device = Arc::new(Mutex::new(device));
let jh = std::thread::spawn({
let device = Arc::clone(&device);
move || {
loop {
// Wait for read request
debug!("waiting for request");
let waker = match req_rx.recv() {
Ok(waker) => waker,
Err(_e) => {
info!("No more wakers, shutting down");
return;
}
};
debug!("Got notified");
match device.lock() {
Ok(guard) => {
let mut buf = [0u8; 64];
//match guard.read_timeout(&mut buf[..], 1000) {
match guard.read(&mut buf[..]) {
Err(e) => {
error!("hidapi failed: {}", e);
drop(data_tx);
waker.wake_by_ref();
break;
}
Ok(len) => {
if len == 0 {
data_tx.send(None).unwrap();
waker.wake_by_ref();
continue;
}
debug!("Read data");
if let Err(e) = data_tx.send(Some(buf)) {
error!("Sending internally: {}", e);
break;
}
waker.wake_by_ref();
}
}
}
Err(e) => {
error!("Broken lock: {:?}", e);
return;
}
}
}
}
});
Ok(Device {
inner: Some(Arc::new(Mutex::new(DeviceInner {
device,
read_thread: Some(jh),
rstate: ReadState::Idle,
data_rx,
req_tx: Some(req_tx),
buffer: None,
buffer_pos: 0,
}))),
})
}
}
impl AsyncWrite for Device {
fn poll_write(
mut self: Pin<&mut Self>,
_cx: &mut Context,
mut buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
let len = buf.len();
if self.inner.is_none() {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidData,
"Cannot poll a closed device",
)));
}
loop {
let max_len = usize::min(64, buf.len());
// The hidapi API requires that you put the report ID in the first byte.
// If you don't use report IDs you must put a 0 there.
let mut buf_with_report_id = [0u8; 1 + 64];
buf_with_report_id[1..1 + max_len].copy_from_slice(&buf[..max_len]);
//let this: &mut Self = &mut self;
debug!("Will write {:?}", &buf_with_report_id[..]);
match self.inner.as_mut().unwrap().lock() {
Ok(guard) => match guard.device.lock() {
Ok(guard) => {
guard
.write(&buf_with_report_id[..])
.map_err(|_| io::Error::new(io::ErrorKind::Other, "hidapi failed"))?;
debug!("Wrote: {:?}", &buf[0..max_len]);
}
Err(e) => error!("{:?}", e),
},
Err(e) => {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::Other,
format!("Mutex broken: {:?}", e),
)))
}
}
buf = &buf[max_len..];
if buf.is_empty() {
debug!("Wrote total {}: {:?}", buf.len(), buf);
return Poll::Ready(Ok(len));
}
}
}
fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Result<(), io::Error>> {
Poll::Ready(Ok(()))
}
// TODO cleanup read thread...
fn poll_close(mut self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Result<(), io::Error>> {
let this: &mut Self = &mut self;
// take the device and drop it
let _device = this.inner.take();
Poll::Ready(Ok(()))
}
}
// Will always read out 64 bytes. Make sure to read out all bytes to avoid trailing bytes in next
// readout.
// Will store all bytes that did not fit in provided buffer and give them next time.
impl AsyncRead for Device {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context,
buf: &mut [u8],
) -> Poll<Result<usize, io::Error>> {
if self.inner.is_none() {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidData,
"Cannot poll a closed device",
)));
}
let mut this =
self.inner.as_mut().unwrap().lock().map_err(|e| {
io::Error::new(io::ErrorKind::Other, format!("Mutex broken: {:?}", e))
})?;
loop {
let waker = cx.waker().clone();
match this.rstate {
ReadState::Idle => {
debug!("Sending waker");
if let Some(req_tx) = &mut this.req_tx {
if let Err(_e) = req_tx.send(waker) {
error!("failed to send waker");
}
} else {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidData,
"Failed internal send",
)));
}
this.rstate = ReadState::Busy;
}
ReadState::Busy => {
// First send any bytes from the previous readout
if let Some(inner_buf) = this.buffer.take() {
let len = usize::min(buf.len(), inner_buf.len());
let inner_slice = &inner_buf[this.buffer_pos..this.buffer_pos + len];
let buf_slice = &mut buf[..len];
buf_slice.copy_from_slice(inner_slice);
// Check if there is more data left
if this.buffer_pos + inner_slice.len() < inner_buf.len() {
this.buffer = Some(inner_buf);
this.buffer_pos += inner_slice.len();
} else {
this.rstate = ReadState::Idle;
}
return Poll::Ready(Ok(len));
}
// Second try to receive more bytes
let vec = match this.data_rx.try_recv() {
Ok(Some(vec)) => vec,
Ok(None) => {
// end of stream?
return Poll::Pending;
}
Err(e) => match e {
mpsc::TryRecvError::Disconnected => {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::Other,
"Inner channel dead",
)));
}
mpsc::TryRecvError::Empty => {
return Poll::Pending;
}
},
};
debug!("Read data {:?}", &vec[..]);
let len = usize::min(vec.len(), buf.len());
let buf_slice = &mut buf[..len];
let vec_slice = &vec[..len];
buf_slice.copy_from_slice(vec_slice);
if len < vec.len() {
// If bytes did not fit in buf, store bytes for next readout
this.buffer = Some(vec);
this.buffer_pos = 0;
} else {
this.rstate = ReadState::Idle;
}
debug!("returning {}", len);
return Poll::Ready(Ok(len));
}
};
}
}
}
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use juno::ast::*;
use juno::gen_js;
use juno::hparser;
fn do_gen(node: &Node, pretty: gen_js::Pretty) -> String {
use juno::gen_js::*;
let mut out: Vec<u8> = vec![];
generate(&mut out, node, pretty).unwrap();
String::from_utf8(out).expect("Invalid UTF-8 output in test")
}
fn node(kind: NodeKind) -> NodePtr {
let range = SourceRange {
file: 0,
start: SourceLoc { line: 0, col: 0 },
end: SourceLoc { line: 0, col: 0 },
};
NodePtr::new(Node { range, kind })
}
fn test_roundtrip(src1: &str) {
use juno::ast::*;
for pretty in &[gen_js::Pretty::Yes, gen_js::Pretty::No] {
let ast1 = hparser::parse(src1).unwrap();
let mut dump: Vec<u8> = vec![];
dump_json(&mut dump, &ast1, juno::ast::Pretty::Yes).unwrap();
let ast1_json = String::from_utf8(dump).expect("Invalid UTF-8 output in test");
let src2 = do_gen(&ast1, *pretty);
let ast2 = hparser::parse(&src2).unwrap_or_else(|_| {
panic!(
"Invalid JS generated: Pretty={:?}\nOriginal Source:\n{}\nGenerated Source:\n{}",
pretty, &src1, &src2,
)
});
let mut dump: Vec<u8> = vec![];
dump_json(&mut dump, &ast2, juno::ast::Pretty::Yes).unwrap();
let ast2_json = String::from_utf8(dump).expect("Invalid UTF-8 output in test");
assert_eq!(
ast1_json, ast2_json,
"AST mismatch: Pretty={:?}\nOriginal Source:\n{}\nGenerated Source:\n{}",
pretty, &src1, &src2
);
}
}
#[test]
fn test_literals() {
use NodeKind::*;
assert_eq!(
do_gen(&node(NullLiteral), gen_js::Pretty::Yes).trim(),
"null"
);
assert_eq!(
do_gen(
&node(StringLiteral {
value: juno::ast::StringLiteral {
str: vec!['A' as u16, 0x1234u16, '\t' as u16],
}
},),
gen_js::Pretty::Yes
)
.trim(),
r#""A\u1234\t""#,
);
assert_eq!(
do_gen(&node(NumericLiteral { value: 1.0 },), gen_js::Pretty::Yes).trim(),
"1"
);
test_roundtrip("1");
test_roundtrip("\"abc\"");
test_roundtrip(r#" "\ud800" "#);
test_roundtrip(r#" "\ud83d\udcd5" "#);
test_roundtrip("true");
test_roundtrip("false");
test_roundtrip("/abc/");
test_roundtrip("/abc/gi");
test_roundtrip("/abc/gi");
test_roundtrip(r#" `abc` "#);
test_roundtrip(r#" `abc\ndef` "#);
test_roundtrip(
r#" `abc
def` "#,
);
test_roundtrip(r#" `abc \ud800 def` "#);
test_roundtrip(r#" `abc \ud800 def` "#);
test_roundtrip(r#" `\ud83d\udcd5` "#);
test_roundtrip(r#" `escape backtick: \` should work` "#);
test_roundtrip(r#" `😹` "#);
}
#[test]
fn test_identifier() {
test_roundtrip("foo");
test_roundtrip("class C { #foo() {} }");
}
#[test]
fn test_binop() {
use NodeKind::*;
assert_eq!(
do_gen(
&node(BinaryExpression {
left: node(NullLiteral),
operator: BinaryExpressionOperator::Plus,
right: node(NullLiteral),
}),
gen_js::Pretty::Yes
)
.trim(),
"null + null"
);
test_roundtrip("1 + 1");
test_roundtrip("1 * 2 + (3 + 4)");
test_roundtrip("1 ** 2 ** 3 ** 4");
test_roundtrip("1 in 2 + (2 - 4) / 3");
test_roundtrip("1 instanceof 2 + (2 - 4) / 3");
}
#[test]
fn test_conditional() {
test_roundtrip("a ? b : c");
test_roundtrip("a ? b : c ? d : e");
test_roundtrip("(a ? b : c) ? d : e");
test_roundtrip("a ? b : (c ? d : e)");
test_roundtrip("a?.3:.4");
}
#[test]
fn test_vars() {
test_roundtrip("var x=3;");
test_roundtrip("var x=3, y=4;");
}
#[test]
fn test_functions() {
test_roundtrip("function foo() {}");
test_roundtrip("function foo(x, y) {}");
test_roundtrip("function foo(x, y=3) {}");
test_roundtrip("function foo([x, y], {z}) {}");
test_roundtrip("function foo([x, y] = [1,2], {z:q}) {}");
test_roundtrip("function foo() { return this; }");
test_roundtrip("function *foo() {}");
test_roundtrip("function *foo() { yield 1; }");
test_roundtrip("function *foo() { yield* f(); }");
test_roundtrip("async function foo() {}");
test_roundtrip("async function foo() { await f(); }");
test_roundtrip("async function *foo() {}");
test_roundtrip("async function *foo() { await f(); yield 1; }");
test_roundtrip("x => 3");
test_roundtrip("(x) => 3");
test_roundtrip("(x,y) => 3");
test_roundtrip("x => {3}");
test_roundtrip("async x => {3}");
test_roundtrip("async (x,y) => {3}");
}
#[test]
fn test_calls() {
test_roundtrip("f();");
test_roundtrip("f(1);");
test_roundtrip("f(1, 2);");
test_roundtrip("(f?.(1, 2))(3);");
test_roundtrip("f?.(1, 2)?.(3)(5);");
test_roundtrip("new f();");
test_roundtrip("new f(1);");
test_roundtrip("import('foo')");
}
#[test]
fn test_statements() {
test_roundtrip("while (1) {}");
test_roundtrip("while (1) { fn(); }");
test_roundtrip("while (1) fn();");
test_roundtrip("for (;;) { fn(); }");
test_roundtrip("for (;;) fn();");
test_roundtrip("for (x;;) { fn(); }");
test_roundtrip("for (;x;) { fn(); }");
test_roundtrip("for (;;x) { fn(); }");
test_roundtrip("for (var x=1;x<10;++x) { fn(); }");
test_roundtrip("for (x in y) { fn(); }");
test_roundtrip("for (var x of y) { fn(); }");
test_roundtrip(
"async () => {
for await (x of y) { fn(); }
}",
);
test_roundtrip("do {fn();} while (1)");
test_roundtrip("debugger");
test_roundtrip("{fn(); fn();}");
test_roundtrip("for (;;) { break; }");
test_roundtrip("for (;;) { continue; }");
test_roundtrip("function f() { return; }");
test_roundtrip("function f() { return 3; }");
test_roundtrip(
"switch(x) {
case 1:
break;
case 2:
case 3:
break;
default:
break;
}",
);
test_roundtrip("a: var x = 3;");
test_roundtrip(
"try {
fn();
} catch {
fn();
}",
);
test_roundtrip(
"try {
fn();
} catch (e) {
fn();
}",
);
test_roundtrip(
"try {
fn();
} catch (e) {
fn();
} finally {
fn();
}",
);
test_roundtrip("if (x) {fn();}");
test_roundtrip("if (x) {fn();} else {fn();}");
test_roundtrip("if (x) fn(); else fn();");
}
#[test]
fn test_logical() {
test_roundtrip("a && b || c");
test_roundtrip("a || b && c");
test_roundtrip("(a || b) && c");
test_roundtrip("(a || b) ?? c");
test_roundtrip("(a ?? b) || c");
}
#[test]
fn test_sequences() {
test_roundtrip("var x = (1, 2, 3);");
test_roundtrip("foo((1, 2, 3), 4);");
}
#[test]
fn test_objects() {
test_roundtrip("({ })");
test_roundtrip(
"({
a: 1,
[x]: 1,
fn() {},
...from,
})",
);
}
#[test]
fn test_arrays() {
test_roundtrip("([])");
test_roundtrip("var x = [1, 2, 3, ...from]");
test_roundtrip("var x = [1, 2, 3, ...from, 4, 5, 6]");
}
#[test]
fn test_assignment() {
test_roundtrip("x = 1");
test_roundtrip("x = y = 1");
test_roundtrip("x += 1");
test_roundtrip("x -= 1");
test_roundtrip("x *= 1");
test_roundtrip("x /= 1");
test_roundtrip("x **= 1");
test_roundtrip("x |= 1");
test_roundtrip("x &= 1");
test_roundtrip("x ||= 1");
test_roundtrip("x &&= 1");
test_roundtrip("x ??= 1");
test_roundtrip("foo()[1] = 1");
}
#[test]
fn test_unary() {
test_roundtrip("+x");
test_roundtrip("-x");
test_roundtrip("!x");
test_roundtrip("~x");
test_roundtrip("-(-x)");
test_roundtrip("+!-x");
test_roundtrip("delete x");
test_roundtrip("typeof x");
}
#[test]
fn test_update() {
test_roundtrip("x++");
test_roundtrip("x--");
test_roundtrip("++x");
test_roundtrip("--x");
test_roundtrip("--(-x)");
test_roundtrip("+x++");
}
#[test]
fn test_members() {
test_roundtrip("a.b");
test_roundtrip("a.b.c");
test_roundtrip("a?.b");
test_roundtrip("a?.[b]");
test_roundtrip("(a?.b).c");
test_roundtrip("a?.b().c");
test_roundtrip("(a?.b()).c");
test_roundtrip("a?.().b");
test_roundtrip("a?.().b");
test_roundtrip("a?.b?.c?.()");
test_roundtrip("(a?.b?.c?.()).d");
test_roundtrip("(a?.b?.c?.())?.d");
test_roundtrip("(a?.b?.c?.())(d)");
test_roundtrip("(a?.b?.c?.())?.(d)");
test_roundtrip("class C { constructor() { new.target; } }");
}
#[test]
fn test_classes() {
test_roundtrip("class C {}");
test_roundtrip("class C extends D {}");
test_roundtrip(
"class C extends D {
prop1;
#prop2;
constructor() {}
a() {}
#b() {}
c(x, y) {}
static d() {}
}",
);
test_roundtrip(
"var cls = (class C extends D {
prop1;
#prop2;
constructor() {}
a() {}
#b() {}
c(x, y) {}
static d() {}
get e() {}
set e(v) {}
;
})",
);
}
#[test]
fn test_import() {
test_roundtrip("import x from 'foo'");
test_roundtrip("import x, {y} from 'foo'");
test_roundtrip("import * as Foo from 'foo'");
test_roundtrip("import x, {y as z, a as b} from 'foo'");
test_roundtrip("import {a, b, c} from 'foo'");
test_roundtrip("import 'foo';");
test_roundtrip("import 'foo' assert {kind: 'json'};");
test_roundtrip(
"
import 'foo';
import 'bar';
",
);
}
#[test]
fn test_export() {
test_roundtrip("export var x = 3;");
test_roundtrip("export function foo() {}");
test_roundtrip("export default function foo() {}");
test_roundtrip("export {x as y};");
test_roundtrip("export * from 'foo';");
}
#[test]
fn test_types() {
use NodeKind::*;
assert_eq!(
do_gen(
&node(UnionTypeAnnotation {
types: vec![
node(NumberTypeAnnotation),
node(IntersectionTypeAnnotation {
types: vec![node(BooleanTypeAnnotation), node(StringTypeAnnotation),]
})
]
}),
gen_js::Pretty::Yes
)
.trim(),
"number | boolean & string"
);
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
_reserved0: [u8; 0x04],
#[doc = "0x04 - peripheral mode configuration register"]
pub pmcr: PMCR,
#[doc = "0x08 - external interrupt configuration register 1"]
pub exticr1: EXTICR1,
#[doc = "0x0c - external interrupt configuration register 2"]
pub exticr2: EXTICR2,
#[doc = "0x10 - external interrupt configuration register 3"]
pub exticr3: EXTICR3,
#[doc = "0x14 - external interrupt configuration register 4"]
pub exticr4: EXTICR4,
#[doc = "0x18 - Timer break lockup register"]
pub cfgr: CFGR,
_reserved6: [u8; 0x04],
#[doc = "0x20 - compensation cell control/status register"]
pub cccsr: CCCSR,
#[doc = "0x24 - SYSCFG compensation cell value register"]
pub ccvr: CCVR,
#[doc = "0x28 - SYSCFG compensation cell code register"]
pub cccr: CCCR,
_reserved9: [u8; 0x04],
#[doc = "0x30 - ADC2 internal input alternate connection"]
pub adc2alt: ADC2ALT,
_reserved10: [u8; 0xf0],
#[doc = "0x124 - SYSCFG package register"]
pub pkgr: PKGR,
_reserved11: [u8; 0x01d8],
#[doc = "0x300 - SYSCFG user register 0"]
pub ur0: UR0,
_reserved12: [u8; 0x04],
#[doc = "0x308 - SYSCFG user register 2"]
pub ur2: UR2,
#[doc = "0x30c - SYSCFG user register 3"]
pub ur3: UR3,
#[doc = "0x310 - SYSCFG user register 4"]
pub ur4: UR4,
#[doc = "0x314 - SYSCFG user register 5"]
pub ur5: UR5,
#[doc = "0x318 - SYSCFG user register 6"]
pub ur6: UR6,
#[doc = "0x31c - SYSCFG user register 7"]
pub ur7: UR7,
_reserved18: [u8; 0x0c],
#[doc = "0x32c - SYSCFG user register 11"]
pub ur11: UR11,
#[doc = "0x330 - SYSCFG user register 12"]
pub ur12: UR12,
#[doc = "0x334 - SYSCFG user register 13"]
pub ur13: UR13,
#[doc = "0x338 - SYSCFG user register 14"]
pub ur14: UR14,
#[doc = "0x33c - SYSCFG user register 15"]
pub ur15: UR15,
#[doc = "0x340 - SYSCFG user register 16"]
pub ur16: UR16,
#[doc = "0x344 - SYSCFG user register 17"]
pub ur17: UR17,
#[doc = "0x348 - SYSCFG user register 18"]
pub ur18: UR18,
}
#[doc = "PMCR (rw) register accessor: peripheral mode configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pmcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pmcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pmcr`]
module"]
pub type PMCR = crate::Reg<pmcr::PMCR_SPEC>;
#[doc = "peripheral mode configuration register"]
pub mod pmcr;
#[doc = "EXTICR1 (rw) register accessor: external interrupt configuration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exticr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`exticr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`exticr1`]
module"]
pub type EXTICR1 = crate::Reg<exticr1::EXTICR1_SPEC>;
#[doc = "external interrupt configuration register 1"]
pub mod exticr1;
#[doc = "EXTICR2 (rw) register accessor: external interrupt configuration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exticr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`exticr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`exticr2`]
module"]
pub type EXTICR2 = crate::Reg<exticr2::EXTICR2_SPEC>;
#[doc = "external interrupt configuration register 2"]
pub mod exticr2;
#[doc = "EXTICR3 (rw) register accessor: external interrupt configuration register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exticr3::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`exticr3::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`exticr3`]
module"]
pub type EXTICR3 = crate::Reg<exticr3::EXTICR3_SPEC>;
#[doc = "external interrupt configuration register 3"]
pub mod exticr3;
#[doc = "EXTICR4 (rw) register accessor: external interrupt configuration register 4\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exticr4::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`exticr4::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`exticr4`]
module"]
pub type EXTICR4 = crate::Reg<exticr4::EXTICR4_SPEC>;
#[doc = "external interrupt configuration register 4"]
pub mod exticr4;
#[doc = "CCCSR (rw) register accessor: compensation cell control/status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cccsr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cccsr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cccsr`]
module"]
pub type CCCSR = crate::Reg<cccsr::CCCSR_SPEC>;
#[doc = "compensation cell control/status register"]
pub mod cccsr;
#[doc = "CCVR (r) register accessor: SYSCFG compensation cell value register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccvr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccvr`]
module"]
pub type CCVR = crate::Reg<ccvr::CCVR_SPEC>;
#[doc = "SYSCFG compensation cell value register"]
pub mod ccvr;
#[doc = "CCCR (rw) register accessor: SYSCFG compensation cell code register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cccr`]
module"]
pub type CCCR = crate::Reg<cccr::CCCR_SPEC>;
#[doc = "SYSCFG compensation cell code register"]
pub mod cccr;
#[doc = "PKGR (r) register accessor: SYSCFG package register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pkgr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pkgr`]
module"]
pub type PKGR = crate::Reg<pkgr::PKGR_SPEC>;
#[doc = "SYSCFG package register"]
pub mod pkgr;
#[doc = "UR0 (r) register accessor: SYSCFG user register 0\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur0::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur0`]
module"]
pub type UR0 = crate::Reg<ur0::UR0_SPEC>;
#[doc = "SYSCFG user register 0"]
pub mod ur0;
#[doc = "UR2 (rw) register accessor: SYSCFG user register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ur2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur2`]
module"]
pub type UR2 = crate::Reg<ur2::UR2_SPEC>;
#[doc = "SYSCFG user register 2"]
pub mod ur2;
#[doc = "UR3 (rw) register accessor: SYSCFG user register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur3::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ur3::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur3`]
module"]
pub type UR3 = crate::Reg<ur3::UR3_SPEC>;
#[doc = "SYSCFG user register 3"]
pub mod ur3;
#[doc = "UR4 (r) register accessor: SYSCFG user register 4\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur4::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur4`]
module"]
pub type UR4 = crate::Reg<ur4::UR4_SPEC>;
#[doc = "SYSCFG user register 4"]
pub mod ur4;
#[doc = "UR5 (r) register accessor: SYSCFG user register 5\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur5::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur5`]
module"]
pub type UR5 = crate::Reg<ur5::UR5_SPEC>;
#[doc = "SYSCFG user register 5"]
pub mod ur5;
#[doc = "UR6 (r) register accessor: SYSCFG user register 6\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur6::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur6`]
module"]
pub type UR6 = crate::Reg<ur6::UR6_SPEC>;
#[doc = "SYSCFG user register 6"]
pub mod ur6;
#[doc = "UR7 (r) register accessor: SYSCFG user register 7\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur7::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur7`]
module"]
pub type UR7 = crate::Reg<ur7::UR7_SPEC>;
#[doc = "SYSCFG user register 7"]
pub mod ur7;
#[doc = "UR11 (r) register accessor: SYSCFG user register 11\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur11::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur11`]
module"]
pub type UR11 = crate::Reg<ur11::UR11_SPEC>;
#[doc = "SYSCFG user register 11"]
pub mod ur11;
#[doc = "UR12 (r) register accessor: SYSCFG user register 12\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur12::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur12`]
module"]
pub type UR12 = crate::Reg<ur12::UR12_SPEC>;
#[doc = "SYSCFG user register 12"]
pub mod ur12;
#[doc = "UR13 (r) register accessor: SYSCFG user register 13\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur13::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur13`]
module"]
pub type UR13 = crate::Reg<ur13::UR13_SPEC>;
#[doc = "SYSCFG user register 13"]
pub mod ur13;
#[doc = "UR14 (rw) register accessor: SYSCFG user register 14\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur14::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ur14::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur14`]
module"]
pub type UR14 = crate::Reg<ur14::UR14_SPEC>;
#[doc = "SYSCFG user register 14"]
pub mod ur14;
#[doc = "UR15 (r) register accessor: SYSCFG user register 15\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur15::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur15`]
module"]
pub type UR15 = crate::Reg<ur15::UR15_SPEC>;
#[doc = "SYSCFG user register 15"]
pub mod ur15;
#[doc = "UR16 (r) register accessor: SYSCFG user register 16\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur16::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur16`]
module"]
pub type UR16 = crate::Reg<ur16::UR16_SPEC>;
#[doc = "SYSCFG user register 16"]
pub mod ur16;
#[doc = "UR17 (r) register accessor: SYSCFG user register 17\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur17::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur17`]
module"]
pub type UR17 = crate::Reg<ur17::UR17_SPEC>;
#[doc = "SYSCFG user register 17"]
pub mod ur17;
#[doc = "CFGR (rw) register accessor: Timer break lockup register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cfgr`]
module"]
pub type CFGR = crate::Reg<cfgr::CFGR_SPEC>;
#[doc = "Timer break lockup register"]
pub mod cfgr;
#[doc = "ADC2ALT (rw) register accessor: ADC2 internal input alternate connection\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`adc2alt::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`adc2alt::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`adc2alt`]
module"]
pub type ADC2ALT = crate::Reg<adc2alt::ADC2ALT_SPEC>;
#[doc = "ADC2 internal input alternate connection"]
pub mod adc2alt;
#[doc = "UR18 (r) register accessor: SYSCFG user register 18\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur18::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ur18`]
module"]
pub type UR18 = crate::Reg<ur18::UR18_SPEC>;
#[doc = "SYSCFG user register 18"]
pub mod ur18;
|
struct Solution;
impl Solution {
pub fn count_substrings(s: String) -> i32 {
let s = s.as_bytes();
let n = s.len() as i32;
let mut ans = 0;
for i in 0..(2 * n - 1) {
// i 是中心点,包括了空隙,所以还要求出对应的索引。
// [0, 0], [0, 1], [1, 1], [1, 2] ...
let mut l = i / 2;
let mut r = l + i % 2;
// 从中心点向两边扩散。
while l >= 0 && r < n && s[l as usize] == s[r as usize] {
ans += 1;
l -= 1;
r += 1;
}
}
ans
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_count_substrings1() {
assert_eq!(Solution::count_substrings("abc".to_string()), 3);
}
#[test]
fn test_count_substrings2() {
assert_eq!(Solution::count_substrings("aaa".to_string()), 6);
}
}
|
use crate::hittable::{HitRecord, Hittable};
use crate::ray::Ray;
use std::ops::Range;
use std::rc::Rc;
pub struct HittableList {
objects: Vec<Rc<dyn Hittable>>,
}
impl HittableList {
pub fn new() -> Self {
Self {
objects: Vec::new(),
}
}
pub fn add(&mut self, object: Rc<dyn Hittable>) {
self.objects.push(object);
}
}
impl Hittable for HittableList {
fn hit(&self, ray: &Ray, t_range: Range<f64>) -> Option<HitRecord> {
let mut res = None;
let mut closest_so_far = t_range.end;
for object in &self.objects {
if let Some(hit) = object.hit(ray, t_range.start..closest_so_far) {
closest_so_far = hit.t;
res = Some(hit);
}
}
res
}
}
|
#![cfg(test)]
use crate::content::PostItem;
use crate::content::SeriesItem;
use crate::paths::AbsPath;
use crate::site::{Site, SiteOptions};
use crate::site_url::{HrefUrl, ImgUrl};
use crate::util::{load_templates, ParsedFile, ParsedFiles};
use camino::Utf8Path;
use camino::Utf8PathBuf;
use eyre::Result;
use hotwatch::Event;
use lazy_static::lazy_static;
use regex::Regex;
use std::fs;
use std::path::PathBuf;
use tempfile::TempDir;
use tera::Tera;
use thiserror::Error;
pub struct TestSite {
pub site: Site,
pub output_dir: TempDir,
pub input_dir: TempDir,
}
impl TestSite {
pub fn create_file(&mut self, file: &str, content: &str) -> Result<()> {
let path = self.input_dir.path().join(file);
fs::write(&path, content)?;
self.site.file_changed(Event::Create(path))
}
pub fn change_file(&mut self, file: &str, from: &str, to: &str) -> Result<()> {
let path = self.input_dir.path().join(file);
let content = fs::read_to_string(&path)?.replace(from, to);
fs::write(&path, content)?;
self.site.file_changed(Event::Write(path))
}
pub fn rename_file(&mut self, from: &str, to: &str) -> Result<()> {
let from = self.input_dir.path().join(from);
let to = self.input_dir.path().join(to);
fs::rename(&from, &to)?;
self.site.file_changed(Event::Rename(from, to))
}
pub fn find_post<'a>(&'a self, file: &str) -> Option<&'a PostItem> {
self.site.content.find_post(file)
}
pub fn find_series<'a>(&'a self, file: &str) -> Option<&'a SeriesItem> {
self.site.content.find_series(file)
}
pub fn output_path(&self, file: &str) -> AbsPath {
AbsPath::from_path_buf(self.output_dir.path().join(file))
}
pub fn output_content(&self, file: &str) -> Result<String> {
let path = self.output_path(file);
let content = fs::read_to_string(&path)?;
Ok(content)
}
pub fn read_file_to_string(&self, file: &str) -> std::io::Result<String> {
fs::read_to_string(self.output_path(file))
}
pub fn remove_file(&mut self, file: &str) -> Result<()> {
let path = self.input_dir.path().join(file);
fs::remove_file(&path)?;
self.site.file_changed(Event::Remove(path))
}
/// Persist the input and output dir, allowing us to inspect them
/// after test has finished.
#[allow(dead_code)]
pub fn persist(self) -> (PathBuf, PathBuf) {
let TestSite {
output_dir,
input_dir,
..
} = self;
(input_dir.into_path(), output_dir.into_path())
}
#[allow(dead_code)]
pub fn skip_clean(self) {
let (input_dir, output_dir) = self.persist();
println!(
"Skipping cleaning\n input: {}\n output: {}",
input_dir.display(),
output_dir.display()
);
}
}
pub struct TestSiteBuilder {
pub include_drafts: bool,
}
impl TestSiteBuilder {
pub fn build(self) -> Result<TestSite> {
let (output_dir, output_path) = AbsPath::new_tempdir()?;
let (input_dir, input_path) = AbsPath::new_tempdir()?;
fs_extra::dir::copy(
"test-site",
&input_path,
&fs_extra::dir::CopyOptions {
content_only: true,
..Default::default()
},
)?;
let site = Site::load_content(SiteOptions {
output_dir: output_path,
input_dir: input_path,
clear_output_dir: true,
include_drafts: self.include_drafts,
})?;
site.render_all()?;
Ok(TestSite {
site,
output_dir,
input_dir,
})
}
}
pub fn templates() -> &'static Tera {
lazy_static! {
static ref TEMPLATES: Tera = load_templates("templates/*.html").unwrap();
}
&TEMPLATES
}
#[derive(Error, Debug)]
pub enum GeneratedFileError<'a> {
#[error("missing doctype")]
MissingDocType,
#[error("broken link `{0}`")]
BrokenLink(&'a str),
#[error("url not found `{0}`")]
UrlNotFound(String),
#[error("img not found `{0}`")]
LocalImgNotFound(String),
#[error("fragment not found `{0}`")]
LocalFragmentNotFound(&'a str),
#[error("fragment `{0}` not found in `{1}`")]
OtherFragmentNotFound(String, Utf8PathBuf),
}
pub fn check_file<'a>(
file: &'a ParsedFile,
files: &'a ParsedFiles,
output_dir: &Utf8Path,
) -> Vec<GeneratedFileError<'a>> {
lazy_static! {
static ref BROKEN_LINK: Regex = Regex::new(r"\[[^[\]]]+]\[[^[\]]]*]").unwrap();
}
let mut errors = Vec::new();
if !file.content.starts_with("<!DOCTYPE html>") {
errors.push(GeneratedFileError::MissingDocType);
}
for bad_link in BROKEN_LINK.find_iter(&file.content) {
// FIXME these gives false positives when they're inside a code block.
// Maybe find start/end of all code blocks, and then only add them if they're outside?
// For now just ignore the offending file.
if !file
.path
.ends_with("rewriting_my_blog_in_rust_for_fun_and_profit/index.html")
{
errors.push(GeneratedFileError::BrokenLink(bad_link.as_str()));
}
}
let mut links: Vec<&HrefUrl> = file.links.iter().collect();
links.sort();
for link in links {
match link {
HrefUrl::Internal(ref internal) => {
let output_file = internal.output_file(output_dir);
// Just skip image links for now, handle errors in img check below.
// It's not -exactly- the same, but it's good enough for me as I don't
// add image links manually.
if internal.is_img() {
continue;
}
let external_ref = match files.get(&output_file) {
Some(file) => file,
None => {
errors.push(GeneratedFileError::UrlNotFound(internal.href().to_string()));
continue;
}
};
if let Some(fragment) = internal.fragment() {
let fragment = format!("#{fragment}");
if !external_ref.fragments.contains(&fragment) {
errors.push(GeneratedFileError::OtherFragmentNotFound(
fragment,
external_ref.path.clone(),
));
}
}
}
HrefUrl::Fragment(ref fragment) => {
if !file.fragments.contains(fragment) {
errors.push(GeneratedFileError::LocalFragmentNotFound(fragment));
}
}
HrefUrl::External(_) => {}
}
}
let mut imgs: Vec<&ImgUrl> = file.imgs.iter().collect();
imgs.sort();
for img in imgs {
match img {
ImgUrl::Internal(ref internal) => {
let output_file = internal.output_file(output_dir);
if !output_file.exists() {
errors.push(GeneratedFileError::LocalImgNotFound(
internal.href().to_string(),
));
}
}
ImgUrl::External(_) => {}
}
}
errors
}
|
use crate::error::Result;
use crate::shared::{FileSystemResource, InstallActionKind, Name, PackageKind, Platform};
use crate::APP_NAME;
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs::read_dir;
use std::io::Write;
use std::path::PathBuf;
use std::str::FromStr;
// ------------------------------------------------------------------------------------------------
// Public Types
// ------------------------------------------------------------------------------------------------
///
/// A Package is the unit of installation, provided by a configured `Installer`. It therefore has
/// a name, platform match, and package kind.
///
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Package {
#[serde(deserialize_with = "Name::deserialize")]
name: Name,
#[serde(default, skip_serializing_if = "Option::is_none")]
platform: Option<Platform>,
#[serde(default, skip_serializing_if = "is_default")]
kind: PackageKind,
}
///
/// The kinds of actions a package set can perform; either a list of packages to install, *or* a
/// map of actions to script strings.
///
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
#[serde(deny_unknown_fields, untagged, rename_all = "kebab-case")]
pub enum PackageSetActions {
Packages {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
packages: Vec<Package>,
},
Scripts {
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
scripts: HashMap<InstallActionKind, String>,
},
}
///
/// A Package set brings together a set of package actions, with additional actions such as linking
/// files, adding an env-file, and run before/after script strings.
///
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub struct PackageSet {
#[serde(skip)]
path: PathBuf,
#[serde(deserialize_with = "Name::deserialize")]
name: Name,
#[serde(default, skip_serializing_if = "Option::is_none")]
description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
platform: Option<Platform>,
#[serde(default, skip_serializing_if = "is_default")]
optional: bool,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
env_vars: HashMap<String, String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
run_before: Option<String>,
#[serde(default, skip_serializing_if = "PackageSetActions::is_empty")]
actions: PackageSetActions,
#[serde(default, skip_serializing_if = "Option::is_none")]
env_file: Option<String>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
link_files: HashMap<String, String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
run_after: Option<String>,
}
///
/// Package set groups are simply directories in the package repository.
///
#[derive(Clone, Debug)]
pub struct PackageSetGroup {
path: PathBuf,
package_sets: Vec<PackageSet>,
}
///
/// The package repository is a directory that contains package groups, which in turn contain
/// package sets.
#[derive(Clone, Debug)]
pub struct PackageRepository {
path: PathBuf,
package_set_groups: Vec<PackageSetGroup>,
}
///
/// The name of the repository directory.
///
pub const REPOSITORY_DIR: &str = "repository";
///
/// A trait implemented by things read from the file system.
pub trait Readable {
fn read(path: &PathBuf) -> Result<Self>
where
Self: Sized;
}
///
/// A trait implemented by things that may be serialized to Writers.
///
pub trait Writeable<W: Write>: Serialize {
fn write(&self, w: &mut W) -> Result<()> {
serde_yaml::to_writer(w, self)?;
Ok(())
}
}
// ------------------------------------------------------------------------------------------------
// Implementations
// ------------------------------------------------------------------------------------------------
impl Default for PackageSetActions {
fn default() -> Self {
Self::Packages {
packages: Default::default(),
}
}
}
impl From<Vec<Package>> for PackageSetActions {
fn from(packages: Vec<Package>) -> Self {
Self::Packages { packages }
}
}
impl PackageSetActions {
pub fn is_empty(&self) -> bool {
match self {
PackageSetActions::Packages { packages } => packages.is_empty(),
PackageSetActions::Scripts { scripts } => scripts.is_empty(),
}
}
}
// ------------------------------------------------------------------------------------------------
impl<W: Write> Writeable<W> for Package {}
impl Package {
/// Construct a new package instance.
pub fn new(name: Name, platform: Option<Platform>, kind: PackageKind) -> Self {
Self {
name,
platform,
kind,
}
}
/// Return this package's name.
pub fn name(&self) -> &Name {
&self.name
}
/// Return `true` if this package is intended for the current platform, else `false`.
pub fn is_platform_match(&self) -> bool {
Platform::CURRENT.is_match(&self.platform)
}
/// Return the platform this package is intended for, `None` implies all.
pub fn platform(&self) -> Platform {
self.platform.as_ref().cloned().unwrap_or_default()
}
/// Return the kind of installer required for this package.
pub fn kind(&self) -> &PackageKind {
&self.kind
}
}
// ------------------------------------------------------------------------------------------------
impl Readable for PackageSet {
fn read(path: &PathBuf) -> Result<Self> {
debug!("PackageSet::read: reading package set file {:?}", path);
let value = std::fs::read_to_string(path)?;
let mut result: PackageSet = serde_yaml::from_str(&value)?;
result.path = path.clone();
trace!("read package_set: {:?}", result);
Ok(result)
}
}
impl<W: Write> Writeable<W> for PackageSet {}
impl PackageSet {
/// Return this package set's name.
pub fn name(&self) -> &Name {
&self.name
}
/// Return the path from which this package set was loaded.
pub fn path(&self) -> &PathBuf {
&self.path
}
/// Return the description of this package set, if one has been provided.
pub fn description(&self) -> &Option<String> {
&self.description
}
/// Return `true` if this package is intended for the current platform, else `false`.
pub fn is_platform_match(&self) -> bool {
Platform::CURRENT.is_match(&self.platform)
}
/// Return the platform this package is intended for, `None` implies all.
pub fn platform(&self) -> Platform {
self.platform.as_ref().cloned().unwrap_or_default()
}
/// Return `true` if this package set is optional, else `false`.
pub fn is_optional(&self) -> bool {
self.optional
}
/// Return any environment variables the package set has declared for use in script strings.
pub fn env_vars(&self) -> &HashMap<String, String> {
&self.env_vars
}
/// Return `true` if this package set has any actions, either package or script string.
pub fn has_actions(&self) -> bool {
!match &self.actions {
PackageSetActions::Packages { packages } => packages.is_empty(),
PackageSetActions::Scripts { scripts } => scripts.is_empty(),
}
}
/// Return the actions required by this package set.
pub fn actions(&self) -> &PackageSetActions {
&self.actions
}
/// Return all the packages to install for this package set, or `None` if script strings have
/// been provided instead.
pub fn packages(&self) -> Option<impl Iterator<Item = &Package>> {
match &self.actions {
PackageSetActions::Packages { packages } => Some(packages.iter()),
PackageSetActions::Scripts { .. } => None,
}
}
/// Return all the script strings to execute for this package set, or `None` if packages have
/// been provided instead.
pub fn scripts(&self) -> Option<&HashMap<InstallActionKind, String>> {
match &self.actions {
PackageSetActions::Packages { .. } => None,
PackageSetActions::Scripts { scripts } => Some(scripts),
}
}
/// Return the name of an environment file to link, if one was provided.
pub fn env_file(&self) -> &Option<String> {
&self.env_file
}
/// Return the path to the environment file to link, if one was provided.
pub fn env_file_path(&self) -> Option<PathBuf> {
self.env_file.as_ref().map(PathBuf::from)
}
/// Return a map of file names to link.
pub fn link_files(&self) -> &HashMap<String, String> {
&self.link_files
}
/// Return a map of file path s to link.
pub fn link_file_paths(&self) -> Vec<(PathBuf, PathBuf)> {
self.link_files
.iter()
.map(|(src, tgt)| (self.path.join(src), PathBuf::from(tgt)))
.collect()
}
/// Return the script string to run before any other action, if one was provided.
pub fn run_before(&self) -> &Option<String> {
&self.run_before
}
/// Return the script string to run after any other action, if one was provided.
pub fn run_after(&self) -> &Option<String> {
&self.run_after
}
}
// ------------------------------------------------------------------------------------------------
lazy_static! {
static ref PSG_NAME: Regex = Regex::new(r#"^([0-9]+\-)?(.*)$"#).unwrap();
}
impl Readable for PackageSetGroup {
fn read(path: &PathBuf) -> Result<Self> {
debug!("PackageSetGroup::read: reading dir {:?}", path);
let mut group = PackageSetGroup {
path: path.clone(),
package_sets: Default::default(),
};
let yaml_extension = OsStr::new("yml");
for dir_entry in read_dir(path)? {
let set_path = dir_entry?.path();
// Option 1. Any file in this directory, "*.yml" that is package-set itself.
if set_path.is_file() && set_path.extension() == Some(yaml_extension) {
let _ = group.package_sets.push(PackageSet::read(&set_path)?);
}
// Option 2. A directory, which contains a file named "package-set.yml"
else if set_path.is_dir() {
let set_path = set_path.join("package-set.yml");
if set_path.is_file() {
let _ = group.package_sets.push(PackageSet::read(&set_path)?);
}
} else {
debug!("PackageSetGroup::read: ignoring {:?}", set_path);
}
}
group.package_sets.sort_by_key(|ps| ps.name().clone());
Ok(group)
}
}
impl PackageSetGroup {
/// Return the name of this package set group, this is derived from the path of the group's
/// directory.
pub fn name(&self) -> Name {
Name::from_str(&*self.path.file_name().unwrap().to_string_lossy())
.expect("Invalid name format!")
}
/// Return a display name for this package set group, this is derived from the path of the
/// group's directory with any numerif prefix removed and any '-' characters replaced with
/// spaces.
pub fn display_name(&self) -> String {
let name = self.name().to_string();
let captures = PSG_NAME.captures(&name);
if let Some(captures) = captures {
let name = captures.get(2).unwrap();
name.as_str().to_string()
} else {
name.to_string()
}
.replace('-', " ")
}
/// Return the path to this package set group.
pub fn path(&self) -> &PathBuf {
&self.path
}
/// Return an iterator over all the package sets in this group.
pub fn package_sets(&self) -> impl Iterator<Item = &PackageSet> {
self.package_sets.iter()
}
/// Return `true` if this group has a package set named `name`, else `false`.
pub fn has_package_set(&self, name: &Name) -> bool {
self.package_set(name).is_some()
}
/// Return the package set named `name`, if one is present.
pub fn package_set(&self, name: &Name) -> Option<&PackageSet> {
self.package_sets.iter().find(|ps| &ps.name == name)
}
}
// ------------------------------------------------------------------------------------------------
lazy_static! {
static ref RESERVED_REPO_NAMES: Vec<&'static str> = vec![".git", ".config", ".local"];
}
impl FileSystemResource for PackageRepository {
fn default_path() -> PathBuf {
xdirs::config_dir_for(APP_NAME)
.unwrap()
.join(REPOSITORY_DIR)
}
fn open_from(repository_path: PathBuf) -> Result<Self> {
info!(
"PackageRepository::actual_open: reading all package data from {:?}",
&repository_path
);
let mut package_set_groups: Vec<PackageSetGroup> = Default::default();
for dir_entry in read_dir(&repository_path)? {
let group_path = dir_entry?.path();
if group_path.is_dir() {
trace!(
"PackageRepository::actual_open: found possible group dir {:?} -> {:?}",
&group_path,
group_path.file_name(),
);
let dir_name = group_path.file_name().unwrap().to_str().unwrap();
if RESERVED_REPO_NAMES.contains(&dir_name) {
debug!(
"PackageRepository::actual_open: some files are always ignored ({:?}).",
group_path
);
} else {
package_set_groups.push(PackageSetGroup::read(&group_path)?);
}
}
}
package_set_groups.sort_by_key(|psg| psg.name());
Ok(PackageRepository {
path: repository_path,
package_set_groups,
})
}
}
impl PackageRepository {
/// Return the path to the configuration directory included in the repository.
pub fn default_config_path() -> PathBuf {
Self::default_path().join(".config")
}
/// Return the path to the local content directory included in the repository.
pub fn default_local_path() -> PathBuf {
Self::default_path().join(".local")
}
/// Return the path to the repository root directory.
pub fn path(&self) -> &PathBuf {
&self.path
}
/// Return `true` if the repository has no groups, else `false`.
pub fn is_empty(&self) -> bool {
self.package_set_groups.is_empty()
}
/// Return an iterator over all groups in this repository.
pub fn groups(&self) -> impl Iterator<Item = &PackageSetGroup> {
self.package_set_groups.iter()
}
/// Return `true` if this repository has a group named `name`, else `false`.
pub fn has_group(&self, name: &Name) -> bool {
self.group(name).is_some()
}
/// Return the group named `name`, if one is present.
pub fn group(&self, name: &Name) -> Option<&PackageSetGroup> {
self.package_set_groups
.iter()
.find(|psg| &psg.name() == name)
}
}
// ------------------------------------------------------------------------------------------------
// Private Functions
// ------------------------------------------------------------------------------------------------
fn is_default<T: Default + PartialEq>(t: &T) -> bool {
t == &T::default()
}
pub mod builders {
use crate::error::{ErrorKind, Result};
use crate::shared::builders::Builder;
use crate::shared::packages::PackageSetActions;
use crate::shared::{
InstallActionKind, Name, Package, PackageKind, PackageSet, PackageSetGroup, Platform,
};
use std::collections::HashMap;
use std::path::PathBuf;
// ---------------------------------------------------------------------------------------------
// Public Types
// --------------------------------------------------------------------------------------------
///
/// Provides a fluent interface for programmatic creation of [`Package`](../struct.package.html)
/// instances.
///
#[derive(Clone, Debug)]
pub struct PackageBuilder(Package);
///
/// Provides a fluent interface for programmatic creation of
/// [`PackageSet`](../struct.packageset.html) instances.
///
#[derive(Clone, Debug)]
pub struct PackageSetBuilder(PackageSet);
///
/// Provides a fluent interface for programmatic creation of
/// [`PackageSetGroup`](../struct.packagesetgroup.html) instances.
///
#[derive(Clone, Debug)]
pub struct PackageSetGroupBuilder(PackageSetGroup);
// --------------------------------------------------------------------------------------------
// Implementations
// --------------------------------------------------------------------------------------------
impl From<Package> for PackageBuilder {
fn from(package: Package) -> Self {
Self(package)
}
}
impl From<PackageBuilder> for Package {
fn from(builder: PackageBuilder) -> Self {
builder.0
}
}
impl Builder for PackageBuilder {
type Inner = Package;
fn build(&mut self) -> Self::Inner {
self.0.clone()
}
}
impl PackageBuilder {
/// Create a new instance, all instances must be named.
pub fn named(name: Name) -> Self {
Self(Package {
name,
platform: None,
kind: Default::default(),
})
}
/// Adds a platform constraint, this package is only installed on the provided platform.
pub fn for_platform(&mut self, platform: Platform) -> &mut Self {
self.0.platform = Some(platform);
self
}
/// Adds a platform constraint, this package is only installed on macos.
pub fn for_macos_only(&mut self) -> &mut Self {
self.for_platform(Platform::Macos)
}
/// Adds a platform constraint, this package is only installed on linux.
pub fn for_linux_only(&mut self) -> &mut Self {
self.for_platform(Platform::Macos)
}
/// This package has no platform constraint, it should install anywhere.
pub fn for_any_platform(&mut self) -> &mut Self {
self.0.platform = None;
self
}
/// Sets the kind of package installer to use.
pub fn of_kind(&mut self, kind: PackageKind) -> &mut Self {
self.0.kind = kind;
self
}
/// This package uses the platform's default installer
pub fn using_default_installer(&mut self) -> &mut Self {
self.of_kind(PackageKind::Default)
}
/// This package uses the platform's application installer
pub fn using_application_installer(&mut self) -> &mut Self {
self.of_kind(PackageKind::Application)
}
/// This package uses the specified language's installer
pub fn using_language_installer(&mut self, language: &Name) -> &mut Self {
self.of_kind(PackageKind::Language(language.clone()))
}
}
// --------------------------------------------------------------------------------------------
impl From<PackageSet> for PackageSetBuilder {
fn from(package: PackageSet) -> Self {
Self(package)
}
}
impl From<PackageSetBuilder> for PackageSet {
fn from(builder: PackageSetBuilder) -> Self {
builder.0
}
}
impl Builder for PackageSetBuilder {
type Inner = PackageSet;
fn build(&mut self) -> Self::Inner {
self.0.clone()
}
}
impl PackageSetBuilder {
/// Create a new instance, all instances must be named.
pub fn named(name: Name) -> Self {
Self(PackageSet {
path: Default::default(),
name,
description: None,
platform: None,
optional: false,
env_vars: Default::default(),
run_before: None,
actions: Default::default(),
env_file: None,
link_files: Default::default(),
run_after: None,
})
}
/// Set the path that this package set was loaded from.
pub fn path(&mut self, path: PathBuf) -> &mut Self {
self.0.path = path;
self
}
/// Add a description of this package set.
pub fn description(&mut self, description: &str) -> &mut Self {
self.0.description = Some(description.to_string());
self
}
/// Adds a platform constraint, this package is only installed on the provided platform.
pub fn for_platform(&mut self, platform: Platform) -> &mut Self {
self.0.platform = Some(platform);
self
}
/// Adds a platform constraint, this package is only installed on macos.
pub fn for_macos_only(&mut self) -> &mut Self {
self.for_platform(Platform::Macos)
}
/// Adds a platform constraint, this package is only installed on linux.
pub fn for_linux_only(&mut self) -> &mut Self {
self.for_platform(Platform::Macos)
}
/// This package has no platform constraint, it should install anywhere.
pub fn for_any_platform(&mut self) -> &mut Self {
self.0.platform = None;
self
}
/// Make this an optional package set.
pub fn optional(&mut self) -> &mut Self {
self.0.optional = true;
self
}
/// Make this a required (the default) package set.
pub fn required(&mut self) -> &mut Self {
self.0.optional = false;
self
}
/// Set the key/values to use as additional tool/environment variables.
pub fn env_vars(&mut self, env_vars: HashMap<String, String>) -> &mut Self {
self.0.env_vars = env_vars;
self
}
/// Add a new tool/environment variable.
pub fn env_var(&mut self, key: &str, value: &str) -> &mut Self {
let _ = self.0.env_vars.insert(key.to_string(), value.to_string());
self
}
/// Add a run-before script string.
pub fn run_before(&mut self, script_string: &str) -> &mut Self {
self.0.run_before = Some(script_string.to_string());
self
}
/// Set the set of actions, whether package or script.
pub fn actions(&mut self, actions: PackageSetActions) -> &mut Self {
self.0.actions = actions;
self
}
/// This sets the internal actions to expect packages, not script strings.
pub fn with_package_actions(&mut self) -> &mut Self {
self.actions(PackageSetActions::Packages {
packages: Default::default(),
})
}
/// Set the list of packages, this is not additive.
pub fn package_actions(&mut self, packages: &[Package]) -> &mut Self {
self.actions(PackageSetActions::Packages {
packages: packages.to_vec(),
})
}
/// Add a package to the list of packages, this is additive.
pub fn add_package_action(&mut self, package: Package) -> Result<&mut Self> {
match &mut self.0.actions {
PackageSetActions::Packages { packages } => {
packages.push(package);
Ok(self)
}
PackageSetActions::Scripts { .. } => Err(ErrorKind::InvalidBuilderState.into()),
}
}
/// This sets the internal actions to expect script strings, not packages.
pub fn with_script_actions(&mut self) -> &mut Self {
self.actions(PackageSetActions::Scripts {
scripts: Default::default(),
})
}
/// Set the map of script strings from the array of tuples, this is not additive.
pub fn script_actions_list(
&mut self,
scripts: &[(InstallActionKind, String)],
) -> &mut Self {
self.script_actions(scripts.iter().cloned().collect())
}
/// Set the map of script strings, this is not additive.
pub fn script_actions(&mut self, scripts: HashMap<InstallActionKind, String>) -> &mut Self {
self.actions(PackageSetActions::Scripts { scripts })
}
/// Add a specific script string for the given action.
pub fn add_script_action(
&mut self,
kind: InstallActionKind,
script_string: &str,
) -> Result<&mut Self> {
match &mut self.0.actions {
PackageSetActions::Packages { .. } => Err(ErrorKind::InvalidBuilderState.into()),
PackageSetActions::Scripts { scripts } => {
let _ = scripts.insert(kind, script_string.to_string());
Ok(self)
}
}
}
/// Add a specific script string for the install action.
pub fn add_install_script_action(&mut self, script_string: &str) -> Result<&mut Self> {
self.add_script_action(InstallActionKind::Install, script_string)
}
/// Add a specific script string for the update action.
pub fn add_update_script_action(&mut self, script_string: &str) -> Result<&mut Self> {
self.add_script_action(InstallActionKind::Update, script_string)
}
/// Add a specific script string for the uninstall action.
pub fn add_uninstall_script_action(&mut self, script_string: &str) -> Result<&mut Self> {
self.add_script_action(InstallActionKind::Uninstall, script_string)
}
/// Add a specific script string for the link-files action.
pub fn add_link_files_script_action(&mut self, script_string: &str) -> Result<&mut Self> {
self.add_script_action(InstallActionKind::LinkFiles, script_string)
}
/// Set the name of a file to be treated as an 'env-file'.
pub fn env_file(&mut self, file_name: &str) -> &mut Self {
self.0.env_file = Some(file_name.to_string());
self
}
/// Set the map of source to target link files.
pub fn link_files(&mut self, link_files: HashMap<String, String>) -> &mut Self {
self.0.link_files = link_files;
self
}
/// Add a source and target to the map of link files
pub fn add_link_file(&mut self, repo_file_name: &str, local_fs_name: &str) -> &mut Self {
let _ = self
.0
.link_files
.insert(repo_file_name.to_string(), local_fs_name.to_string());
self
}
/// Add a run-after script string.
pub fn run_after(&mut self, script_string: &str) -> &mut Self {
self.0.run_after = Some(script_string.to_string());
self
}
}
// --------------------------------------------------------------------------------------------
impl Default for PackageSetGroupBuilder {
fn default() -> Self {
Self(PackageSetGroup {
path: Default::default(),
package_sets: Default::default(),
})
}
}
impl From<PackageSetGroup> for PackageSetGroupBuilder {
fn from(package: PackageSetGroup) -> Self {
Self(package)
}
}
impl From<PackageSetGroupBuilder> for PackageSetGroup {
fn from(builder: PackageSetGroupBuilder) -> Self {
builder.0
}
}
impl Builder for PackageSetGroupBuilder {
type Inner = PackageSetGroup;
fn build(&mut self) -> Self::Inner {
self.0.clone()
}
}
impl PackageSetGroupBuilder {
/// Create a new instance with the given source path.
pub fn new_in(path: PathBuf) -> Self {
Self(PackageSetGroup {
path,
package_sets: vec![],
})
}
/// Add all package sets to the group, this is not additive.
pub fn package_sets(&mut self, package_sets: &[PackageSet]) {
self.0.package_sets = package_sets.to_vec()
}
/// Add a package set to the group, this is additive.
pub fn add_package_set(&mut self, package_set: PackageSet) {
self.0.package_sets.push(package_set)
}
}
}
|
/*
https://projecteuler.net
Starting with 1 and spiralling anticlockwise in the following way, a
square spiral with side length 7 is formed.
37 36 35 34 33 32 31
38 17 16 15 14 13 30
39 18 5 4 3 12 29
40 19 6 1 2 11 28
41 20 7 8 9 10 27
42 21 22 23 24 25 26
43 44 45 46 47 48 49
It is interesting to note that the odd squares lie along the bottom
right diagonal, but what is more interesting is that 8 out of the 13
numbers lying along both diagonals are prime; that is, a ratio of 8/13
≈ 62%.
If one complete new layer is wrapped around the spiral above, a square
spiral with side length 9 will be formed. If this process is
continued, what is the side length of the square spiral for which the
ratio of primes along both diagonals first falls below 10%?
NOTES:
First cut:
Solution: 26241
Elasped time: 214,633,819 us
*/
fn solve() -> u64 {
let mut count = 0_u64; // count of corner primes
let mut total = 1_u64; // total count of corners (includes the center)
let mut last = 1_u64; // initial value
let mut sz = 0; // this is the size to add to get to the next corner
loop {
// Increment the corner size
sz += 2;
// iterate over the corners
for _ in 0..4 { // This could be unwrapped?
last += sz;
if primes::is_prime(last) {
count += 1;
}
}
total += 4;
// find the fraction of primes
let fraction = count as f64 / total as f64;
//println!("{} / {} = {}", count, total, fraction);
if fraction < 0.1 {
return sz + 1;
}
}
}
fn main() {
let start_time = std::time::Instant::now();
let sol = solve();
let elapsed = start_time.elapsed().as_micros();
println!("\nSolution: {}", sol);
let mut remain = elapsed;
let mut s = String::new();
if remain == 0 {
s.insert(0,'0');
}
while remain > 0 {
let temp = remain%1000;
remain /= 1000;
if remain > 0 {
s = format!(",{:03}",temp) + &s;
}
else {
s = format!("{}",temp) + &s;
}
}
println!("Elasped time: {} us", s);
}
|
use crate::{
qjs,
system::{create_dir_all, Path},
Artifact, BoxedFuture, Diagnostics, Input, Mut, Output, ParallelSend, ParallelSync, Ref,
Result, Set, Time, WeakArtifact, WeakSet,
};
use derive_deref::Deref;
use either::Either;
use futures::future::FutureExt;
use serde::Serialize;
use std::{
fmt::{Display, Formatter, Result as FmtResult},
hash::{Hash, Hasher},
iter::once,
};
/// The unique identifier of rule
pub type RuleId = u64;
/// The rule processing state
#[derive(Clone, Copy, Debug, Serialize)]
#[serde(rename_all = "lowercase")]
#[repr(u32)]
pub enum RuleState {
Processed,
Scheduled,
Processing,
}
impl Default for RuleState {
fn default() -> Self {
Self::Processed
}
}
impl Display for RuleState {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
match self {
RuleState::Processed => "processed",
RuleState::Scheduled => "scheduled",
RuleState::Processing => "processing",
}
.fmt(fmt)
}
}
/// The builder interface
pub trait RuleApi: ParallelSend + ParallelSync {
/// Get the list of inputs
fn inputs(&self) -> Vec<Artifact<Input>>;
/// Get the list of outputs
fn outputs(&self) -> Vec<Artifact<Output>>;
/// Run rule
fn invoke(self: Ref<Self>) -> BoxedFuture<Result<Diagnostics>>;
}
#[derive(Clone)]
pub struct Rule(Ref<Internal>);
struct Internal {
id: RuleId,
state: Mut<RuleState>,
diagnostics: Mut<Diagnostics>,
api: Ref<dyn RuleApi>,
}
impl PartialEq for Rule {
fn eq(&self, other: &Self) -> bool {
self.0.id == other.0.id
}
}
impl Eq for Rule {}
impl Hash for Rule {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.id.hash(state);
}
}
impl Display for Rule {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
"Rule #".fmt(f)?;
self.0.id.fmt(f)
}
}
impl Rule {
pub fn from_api(api: Ref<dyn RuleApi>) -> Self {
let mut hasher = fxhash::FxHasher::default();
for output in api.outputs() {
output.hash(&mut hasher);
}
let id = hasher.finish();
let state = Mut::new(RuleState::default());
let diagnostics = Mut::new(Diagnostics::default());
Self(Ref::new(Internal {
id,
api,
state,
diagnostics,
}))
}
pub fn id(&self) -> RuleId {
self.0.id
}
pub fn state(&self) -> RuleState {
*self.0.state.read()
}
pub fn ready_inputs(&self) -> bool {
let inputs = self.0.api.inputs();
inputs.is_empty() || !inputs.into_iter().any(|input| input.outdated())
}
pub fn schedule(&self) {
*self.0.state.write() = RuleState::Scheduled;
}
pub async fn process(&self) -> Result<()> {
{
*self.0.state.write() = RuleState::Processing;
}
for output in self.0.api.outputs() {
if let Some(dir) = Path::new(output.name()).parent() {
if !dir.is_dir().await {
create_dir_all(dir).await?;
}
}
}
let diagnostics = self.0.api.clone().invoke().await?;
let is_failed = diagnostics.is_failed();
{
*self.0.diagnostics.write() = diagnostics;
}
if is_failed {
Err(format!("Failed processing rule"))?;
}
let time = Time::now();
for output in self.0.api.outputs() {
output.set_time(time);
}
{
*self.0.state.write() = RuleState::Processed;
}
Ok(())
}
}
pub struct NoInternal {
inputs: Mut<Set<Artifact<Input>>>,
outputs: WeakSet<WeakArtifact<Output>>,
}
impl Drop for NoInternal {
fn drop(&mut self) {
log::debug!("NoRule::drop");
}
}
#[derive(Clone, Deref)]
#[repr(transparent)]
pub struct NoRule(Ref<NoInternal>);
impl Display for NoRule {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
"NoRule".fmt(f)
}
}
impl NoRule {
fn to_dyn(&self) -> Rule {
Rule::from_api(self.0.clone())
}
pub fn new_raw(inputs: Set<Artifact<Input>>, outputs: WeakSet<WeakArtifact<Output>>) -> Self {
let inputs = Mut::new(inputs);
let this = Self(Ref::new(NoInternal { inputs, outputs }));
log::debug!("NoRule::new");
{
let rule = this.to_dyn();
for output in &this.0.outputs {
output.set_rule(rule.clone());
}
}
this
}
}
impl RuleApi for NoInternal {
fn inputs(&self) -> Vec<Artifact<Input>> {
self.inputs.read().iter().cloned().collect()
}
fn outputs(&self) -> Vec<Artifact<Output>> {
self.outputs.iter().collect()
}
fn invoke(self: Ref<Self>) -> BoxedFuture<Result<Diagnostics>> {
async { Ok(Diagnostics::default()) }.boxed_local()
}
}
#[derive(qjs::HasRefs)]
pub struct JsInternal {
inputs: Mut<Set<Artifact<Input>>>,
outputs: WeakSet<WeakArtifact<Output>>,
#[quickjs(has_refs)]
function: qjs::Persistent<qjs::Function<'static>>,
context: qjs::Context,
}
#[cfg(feature = "parallel")]
unsafe impl Send for JsInternal {}
#[cfg(feature = "parallel")]
unsafe impl Sync for JsInternal {}
impl Drop for JsInternal {
fn drop(&mut self) {
log::debug!("JsRule::drop");
}
}
#[derive(Clone, Deref, qjs::HasRefs)]
#[repr(transparent)]
pub struct JsRule(#[quickjs(has_refs)] Ref<JsInternal>);
impl Display for JsRule {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
"JsRule".fmt(f)
}
}
impl JsRule {
fn to_dyn(&self) -> Rule {
Rule::from_api(self.0.clone())
}
pub fn new_raw(
inputs: Set<Artifact<Input>>,
outputs: WeakSet<WeakArtifact<Output>>,
function: qjs::Persistent<qjs::Function<'static>>,
context: qjs::Context,
) -> Self {
let inputs = Mut::new(inputs);
let this = Self(Ref::new(JsInternal {
inputs,
outputs,
function,
context,
}));
log::debug!("JsRule::new");
{
let rule = this.to_dyn();
for output in &this.0.outputs {
output.set_rule(rule.clone());
}
}
this
}
}
impl RuleApi for JsInternal {
fn inputs(&self) -> Vec<Artifact<Input>> {
self.inputs.read().iter().cloned().collect()
}
fn outputs(&self) -> Vec<Artifact<Output>> {
self.outputs.iter().collect()
}
fn invoke(self: Ref<Self>) -> BoxedFuture<Result<Diagnostics>> {
let function = self.function.clone();
let context = self.context.clone();
let this = JsRule(self);
async move {
let promise: qjs::Promise<_> =
context.with(|ctx| function.restore(ctx)?.call((qjs::This(this),)))?;
Ok(promise.await?)
}
.boxed_local()
}
}
#[qjs::bind(module, public)]
#[quickjs(bare)]
mod js {
pub use super::*;
#[quickjs(rename = "AnyRule")]
impl Rule {
pub fn new() -> Self {
unimplemented!();
}
#[quickjs(get, enumerable)]
pub fn inputs(&self) -> Vec<Artifact<Input>> {
self.0.api.inputs()
}
#[quickjs(get, enumerable)]
pub fn outputs(&self) -> Vec<Artifact<Output>> {
self.0.api.outputs()
}
#[quickjs(rename = "toString")]
pub fn to_string_js(&self) -> String {
self.to_string()
}
}
#[quickjs(rename = "Rule")]
pub fn rule_js1<'js>(
inputs: Either<Set<Artifact<Input>>, Artifact<Input>>,
outputs: Either<Set<Artifact<Output>>, Artifact<Output>>,
function: qjs::Persistent<qjs::Function<'static>>,
ctx: qjs::Ctx<'js>,
) -> JsRule {
JsRule::new_(
function,
qjs::Opt(Some(outputs)),
qjs::Opt(Some(inputs)),
ctx,
)
}
#[quickjs(rename = "Rule")]
pub fn rule_js2<'js>(
function: qjs::Persistent<qjs::Function<'static>>,
outputs: Either<Set<Artifact<Output>>, Artifact<Output>>,
inputs: Either<Set<Artifact<Input>>, Artifact<Input>>,
ctx: qjs::Ctx<'js>,
) -> JsRule {
JsRule::new_(
function,
qjs::Opt(Some(outputs)),
qjs::Opt(Some(inputs)),
ctx,
)
}
#[quickjs(rename = "Rule")]
pub fn rule_js3<'js>(
function: qjs::Persistent<qjs::Function<'static>>,
outputs: qjs::Opt<Either<Set<Artifact<Output>>, Artifact<Output>>>,
inputs: qjs::Opt<Either<Set<Artifact<Input>>, Artifact<Input>>>,
ctx: qjs::Ctx<'js>,
) -> JsRule {
JsRule::new_(function, outputs, inputs, ctx)
}
#[quickjs(rename = "Rule")]
pub fn rule_no1<'js>(
inputs: Either<Set<Artifact<Input>>, Artifact<Input>>,
outputs: qjs::Opt<Either<Set<Artifact<Output>>, Artifact<Output>>>,
) -> NoRule {
NoRule::new_(outputs, qjs::Opt(Some(inputs)))
}
#[quickjs(rename = "Rule")]
pub fn rule_no2<'js>(
outputs: qjs::Opt<Either<Set<Artifact<Output>>, Artifact<Output>>>,
inputs: qjs::Opt<Either<Set<Artifact<Input>>, Artifact<Input>>>,
) -> NoRule {
NoRule::new_(outputs, inputs)
}
#[quickjs(rename = "NoRule")]
impl NoRule {
#[quickjs(rename = "new")]
pub fn new(
inputs: Either<Set<Artifact<Input>>, Artifact<Input>>,
outputs: qjs::Opt<Either<Set<Artifact<Output>>, Artifact<Output>>>,
) -> Self {
Self::new_(outputs, qjs::Opt(Some(inputs)))
}
#[quickjs(rename = "new")]
pub fn new_(
outputs: qjs::Opt<Either<Set<Artifact<Output>>, Artifact<Output>>>,
inputs: qjs::Opt<Either<Set<Artifact<Input>>, Artifact<Input>>>,
) -> Self {
let inputs = inputs
.0
.map(|inputs| inputs.either(|inputs| inputs, |input| once(input).collect()))
.unwrap_or_default();
let outputs = outputs
.0
.map(|outputs| {
outputs.either(
|outputs| outputs.into_iter().collect(),
|output| once(output).collect(),
)
})
.unwrap_or_default();
Self::new_raw(inputs, outputs)
}
#[quickjs(get, enumerable)]
pub fn inputs(&self) -> Vec<Artifact<Input>> {
self.0.inputs.read().iter().cloned().collect()
}
#[quickjs(rename = "inputs", set)]
pub fn set_inputs(&self, inputs: Either<Set<Artifact<Input>>, Artifact<Input>>) {
*self.0.inputs.write() = inputs.either(|inputs| inputs, |input| once(input).collect());
}
#[quickjs(get, enumerable)]
pub fn outputs(&self) -> Vec<Artifact<Output>> {
self.0.outputs.iter().collect()
}
#[quickjs(rename = "toString")]
pub fn to_string_js(&self) -> String {
self.to_string()
}
}
#[quickjs(rename = "FnRule", has_refs)]
impl JsRule {
pub fn new<'js>(
inputs: Either<Set<Artifact<Input>>, Artifact<Input>>,
outputs: Either<Set<Artifact<Output>>, Artifact<Output>>,
function: qjs::Persistent<qjs::Function<'static>>,
ctx: qjs::Ctx<'js>,
) -> Self {
Self::new_(
function,
qjs::Opt(Some(outputs)),
qjs::Opt(Some(inputs)),
ctx,
)
}
#[quickjs(rename = "new")]
pub fn new_<'js>(
function: qjs::Persistent<qjs::Function<'static>>,
outputs: qjs::Opt<Either<Set<Artifact<Output>>, Artifact<Output>>>,
inputs: qjs::Opt<Either<Set<Artifact<Input>>, Artifact<Input>>>,
ctx: qjs::Ctx<'js>,
) -> Self {
let context = qjs::Context::from_ctx(ctx).unwrap();
let inputs = inputs
.0
.map(|inputs| inputs.either(|inputs| inputs, |input| once(input).collect()))
.unwrap_or_default();
let outputs = outputs
.0
.map(|outputs| {
outputs.either(
|outputs| outputs.into_iter().collect(),
|output| once(output).collect(),
)
})
.unwrap_or_default();
Self::new_raw(inputs, outputs, function, context)
}
#[quickjs(get, enumerable)]
pub fn inputs(&self) -> Vec<Artifact<Input>> {
self.0.inputs.read().iter().cloned().collect()
}
#[quickjs(rename = "inputs", set)]
pub fn set_inputs(&self, inputs: Either<Set<Artifact<Input>>, Artifact<Input>>) {
*self.0.inputs.write() = inputs.either(|inputs| inputs, |input| once(input).collect());
}
#[quickjs(get, enumerable)]
pub fn outputs(&self) -> Vec<Artifact<Output>> {
self.0.outputs.iter().collect()
}
#[quickjs(rename = "toString")]
pub fn to_string_js(&self) -> String {
self.to_string()
}
}
}
|
#![no_core]
#![feature(lang_items)]
#![feature(fundamental)]
#![feature(no_core)]
#![feature(optin_builtin_traits)]
#![feature(unboxed_closures)]
#![crate_name = "core"]
#![crate_type = "rlib"]
pub mod marker;
// mod ops;
// mod option;
// mod code;
pub mod prelude { pub mod v1 { } }
|
#![cfg(not(test))]
#![feature(lang_items)]
#![feature(start)]
#![feature(libc)]
#![feature(core)]
#![feature(no_std)]
#![no_std]
extern crate libc;
#[macro_use(assert, panic)]
extern crate core;
use libc::{dirent_t, c_int, c_char, DIR};
use libc::{closedir, getopt, strlen};
use core::str::{StrExt, from_utf8};
use core::mem::size_of;
use core::slice::from_raw_parts;
mod linkedlist;
use linkedlist::LinkedList;
extern {
pub fn opendir(dirname: *const c_char) -> *mut DIR;
pub fn readdir(dir: *mut DIR) -> *mut dirent_t;
}
extern {
static optarg: *const u8;
// static optind: c_int;
// static optopt: *const u8;
}
struct Package {
name: &'static str,
path: &'static str,
}
// XXX: ugly hack, fix this
#[inline]
fn get_d_name(entry: *const dirent_t) -> *const u8 {
let offset = size_of::<u32>() + size_of::<u16>() + size_of::<u8>() * 2;
return (entry as usize + offset) as *const u8;
}
#[inline]
fn getopt_wrap(argc: isize, argv: *const *const u8, expr: &str) -> char {
unsafe {return getopt(argc as i32, argv as *mut *const i8,
expr.as_ptr() as *const i8) as u8 as char};
}
#[inline]
fn cstring2str(s: *const u8) -> &'static str {
return unsafe{from_utf8(from_raw_parts(s, strlen(s as *const i8) as usize))
.ok().unwrap_or("")};
}
#[start]
fn main(_argc: isize, _argv: *const *const u8) -> isize {
let mut ch = getopt_wrap(_argc, _argv, "c:d:f:LmnNp:P:sSqX");
let mut config_dir = "/etc/config";
while ch as i8 != -1 {
unsafe { match ch {
'c' => {config_dir = cstring2str(optarg);}
_ => {libc::puts("usage".as_ptr() as *const i8);}
} }
ch = getopt_wrap(_argc, _argv, "c:d:f:LmnNp:P:sSqX");
};
// if _argc - optind as isize + 1 < 2 {
// // TODO: print usage
// return 0;
// }
let packages: LinkedList<Package> = LinkedList::new();
unsafe {
let dir = opendir(config_dir.as_ptr() as *const i8);
if dir.is_null() {
libc::puts("directory not found".as_ptr() as *const i8);
return -1;
}
let mut entry = readdir(dir);
while !entry.is_null() {
entry = readdir(dir);
}
//let package =
//packages.push(Package{name: cstring2str(get_d_name(entry)), path: ""});
closedir(dir);
}
0
}
#[lang = "stack_exhausted"] extern fn stack_exhausted() {}
#[lang = "eh_personality" ] extern fn eh_personality() {}
#[lang = "panic_fmt"]
extern fn panic_fmt(_args: &core::fmt::Arguments,
_file: &str, _line: usize) -> ! {
loop {}
}
|
use serde::{de, ser};
use serde_json::Number;
#[derive(Debug, PartialEq)]
pub struct Version {
value: u8,
}
impl Version {
/// Returns schema version value
pub fn value(&self) -> u8 {
self.value
}
}
impl Default for Version {
fn default() -> Version {
Version { value: 1 }
}
}
impl<'de> de::Deserialize<'de> for Version {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
let v = Number::deserialize(deserializer)?;
if v.as_u64() != Some(1) {
Err(de::Error::custom("unsuppored version number"))
} else {
Ok(Version { value: 1 })
}
}
}
impl ser::Serialize for Version {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
serializer.serialize_u8(self.value())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn defaults_to_one() {
assert_eq!(Version::default().value(), 1);
}
#[test]
fn one() {
let v: Version = serde_yaml::from_str("1").unwrap();
assert_eq!(v.value(), 1);
}
#[test]
fn fail_on_unsupported_version() {
let v: Result<Version, _> = serde_yaml::from_str("2");
assert!(v.is_err());
let v: Result<Version, _> = serde_yaml::from_str("0");
assert!(v.is_err());
}
#[test]
fn fail_on_string() {
let v: Result<Version, _> = serde_yaml::from_str("'1'");
assert!(v.is_err());
}
}
|
pub(crate) mod interface_description;
pub use interface_description::*;
pub(crate) mod section_header;
pub use section_header::*;
pub(crate) mod enhanced_packet;
pub use enhanced_packet::*;
pub(crate) mod simple_packet;
pub use simple_packet::*;
pub(crate) mod common;
pub use common::*;
pub(crate) mod name_resolution;
pub use name_resolution::*;
pub(crate) mod interface_statistics;
pub use interface_statistics::*;
pub(crate) mod systemd_journal_export;
pub use systemd_journal_export::*;
pub(crate) mod packet;
pub use packet::*;
|
#![allow(unreachable_patterns)]
use std::fmt;
extern crate lalrpop_util;
#[macro_use] extern crate enum_methods;
pub mod ast;
pub mod tok;
pub mod parser;
pub unsafe trait SyntaxTrait { }
pub struct Postgres;
unsafe impl SyntaxTrait for Postgres { }
|
#[doc = "Register `APB1FZR1` reader"]
pub type R = crate::R<APB1FZR1_SPEC>;
#[doc = "Register `APB1FZR1` writer"]
pub type W = crate::W<APB1FZR1_SPEC>;
#[doc = "Field `DBG_TIM2_STOP` reader - TIM2 counter stopped when core is halted"]
pub type DBG_TIM2_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM2_STOP` writer - TIM2 counter stopped when core is halted"]
pub type DBG_TIM2_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM6_STOP` reader - TIM6 counter stopped when core is halted"]
pub type DBG_TIM6_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM6_STOP` writer - TIM6 counter stopped when core is halted"]
pub type DBG_TIM6_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM7_STOP` reader - TIM7 counter stopped when core is halted"]
pub type DBG_TIM7_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM7_STOP` writer - TIM7 counter stopped when core is halted"]
pub type DBG_TIM7_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_RTC_STOP` reader - RTC counter stopped when core is halted"]
pub type DBG_RTC_STOP_R = crate::BitReader;
#[doc = "Field `DBG_RTC_STOP` writer - RTC counter stopped when core is halted"]
pub type DBG_RTC_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_WWDG_STOP` reader - Window watchdog counter stopped when core is halted"]
pub type DBG_WWDG_STOP_R = crate::BitReader;
#[doc = "Field `DBG_WWDG_STOP` writer - Window watchdog counter stopped when core is halted"]
pub type DBG_WWDG_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_IWDG_STOP` reader - Independent watchdog counter stopped when core is halted"]
pub type DBG_IWDG_STOP_R = crate::BitReader;
#[doc = "Field `DBG_IWDG_STOP` writer - Independent watchdog counter stopped when core is halted"]
pub type DBG_IWDG_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C1_STOP` reader - I2C1 SMBUS timeout counter stopped when core is halted"]
pub type DBG_I2C1_STOP_R = crate::BitReader;
#[doc = "Field `DBG_I2C1_STOP` writer - I2C1 SMBUS timeout counter stopped when core is halted"]
pub type DBG_I2C1_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C2_STOP` reader - I2C2 SMBUS timeout counter stopped when core is halted"]
pub type DBG_I2C2_STOP_R = crate::BitReader;
#[doc = "Field `DBG_I2C2_STOP` writer - I2C2 SMBUS timeout counter stopped when core is halted"]
pub type DBG_I2C2_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C3_STOP` reader - I2C3 SMBUS timeout counter stopped when core is halted"]
pub type DBG_I2C3_STOP_R = crate::BitReader;
#[doc = "Field `DBG_I2C3_STOP` writer - I2C3 SMBUS timeout counter stopped when core is halted"]
pub type DBG_I2C3_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_CAN_STOP` reader - bxCAN stopped when core is halted"]
pub type DBG_CAN_STOP_R = crate::BitReader;
#[doc = "Field `DBG_CAN_STOP` writer - bxCAN stopped when core is halted"]
pub type DBG_CAN_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_LPTIM1_STOP` reader - LPTIM1 counter stopped when core is halted"]
pub type DBG_LPTIM1_STOP_R = crate::BitReader;
#[doc = "Field `DBG_LPTIM1_STOP` writer - LPTIM1 counter stopped when core is halted"]
pub type DBG_LPTIM1_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - TIM2 counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_tim2_stop(&self) -> DBG_TIM2_STOP_R {
DBG_TIM2_STOP_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 4 - TIM6 counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_tim6_stop(&self) -> DBG_TIM6_STOP_R {
DBG_TIM6_STOP_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - TIM7 counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_tim7_stop(&self) -> DBG_TIM7_STOP_R {
DBG_TIM7_STOP_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 10 - RTC counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_rtc_stop(&self) -> DBG_RTC_STOP_R {
DBG_RTC_STOP_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Window watchdog counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_wwdg_stop(&self) -> DBG_WWDG_STOP_R {
DBG_WWDG_STOP_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Independent watchdog counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_iwdg_stop(&self) -> DBG_IWDG_STOP_R {
DBG_IWDG_STOP_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 21 - I2C1 SMBUS timeout counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_i2c1_stop(&self) -> DBG_I2C1_STOP_R {
DBG_I2C1_STOP_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - I2C2 SMBUS timeout counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_i2c2_stop(&self) -> DBG_I2C2_STOP_R {
DBG_I2C2_STOP_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - I2C3 SMBUS timeout counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_i2c3_stop(&self) -> DBG_I2C3_STOP_R {
DBG_I2C3_STOP_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 25 - bxCAN stopped when core is halted"]
#[inline(always)]
pub fn dbg_can_stop(&self) -> DBG_CAN_STOP_R {
DBG_CAN_STOP_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 31 - LPTIM1 counter stopped when core is halted"]
#[inline(always)]
pub fn dbg_lptim1_stop(&self) -> DBG_LPTIM1_STOP_R {
DBG_LPTIM1_STOP_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - TIM2 counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_tim2_stop(&mut self) -> DBG_TIM2_STOP_W<APB1FZR1_SPEC, 0> {
DBG_TIM2_STOP_W::new(self)
}
#[doc = "Bit 4 - TIM6 counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_tim6_stop(&mut self) -> DBG_TIM6_STOP_W<APB1FZR1_SPEC, 4> {
DBG_TIM6_STOP_W::new(self)
}
#[doc = "Bit 5 - TIM7 counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_tim7_stop(&mut self) -> DBG_TIM7_STOP_W<APB1FZR1_SPEC, 5> {
DBG_TIM7_STOP_W::new(self)
}
#[doc = "Bit 10 - RTC counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_rtc_stop(&mut self) -> DBG_RTC_STOP_W<APB1FZR1_SPEC, 10> {
DBG_RTC_STOP_W::new(self)
}
#[doc = "Bit 11 - Window watchdog counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_wwdg_stop(&mut self) -> DBG_WWDG_STOP_W<APB1FZR1_SPEC, 11> {
DBG_WWDG_STOP_W::new(self)
}
#[doc = "Bit 12 - Independent watchdog counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_iwdg_stop(&mut self) -> DBG_IWDG_STOP_W<APB1FZR1_SPEC, 12> {
DBG_IWDG_STOP_W::new(self)
}
#[doc = "Bit 21 - I2C1 SMBUS timeout counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c1_stop(&mut self) -> DBG_I2C1_STOP_W<APB1FZR1_SPEC, 21> {
DBG_I2C1_STOP_W::new(self)
}
#[doc = "Bit 22 - I2C2 SMBUS timeout counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c2_stop(&mut self) -> DBG_I2C2_STOP_W<APB1FZR1_SPEC, 22> {
DBG_I2C2_STOP_W::new(self)
}
#[doc = "Bit 23 - I2C3 SMBUS timeout counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c3_stop(&mut self) -> DBG_I2C3_STOP_W<APB1FZR1_SPEC, 23> {
DBG_I2C3_STOP_W::new(self)
}
#[doc = "Bit 25 - bxCAN stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_can_stop(&mut self) -> DBG_CAN_STOP_W<APB1FZR1_SPEC, 25> {
DBG_CAN_STOP_W::new(self)
}
#[doc = "Bit 31 - LPTIM1 counter stopped when core is halted"]
#[inline(always)]
#[must_use]
pub fn dbg_lptim1_stop(&mut self) -> DBG_LPTIM1_STOP_W<APB1FZR1_SPEC, 31> {
DBG_LPTIM1_STOP_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Debug MCU APB1 freeze register1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1fzr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1fzr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APB1FZR1_SPEC;
impl crate::RegisterSpec for APB1FZR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apb1fzr1::R`](R) reader structure"]
impl crate::Readable for APB1FZR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`apb1fzr1::W`](W) writer structure"]
impl crate::Writable for APB1FZR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets APB1FZR1 to value 0"]
impl crate::Resettable for APB1FZR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use z80::Z80;
/*
** XOR r|$xx|(hl)
** Condition Bits: R000
** Clocks:
** r: 1
** $xx: 2
** (hl): 2
*/
pub fn xor(z80: &mut Z80, op: u8) {
let val = match op {
0xEE => {
z80.r.pc += 1;
z80.mmu.rb(z80.r.pc - 1)
},
0xAE => z80.mmu.rb(z80.r.get_hl()),
0xAF => z80.r.a,
0xA8 => z80.r.b,
0xA9 => z80.r.c,
0xAA => z80.r.d,
0xAB => z80.r.e,
0xAC => z80.r.h,
0xAD => z80.r.l,
_ => 0
};
z80.r.clear_flags();
z80.r.a ^= val;
if z80.r.a == 0 {
z80.r.set_zero(true);
} else {
z80.r.set_zero(false);
}
if op == 0xEE || op == 0xAE {
z80.set_register_clock(2);
} else {
z80.set_register_clock(1);
}
}
|
// Copyright 2014 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Device extension. Allows creating a renderer or converting into
//! a single-threaded wrapper.
use std::ops;
use device;
use render::{batch, Renderer};
use render::ext::factory::RenderFactory;
use render::shade::ShaderParam;
/// A convenient wrapper suitable for single-threaded operation.
pub struct Graphics<D: device::Device, F> {
/// Graphics device.
pub device: D,
/// Resource factory.
pub factory: F,
/// Renderer front-end.
pub renderer: Renderer<D::Resources, D::CommandBuffer>,
/// Hidden batch context.
context: batch::Context<D::Resources>,
}
impl<D: device::Device, F> ops::Deref for Graphics<D, F> {
type Target = batch::Context<D::Resources>;
fn deref(&self) -> &batch::Context<D::Resources> {
&self.context
}
}
impl<D: device::Device, F> ops::DerefMut for Graphics<D, F> {
fn deref_mut(&mut self) -> &mut batch::Context<D::Resources> {
&mut self.context
}
}
impl<D: device::Device, F: device::Factory<D::Resources>> Graphics<D, F> {
/// Clear the output with given `ClearData`.
pub fn clear<O: ::Output<D::Resources>>(&mut self,
data: ::ClearData, mask: ::Mask, out: &O) {
self.renderer.clear(data, mask, out)
}
/// Draw a `RefBatch` batch.
pub fn draw<'a,
T: ShaderParam<Resources = D::Resources>,
O: ::Output<D::Resources>,
>(
&'a mut self, batch: &'a batch::RefBatch<T>, out: &O)
-> Result<(), ::DrawError<batch::OutOfBounds>>
{
self.renderer.draw(&(batch, &self.context), out)
}
/// Draw a `CoreBatch` batch.
pub fn draw_core<'a,
T: ShaderParam<Resources = D::Resources>,
O: ::Output<D::Resources>,
>(
&'a mut self, core: &'a batch::CoreBatch<T>, slice: &'a ::Slice<D::Resources>,
params: &'a T, out: &O) -> Result<(), ::DrawError<batch::OutOfBounds>>
{
self.renderer.draw(&self.context.bind(core, slice, params), out)
}
/// Submit the internal command buffer and reset for the next frame.
pub fn end_frame(&mut self) {
self.device.submit(self.renderer.as_buffer());
self.renderer.reset();
}
/// Cleanup resources after the frame.
pub fn cleanup(&mut self) {
self.device.after_frame();
self.factory.cleanup();
}
}
/// Backend extension trait for convenience methods
pub trait DeviceExt<D: device::Device, F> {
/// Convert to single-threaded wrapper
fn into_graphics(mut self) -> Graphics<D, F>;
}
impl<
D: device::Device,
F: device::Factory<D::Resources>,
> DeviceExt<D, F> for (D, F) {
fn into_graphics(mut self) -> Graphics<D, F> {
let rend = self.1.create_renderer();
Graphics {
device: self.0,
factory: self.1,
renderer: rend,
context: batch::Context::new(),
}
}
}
|
//! Utilities related to RDF.
use std::str::FromStr;
use std::fmt;
use std::fmt::{Display, Formatter};
use language_tags;
/// Error type returned when trying to parse an invalid IRI.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct InvalidIriError {
/// The invalid IRI string that we tried to parse.
pub attempted_iri: String
}
/// Error type returned when trying to parse an invalid language tag.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct InvalidLangTagError {
/// The invalid tag string that we tried to parse.
pub attempted_tag: String
}
/// Represents an IRI.
///
/// Note: This type's implementations of [`Ord`](std::cmp::Ord) and
/// [`PartialOrd`](std::cmp::PartialOrd) have little semantic meaning, and exist mainly for use with
/// collections that require an ordered element type. Additionally, the implementations of
/// [`Eq`](std::cmp::Eq) and [`PartialEq`](std::cmp::PartialEq) will always treat two IRIs as
/// different if their text is different.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Iri {
// TODO: Constructor(s) should guarantee this is a valid IRI. They currently do not.
iri: String
}
impl Iri {
/// Constructs an IRI from the given string. Currently, this function never returns an error,
/// but future versions will hopefully reject invalid IRI strings.
pub fn new(iri: String) -> Result<Self, InvalidIriError> {
Ok(Iri { iri })
}
}
impl Display for Iri {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
f.write_str(&self.iri)
}
}
/// Represents a language tag for an RDF literal.
///
/// Note: This type's implementations of [`Ord`](std::cmp::Ord) and
/// [`PartialOrd`](std::cmp::PartialOrd) have little semantic meaning, and exist mainly for use with
/// collections that require an ordered element type.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct LangTag {
/// Constructor(s) should guarantee this is a valid, canonicalized language tag string. All
/// semantically equivalent language tags should have the same string.
tag: String
}
impl LangTag {
/// Constructs a language tag from the given string. Returns an error if the string is not a
/// valid language tag.
pub fn new(tag: String) -> Result<Self, InvalidLangTagError> {
// We use a language_tags::LanguageTag object to perform the canonicalization and error
// checking.
match language_tags::LanguageTag::from_str(&tag) {
Ok(tag_obj) => Ok(LangTag { tag: tag_obj.canonicalize().to_string() }),
Err(_) => Err(InvalidLangTagError { attempted_tag: tag })
}
}
}
impl Display for LangTag {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
f.write_str(&self.tag)
}
}
// TODO: Figure out if lang_tagged_literal_data_type and non_lang_tagged_literal_default_data_type
// can be replaced with static constants. This would probably help with the efficiency of
// Literal::with_data_type. The main difficulty is that their initialization requires heap
// allocation.
/// Generates the data type IRI that automatically applies to all language-tagged RDF literals.
pub fn lang_tagged_literal_data_type() -> Iri {
Iri::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#langString".to_string()).unwrap()
}
/// Generates the default data type IRI for non-language-tagged RDF literals.
pub fn non_lang_tagged_literal_default_data_type() -> Iri {
Iri::new("http://www.w3.org/2001/XMLSchema#string".to_string()).unwrap()
}
/// Represents an RDF literal.
///
/// Note: This type's implementations of [`Ord`](std::cmp::Ord) and
/// [`PartialOrd`](std::cmp::PartialOrd) have little semantic meaning, and exist mainly for use with
/// collections that require an ordered element type. Additionally, the implementations of
/// [`Eq`](std::cmp::Eq) and [`PartialEq`](std::cmp::PartialEq) do not take any data-type-specific
/// equivalence into account. For example, literals with value "0" and "0.0" are considered
/// different even if they have a floating point data type.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Literal {
/// Literal value text.
value: String,
/// IRI identifying the data type. Must be the same IRI as returned by
/// [`lang_tagged_literal_data_type`](self::lang_tagged_literal_data_type) if and only if a
/// language tag is present.
data_type: Iri,
/// Optional language tag.
lang_tag: Option<LangTag>
}
impl Literal {
/// Constructs a literal with the given text value, no language tag, and the default data type
/// for non-language-tagged literals.
pub fn new(value: String) -> Self {
Literal {
value,
data_type: non_lang_tagged_literal_default_data_type(),
lang_tag: None
}
}
/// Constructs a literal with the given text value and language tag. The data type will be the
/// type required for language-tagged literals.
pub fn with_lang_tag(value: String, lang_tag: LangTag) -> Self {
Literal {
value,
data_type: lang_tagged_literal_data_type(),
lang_tag: Some(lang_tag)
}
}
/// Constructs a literal with the given text value, no language tag, and the given data type.
/// Returns [`None`](std::option::Option::None) if the provided data type requires a language
/// tag.
pub fn with_data_type(value: String, data_type: Iri) -> Option<Self> {
if data_type == lang_tagged_literal_data_type() {
None
}
else {
Some(Literal {
value,
data_type,
lang_tag: None
})
}
}
/// Gets the literal value text.
pub fn value(&self) -> &str {
&self.value
}
/// Gets the data type IRI.
pub fn data_type(&self) -> &Iri {
&self.data_type
}
/// Gets the language tag, or [`None`](std::option::Option::None) if there is no language tag.
pub fn lang_tag(&self) -> Option<&LangTag> {
match &self.lang_tag {
None => None,
Some(tag) => Some(&tag)
}
}
} |
use ckb_error::{Error, ErrorKind};
use failure::Fail;
use std::fmt::Display;
#[derive(Fail, Debug, PartialEq, Clone, Eq, Display)]
pub enum DaoError {
InvalidHeader,
InvalidOutPoint,
InvalidDaoFormat,
Overflow,
ZeroC,
}
impl From<DaoError> for Error {
fn from(error: DaoError) -> Self {
error.context(ErrorKind::Dao).into()
}
}
|
use {Document, Error, Revision};
use action::query_keys::*;
use document::WriteDocumentResponse;
use transport::{JsonResponse, JsonResponseDecoder, Request, StatusCode, Transport};
pub struct UpdateDocument<'a, T>
where
T: Transport + 'a,
{
transport: &'a T,
doc: &'a Document,
}
impl<'a, T> UpdateDocument<'a, T>
where
T: Transport + 'a,
{
#[doc(hidden)]
pub fn new(transport: &'a T, doc: &'a Document) -> Self {
UpdateDocument {
transport: transport,
doc: doc,
}
}
pub fn run(mut self) -> Result<Revision, Error> {
self.transport.send(
try!(self.make_request()),
JsonResponseDecoder::new(handle_response),
)
}
fn make_request(&mut self) -> Result<Request, Error> {
self.transport
.put(self.doc.path().iter())
.with_accept_json()
.with_query(RevisionQueryKey, self.doc.revision())
.with_json_content(&self.doc)
}
}
fn handle_response(response: JsonResponse) -> Result<Revision, Error> {
match response.status_code() {
StatusCode::Created => {
let body: WriteDocumentResponse = try!(response.decode_content());
Ok(body.revision)
}
StatusCode::Conflict => Err(Error::document_conflict(&response)),
StatusCode::NotFound => Err(Error::not_found(&response)),
StatusCode::Unauthorized => Err(Error::unauthorized(&response)),
_ => Err(Error::server_response(&response)),
}
}
#[cfg(test)]
mod tests {
use super::*;
use {Error, Revision, serde_json};
use document::DocumentBuilder;
use transport::{JsonResponseBuilder, MockTransport, StatusCode, Transport};
#[test]
fn make_request_default() {
let transport = MockTransport::new();
let doc = DocumentBuilder::new(
"/foo/bar",
Revision::parse("1-1234567890abcdef1234567890abcdef").unwrap(),
).build_content(|x| x.insert("field_1", 42).insert("field_2", "hello"))
.unwrap();
let request_content = serde_json::builder::ObjectBuilder::new()
.insert("field_1", 42)
.insert("field_2", "hello")
.build();
let expected = transport
.put(vec!["foo", "bar"])
.with_accept_json()
.with_query_literal("rev", "1-1234567890abcdef1234567890abcdef")
.with_json_content(&request_content)
.unwrap();
let got = {
let mut action = UpdateDocument::new(&transport, &doc);
action.make_request().unwrap()
};
assert_eq!(expected, got);
}
#[test]
fn handle_response_created() {
let response = JsonResponseBuilder::new(StatusCode::Created)
.with_json_content_raw(
r#"{"ok":true,"id":"bar","rev":"1-1234567890abcdef1234567890abcdef"}"#,
)
.unwrap();
let expected = Revision::parse("1-1234567890abcdef1234567890abcdef").unwrap();
let got = super::handle_response(response).unwrap();
assert_eq!(expected, got);
}
#[test]
fn handle_response_conflict() {
let response = JsonResponseBuilder::new(StatusCode::Conflict)
.with_json_content_raw(
r#"{"error":"conflict","reason":"Document update conflict."}"#,
)
.unwrap();
match super::handle_response(response) {
Err(Error::DocumentConflict(ref error_response))
if error_response.error() == "conflict" && error_response.reason() == "Document update conflict." => (),
x @ _ => unexpected_result!(x),
}
}
#[test]
fn handle_response_not_found() {
let response = JsonResponseBuilder::new(StatusCode::NotFound)
.with_json_content_raw(r#"{"error":"not_found","reason":"no_db_file"}"#)
.unwrap();
match super::handle_response(response) {
Err(Error::NotFound(ref error_response))
if error_response.error() == "not_found" && error_response.reason() == "no_db_file" => (),
x @ _ => unexpected_result!(x),
}
}
#[test]
fn handle_response_unauthorized() {
let response = JsonResponseBuilder::new(StatusCode::Unauthorized)
.with_json_content_raw(
r#"{"error":"unauthorized","reason":"Authentication required."}"#,
)
.unwrap();
match super::handle_response(response) {
Err(Error::Unauthorized(ref error_response))
if error_response.error() == "unauthorized" && error_response.reason() == "Authentication required." =>
(),
x @ _ => unexpected_result!(x),
}
}
}
|
fn main() {
let _number = 12;
print!("{}", _number);
}
|
use azure_core::headers::CommonStorageResponseHeaders;
use azure_core::prelude::*;
use azure_storage::xml::read_xml;
use bytes::Bytes;
use http::response::Response;
use std::convert::TryInto;
#[derive(Debug, Clone)]
pub struct ListQueuesResponse {
pub common_storage_response_headers: CommonStorageResponseHeaders,
pub service_endpoint: String,
pub prefix: Option<String>,
// this seems duplicate :S
pub marker: Option<String>,
pub max_results: Option<u32>,
pub queues: Vec<Queue>,
pub next_marker: Option<NextMarker>,
}
impl ListQueuesResponse {
pub fn next_marker(&self) -> &Option<NextMarker> {
&self.next_marker
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct ListQueuesResponseInternal {
#[serde(rename = "ServiceEndpoint")]
pub service_endpoint: String,
#[serde(rename = "Prefix")]
pub prefix: Option<String>,
#[serde(rename = "Marker")]
pub marker: Option<String>,
#[serde(rename = "MaxResults")]
pub max_results: Option<u32>,
#[serde(rename = "Queues")]
pub queues: Queues,
#[serde(rename = "NextMarker")]
pub next_marker: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Queues {
#[serde(rename = "Queue")]
pub queues: Option<Vec<Queue>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Queue {
#[serde(rename = "Name")]
pub name: String,
#[serde(rename = "Metadata")]
pub metadata: Option<std::collections::HashMap<String, String>>,
}
impl std::convert::TryFrom<&Response<Bytes>> for ListQueuesResponse {
type Error = crate::Error;
fn try_from(response: &Response<Bytes>) -> Result<Self, Self::Error> {
let headers = response.headers();
let body = response.body();
debug!("headers == {:?}", headers);
debug!("body == {:#?}", body);
let mut response: ListQueuesResponseInternal = read_xml(body)?;
// get rid of the ugly Some("") empty string
// we use None instead
if let Some(next_marker) = &response.next_marker {
if next_marker.is_empty() {
response.next_marker = None;
}
}
Ok(ListQueuesResponse {
common_storage_response_headers: headers.try_into()?,
service_endpoint: response.service_endpoint,
prefix: response.prefix,
marker: response.marker,
max_results: response.max_results,
queues: response.queues.queues.unwrap_or_default(),
next_marker: response.next_marker.map(|nm| nm.into()),
})
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn try_parse() {
let range = "<?xml version=\"1.0\" encoding=\"utf-8\"?><EnumerationResults ServiceEndpoint=\"https://azureskdforrust.queue.core.windows.net/\"><Prefix>a</Prefix><MaxResults>2</MaxResults><Queues><Queue><Name>azureiscool</Name></Queue><Queue><Name>azurerocks</Name></Queue></Queues><NextMarker /></EnumerationResults>";
let response: ListQueuesResponseInternal = serde_xml_rs::from_str(range).unwrap();
assert_eq!(response.queues.queues.unwrap().len(), 2);
}
}
|
//!
use crate::{dev::*, interfaces::Conn, types::MTU};
use std::sync::{Arc, Mutex};
// ///
// pub const OFFSET_BYTES: usize = 4;
// ///
// pub const HEADER_LENGTH: usize = 40;
/// Represents a running TUN interface.
///
/// ## What is TUN, what is Wireguard, and why do we use them?
///
/// flow:
/// ? (why) initialized with a core.Dialer and core.Listener
/// on startup:
/// init handler loop
/// for each yg.Conn in Listener.await
/// _wrap each in TunConn (driving adapter)
/// close existing ones (by asking Tun.has_conn)
/// save it
/// ? yconn.subscribe(TunnConn._read).or_timeout()
/// in reality, yg.Conn drains an inner buffer
/// gives it to TunConn._read
/// calls tunWriter.writeFrom
/// calls iface.write
///
/// start a tunReader (actor)
/// waits for packet delivered by TUN device (iface)
/// tunAdapter._handle_packet (sends packet to TunConn)
/// finds cached TunConn from Tun
/// or calls ygg.Dialer.Dial to create a ygg.Conn
/// then wraps it as TunConn
/// calls TunConn.writefrom (._write)
/// creates FlowKeyMessage
/// ygg.Conn.WriteFrom
/// then if packet too big:
/// tunWriter.writeFrom(ICMP packet)
///
/// start the ckr
///
/// ???? is a Port
/// ? Handle<IncomingConnection>
/// ? spawns TunConn `for yg.Conn in Listener.await`
/// ? stores `Addr<TunConn>` by remote address and subnet
#[async_trait::async_trait]
pub trait TunAdapter<C: Core>
where
Self: Actor,
{
// const IPV6_HEADER_LEN: u8 = 40;
type Conn: TunConn<C, Self>;
}
///
/// is an actor that adapts an yg.Conn to an TUN interface connection w/ a remote peer
/// polling polls internal yg.Conn
/// pulling from a readBuffer (created upon dialing)
///
/// created:
/// - upon dialing
///
/// ???? is a Port
/// ? Handle<...>
pub trait TunConn<C: Core, T: TunAdapter<C>>
where
Self: Actor,
{
}
/// Represents the underlying, platform-specific TUN interface.
pub trait TunInterface: Sized {
type Reader: AsyncRead;
type Writer: Actor + AsyncWrite;
// TODO: set interface name
fn open() -> Result<Self, Error>;
fn name(&self) -> &str;
// fn mtu(&self) -> MTU;
fn split(self) -> (Self::Reader, Self::Writer);
}
pub mod messages {
#[xactor::message(result = "()")]
#[derive(Debug)]
pub struct IncomingConnection;
// #[xactor::message(result = "()")]
// #[derive(Debug)]
// pub struct Packet;
}
|
#[doc = "Register `VCTR33` reader"]
pub type R = crate::R<VCTR33_SPEC>;
#[doc = "Register `VCTR33` writer"]
pub type W = crate::W<VCTR33_SPEC>;
#[doc = "Field `B1056` reader - B1056"]
pub type B1056_R = crate::BitReader;
#[doc = "Field `B1056` writer - B1056"]
pub type B1056_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1057` reader - B1057"]
pub type B1057_R = crate::BitReader;
#[doc = "Field `B1057` writer - B1057"]
pub type B1057_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1058` reader - B1058"]
pub type B1058_R = crate::BitReader;
#[doc = "Field `B1058` writer - B1058"]
pub type B1058_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1059` reader - B1059"]
pub type B1059_R = crate::BitReader;
#[doc = "Field `B1059` writer - B1059"]
pub type B1059_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1060` reader - B1060"]
pub type B1060_R = crate::BitReader;
#[doc = "Field `B1060` writer - B1060"]
pub type B1060_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1061` reader - B1061"]
pub type B1061_R = crate::BitReader;
#[doc = "Field `B1061` writer - B1061"]
pub type B1061_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1062` reader - B1062"]
pub type B1062_R = crate::BitReader;
#[doc = "Field `B1062` writer - B1062"]
pub type B1062_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1063` reader - B1063"]
pub type B1063_R = crate::BitReader;
#[doc = "Field `B1063` writer - B1063"]
pub type B1063_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1064` reader - B1064"]
pub type B1064_R = crate::BitReader;
#[doc = "Field `B1064` writer - B1064"]
pub type B1064_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1065` reader - B1065"]
pub type B1065_R = crate::BitReader;
#[doc = "Field `B1065` writer - B1065"]
pub type B1065_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1066` reader - B1066"]
pub type B1066_R = crate::BitReader;
#[doc = "Field `B1066` writer - B1066"]
pub type B1066_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1067` reader - B1067"]
pub type B1067_R = crate::BitReader;
#[doc = "Field `B1067` writer - B1067"]
pub type B1067_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1068` reader - B1068"]
pub type B1068_R = crate::BitReader;
#[doc = "Field `B1068` writer - B1068"]
pub type B1068_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1069` reader - B1069"]
pub type B1069_R = crate::BitReader;
#[doc = "Field `B1069` writer - B1069"]
pub type B1069_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1070` reader - B1070"]
pub type B1070_R = crate::BitReader;
#[doc = "Field `B1070` writer - B1070"]
pub type B1070_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1071` reader - B1071"]
pub type B1071_R = crate::BitReader;
#[doc = "Field `B1071` writer - B1071"]
pub type B1071_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1072` reader - B1072"]
pub type B1072_R = crate::BitReader;
#[doc = "Field `B1072` writer - B1072"]
pub type B1072_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1073` reader - B1073"]
pub type B1073_R = crate::BitReader;
#[doc = "Field `B1073` writer - B1073"]
pub type B1073_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1074` reader - B1074"]
pub type B1074_R = crate::BitReader;
#[doc = "Field `B1074` writer - B1074"]
pub type B1074_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1075` reader - B1075"]
pub type B1075_R = crate::BitReader;
#[doc = "Field `B1075` writer - B1075"]
pub type B1075_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1076` reader - B1076"]
pub type B1076_R = crate::BitReader;
#[doc = "Field `B1076` writer - B1076"]
pub type B1076_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1077` reader - B1077"]
pub type B1077_R = crate::BitReader;
#[doc = "Field `B1077` writer - B1077"]
pub type B1077_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1078` reader - B1078"]
pub type B1078_R = crate::BitReader;
#[doc = "Field `B1078` writer - B1078"]
pub type B1078_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1079` reader - B1079"]
pub type B1079_R = crate::BitReader;
#[doc = "Field `B1079` writer - B1079"]
pub type B1079_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1080` reader - B1080"]
pub type B1080_R = crate::BitReader;
#[doc = "Field `B1080` writer - B1080"]
pub type B1080_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1081` reader - B1081"]
pub type B1081_R = crate::BitReader;
#[doc = "Field `B1081` writer - B1081"]
pub type B1081_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1082` reader - B1082"]
pub type B1082_R = crate::BitReader;
#[doc = "Field `B1082` writer - B1082"]
pub type B1082_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1083` reader - B1083"]
pub type B1083_R = crate::BitReader;
#[doc = "Field `B1083` writer - B1083"]
pub type B1083_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1084` reader - B1084"]
pub type B1084_R = crate::BitReader;
#[doc = "Field `B1084` writer - B1084"]
pub type B1084_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1085` reader - B1085"]
pub type B1085_R = crate::BitReader;
#[doc = "Field `B1085` writer - B1085"]
pub type B1085_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1086` reader - B1086"]
pub type B1086_R = crate::BitReader;
#[doc = "Field `B1086` writer - B1086"]
pub type B1086_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1087` reader - B1087"]
pub type B1087_R = crate::BitReader;
#[doc = "Field `B1087` writer - B1087"]
pub type B1087_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - B1056"]
#[inline(always)]
pub fn b1056(&self) -> B1056_R {
B1056_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - B1057"]
#[inline(always)]
pub fn b1057(&self) -> B1057_R {
B1057_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - B1058"]
#[inline(always)]
pub fn b1058(&self) -> B1058_R {
B1058_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - B1059"]
#[inline(always)]
pub fn b1059(&self) -> B1059_R {
B1059_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - B1060"]
#[inline(always)]
pub fn b1060(&self) -> B1060_R {
B1060_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - B1061"]
#[inline(always)]
pub fn b1061(&self) -> B1061_R {
B1061_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - B1062"]
#[inline(always)]
pub fn b1062(&self) -> B1062_R {
B1062_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - B1063"]
#[inline(always)]
pub fn b1063(&self) -> B1063_R {
B1063_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - B1064"]
#[inline(always)]
pub fn b1064(&self) -> B1064_R {
B1064_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - B1065"]
#[inline(always)]
pub fn b1065(&self) -> B1065_R {
B1065_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - B1066"]
#[inline(always)]
pub fn b1066(&self) -> B1066_R {
B1066_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - B1067"]
#[inline(always)]
pub fn b1067(&self) -> B1067_R {
B1067_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - B1068"]
#[inline(always)]
pub fn b1068(&self) -> B1068_R {
B1068_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - B1069"]
#[inline(always)]
pub fn b1069(&self) -> B1069_R {
B1069_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - B1070"]
#[inline(always)]
pub fn b1070(&self) -> B1070_R {
B1070_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - B1071"]
#[inline(always)]
pub fn b1071(&self) -> B1071_R {
B1071_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - B1072"]
#[inline(always)]
pub fn b1072(&self) -> B1072_R {
B1072_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - B1073"]
#[inline(always)]
pub fn b1073(&self) -> B1073_R {
B1073_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - B1074"]
#[inline(always)]
pub fn b1074(&self) -> B1074_R {
B1074_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - B1075"]
#[inline(always)]
pub fn b1075(&self) -> B1075_R {
B1075_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - B1076"]
#[inline(always)]
pub fn b1076(&self) -> B1076_R {
B1076_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - B1077"]
#[inline(always)]
pub fn b1077(&self) -> B1077_R {
B1077_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - B1078"]
#[inline(always)]
pub fn b1078(&self) -> B1078_R {
B1078_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - B1079"]
#[inline(always)]
pub fn b1079(&self) -> B1079_R {
B1079_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - B1080"]
#[inline(always)]
pub fn b1080(&self) -> B1080_R {
B1080_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - B1081"]
#[inline(always)]
pub fn b1081(&self) -> B1081_R {
B1081_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - B1082"]
#[inline(always)]
pub fn b1082(&self) -> B1082_R {
B1082_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - B1083"]
#[inline(always)]
pub fn b1083(&self) -> B1083_R {
B1083_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - B1084"]
#[inline(always)]
pub fn b1084(&self) -> B1084_R {
B1084_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - B1085"]
#[inline(always)]
pub fn b1085(&self) -> B1085_R {
B1085_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - B1086"]
#[inline(always)]
pub fn b1086(&self) -> B1086_R {
B1086_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - B1087"]
#[inline(always)]
pub fn b1087(&self) -> B1087_R {
B1087_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - B1056"]
#[inline(always)]
#[must_use]
pub fn b1056(&mut self) -> B1056_W<VCTR33_SPEC, 0> {
B1056_W::new(self)
}
#[doc = "Bit 1 - B1057"]
#[inline(always)]
#[must_use]
pub fn b1057(&mut self) -> B1057_W<VCTR33_SPEC, 1> {
B1057_W::new(self)
}
#[doc = "Bit 2 - B1058"]
#[inline(always)]
#[must_use]
pub fn b1058(&mut self) -> B1058_W<VCTR33_SPEC, 2> {
B1058_W::new(self)
}
#[doc = "Bit 3 - B1059"]
#[inline(always)]
#[must_use]
pub fn b1059(&mut self) -> B1059_W<VCTR33_SPEC, 3> {
B1059_W::new(self)
}
#[doc = "Bit 4 - B1060"]
#[inline(always)]
#[must_use]
pub fn b1060(&mut self) -> B1060_W<VCTR33_SPEC, 4> {
B1060_W::new(self)
}
#[doc = "Bit 5 - B1061"]
#[inline(always)]
#[must_use]
pub fn b1061(&mut self) -> B1061_W<VCTR33_SPEC, 5> {
B1061_W::new(self)
}
#[doc = "Bit 6 - B1062"]
#[inline(always)]
#[must_use]
pub fn b1062(&mut self) -> B1062_W<VCTR33_SPEC, 6> {
B1062_W::new(self)
}
#[doc = "Bit 7 - B1063"]
#[inline(always)]
#[must_use]
pub fn b1063(&mut self) -> B1063_W<VCTR33_SPEC, 7> {
B1063_W::new(self)
}
#[doc = "Bit 8 - B1064"]
#[inline(always)]
#[must_use]
pub fn b1064(&mut self) -> B1064_W<VCTR33_SPEC, 8> {
B1064_W::new(self)
}
#[doc = "Bit 9 - B1065"]
#[inline(always)]
#[must_use]
pub fn b1065(&mut self) -> B1065_W<VCTR33_SPEC, 9> {
B1065_W::new(self)
}
#[doc = "Bit 10 - B1066"]
#[inline(always)]
#[must_use]
pub fn b1066(&mut self) -> B1066_W<VCTR33_SPEC, 10> {
B1066_W::new(self)
}
#[doc = "Bit 11 - B1067"]
#[inline(always)]
#[must_use]
pub fn b1067(&mut self) -> B1067_W<VCTR33_SPEC, 11> {
B1067_W::new(self)
}
#[doc = "Bit 12 - B1068"]
#[inline(always)]
#[must_use]
pub fn b1068(&mut self) -> B1068_W<VCTR33_SPEC, 12> {
B1068_W::new(self)
}
#[doc = "Bit 13 - B1069"]
#[inline(always)]
#[must_use]
pub fn b1069(&mut self) -> B1069_W<VCTR33_SPEC, 13> {
B1069_W::new(self)
}
#[doc = "Bit 14 - B1070"]
#[inline(always)]
#[must_use]
pub fn b1070(&mut self) -> B1070_W<VCTR33_SPEC, 14> {
B1070_W::new(self)
}
#[doc = "Bit 15 - B1071"]
#[inline(always)]
#[must_use]
pub fn b1071(&mut self) -> B1071_W<VCTR33_SPEC, 15> {
B1071_W::new(self)
}
#[doc = "Bit 16 - B1072"]
#[inline(always)]
#[must_use]
pub fn b1072(&mut self) -> B1072_W<VCTR33_SPEC, 16> {
B1072_W::new(self)
}
#[doc = "Bit 17 - B1073"]
#[inline(always)]
#[must_use]
pub fn b1073(&mut self) -> B1073_W<VCTR33_SPEC, 17> {
B1073_W::new(self)
}
#[doc = "Bit 18 - B1074"]
#[inline(always)]
#[must_use]
pub fn b1074(&mut self) -> B1074_W<VCTR33_SPEC, 18> {
B1074_W::new(self)
}
#[doc = "Bit 19 - B1075"]
#[inline(always)]
#[must_use]
pub fn b1075(&mut self) -> B1075_W<VCTR33_SPEC, 19> {
B1075_W::new(self)
}
#[doc = "Bit 20 - B1076"]
#[inline(always)]
#[must_use]
pub fn b1076(&mut self) -> B1076_W<VCTR33_SPEC, 20> {
B1076_W::new(self)
}
#[doc = "Bit 21 - B1077"]
#[inline(always)]
#[must_use]
pub fn b1077(&mut self) -> B1077_W<VCTR33_SPEC, 21> {
B1077_W::new(self)
}
#[doc = "Bit 22 - B1078"]
#[inline(always)]
#[must_use]
pub fn b1078(&mut self) -> B1078_W<VCTR33_SPEC, 22> {
B1078_W::new(self)
}
#[doc = "Bit 23 - B1079"]
#[inline(always)]
#[must_use]
pub fn b1079(&mut self) -> B1079_W<VCTR33_SPEC, 23> {
B1079_W::new(self)
}
#[doc = "Bit 24 - B1080"]
#[inline(always)]
#[must_use]
pub fn b1080(&mut self) -> B1080_W<VCTR33_SPEC, 24> {
B1080_W::new(self)
}
#[doc = "Bit 25 - B1081"]
#[inline(always)]
#[must_use]
pub fn b1081(&mut self) -> B1081_W<VCTR33_SPEC, 25> {
B1081_W::new(self)
}
#[doc = "Bit 26 - B1082"]
#[inline(always)]
#[must_use]
pub fn b1082(&mut self) -> B1082_W<VCTR33_SPEC, 26> {
B1082_W::new(self)
}
#[doc = "Bit 27 - B1083"]
#[inline(always)]
#[must_use]
pub fn b1083(&mut self) -> B1083_W<VCTR33_SPEC, 27> {
B1083_W::new(self)
}
#[doc = "Bit 28 - B1084"]
#[inline(always)]
#[must_use]
pub fn b1084(&mut self) -> B1084_W<VCTR33_SPEC, 28> {
B1084_W::new(self)
}
#[doc = "Bit 29 - B1085"]
#[inline(always)]
#[must_use]
pub fn b1085(&mut self) -> B1085_W<VCTR33_SPEC, 29> {
B1085_W::new(self)
}
#[doc = "Bit 30 - B1086"]
#[inline(always)]
#[must_use]
pub fn b1086(&mut self) -> B1086_W<VCTR33_SPEC, 30> {
B1086_W::new(self)
}
#[doc = "Bit 31 - B1087"]
#[inline(always)]
#[must_use]
pub fn b1087(&mut self) -> B1087_W<VCTR33_SPEC, 31> {
B1087_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "MPCBBx vector register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`vctr33::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`vctr33::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct VCTR33_SPEC;
impl crate::RegisterSpec for VCTR33_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`vctr33::R`](R) reader structure"]
impl crate::Readable for VCTR33_SPEC {}
#[doc = "`write(|w| ..)` method takes [`vctr33::W`](W) writer structure"]
impl crate::Writable for VCTR33_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets VCTR33 to value 0xffff_ffff"]
impl crate::Resettable for VCTR33_SPEC {
const RESET_VALUE: Self::Ux = 0xffff_ffff;
}
|
#[doc = "Register `SECWM1R_PRG` reader"]
pub type R = crate::R<SECWM1R_PRG_SPEC>;
#[doc = "Register `SECWM1R_PRG` writer"]
pub type W = crate::W<SECWM1R_PRG_SPEC>;
#[doc = "Field `SECWM1_STRT` reader - Bank1 security WM area 1 start sector"]
pub type SECWM1_STRT_R = crate::FieldReader;
#[doc = "Field `SECWM1_STRT` writer - Bank1 security WM area 1 start sector"]
pub type SECWM1_STRT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
#[doc = "Field `SECWM1_END` reader - Bank1 security WM area 1 end sector"]
pub type SECWM1_END_R = crate::FieldReader;
#[doc = "Field `SECWM1_END` writer - Bank1 security WM area 1 end sector"]
pub type SECWM1_END_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
impl R {
#[doc = "Bits 0:6 - Bank1 security WM area 1 start sector"]
#[inline(always)]
pub fn secwm1_strt(&self) -> SECWM1_STRT_R {
SECWM1_STRT_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bits 16:22 - Bank1 security WM area 1 end sector"]
#[inline(always)]
pub fn secwm1_end(&self) -> SECWM1_END_R {
SECWM1_END_R::new(((self.bits >> 16) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:6 - Bank1 security WM area 1 start sector"]
#[inline(always)]
#[must_use]
pub fn secwm1_strt(&mut self) -> SECWM1_STRT_W<SECWM1R_PRG_SPEC, 0> {
SECWM1_STRT_W::new(self)
}
#[doc = "Bits 16:22 - Bank1 security WM area 1 end sector"]
#[inline(always)]
#[must_use]
pub fn secwm1_end(&mut self) -> SECWM1_END_W<SECWM1R_PRG_SPEC, 16> {
SECWM1_END_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "FLASH security watermark for Bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`secwm1r_prg::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`secwm1r_prg::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SECWM1R_PRG_SPEC;
impl crate::RegisterSpec for SECWM1R_PRG_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`secwm1r_prg::R`](R) reader structure"]
impl crate::Readable for SECWM1R_PRG_SPEC {}
#[doc = "`write(|w| ..)` method takes [`secwm1r_prg::W`](W) writer structure"]
impl crate::Writable for SECWM1R_PRG_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SECWM1R_PRG to value 0"]
impl crate::Resettable for SECWM1R_PRG_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! This is a classy "Forward" rendering pipeline.
//! It supports all materials except for `Inverse` blending.
//! It renders everything with a single pass, ordering opaque objects
//! first front-to-back, and then blended ones back-to-front on top.
//! TODO: apply actual lights in the shader. Currently hard-coded.
use std::marker::PhantomData;
use gfx;
use gfx_phase;
mod pipe;
pub use self::pipe::{order, Pipeline};
/// A short typedef for the phase.
pub type Phase<R> = gfx_phase::CachedPhase<R,
::Material<R>,
::view::Info<f32>,
Technique<R>,
>;
mod param {
#![allow(missing_docs)]
use gfx::shade::TextureParam;
use gfx::handle::Buffer;
#[derive(Debug, Clone, Copy)]
pub struct Light {
pub position: [f32; 4],
pub color: [f32; 4],
pub attenuation: [f32; 4],
}
gfx_parameters!( Struct {
u_Transform@ mvp: [[f32; 4]; 4],
u_WorldTransform@ world: [[f32; 4]; 4],
u_NormalRotation@ normal: [[f32; 3]; 3],
u_Color@ color: [f32; 4],
u_Ambient@ ambient: [f32; 4],
t_Diffuse@ texture: TextureParam<R>,
u_AlphaTest@ alpha_test: f32,
u_LightMask@ light_mask: [i32; 4], //TODO: u32
b_Lights@ lights: Buffer<R, Light>,
});
}
/// Typedef for the ordering function.
pub type OrderFun<R> = gfx_phase::OrderFun<f32, Kernel, param::Struct<R>>;
const MAX_LIGHTS: usize = 256; // must be in sync with the shaders
const PHONG_VS : &'static [u8] = include_bytes!("../../gpu/phong.glslv");
const PHONG_FS : &'static [u8] = include_bytes!("../../gpu/phong.glslf");
const PHONG_TEX_VS: &'static [u8] = include_bytes!("../../gpu/phong_tex.glslv");
const PHONG_TEX_FS: &'static [u8] = include_bytes!("../../gpu/phong_tex.glslf");
/// Pipeline creation error.
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
/// Failed to create a texture.
Texture(gfx::tex::TextureError),
/// Failed to link a program.
Program(gfx::ProgramError),
}
impl From<gfx::tex::TextureError> for Error {
fn from(e: gfx::tex::TextureError) -> Error {
Error::Texture(e)
}
}
impl From<gfx::ProgramError> for Error {
fn from(e: gfx::ProgramError) -> Error {
Error::Program(e)
}
}
/// The core technique of the pipeline.
pub struct Technique<R: gfx::Resources> {
program: gfx::handle::Program<R>,
program_textured: gfx::handle::Program<R>,
state_add: gfx::DrawState,
state_alpha: gfx::DrawState,
state_opaque: gfx::DrawState,
state_multiply: gfx::DrawState,
light_buf: gfx::handle::Buffer<R, param::Light>,
/// The default texture used for materials that don't have it.
pub default_texture: gfx::handle::Texture<R>,
/// The light color of non-lit areas.
pub ambient_color: gfx::ColorValue,
/// Active lights.
pub lights: Vec<super::Light<f32>>,
}
impl<R: gfx::Resources> Technique<R> {
/// Create a new technique.
pub fn new<F: gfx::Factory<R>>(factory: &mut F)
-> Result<Technique<R>, Error> {
use gfx::traits::FactoryExt;
let texture = try!(factory.create_texture_rgba8_static(1, 1, &[0xFFFFFFFF]));
let prog0 = try!(factory.link_program(PHONG_VS, PHONG_FS));
let prog1 = try!(factory.link_program(PHONG_TEX_VS, PHONG_TEX_FS));
let state = gfx::DrawState::new().depth(
gfx::state::Comparison::LessEqual,
true
);
Ok(Technique {
program: prog0,
program_textured: prog1,
state_add: state.clone().blend(gfx::BlendPreset::Add),
state_alpha: state.clone().blend(gfx::BlendPreset::Alpha),
state_multiply: state.clone().blend(gfx::BlendPreset::Multiply),
state_opaque: state,
light_buf: factory.create_buffer_dynamic(MAX_LIGHTS, gfx::BufferRole::Uniform),
default_texture: texture,
ambient_color: [0.1, 0.1, 0.1, 0.0],
lights: Vec::new(),
})
}
/// Update the light buffer before drawing.
pub fn update<S: gfx::Stream<R>>(&self, stream: &mut S) {
use cgmath::FixedArray;
for (i, lit) in self.lights.iter().enumerate() {
use super::light::Attenuation::*;
let par = param::Light {
position: *lit.position.as_fixed(),
color: lit.color,
attenuation: match lit.attenuation {
Constant { intensity } => [1.0 / intensity, 0.0, 0.0, 0.0],
Quadratic { k0, k1, k2 } => [k0, k1, k2, 0.0],
Spherical { intensity, distance } => [
1.0 / intensity,
2.0 / distance,
1.0 / (distance * distance),
0.0],
},
};
stream.access().0.update_buffer(self.light_buf.raw(), &[par], i+1).unwrap()
}
}
}
/// Kernel of the technique, defining what program needs to be used.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct Kernel {
textured: bool,
transparency: ::Transparency,
}
impl<R: gfx::Resources> gfx_phase::Technique<R, ::Material<R>, ::view::Info<f32>> for Technique<R> {
type Kernel = Kernel;
type Params = param::Struct<R>;
fn test(&self, mesh: &gfx::Mesh<R>, mat: &::Material<R>) -> Option<Kernel> {
if mat.transparency == ::Transparency::Blend(gfx::BlendPreset::Invert) {
return None
}
Some(Kernel {
textured: mat.texture.is_some() &&
mesh.attributes.iter().find(|a| a.name == "a_Tex0").is_some(),
transparency: mat.transparency,
})
}
fn compile<'a>(&'a self, kernel: Kernel)
-> gfx_phase::TechResult<'a, R, param::Struct<R>> {
use ::Transparency::*;
( if kernel.textured {
&self.program_textured
} else {
&self.program
},
param::Struct {
mvp: [[0.0; 4]; 4],
world: [[0.0; 4]; 4],
normal: [[0.0; 3]; 3],
color: [0.0; 4],
ambient: [0.0; 4],
texture: (self.default_texture.clone(), None),
alpha_test: if let Cutout(v) = kernel.transparency {
v as f32 / 255 as f32
}else { 0.0 },
light_mask: [0; 4],
lights: self.light_buf.clone(),
_r: PhantomData,
},
match kernel.transparency {
Blend(gfx::BlendPreset::Add) => &self.state_add,
Blend(gfx::BlendPreset::Alpha) => &self.state_alpha,
Blend(gfx::BlendPreset::Multiply) => &self.state_multiply,
_ => &self.state_opaque,
},
None,
)
}
fn fix_params(&self, mat: &::Material<R>, space: &::view::Info<f32>,
params: &mut param::Struct<R>) {
use cgmath::FixedArray;
params.mvp = *space.mx_vertex.as_fixed();
params.world = *space.mx_world.as_fixed();
params.normal = *space.mx_normal.as_fixed();
params.color = mat.color;
params.ambient = self.ambient_color;
params.light_mask = self.lights.iter().enumerate().fold(
(0, 0, [0; 4]), |(mut bit, element, mut mask), (i, lit)| {
//TODO: frustum intersect with entity
if lit.active {
mask[element] |= ((i + 1) as i32) << bit;
bit += 8;
(bit & 0x1F, element + (bit >> 5), mask)
} else { (bit, element, mask) }
}
).2;
if let Some(ref tex) = mat.texture {
params.texture = tex.clone();
}
}
}
|
//! An API for auto-generating and obtaining copies of metadata cache
use crate::repository::{Ebuild, Repository};
use ::crypto::{digest::Digest, md5::Md5};
use ::directories::ProjectDirs;
use ::lru::LruCache;
use ::std::{
borrow::ToOwned,
fmt,
fs::{self, File},
io::{ErrorKind, Read},
os::unix::process::ExitStatusExt,
panic,
path::{Path, PathBuf},
process::Command,
result::Result::{Err, Ok},
str,
string::String,
};
use ::tempfile::{Builder, TempDir};
mod cacheentry;
mod md5cachedir;
use cacheentry::CacheEntry;
use md5cachedir::Md5CacheDir;
pub struct MetaDataCache {
r: Repository,
ebuild_md5_cache: LruCache<String, String>,
eclass_md5_cache: LruCache<String, String>,
ebuild_cache: LruCache<String, CacheEntry>,
ebuild_md5_cache_dir: Md5CacheDir,
cache_dir: PathBuf,
temp_dir: TempDir,
}
impl MetaDataCache {
pub fn new(r: Repository) -> Self {
let pd =
ProjectDirs::from("io.github.kentfredric", "", "grease-util")
.unwrap();
let c = MetaDataCache {
r: r.to_owned(),
ebuild_md5_cache: LruCache::new(500),
eclass_md5_cache: LruCache::new(500),
ebuild_cache: LruCache::new(100),
ebuild_md5_cache_dir: Md5CacheDir::new(
r.get_dir("metadata/md5-cache").unwrap(),
None,
),
cache_dir: pd.cache_dir().join(r.name().unwrap()),
temp_dir: Builder::new()
.prefix("grease-util-")
.rand_bytes(7)
.tempdir()
.unwrap(),
};
c.ensure_cache_dir();
c
}
pub(crate) fn add_fallback_caches(&mut self, dirs: Vec<PathBuf>) {
self.ebuild_md5_cache_dir.add_children(dirs)
}
fn ensure_cache_dir(&self) {
match self.cache_dir.metadata() {
Err(e) => match e.kind() {
ErrorKind::NotFound => {
fs::create_dir_all(&self.cache_dir).unwrap();
},
_ => panic!("Cache directory is not usable: {}", e),
},
Ok(m) => {
if !m.is_dir() {
panic!(
"Cache directory {:?} exists but is not a dir",
&self.cache_dir
);
}
},
}
}
fn md5_file(&self, path: &Path) -> String {
let mut f = File::open(path).unwrap();
let mut buf = [0; 8 * 1024];
let mut md5 = Md5::new();
while let Ok(len) = f.read(&mut buf[..]) {
if len == 0 {
break;
}
md5.input(&buf[..len]);
}
md5.result_str()
}
fn get_eclass_md5(&mut self, name: &str) -> &String {
let my_name = name.to_owned();
if !self.eclass_md5_cache.contains(&my_name) {
let p = self.r.eclass_path(&my_name).unwrap();
self.eclass_md5_cache.put(my_name.to_owned(), self.md5_file(&p));
}
self.eclass_md5_cache.get(&my_name).unwrap()
}
fn get_cache_for(&mut self, ebuild: Ebuild) -> Option<CacheEntry> {
let cache_key = format!("{}/{}", ebuild.category(), ebuild.pf());
match self.get_disk_cache_for(ebuild.to_owned()) {
None => {
self.generate_cache_for(ebuild.to_owned());
self.get_disk_cache_for(ebuild)
},
other => other,
}
}
fn cache_config(&self, name: &str, repo: &str) -> String {
format!(
"[DEFAULT]
main-repo = {name}
[{name}]
location = {repo}
sync-type = rsync
syc-uri = rsync://invalid
",
name = name,
repo = repo,
)
}
fn generate_cache_for(&mut self, ebuild: Ebuild) -> () {
let repo_name = self.r.name().unwrap();
let target = format!(
"{category}/{package}",
category = ebuild.category(),
package = ebuild.pn()
);
let mut job = Command::new("egencache")
.args(&[
"--repo",
&repo_name,
"--repositories-configuration",
&self.cache_config(
&repo_name,
self.r.path().to_str().unwrap(),
),
"--cache-dir",
])
.arg(self.cache_dir.as_os_str())
.args(&["--tolerant", "--jobs", "3", "--update", &target])
.spawn()
.expect("Can't start egencache");
let exit = job.wait().expect("Failed to wait for egencache");
match exit.code() {
Some(130) => panic!("egencache exited by sigint"),
Some(0) => (),
Some(s) => panic!("egencache exited with value {}", s),
None => match exit.signal() {
None => panic!("egencache killed by unknown signal"),
Some(s) => panic!("egencache killed by signal {}", s),
},
}
}
fn get_disk_cache_for(&mut self, ebuild: Ebuild) -> Option<CacheEntry> {
let cache_key = format!("{}/{}", ebuild.category(), ebuild.pf());
let cache_leaves = self.ebuild_md5_cache_dir.get_iter(&cache_key);
'leaf: for leaf in cache_leaves {
let cache_entry = CacheEntry::read_from(&leaf);
let md5 = self.md5_file(&ebuild.path());
if md5 != cache_entry.md5() {
continue;
}
for (eclass, emd5) in cache_entry.eclasses() {
if md5 != *self.get_eclass_md5(&eclass) {
continue 'leaf;
}
}
return Some(cache_entry);
}
return None;
}
}
impl fmt::Debug for MetaDataCache {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Foo")
.field("r", &self.r)
.field("cache_dir", &self.cache_dir)
.field("temp_dir", &self.temp_dir)
.finish()
}
}
#[test]
fn test_get_md5() {
let eclasses = vec![
"alternatives",
"ant-tasks",
"apache-2",
"apache-module",
"aspell-dict-r1",
"autotools",
"autotools-multilib",
"autotools-utils",
"base",
"bash-completion-r1",
"bazel",
"bsdmk",
"bzr",
"cannadic",
"cargo",
"cdrom",
"check-reqs",
"chromium-2",
"cmake-multilib",
"cmake-utils",
"common-lisp-3",
"cron",
"cuda",
"cvs",
"darcs",
"db",
"db-use",
"depend.apache",
"desktop",
"distutils-r1",
"dotnet",
"eapi7-ver",
"elisp-common",
"elisp",
"emboss-r2",
"epatch",
"epunt-cxx",
"estack",
"eutils",
"fcaps",
"fdo-mime",
"findlib",
"fixheadtails",
"flag-o-matic",
"font-ebdftopcf",
"font",
"fortran-2",
"fox",
"freebsd",
"freedict",
"games",
"games-mods",
"ghc-package",
"git-2",
"git-r3",
"gkrellm-plugin",
"gnome2",
"gnome2-utils",
"gnome.org",
"gnome-python-common-r1",
"gnuconfig",
"gnustep-2",
"gnustep-base",
"golang-base",
"golang-build",
"golang-vcs",
"golang-vcs-snapshot",
"gstreamer",
"haskell-cabal",
"java-ant-2",
"java-osgi",
"java-pkg-2",
"java-pkg-opt-2",
"java-pkg-simple",
"java-utils-2",
"java-virtuals-2",
"java-vm-2",
"kde5",
"kde5-functions",
"kde5-meta-pkg",
"kernel-2",
"kodi-addon",
"l10n",
"latex-package",
"leechcraft",
"libretro-core",
"libtool",
"linux-info",
"linux-mod",
"llvm",
"ltprune",
"mate-desktop.org",
"mate",
"mercurial",
"meson",
"mono",
"mono-env",
"mount-boot",
"mozconfig-v6.52",
"mozconfig-v6.60",
"mozcoreconf-v4",
"mozcoreconf-v5",
"mozcoreconf-v6",
"mozextension",
"mozlinguas-v2",
"multibuild",
"multilib-build",
"multilib",
"multilib-minimal",
"multiprocessing",
"myspell",
"myspell-r2",
"mysql-cmake",
"mysql_fx",
"mysql-multilib-r1",
"mysql-v2",
"netsurf",
"ninja-utils",
"nsplugins",
"nvidia-driver",
"oasis",
"obs-download",
"obs-service",
"office-ext-r1",
"opam",
"openib",
"out-of-source",
"pam",
"pax-utils",
"perl-app",
"perl-functions",
"perl-module",
"php-ext-pecl-r3",
"php-ext-source-r2",
"php-ext-source-r3",
"php-pear-r2",
"portability",
"postgres",
"postgres-multi",
"prefix",
"preserve-libs",
"python-any-r1",
"python-r1",
"python-single-r1",
"python-utils-r1",
"qmail",
"qmake-utils",
"qt5-build",
"readme.gentoo",
"readme.gentoo-r1",
"rebar",
"ros-catkin",
"rpm",
"ruby-fakegem",
"ruby-ng",
"ruby-ng-gnome2",
"ruby-single",
"ruby-utils",
"rust-toolchain",
"s6",
"savedconfig",
"scons-utils",
"selinux-policy-2",
"sgml-catalog",
"ssl-cert",
"stardict",
"subversion",
"sword-module",
"systemd",
"texlive-common",
"texlive-module",
"tmpfiles",
"toolchain-autoconf",
"toolchain-binutils",
"toolchain",
"toolchain-funcs",
"toolchain-glibc",
"twisted-r1",
"udev",
"unpacker",
"user",
"vala",
"vcs-clean",
"vcs-snapshot",
"vdr-plugin-2",
"versionator",
"vim-doc",
"vim-plugin",
"vim-spell",
"virtualx",
"waf-utils",
"webapp",
"wxwidgets",
"xdg",
"xdg-utils",
"xemacs-elisp-common",
"xemacs-elisp",
"xemacs-packages",
"xfconf",
"xorg-2",
"xorg-3",
];
let r = Repository::new(Path::new("/usr/portage"));
let mut mc = MetaDataCache::new(r);
for i in 1..20 {
for v in &eclasses {
drop(mc.get_eclass_md5(v));
}
}
drop(mc);
panic!("done");
}
|
/// A JSON Patch "move" operation.
///
/// Removes the value at a specified location and adds it to the target location.
///
/// [More Info](https://tools.ietf.org/html/rfc6902#section-4.4)
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct OpMove {
/// A string containing a JSON-Pointer value that references a location within
/// the target document (the "target location") where the operation is
/// performed.
pub path: String,
/// A string containing a JSON-Pointer value that references the location in
/// the target document to move the value from.
pub from: String,
}
|
extern crate image;
extern crate rand;
extern crate rayon;
use std::env;
use std::path::Path;
use std::fs::File;
use std::io::Write;
use image::{Pixel, ImageBuffer, Rgba};
use rand::Rng;
use rayon::prelude::*;
const NUM_TRIANGLES: usize = 10;
const TRANSPARENCY: u8 = 230;
const N_ITERS: usize = 10_000;
const DOWNSCALE: u32 = 64;
type Image = ImageBuffer<Rgba<u8>, Vec<u8>>;
type Color = [u8; 4];
#[derive(Debug, Clone, Copy)]
struct Point {
x: i32,
y: i32,
}
#[derive(Debug, Clone, Copy)]
struct Triangle {
a: Point,
b: Point,
c: Point,
}
#[derive(Debug, Clone, Copy)]
struct ColorTriangle {
triangle: Triangle,
color: Color,
}
impl std::ops::Deref for ColorTriangle {
type Target = Triangle;
fn deref(&self) -> &Self::Target {
&self.triangle
}
}
impl Triangle {
fn random(width: u32, height: u32) -> Self {
const PAD: i32 = 1;
let width = width as i32;
let height = height as i32;
let hw = width / 5;
let hh = height / 5;
let mut rng = rand::thread_rng();
let a = Point {
x: rng.gen_range(-hw, width - PAD),
y: rng.gen_range(-hh, height + hw),
};
let b = Point {
x: rng.gen_range(a.x + PAD, width + hw),
y: rng.gen_range(-hh, height - PAD),
};
let c = Point {
x: rng.gen_range(a.x, b.x),
y: rng.gen_range(b.y, height),
};
Self { a, b, c }
}
fn contains(&self, point: Point) -> bool {
fn orient2d(a: Point, b: Point, c: Point) -> i32 {
(b.x - a.x) * (c.y - a.y) - (b.y - a.y) * (c.x - a.x)
}
let w0 = orient2d(self.a, self.b, point);
let w1 = orient2d(self.b, self.c, point);
let w2 = orient2d(self.c, self.a, point);
w0 >= 0 && w1 >= 0 && w2 >= 0
}
fn bounding(&self, w: u32, h: u32) -> (i32, i32, i32, i32) {
use std::cmp::{min, max};
let min_x = min(min(self.a.x, self.b.x), self.c.x);
let min_y = min(min(self.a.y, self.b.y), self.c.y);
let max_x = max(max(max(self.a.x, self.b.x), self.c.x), 0);
let max_y = max(max(max(self.a.y, self.b.y), self.c.y), 0);
(
max(min_x, 0),
max(min_y, 0),
min(max_x, w as i32),
min(max_y, h as i32),
)
}
}
#[derive(Debug)]
enum Error {
MissingInput,
TriangulationFailed,
IoError(::std::io::Error),
ImageError(image::ImageError),
}
struct Svg {
background: Color,
triangles: Vec<ColorTriangle>,
width: u32,
height: u32,
}
impl Svg {
fn save(&self, filename: &AsRef<Path>) -> Result<(), ::std::io::Error> {
fn color_to_hex(c: Color) -> String {
format!("#{:x}{:x}{:x}", c[0], c[1], c[2])
}
let mut s = String::new();
s.push_str(&format!(
r##"<?xml version="1.0" standalone="no"?>
<svg viewBox = "0 0 {} {}" version = "1.1" xmlns="http://www.w3.org/2000/svg">
<rect x="0" y="0" width="{0}" height="{1}" fill="{}"/>{}"##,
self.width,
self.height,
color_to_hex(self.background),
'\n'
));
for triangle in &self.triangles {
s.push_str(&format!(
" <polygon points=\"{} {}, {} {}, {} {}\" fill=\"{}\" fill-opacity=\"{}\" />\n",
triangle.a.x,
triangle.a.y,
triangle.b.x,
triangle.b.y,
triangle.c.x,
triangle.c.y,
color_to_hex(triangle.color),
triangle.color[3] as f32 / 255.0
));
}
s.push_str("</svg>\n");
let mut f = File::create(filename).unwrap();
f.write_all(s.as_bytes())
}
fn scale(&mut self, sx: f32, sy: f32) {
for triangle in self.triangles.iter_mut() {
triangle.triangle.a.x = (triangle.triangle.a.x as f32 * sx) as i32;
triangle.triangle.b.x = (triangle.triangle.b.x as f32 * sx) as i32;
triangle.triangle.c.x = (triangle.triangle.c.x as f32 * sx) as i32;
triangle.triangle.a.y = (triangle.triangle.a.y as f32 * sy) as i32;
triangle.triangle.b.y = (triangle.triangle.b.y as f32 * sy) as i32;
triangle.triangle.c.y = (triangle.triangle.c.y as f32 * sy) as i32;
}
}
}
/// Compute the next triangle for the image.
fn next_triangle(target_image: &Image, current_image: &Image) -> Option<ColorTriangle> {
(0..N_ITERS)
.into_par_iter()
.flat_map(|_i| {
let (w, h) = target_image.dimensions();
let triangle = Triangle::random(w, h);
let (x0, y0, x1, y1) = triangle.bounding(w, h);
let cap = (y1 - y0) as usize + (x1 - x0) as usize;
if cap > 10_000 {
println!("cap={}", cap);
println!("{:?}", triangle.bounding(w, h));
println!("w={} h={}", w, h);
println!("triangle: {:?}", triangle);
}
let mut pixels = Vec::with_capacity(cap);
let mut avg_pixel = [0, 0, 0];
for y in y0..y1 {
for x in x0..x1 {
if triangle.contains(Point { x, y }) {
let p = target_image.get_pixel(x as u32, y as u32).channels();
avg_pixel[0] += p[0] as usize;
avg_pixel[1] += p[1] as usize;
avg_pixel[2] += p[2] as usize;
pixels.push((x as u32, y as u32));
}
}
}
if pixels.len() == 0 {
return None;
}
avg_pixel[0] /= pixels.len();
avg_pixel[1] /= pixels.len();
avg_pixel[2] /= pixels.len();
let color = [
avg_pixel[0] as u8,
avg_pixel[1] as u8,
avg_pixel[2] as u8,
TRANSPARENCY,
];
let score = {
let mut s = 0isize;
let c = *Rgba::from_slice(&color);
for &(x, y) in &pixels {
let target = target_image.get_pixel(x, y);
let before = *current_image.get_pixel(x, y);
let old_error = {
(target[0] as i16 - before[0] as i16).pow(2) as isize +
(target[1] as i16 - before[1] as i16).pow(2) as isize +
(target[2] as i16 - before[2] as i16).pow(2) as isize
};
let after = {
let mut a = before;
a.blend(&c);
a
};
let new_error = {
(target[0] as i16 - after[0] as i16).pow(2) as isize +
(target[1] as i16 - after[1] as i16).pow(2) as isize +
(target[2] as i16 - after[2] as i16).pow(2) as isize
};
s += new_error - old_error;
}
s // / pixels.len() as isize
};
Some((score, ColorTriangle { triangle, color }))
})
.min_by_key(|&(score, _)| score)
.map(|(_s, triangle)| triangle)
}
fn triangulate(image: Image) -> Result<Svg, Error> {
fn avg_color(img: &Image) -> Color {
let n = {
let (w, h) = img.dimensions();
(w * h) as usize
};
let mut c = [0; 4];
for (_x, _y, p) in img.enumerate_pixels() {
c[0] += p.data[0] as usize;
c[1] += p.data[1] as usize;
c[2] += p.data[2] as usize;
}
[(c[0] / n) as u8, (c[1] / n) as u8, (c[2] / n) as u8, 255]
}
fn fill_with(img: &mut Image, color: Color) {
let c = *Rgba::from_slice(&color);
for (_x, _y, p) in img.enumerate_pixels_mut() {
*p = c;
}
}
fn rasterize_triangle(image: &mut Image, triangle: ColorTriangle) {
let (w, h) = image.dimensions();
let (x0, y0, x1, y1) = triangle.bounding(w, h);
let color = Rgba::from_slice(&triangle.color);
for y in y0..y1 {
for x in x0..x1 {
if triangle.contains(Point { x, y }) {
let mut p = image.get_pixel_mut(x as u32, y as u32);
p.blend(color);
}
}
}
}
let (w, h) = image.dimensions();
let downsampled =
image::imageops::resize(&image, DOWNSCALE, DOWNSCALE, image::FilterType::Nearest);
let mut buffer = image.clone();
let background_color = avg_color(&buffer);
fill_with(&mut buffer, background_color);
let mut svg = Svg {
background: background_color,
triangles: vec![],
width: image.width(),
height: image.height(),
};
for _ in 0..NUM_TRIANGLES {
let triangle = next_triangle(&downsampled, &buffer).ok_or(
Error::TriangulationFailed,
)?;
rasterize_triangle(&mut buffer, triangle);
svg.triangles.push(triangle);
}
let scale_x = w as f32 / DOWNSCALE as f32;
let scale_y = h as f32 / DOWNSCALE as f32;
svg.scale(scale_x, scale_y);
Ok(svg)
}
fn do_stuff() -> Result<(), Error> {
let filename = env::args().nth(1).ok_or(Error::MissingInput)?;
let image: Image = image::open(&filename)
.map_err(|e| Error::ImageError(e))?
.to_rgba();
let triangulated = triangulate(image)?;
triangulated
.save(&format!("out-{}.svg", filename))
.map_err(|e| Error::IoError(e))?;
Ok(())
}
fn main() {
match do_stuff() {
Ok(()) => {}
_ => unreachable!(),
}
}
|
use crate::*;
use std::sync::Arc;
use std::{fs::File, io, io::prelude::*, path::PathBuf};
use tracing;
pub struct PingResultProcessorCsvLogger {
common_config: Arc<PingResultProcessorCommonConfig>,
log_path: PathBuf,
log_file: File,
}
impl PingResultProcessorCsvLogger {
#[tracing::instrument(name = "Creating ping result csv logger", level = "debug")]
pub fn new(common_config: Arc<PingResultProcessorCommonConfig>, log_path_buf: &PathBuf) -> PingResultProcessorCsvLogger {
return PingResultProcessorCsvLogger { common_config, log_path: log_path_buf.clone(), log_file: rnp_utils::create_log_file(log_path_buf) };
}
fn log_result_as_csv(&mut self, ping_result: &PingResult) -> io::Result<()> {
let log_content = ping_result.format_as_csv_string();
self.log_file.write(log_content.as_bytes())?;
self.log_file.write("\n".as_bytes())?;
return Ok(());
}
}
impl PingResultProcessor for PingResultProcessorCsvLogger {
fn name(&self) -> &'static str {
"CsvLogger"
}
fn config(&self) -> &PingResultProcessorCommonConfig {
self.common_config.as_ref()
}
fn initialize(&mut self) {
// Writer CSV header
self.log_file
.write("UtcTime,WorkerId,Protocol,TargetIp,TargetPort,SourceIp,SourcePort,IsWarmup,IsSucceeded,RttInMs,IsTimedOut,PreparationError,PingError,HandshakeError,DisconnectError\n".as_bytes())
.expect(&format!(
"Failed to write logs to csv file! Path = {}",
self.log_path.display()
));
}
fn process_ping_result(&mut self, ping_result: &PingResult) {
self.log_result_as_csv(ping_result).expect(&format!("Failed to write logs to csv file! Path = {}", self.log_path.display()));
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ping_result_processors::ping_result_processor_test_common;
use crate::PingResultDto;
use chrono::{TimeZone, Utc};
use pretty_assertions::assert_eq;
#[test]
fn ping_result_process_csv_logger_should_work() {
let test_log_file_path = "tests_data\\test_log.csv";
let mut processor: Box<dyn PingResultProcessor + Send + Sync> = Box::new(PingResultProcessorCsvLogger::new(
Arc::new(PingResultProcessorCommonConfig { quiet_level: RNP_QUIET_LEVEL_NO_OUTPUT }),
&PathBuf::from(test_log_file_path),
));
ping_result_processor_test_common::run_ping_result_processor_with_test_samples(&mut processor);
let mut actual_logged_records = Vec::new();
{
let mut csv_reader = csv::Reader::from_path(test_log_file_path).unwrap();
for result in csv_reader.deserialize() {
let actual_record: PingResultDto = result.unwrap();
actual_logged_records.push(actual_record);
}
}
assert_eq!(
vec![
PingResultDto {
utc_time: Utc.ymd(2021, 7, 6).and_hms_milli(9, 10, 11, 12),
worker_id: 1,
protocol: "TCP".to_string(),
target_ip: "1.2.3.4".parse().unwrap(),
target_port: 443,
source_ip: "5.6.7.8".parse().unwrap(),
source_port: 8080,
is_warmup: true,
is_succeeded: true,
is_timed_out: false,
rtt_in_ms: 10f64,
preparation_error: "".to_string(),
ping_error: "".to_string(),
handshake_error: "".to_string(),
disconnect_error: "".to_string(),
},
PingResultDto {
utc_time: Utc.ymd(2021, 7, 6).and_hms_milli(9, 10, 11, 12),
worker_id: 1,
protocol: "TCP".to_string(),
target_ip: "1.2.3.4".parse().unwrap(),
target_port: 443,
source_ip: "5.6.7.8".parse().unwrap(),
source_port: 8080,
is_warmup: false,
is_succeeded: false,
is_timed_out: true,
rtt_in_ms: 1000f64,
preparation_error: "".to_string(),
ping_error: "".to_string(),
handshake_error: "".to_string(),
disconnect_error: "".to_string(),
},
PingResultDto {
utc_time: Utc.ymd(2021, 7, 6).and_hms_milli(9, 10, 11, 12),
worker_id: 1,
protocol: "TCP".to_string(),
target_ip: "1.2.3.4".parse().unwrap(),
target_port: 443,
source_ip: "5.6.7.8".parse().unwrap(),
source_port: 8080,
is_warmup: false,
is_succeeded: true,
is_timed_out: false,
rtt_in_ms: 20f64,
preparation_error: "".to_string(),
ping_error: "".to_string(),
handshake_error: "connect aborted".to_string(),
disconnect_error: "".to_string(),
},
PingResultDto {
utc_time: Utc.ymd(2021, 7, 6).and_hms_milli(9, 10, 11, 12),
worker_id: 1,
protocol: "TCP".to_string(),
target_ip: "1.2.3.4".parse().unwrap(),
target_port: 443,
source_ip: "5.6.7.8".parse().unwrap(),
source_port: 8080,
is_warmup: false,
is_succeeded: true,
is_timed_out: false,
rtt_in_ms: 20f64,
preparation_error: "".to_string(),
ping_error: "".to_string(),
handshake_error: "".to_string(),
disconnect_error: "disconnect timeout".to_string(),
},
PingResultDto {
utc_time: Utc.ymd(2021, 7, 6).and_hms_milli(9, 10, 11, 12),
worker_id: 1,
protocol: "TCP".to_string(),
target_ip: "1.2.3.4".parse().unwrap(),
target_port: 443,
source_ip: "5.6.7.8".parse().unwrap(),
source_port: 8080,
is_warmup: false,
is_succeeded: false,
is_timed_out: false,
rtt_in_ms: 0f64,
preparation_error: "".to_string(),
ping_error: "connect failed".to_string(),
handshake_error: "".to_string(),
disconnect_error: "".to_string(),
},
PingResultDto {
utc_time: Utc.ymd(2021, 7, 6).and_hms_milli(9, 10, 11, 12),
worker_id: 1,
protocol: "TCP".to_string(),
target_ip: "1.2.3.4".parse().unwrap(),
target_port: 443,
source_ip: "5.6.7.8".parse().unwrap(),
source_port: 8080,
is_warmup: false,
is_succeeded: false,
is_timed_out: false,
rtt_in_ms: 0f64,
preparation_error: "address in use".to_string(),
ping_error: "".to_string(),
handshake_error: "".to_string(),
disconnect_error: "".to_string(),
},
],
actual_logged_records,
);
}
}
|
#![allow(dead_code)]
//! A radio button widget with any widget as children
use std::marker::PhantomData;
use druid::theme;
use druid::widget::{Align, Flex, WidgetExt};
use druid::{
kurbo::{Point, Rect, Size},
LifeCycle, LifeCycleCtx,
};
use druid::{
BoxConstraints, Data, Env, Event, EventCtx, LayoutCtx, PaintCtx, RenderContext, UnitPoint,
UpdateCtx, Widget, WidgetPod,
};
use crate::theme_ext::{PAINTR_TOGGLE_FOREGROND, PAINTR_TOGGLE_OFF, PAINTR_TOGGLE_ON};
/// A group of radio buttons
#[derive(Debug, Clone)]
pub struct RadioGroup<T: Data + PartialEq + 'static> {
phantom: PhantomData<T>,
}
impl<T: Data + PartialEq + Copy + 'static> RadioGroup<T> {
/// Given a vector of `(widget, enum_variant)` tuples, create a group of Radio buttons
pub fn new(variants: impl IntoIterator<Item = (Box<dyn Widget<T>>, T)>) -> impl Widget<T> {
let mut col = Flex::column();
for (w, variant) in variants.into_iter() {
let radio = Radio::new(w, variant);
col.add_child(
radio
.env_scope(move |env: &mut Env, data: &T| {
let color = if *data == variant {
env.get(PAINTR_TOGGLE_ON)
} else {
env.get(PAINTR_TOGGLE_OFF)
};
env.set(PAINTR_TOGGLE_FOREGROND, color);
})
.padding(5.0),
);
}
col
}
}
/// A single radio button
pub struct Radio<T: Data + PartialEq> {
variant: T,
child: WidgetPod<T, Box<dyn Widget<T>>>,
}
impl<T: Data + PartialEq + 'static> Radio<T> {
/// Create a lone Radio button from label text and an enum variant
pub fn new(w: impl Widget<T> + 'static, variant: T) -> impl Widget<T> {
let radio = Self { variant, child: WidgetPod::new(w).boxed() };
Align::vertical(UnitPoint::LEFT, radio)
}
}
impl<T: Data + PartialEq> Widget<T> for Radio<T> {
fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) {
self.child.lifecycle(ctx, event, data, env);
if let LifeCycle::HotChanged(_) = event {
ctx.request_paint();
}
}
fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, _env: &Env) {
match event {
Event::MouseDown(_) => {
ctx.set_active(true);
ctx.request_paint();
}
Event::MouseUp(_) => {
if ctx.is_active() {
ctx.set_active(false);
if ctx.is_hot() {
*data = self.variant.clone();
}
ctx.request_paint();
}
}
_ => (),
}
}
fn update(&mut self, ctx: &mut UpdateCtx, old_data: &T, data: &T, env: &Env) {
self.child.update(ctx, data, env);
if !old_data.same(data) {
ctx.request_paint();
}
}
fn layout(
&mut self,
layout_ctx: &mut LayoutCtx,
bc: &BoxConstraints,
data: &T,
env: &Env,
) -> Size {
let size = self.child.layout(layout_ctx, &bc, data, env);
self.child.set_layout_rect(
layout_ctx,
data,
env,
Rect::from_origin_size(Point::ORIGIN, size),
);
bc.constrain(size)
}
fn paint(&mut self, paint_ctx: &mut PaintCtx, data: &T, env: &Env) {
let size = paint_ctx.size();
let border_color = if paint_ctx.is_hot() {
env.get(theme::BORDER_LIGHT)
} else {
env.get(theme::BORDER_DARK)
};
let rt = Rect::from_origin_size(Point::ORIGIN, size);
// Check if data enum matches our variant
if *data == self.variant {
paint_ctx.fill(rt, &env.get(theme::LABEL_COLOR));
}
paint_ctx.stroke(rt, &border_color, 1.0);
// Paint the text label
self.child.paint(paint_ctx, data, env);
}
}
|
//! The Group Policy Object Editor stores registry-based configuration settings in two Registry.pol files, stored in folders
//! under the `<drive>:\Windows\System32\GroupPolicy\` folder. One file contains computer settings and the other file contains
//! user settings. The Group Policy Object Editor saves the settings to these files on exit, and imports the settings on
//! startup.
//!
//! This library does not and will never depend on any Windows-specific libraries and will therefore work on any system.
#[macro_use]
extern crate nom;
#[macro_use]
extern crate lazy_static;
pub mod v1;
pub use self::v1::*;
/// First part of header, equivalent to "PReg" in little-endian.
pub const REGFILE_SIGNATURE: u32 = 0x67655250;
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::INTSET {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct B2MSHUTDNR {
bits: bool,
}
impl B2MSHUTDNR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct B2MACTIVER {
bits: bool,
}
impl B2MACTIVER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct B2MSLEEPR {
bits: bool,
}
impl B2MSLEEPR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct CQERRR {
bits: bool,
}
impl CQERRR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct CQUPDR {
bits: bool,
}
impl CQUPDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct CQPAUSEDR {
bits: bool,
}
impl CQPAUSEDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct DERRR {
bits: bool,
}
impl DERRR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct DCMPR {
bits: bool,
}
impl DCMPR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct BLECSSTATR {
bits: bool,
}
impl BLECSSTATR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct BLECIRQR {
bits: bool,
}
impl BLECIRQR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct ICMDR {
bits: bool,
}
impl ICMDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct IACCR {
bits: bool,
}
impl IACCR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct B2MSTR {
bits: bool,
}
impl B2MSTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FOVFLR {
bits: bool,
}
impl FOVFLR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FUNDFLR {
bits: bool,
}
impl FUNDFLR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct THRR {
bits: bool,
}
impl THRR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct CMDCMPR {
bits: bool,
}
impl CMDCMPR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _B2MSHUTDNW<'a> {
w: &'a mut W,
}
impl<'a> _B2MSHUTDNW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _B2MACTIVEW<'a> {
w: &'a mut W,
}
impl<'a> _B2MACTIVEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _B2MSLEEPW<'a> {
w: &'a mut W,
}
impl<'a> _B2MSLEEPW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CQERRW<'a> {
w: &'a mut W,
}
impl<'a> _CQERRW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 13;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CQUPDW<'a> {
w: &'a mut W,
}
impl<'a> _CQUPDW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CQPAUSEDW<'a> {
w: &'a mut W,
}
impl<'a> _CQPAUSEDW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _DERRW<'a> {
w: &'a mut W,
}
impl<'a> _DERRW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _DCMPW<'a> {
w: &'a mut W,
}
impl<'a> _DCMPW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _BLECSSTATW<'a> {
w: &'a mut W,
}
impl<'a> _BLECSSTATW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _BLECIRQW<'a> {
w: &'a mut W,
}
impl<'a> _BLECIRQW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _ICMDW<'a> {
w: &'a mut W,
}
impl<'a> _ICMDW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _IACCW<'a> {
w: &'a mut W,
}
impl<'a> _IACCW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _B2MSTW<'a> {
w: &'a mut W,
}
impl<'a> _B2MSTW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FOVFLW<'a> {
w: &'a mut W,
}
impl<'a> _FOVFLW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FUNDFLW<'a> {
w: &'a mut W,
}
impl<'a> _FUNDFLW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _THRW<'a> {
w: &'a mut W,
}
impl<'a> _THRW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CMDCMPW<'a> {
w: &'a mut W,
}
impl<'a> _CMDCMPW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 16 - Revision A: The B2M_STATE from the BLE Core transitioned into shutdown state Revision B: Falling BLE Core Status signal. Asserted when the BLE_STATUS signal from the BLE Core is de-asserted (1 -> 0)"]
#[inline]
pub fn b2mshutdn(&self) -> B2MSHUTDNR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
};
B2MSHUTDNR { bits }
}
#[doc = "Bit 15 - Revision A: The B2M_STATE from the BLE Core transitioned into the active state Revision B: Falling BLE Core IRQ signal. Asserted when the BLE_IRQ signal from the BLE Core is de-asserted (1 -> 0)"]
#[inline]
pub fn b2mactive(&self) -> B2MACTIVER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
};
B2MACTIVER { bits }
}
#[doc = "Bit 14 - The B2M_STATE from the BLE Core transitioned into the sleep state"]
#[inline]
pub fn b2msleep(&self) -> B2MSLEEPR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
};
B2MSLEEPR { bits }
}
#[doc = "Bit 13 - Command queue error during processing. When an error occurs, the system will stop processing and halt operations to allow software to take recovery actions"]
#[inline]
pub fn cqerr(&self) -> CQERRR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CQERRR { bits }
}
#[doc = "Bit 12 - Command queue write operation executed a register write with the register address bit 0 set to 1. The low address bits in the CQ address fields are unused and bit 0 can be used to trigger an interrupt to indicate when this register write is performed by the CQ operation."]
#[inline]
pub fn cqupd(&self) -> CQUPDR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CQUPDR { bits }
}
#[doc = "Bit 11 - Command queue is paused due to an active event enabled in the PAUSEEN register. The interrupt is posted when the event is enabled within the PAUSEEN register, the mask is active in the CQIRQMASK field and the event occurs."]
#[inline]
pub fn cqpaused(&self) -> CQPAUSEDR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CQPAUSEDR { bits }
}
#[doc = "Bit 10 - DMA Error encountered during the processing of the DMA command. The DMA error could occur when the memory access specified in the DMA operation is not available or incorrectly specified."]
#[inline]
pub fn derr(&self) -> DERRR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
};
DERRR { bits }
}
#[doc = "Bit 9 - DMA Complete. Processing of the DMA operation has completed and the DMA submodule is returned into the idle state"]
#[inline]
pub fn dcmp(&self) -> DCMPR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
};
DCMPR { bits }
}
#[doc = "Bit 8 - BLE Core SPI Status interrupt. Asserted when the SPI_STATUS signal from the BLE Core is asserted, indicating that SPI writes can be done to the BLE Core. Transfers to the BLE Core should only be done when this signal is high."]
#[inline]
pub fn blecsstat(&self) -> BLECSSTATR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
};
BLECSSTATR { bits }
}
#[doc = "Bit 7 - BLE Core IRQ signal. Asserted when the BLE_IRQ signal from the BLE Core is asserted, indicating the availability of read data from the BLE Core."]
#[inline]
pub fn blecirq(&self) -> BLECIRQR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
BLECIRQR { bits }
}
#[doc = "Bit 6 - illegal command interrupt. Asserted when a command is written when an active command is in progress."]
#[inline]
pub fn icmd(&self) -> ICMDR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
ICMDR { bits }
}
#[doc = "Bit 5 - illegal FIFO access interrupt. Asserted when there is a overflow or underflow event"]
#[inline]
pub fn iacc(&self) -> IACCR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
};
IACCR { bits }
}
#[doc = "Bit 4 - B2M State change interrupt. Asserted on any change in the B2M_STATE signal from the BLE Core."]
#[inline]
pub fn b2mst(&self) -> B2MSTR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
B2MSTR { bits }
}
#[doc = "Bit 3 - Write FIFO Overflow interrupt. This occurs when software tries to write to a full fifo. The current operation does not stop."]
#[inline]
pub fn fovfl(&self) -> FOVFLR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FOVFLR { bits }
}
#[doc = "Bit 2 - Read FIFO Underflow interrupt. Asserted when a pop operation is done to a empty read FIFO."]
#[inline]
pub fn fundfl(&self) -> FUNDFLR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FUNDFLR { bits }
}
#[doc = "Bit 1 - FIFO Threshold interrupt. For write operations, asserted when the number of free bytes in the write FIFO equals or exceeds the WTHR field. For read operations, asserted when the number of valid bytes in the read FIFO equals of exceeds the value set in the RTHR field."]
#[inline]
pub fn thr(&self) -> THRR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
THRR { bits }
}
#[doc = "Bit 0 - Command Complete interrupt. Asserted when the current operation has completed. For repeated commands, this will only be asserted when the final repeated command is completed."]
#[inline]
pub fn cmdcmp(&self) -> CMDCMPR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CMDCMPR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 16 - Revision A: The B2M_STATE from the BLE Core transitioned into shutdown state Revision B: Falling BLE Core Status signal. Asserted when the BLE_STATUS signal from the BLE Core is de-asserted (1 -> 0)"]
#[inline]
pub fn b2mshutdn(&mut self) -> _B2MSHUTDNW {
_B2MSHUTDNW { w: self }
}
#[doc = "Bit 15 - Revision A: The B2M_STATE from the BLE Core transitioned into the active state Revision B: Falling BLE Core IRQ signal. Asserted when the BLE_IRQ signal from the BLE Core is de-asserted (1 -> 0)"]
#[inline]
pub fn b2mactive(&mut self) -> _B2MACTIVEW {
_B2MACTIVEW { w: self }
}
#[doc = "Bit 14 - The B2M_STATE from the BLE Core transitioned into the sleep state"]
#[inline]
pub fn b2msleep(&mut self) -> _B2MSLEEPW {
_B2MSLEEPW { w: self }
}
#[doc = "Bit 13 - Command queue error during processing. When an error occurs, the system will stop processing and halt operations to allow software to take recovery actions"]
#[inline]
pub fn cqerr(&mut self) -> _CQERRW {
_CQERRW { w: self }
}
#[doc = "Bit 12 - Command queue write operation executed a register write with the register address bit 0 set to 1. The low address bits in the CQ address fields are unused and bit 0 can be used to trigger an interrupt to indicate when this register write is performed by the CQ operation."]
#[inline]
pub fn cqupd(&mut self) -> _CQUPDW {
_CQUPDW { w: self }
}
#[doc = "Bit 11 - Command queue is paused due to an active event enabled in the PAUSEEN register. The interrupt is posted when the event is enabled within the PAUSEEN register, the mask is active in the CQIRQMASK field and the event occurs."]
#[inline]
pub fn cqpaused(&mut self) -> _CQPAUSEDW {
_CQPAUSEDW { w: self }
}
#[doc = "Bit 10 - DMA Error encountered during the processing of the DMA command. The DMA error could occur when the memory access specified in the DMA operation is not available or incorrectly specified."]
#[inline]
pub fn derr(&mut self) -> _DERRW {
_DERRW { w: self }
}
#[doc = "Bit 9 - DMA Complete. Processing of the DMA operation has completed and the DMA submodule is returned into the idle state"]
#[inline]
pub fn dcmp(&mut self) -> _DCMPW {
_DCMPW { w: self }
}
#[doc = "Bit 8 - BLE Core SPI Status interrupt. Asserted when the SPI_STATUS signal from the BLE Core is asserted, indicating that SPI writes can be done to the BLE Core. Transfers to the BLE Core should only be done when this signal is high."]
#[inline]
pub fn blecsstat(&mut self) -> _BLECSSTATW {
_BLECSSTATW { w: self }
}
#[doc = "Bit 7 - BLE Core IRQ signal. Asserted when the BLE_IRQ signal from the BLE Core is asserted, indicating the availability of read data from the BLE Core."]
#[inline]
pub fn blecirq(&mut self) -> _BLECIRQW {
_BLECIRQW { w: self }
}
#[doc = "Bit 6 - illegal command interrupt. Asserted when a command is written when an active command is in progress."]
#[inline]
pub fn icmd(&mut self) -> _ICMDW {
_ICMDW { w: self }
}
#[doc = "Bit 5 - illegal FIFO access interrupt. Asserted when there is a overflow or underflow event"]
#[inline]
pub fn iacc(&mut self) -> _IACCW {
_IACCW { w: self }
}
#[doc = "Bit 4 - B2M State change interrupt. Asserted on any change in the B2M_STATE signal from the BLE Core."]
#[inline]
pub fn b2mst(&mut self) -> _B2MSTW {
_B2MSTW { w: self }
}
#[doc = "Bit 3 - Write FIFO Overflow interrupt. This occurs when software tries to write to a full fifo. The current operation does not stop."]
#[inline]
pub fn fovfl(&mut self) -> _FOVFLW {
_FOVFLW { w: self }
}
#[doc = "Bit 2 - Read FIFO Underflow interrupt. Asserted when a pop operation is done to a empty read FIFO."]
#[inline]
pub fn fundfl(&mut self) -> _FUNDFLW {
_FUNDFLW { w: self }
}
#[doc = "Bit 1 - FIFO Threshold interrupt. For write operations, asserted when the number of free bytes in the write FIFO equals or exceeds the WTHR field. For read operations, asserted when the number of valid bytes in the read FIFO equals of exceeds the value set in the RTHR field."]
#[inline]
pub fn thr(&mut self) -> _THRW {
_THRW { w: self }
}
#[doc = "Bit 0 - Command Complete interrupt. Asserted when the current operation has completed. For repeated commands, this will only be asserted when the final repeated command is completed."]
#[inline]
pub fn cmdcmp(&mut self) -> _CMDCMPW {
_CMDCMPW { w: self }
}
}
|
mod routing;
pub(crate) use self::routing::CoordIndex;
|
fn ulam(n: usize) -> usize {
let mut ulams = vec![1, 2];
let mut sieve = vec![1, 1];
let mut u = 2;
while ulams.len() < n {
sieve.resize(u + ulams[ulams.len() - 2], 0);
for i in 0..ulams.len() - 1 {
sieve[u + ulams[i] - 1] += 1;
}
for i in u..sieve.len() {
if sieve[i] == 1 {
u = i + 1;
ulams.push(u);
break;
}
}
}
ulams[n - 1]
}
fn main() {
use std::time::Instant;
let start = Instant::now();
let mut n = 1;
while n <= 100000 {
println!("Ulam({}) = {}", n, ulam(n));
n *= 10;
}
println!("Elapsed time: {:.2?}", start.elapsed());
} |
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// FormulaAndFunctionEventQueryGroupBy : List of objects used to group by.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct FormulaAndFunctionEventQueryGroupBy {
/// Event facet.
#[serde(rename = "facet")]
pub facet: String,
/// Number of groups to return.
#[serde(rename = "limit", skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
#[serde(rename = "sort", skip_serializing_if = "Option::is_none")]
pub sort: Option<Box<crate::models::FormulaAndFunctionEventQueryGroupBySort>>,
}
impl FormulaAndFunctionEventQueryGroupBy {
/// List of objects used to group by.
pub fn new(facet: String) -> FormulaAndFunctionEventQueryGroupBy {
FormulaAndFunctionEventQueryGroupBy {
facet,
limit: None,
sort: None,
}
}
}
|
use std::future::Future;
pub fn spawn<F>(task: F)
where
F: Send + Future<Output = ()> + 'static,
{
cfg_if! {
if #[cfg(not(target_arch = "wasm32"))] {
tokio::spawn(task);
} else {
use wasm_bindgen_futures::spawn_local;
spawn_local(task);
}
}
}
|
fn main() {
tonic_build::configure()
.out_dir("src/protobuf")
.format(cfg!(debug_assertions)) // Release build environments might not have rustfmt installed
.compile(&["proto/codec.proto"], &["proto"])
.expect("Failed to compile StreamingFast Ethereum proto(s)");
}
|
use async_graphql::{Context, EmptyMutation, EmptySubscription, Object, Schema, SimpleObject};
use async_graphql_warp::graphql;
use std::convert::Infallible;
use warp::{Filter, Reply};
#[derive(SimpleObject)]
struct Product {
upc: String,
name: String,
price: i32,
}
struct Query;
#[Object(extends)]
impl Query {
async fn top_products<'a>(&self, ctx: &'a Context<'_>) -> &'a Vec<Product> {
ctx.data_unchecked::<Vec<Product>>()
}
#[graphql(entity)]
async fn find_product_by_upc<'a>(
&self,
ctx: &'a Context<'_>,
upc: String,
) -> Option<&'a Product> {
let hats = ctx.data_unchecked::<Vec<Product>>();
hats.iter().find(|product| product.upc == upc)
}
}
#[tokio::main]
async fn main() {
let hats = vec![
Product {
upc: "top-1".to_string(),
name: "Trilby".to_string(),
price: 11,
},
Product {
upc: "top-2".to_string(),
name: "Fedora".to_string(),
price: 22,
},
Product {
upc: "top-3".to_string(),
name: "Boater".to_string(),
price: 33,
},
];
let schema = Schema::build(Query, EmptyMutation, EmptySubscription)
.data(hats)
.finish();
warp::serve(graphql(schema).and_then(
|(schema, request): (
Schema<Query, EmptyMutation, EmptySubscription>,
async_graphql::Request,
)| async move {
Ok::<_, Infallible>(warp::reply::json(&schema.execute(request).await).into_response())
},
))
.run(([0, 0, 0, 0], 4002))
.await;
}
|
#[doc = "Reader of register CPSCTL"]
pub type R = crate::R<u32, super::CPSCTL>;
#[doc = "Writer for register CPSCTL"]
pub type W = crate::W<u32, super::CPSCTL>;
#[doc = "Register CPSCTL `reset()`'s with value 0"]
impl crate::ResetValue for super::CPSCTL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `CPS_RDY`"]
pub type CPS_RDY_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CPS_RDY`"]
pub struct CPS_RDY_W<'a> {
w: &'a mut W,
}
impl<'a> CPS_RDY_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `CPS_EN`"]
pub type CPS_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CPS_EN`"]
pub struct CPS_EN_W<'a> {
w: &'a mut W,
}
impl<'a> CPS_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 8 - I/O compensation cell is really or not"]
#[inline(always)]
pub fn cps_rdy(&self) -> CPS_RDY_R {
CPS_RDY_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 0 - I/O compensation cell enable"]
#[inline(always)]
pub fn cps_en(&self) -> CPS_EN_R {
CPS_EN_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 8 - I/O compensation cell is really or not"]
#[inline(always)]
pub fn cps_rdy(&mut self) -> CPS_RDY_W {
CPS_RDY_W { w: self }
}
#[doc = "Bit 0 - I/O compensation cell enable"]
#[inline(always)]
pub fn cps_en(&mut self) -> CPS_EN_W {
CPS_EN_W { w: self }
}
}
|
use x86_64::structures::idt::{ InterruptDescriptorTable, ExceptionStackFrame };
use x86_64::structures::tss::TaskStateSegment;
use x86_64::structures::gdt::{ GlobalDescriptorTable, Descriptor, SegmentSelector };
use x86_64::structures::idt::PageFaultErrorCode;
use x86_64::VirtAddr;
use lazy_static::lazy_static;
use spin::Mutex;
pub mod pic;
pub const DOUBLE_FAULT_IST_INDEX: u16 = 0;
lazy_static! {
static ref TSS: TaskStateSegment = {
let mut tss = TaskStateSegment::new();
tss.interrupt_stack_table[DOUBLE_FAULT_IST_INDEX as usize] = {
const STACK_SIZE: usize = 4096;
static mut STACK: [u8; STACK_SIZE] = [0; STACK_SIZE];
let stack_start = VirtAddr::from_ptr(unsafe { &STACK });
let stack_end = stack_start + STACK_SIZE;
stack_end
};
tss
};
static ref IDT_prepare: Mutex<fn(&mut InterruptDescriptorTable)> = Mutex::new(|_func: &mut InterruptDescriptorTable| {});
pub static ref IDT: InterruptDescriptorTable = {
let mut idt = InterruptDescriptorTable::new();
idt.breakpoint.set_handler_fn(breakpoint_handler);
idt.page_fault.set_handler_fn(page_fault_handler);
unsafe {
idt.double_fault.set_handler_fn(double_fault_handler)
.set_stack_index(DOUBLE_FAULT_IST_INDEX);
}
pic::prepare(&mut idt);
IDT_prepare.lock()(&mut idt);
idt
};
static ref GDT: (GlobalDescriptorTable, Selectors) = {
let mut gdt = GlobalDescriptorTable::new();
let code_selector = gdt.add_entry(Descriptor::kernel_code_segment());
let tss_selector = gdt.add_entry(Descriptor::tss_segment(&TSS));
(gdt, Selectors { code_selector, tss_selector })
};
}
struct Selectors {
code_selector: SegmentSelector,
tss_selector: SegmentSelector,
}
pub fn init_interrupts(func: fn(&mut InterruptDescriptorTable)) {
use x86_64::instructions::segmentation::set_cs;
use x86_64::instructions::tables::load_tss;
GDT.0.load();
unsafe {
set_cs(GDT.1.code_selector);
load_tss(GDT.1.tss_selector);
}
*IDT_prepare.lock() = func;
IDT.load();
unsafe { pic::PICS.lock().initialize() };
x86_64::instructions::interrupts::enable();
}
extern "x86-interrupt" fn breakpoint_handler(stack_frame: &mut ExceptionStackFrame)
{
debug!("EXCEPTION: BREAKPOINT (DEFAULT (not assigned))\n{:#?}", stack_frame);
}
extern "x86-interrupt" fn double_fault_handler(
stack_frame: &mut ExceptionStackFrame, _error_code: u64)
{
debug!("EXCEPTION: DOUBLE FAULT\n{:#?}", stack_frame);
eop!()
}
extern "x86-interrupt" fn page_fault_handler(
stack_frame: &mut ExceptionStackFrame, error_code: PageFaultErrorCode)
{
use x86_64::registers::control::Cr2;
debug!("ERROR CODE:\n {:?}", error_code);
debug!("ACCESSED ADDRESS: {:?}", Cr2::read());
debug!("EXCEPTION: PAGE FAULT\n{:#?}", stack_frame);
eop!()
} |
#![cfg(test)]
extern crate rand;
use self::rand::Rng;
use self::rand::distributions::uniform::SampleUniform;
use ::*;
use std::ops::Neg;
const NUM_TESTS: u16 = 10000;
const RANGE: isize = 500;
const RANGE_FLOAT: f32 = 500.0;
pub fn reverse_slice<T: Clone>(points: &[T]) -> Vec<T> {
points.iter().rev().cloned().collect()
}
fn random_point<T>(rng: &mut rand::rngs::ThreadRng, range: T) -> Point<T>
where
T: SampleUniform + PartialOrd + Neg<Output = T> + Copy,
{
(rng.gen_range(-range..range), rng.gen_range(-range..range))
}
fn random_voxel<T>(rng: &mut rand::rngs::ThreadRng, range: T) -> Voxel<T>
where
T: SampleUniform + PartialOrd + Neg<Output = T> + Copy,
{
(
rng.gen_range(-range..range),
rng.gen_range(-range..range),
rng.gen_range(-range..range),
)
}
#[test]
fn supercover_symmetrical() {
let supercover = |a, b| Supercover::new(a, b).collect::<Vec<_>>();
let mut rng = rand::thread_rng();
for _ in 0..NUM_TESTS {
let start = random_point(&mut rng, RANGE);
let end = random_point(&mut rng, RANGE);
assert_eq!(
supercover(start, end),
reverse_slice(&supercover(end, start))
);
}
}
#[test]
#[should_panic]
fn bresenham_not_symmetrical() {
let bresenham = |a, b| Bresenham::new(a, b).collect::<Vec<_>>();
let mut rng = rand::thread_rng();
for _ in 0..NUM_TESTS {
let start = random_point(&mut rng, RANGE);
let end = random_point(&mut rng, RANGE);
assert_eq!(bresenham(start, end), reverse_slice(&bresenham(end, start)));
}
}
#[test]
#[should_panic]
fn bresenham_3d_not_symmetrical() {
let bresenham_3d = |a, b| Bresenham3d::new(a, b).collect::<Vec<_>>();
let mut rng = rand::thread_rng();
for _ in 0..NUM_TESTS {
let start = random_voxel(&mut rng, RANGE);
let end = random_voxel(&mut rng, RANGE);
assert_eq!(
bresenham_3d(start, end),
reverse_slice(&bresenham_3d(end, start))
);
}
}
#[test]
#[should_panic]
fn walk_voxels_symmetrical() {
use VoxelOrigin::Center;
let walk_voxels = |a, b| WalkVoxels::<_, i16>::new(a, b, &Center).collect::<Vec<_>>();
let mut rng = rand::thread_rng();
for _ in 0..NUM_TESTS {
let start = random_voxel(&mut rng, RANGE_FLOAT);
let end = random_voxel(&mut rng, RANGE_FLOAT);
assert_eq!(
walk_voxels(start, end),
reverse_slice(&walk_voxels(end, start))
);
}
}
|
#[doc = "Register `ADC_CALFACT2` reader"]
pub type R = crate::R<ADC_CALFACT2_SPEC>;
#[doc = "Register `ADC_CALFACT2` writer"]
pub type W = crate::W<ADC_CALFACT2_SPEC>;
#[doc = "Field `LINCALFACT` reader - LINCALFACT"]
pub type LINCALFACT_R = crate::FieldReader<u32>;
#[doc = "Field `LINCALFACT` writer - LINCALFACT"]
pub type LINCALFACT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 30, O, u32>;
impl R {
#[doc = "Bits 0:29 - LINCALFACT"]
#[inline(always)]
pub fn lincalfact(&self) -> LINCALFACT_R {
LINCALFACT_R::new(self.bits & 0x3fff_ffff)
}
}
impl W {
#[doc = "Bits 0:29 - LINCALFACT"]
#[inline(always)]
#[must_use]
pub fn lincalfact(&mut self) -> LINCALFACT_W<ADC_CALFACT2_SPEC, 0> {
LINCALFACT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "ADC calibration factor register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`adc_calfact2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`adc_calfact2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ADC_CALFACT2_SPEC;
impl crate::RegisterSpec for ADC_CALFACT2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`adc_calfact2::R`](R) reader structure"]
impl crate::Readable for ADC_CALFACT2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`adc_calfact2::W`](W) writer structure"]
impl crate::Writable for ADC_CALFACT2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets ADC_CALFACT2 to value 0"]
impl crate::Resettable for ADC_CALFACT2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use futures::{StreamExt, TryStreamExt, executor::block_on};
use handlebars::Handlebars;
use std::path::PathBuf;
use std::error::Error;
use std::fs;
use std::collections::HashMap;
use serde::{
Serialize,
Deserialize
};
use serde_yaml;
use kube::{
api::{Api, WatchEvent, PostParams},
Client,
runtime::Informer
};
use kube_derive::CustomResource;
use k8s_openapi::api::batch::v1::Job;
use structopt::StructOpt;
#[derive(CustomResource, Serialize, Deserialize, Default, Clone, Debug)]
#[kube(
group = "application-operator.github.io",
kind = "Application",
version = "v1alpha1",
namespaced
)]
pub struct ApplicationSpec {
application: String,
environment: String,
version: String
}
/// Parse a single key-value pair
/// https://github.com/TeXitoi/structopt/blob/master/examples/keyvalue.rs
fn parse_key_val<T, U>(s: &str) -> Result<(T, U), Box<dyn Error>>
where
T: std::str::FromStr,
T::Err: Error + 'static,
U: std::str::FromStr,
U::Err: Error + 'static,
{
let pos = s
.find('=')
.ok_or_else(|| format!("invalid KEY=value: no `=` found in `{}`", s))?;
Ok((s[..pos].parse()?, s[pos + 1..].parse()?))
}
/// Search for a pattern in a file and display the lines that contain it.
#[derive(StructOpt)]
struct Cli {
#[structopt(short = "n", long = "namespace", default_value = "applications")]
namespace: String,
#[structopt(short = "i", long = "image", env = "IMAGE")]
image: String,
#[structopt(short = "c", long = "command", default_value = "/bin/deploy")]
command: String,
#[structopt(short = "s", long = "service-account", default_value = "default")]
service_account: String,
#[structopt(short = "t", long = "template", parse(from_os_str))]
template: PathBuf,
#[structopt(short = "e", long = "extra-vars", parse(try_from_str = parse_key_val), number_of_values = 1)]
extra: Vec<(String, String)>,
}
#[derive(Serialize)]
struct TemplateVars {
application: String,
environment: String,
version: String,
command: String,
job_name: String,
namespace: String,
service_account: String,
image: String,
extra: HashMap<String,String>,
resource_name: String,
}
fn version_to_rfc1123(version: String, length: usize) -> String {
let version = version.replace(".", "-").replace("_", "-");
return version.get(..length).or(Some(&version)).unwrap().trim_end_matches("-").to_string();
}
fn ensure_application(client: Client, application: &Application, opts: &Cli) {
let name = application.metadata.name.as_ref().expect("Application must have a name");
let version = application.spec.version.clone();
let config_version = std::env::var("CONFIG_VERSION").expect("CONFIG_VERSION environment variable must be set");
let namespace = application.metadata.namespace.as_ref().expect(&format!("Application {} must be namespaced", name));
let template_vars = TemplateVars {
application: application.spec.application.clone(),
environment: application.spec.environment.clone(),
version: version.clone(),
command: opts.command.clone(),
namespace: namespace.to_string(),
job_name: format!("{}-{}-{}", name, version_to_rfc1123(config_version, 16), version_to_rfc1123(version, 16)),
service_account: opts.service_account.clone(),
image: opts.image.clone(),
extra: opts.extra.clone().into_iter().collect(),
resource_name: application.metadata.name.clone().unwrap(),
};
println!(
"Ensuring that application {:?} in environment {:?} has version {:?}",
template_vars.application,
template_vars.environment,
template_vars.version
);
let reg = Handlebars::new();
let contents = fs::read_to_string(&opts.template)
.expect("Something went wrong reading the file");
let application_job : Job = serde_yaml::from_str(®.render_template(&contents, &template_vars).unwrap()).unwrap();
let pp = PostParams::default();
let jobs: Api<Job> = Api::namespaced(client, &namespace);
let created = block_on(jobs.create(&pp, &application_job));
match created {
Ok(_) => (),
Err(kube::Error::Api(ae)) => assert_eq!(ae.code, 409, "Couldn't create job: {:?}", ae.message),
Err(e) => panic!("Couldn't create job {:?}", e), // any other case is probably bad
}
}
fn handle(client: Client, opts: &Cli, event: WatchEvent<Application>) -> anyhow::Result<()> {
// This will receive events each time something
match event {
WatchEvent::Added(application) | WatchEvent::Modified(application) => {
ensure_application(client, &application, &opts);
},
WatchEvent::Deleted(application) => {
println!("Deleted an application {:?}", application.metadata.name)
}
_ => {
println!("another event")
}
}
Ok(())
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
// Create a new client
let client = Client::try_default().await?;
let cli = &Cli::from_args();
// Set a namespace. We're just hard-coding for now.
let namespace = &cli.namespace;
// Describe the CRD we're working with.
// This is basically the fields from our CRD definition.
let applications: Api<Application> = Api::namespaced(client.clone(), &namespace);
let inform = Informer::new(applications);
// Create our informer and start listening.
loop {
let mut events = inform.poll().await?.boxed();
while let Some(event) = events.try_next().await? {
handle(client.clone(), cli, event)?;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn version_to_rfc1123_test() {
assert_eq!("hello-world".to_string(), version_to_rfc1123("hello-world".to_string(), 20));
assert_eq!("hello-worl".to_string(), version_to_rfc1123("hello-world".to_string(), 10));
assert_eq!("hello".to_string(), version_to_rfc1123("hello-world".to_string(), 6));
assert_eq!("hello-1234".to_string(), version_to_rfc1123("hello.1234".to_string(), 10));
assert_eq!("hello".to_string(), version_to_rfc1123("hello-----".to_string(), 10));
}
}
|
use file;
pub fn run() {
let inputs = file::read_inputs("Day5.txt");
println!("Steps {}", solve(&"0\n3\n0\n1\n-3".to_string(), part1));
println!("Steps {}", solve(&"0\n3\n0\n1\n-3".to_string(), part2));
println!("Steps {}", solve(&inputs, part1));
println!("Steps {}", solve(&inputs, part2));
}
fn solve(inputs: &String, modify: fn(&mut i32)) -> u32 {
let mut lines: Vec<i32> = inputs
.lines()
.filter(|s| !s.is_empty())
.map(|i| i.parse::<i32>().unwrap())
.collect();
let mut pos: i32 = 0;
let mut count = 0;
while pos < lines.len() as i32 && pos >= 0 {
let jump = lines[pos as usize];
modify(&mut(lines[pos as usize]));
pos += jump;
count += 1;
}
return count;
}
fn part1(item: &mut i32) {
*item += 1;
}
fn part2(item: &mut i32) {
if *item >= 3 {
*item -= 1;
} else {
*item += 1;
}
} |
use std::borrow::Borrow;
use std::env;
use std::ffi::{CStr, OsStr, OsString};
use std::fs::OpenOptions;
use std::os::unix::prelude::*;
use std::path::PathBuf;
use bstr::BString;
use clap::{App, Arg};
use regex::bytes::Regex;
use slog::*;
use slog_async::OverflowStrategy;
use dracut_install::{install_files_ldd, install_modules, modalias_list, RunContext};
//use itertools::Itertools;
const VERSION: &str = env!("CARGO_PKG_VERSION");
const NAME: &str = env!("CARGO_PKG_NAME");
pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + 'static + Send + Sync>>;
fn main() {
let after_help = format!(
r#"Example usage:
{NAME} -D DESTROOTDIR [OPTION]... -a SOURCE...
or: {NAME} -D DESTROOTDIR [OPTION]... SOURCE DEST
or: {NAME} -D DESTROOTDIR [OPTION]... -m KERNELMODULE [KERNELMODULE …]
Install SOURCE to DEST in DESTROOTDIR with all needed dependencies.
KERNELMODULE can have one of the formats:
* <absolute path> with a leading /
* =<kernel subdir>[/<kernel subdir>…] like '=drivers/hid'
* <module name>"#,
NAME = NAME
);
let app = App::new(NAME)
.version(VERSION)
.after_help(after_help.as_ref())
.arg(
Arg::with_name("debug")
.long("debug")
.help("Show debug output"),
)
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.help("Show more output")
.multiple(true),
)
.arg(
Arg::with_name("version")
.long("version")
.help("Show package version")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("dir")
.short("d")
.long("dir")
.help("SOURCE is a directory")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("ldd")
.short("l")
.long("ldd")
.help("Also install shebang executables and libraries")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("resolvelazy")
.short("R")
.long("resolvelazy")
.help("Only install shebang executables and libraries for all SOURCE files")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("optional")
.short("o")
.long("optional")
.help("If SOURCE does not exist, do not fail")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("all")
.short("a")
.long("all")
.help("Install all SOURCE arguments to <DESTROOTDIR>")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("hostonly")
.short("H")
.long("hostonly")
.help("Mark all SOURCE files as hostonly")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("module")
.short("m")
.long("module")
.help("Install kernel modules, instead of files")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("fips")
.short("f")
.long("fips")
.help("Also install all '.SOURCE.hmac' files")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("modalias")
.long("modalias")
.help("Only generate module list from /sys/devices modalias list")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("silent")
.long("silent")
.help("Don't display error messages for kernel module install")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("destrootdir")
.short("D")
.long("destrootdir")
.value_name("DESTROOTDIR")
.help("Install all files to <DESTROOTDIR> as the root")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("logdir")
.short("L")
.long("logdir")
.value_name("DIR")
.help("Log files, which were installed from the host to <DIR>")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("mod-filter-path")
.short("p")
.long("mod-filter-path")
.value_name("REGEXP")
.help("Filter kernel modules by path <REGEXP>")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("mod-filter-nopath")
.short("P")
.long("mod-filter-nopath")
.value_name("REGEXP")
.help("Exclude kernel modules by path <REGEXP>")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("mod-filter-symbol")
.short("s")
.long("mod-filter-symbol")
.value_name("REGEXP")
.help("Filter kernel modules by symbol <REGEXP>")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("mod-filter-nosymbol")
.short("S")
.long("mod-filter-nosymbol")
.value_name("REGEXP")
.help("Exclude kernel modules by symbol <REGEXP>")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("mod-filter-noname")
.short("N")
.long("mod-filter-noname")
.value_name("REGEXP")
.help("Exclude kernel modules by name <REGEXP>")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("kerneldir")
.long("kerneldir")
.value_name("DIR")
.help("Specify the kernel module directory")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("firmwaredirs")
.long("firmwaredirs")
.value_name("DIRS")
.help("Specify the firmware directory search path with : separation")
.takes_value(true)
.required(false),
)
.arg(Arg::from_usage("<arg>... 'files, directories or kernel modules'").required(false));
let matches = app.get_matches();
let mut ctx: RunContext = RunContext {
destrootdir: if let Some(dir) = matches.value_of_os("destrootdir") {
PathBuf::from(dir)
} else {
let /* mut */ dest_root_dir = match env::var_os("DESTROOTDIR") {
None => {
if matches.is_present("modalias") {
OsString::from("")
} else {
eprintln!("DESTROOTDIR is unset and no --destrootdir given");
std::process::exit(1);
}
}
Some(d) => d
};
PathBuf::from(dest_root_dir)
},
all: matches.is_present("all"),
hmac: matches.is_present("hmac"),
createdir: matches.is_present("createdir"),
optional: matches.is_present("optional"),
silent: matches.is_present("silent"),
module: matches.is_present("module"),
modalias: matches.is_present("modalias"),
resolvelazy: matches.is_present("resolvelazy"),
resolvedeps: matches.is_present("resolvedeps"),
hostonly: matches.is_present("hostonly"),
loglevel: if matches.is_present("debug") {
Level::Debug
} else {
match matches.occurrences_of("verbose") {
0 => Level::Warning,
1 => Level::Info,
2 => Level::Debug,
3 => Level::Trace,
_ => Level::Trace,
}
},
kerneldir: matches.value_of_os("kerneldir").map(OsString::from),
logdir: matches.value_of_os("logdir").map(OsString::from),
mod_filter_path: matches.value_of_os("mod-filter-path").map(|s| {
let s = s.to_string_lossy();
Regex::new(s.borrow()).unwrap_or_else(|e| {
eprintln!("filter path '{:?}' a regexp: {}", s, e);
std::process::exit(1)
})
}),
mod_filter_nopath: matches.value_of_os("mod-filter-nopath").map(|s| {
let s = s.to_string_lossy();
Regex::new(s.borrow()).unwrap_or_else(|e| {
eprintln!("filter nopath '{:?}' not a regexp: {}", s, e);
std::process::exit(1)
})
}),
mod_filter_symbol: matches.value_of_os("mod-filter-symbol").map(|s| {
let s = s.to_string_lossy();
Regex::new(s.borrow()).unwrap_or_else(|e| {
eprintln!("filter symbol '{:?}' not a regexp: {}", s, e);
std::process::exit(1)
})
}),
mod_filter_nosymbol: matches.value_of_os("mod-filter-nosymbol").map(|s| {
let s = s.to_string_lossy();
Regex::new(s.borrow()).unwrap_or_else(|e| {
eprintln!("filter nosymbol '{:?}' not a regexp: {}", s, e);
std::process::exit(1)
})
}),
mod_filter_noname: matches.value_of_os("mod-filter-noname").map(|s| {
let s = s.to_string_lossy();
Regex::new(s.borrow()).unwrap_or_else(|e| {
eprintln!("filter noname '{:?}' not a regexp: {}", s, e);
std::process::exit(1)
})
}),
firmwaredirs: matches
.value_of_os("firmwaredirs")
.map(OsStr::as_bytes)
.map(BString::from)
.map(|s| {
s.split(|b| *b == b':')
.map(OsStr::from_bytes)
.map(OsString::from)
.collect::<Vec<_>>()
})
.unwrap_or_default(),
pathdirs: env::var_os("PATH")
.iter()
.map(OsString::as_os_str)
.map(OsStr::as_bytes)
.map(BString::from)
.map(|s| {
s.split(|b| *b == b':')
.map(OsStr::from_bytes)
.map(OsString::from)
.collect::<Vec<_>>()
})
.next()
.unwrap_or_default(),
logger: slog::Logger::root(slog::Discard, o!()),
};
let files = match matches.values_of_os("arg") {
Some(v) => v.map(OsString::from).collect::<Vec<_>>(),
None => Vec::<OsString>::new(),
};
if let Err(e) = do_main(&mut ctx, &files) {
match ctx.loglevel {
Level::Debug | Level::Trace => {
error!(ctx.logger, "{:?}", e);
}
_ => {
error!(ctx.logger, "Error: {}", e);
}
}
drop(ctx);
std::process::exit(1);
}
}
fn do_main(ctx: &mut RunContext, args: &[OsString]) -> Result<()> {
// Setup logging
if let Some(dir) = &ctx.logdir {
let logfile_path = PathBuf::from(dir).join(format!("{}.log", unsafe { libc::getpid() }));
let logfile = OpenOptions::new().append(true).open(logfile_path)?;
let decorator = slog_term::PlainDecorator::new(logfile);
let drain = slog_term::CompactFormat::new(decorator)
.build()
.filter_level(ctx.loglevel)
.fuse();
let drain = slog_async::Async::new(drain)
.overflow_strategy(OverflowStrategy::Block)
.build()
.fuse();
ctx.logger = Logger::root(drain, o!());
} else {
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::FullFormat::new(decorator)
.use_original_order()
.build()
.filter_level(ctx.loglevel)
.fuse();
let drain = slog_async::Async::new(drain)
.overflow_strategy(OverflowStrategy::Block)
.build()
.fuse();
ctx.logger = Logger::root(drain, o!());
}
// Get running kernel version, if kerneldir is unset
if ctx.kerneldir.is_none() {
let mut utsname: libc::utsname = unsafe { std::mem::MaybeUninit::uninit().assume_init() };
let ret = unsafe { libc::uname(&mut utsname) };
if ret == -1 {
return Err(Box::from(std::io::Error::last_os_error()));
}
ctx.kerneldir = Some({
let mut s = OsString::from("/lib/modules/");
s.push(OsStr::from_bytes(
unsafe { CStr::from_ptr(&utsname.release as *const libc::c_char) }.to_bytes(),
));
s
});
}
if ctx.modalias {
for m in modalias_list()? {
println!("{:?}", m);
}
return Ok(());
}
if ctx.module {
install_modules(ctx, args)
} else {
install_files_ldd(ctx, args)
}
}
|
#![deny(missing_docs)]
//! simplehttpserver is a re-implementation of Python's SimpleHTTPServer module in Rust. It's a
//! small utility that spawns a small HTTP server, serving static files from the current directory,
//! and is useful for serving HTML pages in a server context.
//!
//! By default, simplehttpserver runs on port 8000.
extern crate rocket;
extern crate rocket_contrib;
extern crate serde;
use std::env;
use std::fs;
use std::path::PathBuf;
use std::collections::HashMap;
use rocket::Outcome;
use rocket::request::{Request, FromRequest};
use rocket::response::{self, Responder, NamedFile};
use rocket_contrib::Template;
use serde::ser::{Serialize, Serializer, SerializeSeq};
/// A custom response enum, that serves either a template or a static file depending on the path
/// that's requested.
pub enum Response {
/// A listing is handled by a template renderer.
Listing(Template),
/// A static file that can be served directly.
File(NamedFile),
}
impl<'r> Responder<'r> for Response {
/// Forwards the Response enumerated type to the specific response handler.
fn respond_to(self, req: &Request) -> response::Result<'r> {
match self {
Response::Listing(t) => Template::respond_to(t, req),
Response::File(f) => NamedFile::respond_to(f, req),
}
}
}
/// A custom context value that makes it easier to construct a context to pass to handlebars-rust
/// for rendering the template.
enum ContextValue {
/// A generic string type.
String(String),
/// A list of strings, used for generating the directory listing.
FileNameList(Vec<String>),
}
impl Serialize for ContextValue {
/// Converts a ContextValue into serde-compatible data forms, so it can be accessed by the
/// template rendering engine.
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
&ContextValue::String(ref s) => {
return serializer.serialize_str(s.as_str());
}
&ContextValue::FileNameList(ref v) => {
let mut seq = serializer.serialize_seq(Some(v.len()))?;
for item in v {
seq.serialize_element(item)?;
}
return seq.end();
}
}
}
}
/// A response guard that collects information from the Request object that cannot normally be
/// obtained through the view handler and passes it on to the actual view handler. This guard will
/// always return successfully.
pub struct Directory {
/// The relative name of the current directory that's displayed on the page.
name: String,
/// A PathBuf representing the directory that was requested.
path: PathBuf,
}
impl<'a, 'r> FromRequest<'a, 'r> for Directory {
#[doc(hidden)]
type Error = ();
/// Generates a Directory response guard from a Request object.
fn from_request(request: &'a Request<'r>) -> rocket::request::Outcome<Directory, ()> {
let uri = request.uri().as_str();
println!("uri: {}", uri);
let mut target = env::current_dir().unwrap();
if uri.len() > 1 {
let push_uri = &uri[1..];
target.push(push_uri);
}
return Outcome::Success(Directory {
name: String::from(uri),
path: target,
});
}
}
/// A handler for both the root path and any other path. This is necessary because the Rocket route
/// handler for /<path> will not match /, so two handlers are necessary.
pub fn generic_handler(directory: Directory) -> Response {
if fs::metadata(directory.path.clone()).unwrap().is_dir() {
let mut context: HashMap<&str, ContextValue> = HashMap::new();
context.insert("name", ContextValue::String(directory.name));
let mut filelist: Vec<String> = vec![];
let names = fs::read_dir(directory.path).unwrap();
for name in names {
filelist.push(String::from(name.unwrap().file_name().to_str().unwrap()));
}
filelist.sort();
context.insert("files", ContextValue::FileNameList(filelist));
return Response::Listing(Template::render("index", context));
} else {
return Response::File(NamedFile::open(directory.path).unwrap());
}
}
|
pub(crate) mod factory;
|
use std::io::{stdin, print, println};
use std::io::stdio::flush;
use std::io::BufferedReader;
fn print_spaces(num:int) {
for _ in range(0, num) {
print(" ");
}
}
pub fn triangle_print(board:&[bool]) {
for line in range(0, 5) {
let prefix_spaces = (4 - line) * 4;
let start = (line * (line + 1)) / 2;
print_spaces(prefix_spaces);
for index in range(start, line + start + 1) {
match board[index] {
false => print(" "),
true => print(" __ ")
};
print(" ");
}
println("");
print_spaces(prefix_spaces);
for index in range(start, line + start + 1) {
match board[index] {
false => print!(" {:2d} ", index),
true => print!("|{:2d}|", index)
};
print(" ");
}
println("");
print_spaces(prefix_spaces);
for index in range(start, line + start + 1) {
match board[index] {
false => print(" "),
true => print("|__|")
};
print(" ");
}
println("");
}
}
fn get_pegnum<T: Reader>(prompt:&str, reader:&mut BufferedReader<T>) -> Option<int> {
loop {
print(prompt);
flush();
match reader.read_line().ok().unwrap().trim() {
"" => { continue; },
"end" => { return None },
num => {
let num = from_str::<int>(num).unwrap();
if num < 0 || num > 14 {
println("You must enter either a peg number (0 to 14, inclusive)");
println("or the word \" end\". (without the quotes)");
continue;
}
return Some(num);
}
}
}
}
pub fn triangle_input(board:&mut [bool]) {
for index in range(0, 15) {
board[index] = false;
}
let mut reader = ~stdin();
loop {
triangle_print(board);
println("");
println("Enter a number to insert/remove a peg from that space, or enter the");
println("word \"end\" when the board is set up.");
println("");
match get_pegnum("Input: ", reader) {
Some(peg_num) => {
board[peg_num] = !board[peg_num];
},
None => {return;}
}
}
}
|
use crate::ast::Connective;
use crate::parse;
use std::collections::{HashMap, HashSet, VecDeque};
#[derive(Debug, Clone)]
struct Node {
connectives: Vec<(FactId, Connective, bool)>,
closed: bool,
}
#[derive(Debug, Clone)]
struct Edge {
origin_node: NodeId,
fact: FactId,
to: NodeId,
}
#[derive(Debug, Clone, Copy)]
struct NodeId(usize);
#[derive(Debug, Clone, Copy)]
struct FactId(usize);
#[derive(Debug, Clone)]
enum QueueEntry {
Repeated(usize, FactId, Connective, bool, String, bool),
Standard(usize, FactId, Connective, bool),
}
impl QueueEntry {
fn extract(&self) -> (&Connective, bool, Option<&str>) {
match self {
QueueEntry::Standard(_, _, con, expect) => (con, *expect, None),
QueueEntry::Repeated(_, _, con, expect, to_repace, _) => (con, *expect, Some(to_repace)),
}
}
}
#[derive(Debug, Clone)]
struct Knowlage {
facts: HashMap<Connective, bool>,
queue: VecDeque<(FactId, Connective, bool)>,
known_constants: HashSet<String>,
repeaters: Vec<(FactId, String, Connective, bool, HashSet<String>)>,
}
impl Knowlage {
fn new(known_constants: HashSet<String>) -> Knowlage {
Knowlage {
queue: Default::default(),
facts: Default::default(),
known_constants,
repeaters: vec![],
}
}
fn generate_queue(&self) -> Vec<QueueEntry> {
let queued = self
.queue
.iter()
.cloned()
.enumerate()
.map(|(i, (fact_id, connective, expect))| {
QueueEntry::Standard(i, fact_id, connective, expect)
});
let repeated = self.repeaters.iter().enumerate().flat_map(
|(i, (fact_id, to_repalce, con, expect, ran_on))| {
// if self.known_constants.is_empty() {
// vec![QueueEntry::Repeated(i, *fact_id, con.clone(), *expect, to_repalce.to_string(), true)]
// } else {
self.known_constants
.difference(&ran_on)
.map(move |constant| {
QueueEntry::Repeated(
i,
*fact_id,
con.substitude(&to_repalce, &constant),
*expect,
constant.to_string(),
false,
)
})
// }
},
);
queued.chain(repeated).collect()
}
fn process_queue_entry(&mut self, entry: QueueEntry) -> (FactId, Connective, bool, bool) {
match entry {
QueueEntry::Repeated(index, fact_id, connective, expect, constant, introduce_constant) => {
if introduce_constant {
self.known_constants.insert(constant.to_string());
}
if let Some(repeater) = self.repeaters.get_mut(index) {
repeater.4.insert(constant);
} else {
panic!("repeater did not exists");
}
(fact_id, connective, expect, true)
}
QueueEntry::Standard(index, fact_id, connective, expect) => {
self.queue.remove(index);
(fact_id, connective, expect, false)
}
}
}
fn pop(&mut self) -> Option<(FactId, Connective, bool, bool)> {
let queue = self.generate_queue();
let entry = queue.clone().into_iter().min_by_key(|entry| {
let (con, expect, repeated) = entry.extract();
let contra = |con, expect: bool| self.facts.get(con).cloned() == Some(!expect);
match (con, expect, repeated) {
(Connective::Var(_), _, _) => 1,
(Connective::And(a, b), true, _) if contra(a, true) || contra(b, true) => 0,
(Connective::Or(a, b), false, _) if contra(a, false) || contra(b, false) => 0,
(Connective::And(_, _), true, _) |
(Connective::Or(_, _), false, _) => 2,
(Connective::Implicate(a, b), false, _) if contra(a, true) || contra(b, false) => 0,
(Connective::Implicate(_, _), false, _) => 3,
(Connective::Implicate(a, b), true, _) if contra(a, false) || contra(b, true) => 0,
_ => 100
}
})?;
// let entry = queue.into_iter().next()?;
// let pretty_entry = |entry: &QueueEntry| {
// match entry {
// QueueEntry::Standard(_, _, con, expect) => format!("S({}: {})", con.pretty(), expect),
// QueueEntry::Repeated(_, _, con, expect, _, _) => format!("R({}: {})", con.pretty(), expect),
// }
// };
// println!("{:?}, choose {:?}", queue.iter().map(pretty_entry).collect::<Vec<_>>().join(", "), pretty_entry(&entry));
Some(self.process_queue_entry(entry))
}
fn fact(&mut self, connective: Connective, expect: bool) -> Result<bool, FactResult> {
if let Some(fact) = self.facts.get(&connective) {
if expect == *fact {
Ok(false)
} else {
Err(FactResult::Closes)
}
} else {
self.facts.insert(connective, expect);
Ok(true)
}
}
fn queue(&mut self, fact_id: FactId, connective: Connective, expect: bool) {
self.queue.push_back((fact_id, connective, expect));
}
fn add_repeater(
&mut self,
fact_id: FactId,
to_repalce: String,
connective: Connective,
expect: bool,
) {
self.repeaters
.push((fact_id, to_repalce, connective, expect, HashSet::new()))
}
fn register_constant(&mut self, constant: String) {
self.known_constants.insert(constant);
}
}
#[derive(Debug, Clone, PartialEq)]
enum FactResult {
Closes,
}
#[derive(Debug, Clone)]
pub struct Tableau {
facts_counter: usize,
constant_counter: usize,
process_counter: usize,
knowlage: Knowlage,
knowlage_stack: Vec<Knowlage>,
nodes: Vec<Node>,
edges: Vec<Edge>,
}
impl Tableau {
pub fn new(start: Vec<(Connective, bool)>) -> Tableau {
let known_constants: HashSet<_> = start
.iter()
.flat_map(|(con, _)| con.all_variables().into_iter())
.collect();
let mut tableau = Tableau {
facts_counter: 0,
constant_counter: 0,
process_counter: 0,
knowlage: Knowlage::new(known_constants),
knowlage_stack: vec![],
nodes: vec![],
edges: vec![],
};
let (staring_node_id, node) = tableau.alloc_node(start.clone());
let connectives = node.connectives.clone();
let result = tableau.queue_facts(connectives);
tableau.process_next(staring_node_id, result);
tableau
}
fn queue_facts(
&mut self,
connectives: impl IntoIterator<Item = (FactId, Connective, bool)>,
) -> Result<(), FactResult> {
for (fact_id, con, expect) in connectives {
if self.knowlage.fact(con.clone(), expect)? {
self.knowlage.queue(fact_id, con, expect);
}
}
Ok(())
}
fn pop_queue(&mut self) -> Option<(FactId, Connective, bool, bool)> {
self.knowlage.pop()
}
fn create_edge(&mut self, origin_node: NodeId, fact: FactId, to: NodeId) {
self.edges.push(Edge {
origin_node,
fact,
to,
});
}
fn save_knowlage(&mut self) {
let save_point = self.knowlage.clone();
self.knowlage_stack.push(save_point);
}
fn restore_knowlage(&mut self) {
assert!(self.knowlage_stack.len() > 0);
self.knowlage = self.knowlage_stack.pop().expect("knowlage_stack was empty");
}
fn alloc_node(
&mut self,
connectives: impl IntoIterator<Item = (Connective, bool)>,
) -> (NodeId, &Node) {
let connectives = connectives
.into_iter()
.map(|(con, expect)| {
let fact_id = self.facts_counter;
self.facts_counter += 1;
(FactId(fact_id), con.clone(), expect)
})
.collect();
let node_id = NodeId(self.nodes.len());
self.nodes.push(Node {
connectives,
closed: false,
});
(node_id, &self.nodes[node_id.0])
}
fn alloc_constant(&mut self) -> String {
let con = format!("C{}", self.constant_counter);
self.knowlage.register_constant(con.clone());
self.constant_counter += 1;
// println!("{:?}", self.constant_counter);
con
}
fn process_next(&mut self, from: NodeId, last_result: Result<(), FactResult>) -> bool {
if self.process_counter >= 300 {
// println!("Exeed process limit");
return false;
}
if self.constant_counter >= 100 {
return false;
}
if self.facts_counter >= 100 {
// println!("more then 100 facts");
return false;
}
self.process_counter += 1;
match last_result {
Ok(()) => {
if let Some((fact, con, expect, was_repeated)) = self.pop_queue() {
if was_repeated {
self.straight(from, fact, vec![(con, expect)]);
} else {
self.process(from, fact, con, expect);
}
} else {
// println!("Queue was empty!");
}
}
Err(FactResult::Closes) => {
self.nodes[from.0].closed = true;
}
}
true
}
fn process(
&mut self,
from: NodeId,
fact_id: FactId,
connective: Connective,
expect: bool,
) -> bool {
match connective {
Connective::Var(_) | Connective::Predicate(_, _) => self.process_next(from, Ok(())),
Connective::And(left, right) => {
if expect {
self.straight(from, fact_id, vec![(*left, true), (*right, true)])
} else {
self.branch(from, fact_id, vec![(*left, false)], vec![(*right, false)])
}
}
Connective::Or(left, right) => {
if expect {
self.branch(from, fact_id, vec![(*left, true)], vec![(*right, true)])
} else {
self.straight(from, fact_id, vec![(*left, false), (*right, false)])
}
}
Connective::Implicate(left, right) => {
if expect {
self.branch(from, fact_id, vec![(*left, false)], vec![(*right, true)])
} else {
self.straight(from, fact_id, vec![(*left, true), (*right, false)])
}
}
Connective::Biimplicate(left, right) => {
if expect {
self.branch(
from,
fact_id,
vec![(*left.clone(), false), (*right.clone(), false)],
vec![(*left, true), (*right, true)],
)
} else {
self.branch(
from,
fact_id,
vec![(*left.clone(), false), (*right.clone(), true)],
vec![(*left, true), (*right, false)],
)
}
}
Connective::Not(con) => self.straight(from, fact_id, vec![(*con, !expect)]),
Connective::Exists(var, con) => {
if expect {
let new_const = self.alloc_constant();
let new_con = con.substitude(&var, &new_const);
self.straight(from, fact_id, vec![(new_con, expect)])
} else {
self.knowlage
.add_repeater(fact_id, var.clone(), *con.clone(), expect);
self.straight(from, fact_id, vec![])
}
}
Connective::ForAll(var, con) => {
if expect {
self.knowlage
.add_repeater(fact_id, var.clone(), *con.clone(), expect);
self.straight(from, fact_id, vec![])
} else {
let new_const = self.alloc_constant();
let new_con = con.substitude(&var, &new_const);
self.straight(from, fact_id, vec![(new_con, expect)])
}
}
}
}
fn straight(&mut self, from: NodeId, fact_id: FactId, cons: Vec<(Connective, bool)>) -> bool {
if !cons.is_empty() {
let (node_id, node) = self.alloc_node(cons);
let connectives = node.connectives.clone();
self.create_edge(from, fact_id, node_id);
let result = self.queue_facts(connectives);
self.process_next(node_id, result);
} else {
self.process_next(from, Ok(()));
}
true
}
fn branch(
&mut self,
from: NodeId,
fact_id: FactId,
left: Vec<(Connective, bool)>,
right: Vec<(Connective, bool)>,
) -> bool {
self.save_knowlage();
self.straight(from, fact_id, left);
self.restore_knowlage();
self.save_knowlage();
self.straight(from, fact_id, right);
self.restore_knowlage();
true
}
pub fn generate_dot(&self) -> String {
let header = "digraph A {\n\t";
let footer = "\t\n}";
let nodes = self
.nodes
.iter()
.enumerate()
.map(|(id, node)| {
let mut label = node
.connectives
.iter()
.map(|(id, con, expect)| {
format!(r#"[{}] {}: {}"#, id.0 + 1, con.pretty(), expect)
})
.collect::<Vec<_>>()
.join("\\n");
if node.closed {
label += "\\nx";
}
format!(r#"{} [label="{}"];"#, id, label)
})
.collect::<Vec<_>>()
.join("\n\t");
let edges = self
.edges
.iter()
.map(|edge| {
format!(
"{} -> {} [label=\"{}\"];",
edge.origin_node.0,
edge.to.0,
edge.fact.0 + 1
)
})
.collect::<Vec<_>>()
.join("\n\t");
format!("{}{}\n\n\t{}{}", header, nodes, edges, footer)
}
}
#[allow(unused)]
fn run(src: &str, expect: bool) -> Tableau {
let con = parse::parse(src).unwrap();
Tableau::new(vec![(con, expect)])
}
#[test]
fn simple_or() {
// println!("{}", run("P(a) | b", false).generate_dot());
// println!("{}", run("a | (b | a)", false).generate_dot());
// println!("{}", run("a | (b & a)", false).generate_dot());
// println!("{}", run("a > b & (x = !a)", false).generate_dot());
println!(
"{}",
run(
"(b ∨ h) ∧ (h → ¬b) ∧ (¬h → ¬a) ∧ (l → k) ∧ (k → ¬b ∧ ¬a)",
false
)
.generate_dot()
);
// println!("{}", run(".a (a | b)", false).generate_dot());
// println!("{}", run(".a a > b | c", false).generate_dot());
// assert!(false);
}
// let con = parse::parse(r#"(A & !B) & (B & !A) = (A & B) & !(A | B)"#).unwrap();
#[test]
fn other_test() {
let a = parse::parse(r#"(\x(\y(P(x,y) > P(y,x))))"#).unwrap();
// let b = parse::parse(r#"(\x(\y(P(x,y) = P(y,x))))"#).unwrap();
println!(
"{}",
Tableau::new(vec![(a, true)]).generate_dot()
);
unimplemented!();
}
// #[test]
// fn other_equiv_test() {
// let a = parse::parse("(\\x(.y(P(x, y))))").unwrap();
// let b = parse::parse("(.z(P(z, z)))").unwrap();
// // let c = parse::parse("!q").unwrap();
// println!(
// "{}",
// Tableau::new(vec![(a, true), (b, false)]).generate_dot()
// );
// // unimplemented!();
// }
// #[test]
// fn fails_in_browser_index_out_of_bounds() {
// println!("start");
// println!("{}", run("(\\x(.y (P(y,x)))) > a", false).generate_dot());
// println!("end");
// }
|
use embedded_time::TimeError;
use crate::network::Error as NetworkError;
use crate::services::data::Error as DataServiceError;
use core::cell::{BorrowError, BorrowMutError};
#[derive(Debug, PartialEq)]
pub enum GenericError {
BorrowError,
BorrowMutError,
Timeout,
Time(TimeError),
Unsupported,
}
impl From<BorrowMutError> for GenericError {
fn from(_: BorrowMutError) -> Self {
GenericError::BorrowMutError
}
}
impl From<BorrowError> for GenericError {
fn from(_: BorrowError) -> Self {
GenericError::BorrowError
}
}
impl From<TimeError> for GenericError {
fn from(e: TimeError) -> Self {
GenericError::Time(e)
}
}
#[derive(Debug, PartialEq)]
#[non_exhaustive]
pub enum Error {
// General device errors
BaudDetection,
Busy,
Uninitialized,
StateTimeout,
// Network errors
Network(NetworkError),
// Service specific errors
DataService(DataServiceError),
// Generic shared errors, e.g. from `core::`
Generic(GenericError),
_Unknown,
}
impl From<DataServiceError> for Error {
fn from(e: DataServiceError) -> Self {
// Unwrap generic and network errors
match e {
DataServiceError::Generic(g) => Error::Generic(g),
DataServiceError::Network(g) => Error::Network(g),
_ => Error::DataService(e),
}
}
}
impl From<NetworkError> for Error {
fn from(e: NetworkError) -> Self {
// Unwrap generic errors
match e {
NetworkError::Generic(g) => Error::Generic(g),
_ => Error::Network(e),
}
}
}
impl From<TimeError> for Error {
fn from(e: TimeError) -> Self {
Error::Generic(e.into())
}
}
impl From<BorrowMutError> for Error {
fn from(e: BorrowMutError) -> Self {
Error::Generic(e.into())
}
}
impl From<BorrowError> for Error {
fn from(e: BorrowError) -> Self {
Error::Generic(e.into())
}
}
|
#[doc = "Register `AHB3LPENR` reader"]
pub type R = crate::R<AHB3LPENR_SPEC>;
#[doc = "Register `AHB3LPENR` writer"]
pub type W = crate::W<AHB3LPENR_SPEC>;
#[doc = "Field `MDMALPEN` reader - MDMA Clock Enable During CSleep Mode"]
pub type MDMALPEN_R = crate::BitReader<MDMALPEN_A>;
#[doc = "MDMA Clock Enable During CSleep Mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MDMALPEN_A {
#[doc = "0: The selected clock is disabled during csleep mode"]
Disabled = 0,
#[doc = "1: The selected clock is enabled during csleep mode"]
Enabled = 1,
}
impl From<MDMALPEN_A> for bool {
#[inline(always)]
fn from(variant: MDMALPEN_A) -> Self {
variant as u8 != 0
}
}
impl MDMALPEN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MDMALPEN_A {
match self.bits {
false => MDMALPEN_A::Disabled,
true => MDMALPEN_A::Enabled,
}
}
#[doc = "The selected clock is disabled during csleep mode"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == MDMALPEN_A::Disabled
}
#[doc = "The selected clock is enabled during csleep mode"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == MDMALPEN_A::Enabled
}
}
#[doc = "Field `MDMALPEN` writer - MDMA Clock Enable During CSleep Mode"]
pub type MDMALPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MDMALPEN_A>;
impl<'a, REG, const O: u8> MDMALPEN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "The selected clock is disabled during csleep mode"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(MDMALPEN_A::Disabled)
}
#[doc = "The selected clock is enabled during csleep mode"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(MDMALPEN_A::Enabled)
}
}
#[doc = "Field `DMA2DLPEN` reader - DMA2D Clock Enable During CSleep Mode"]
pub use MDMALPEN_R as DMA2DLPEN_R;
#[doc = "Field `FLITFLPEN` reader - FLITF Clock Enable During CSleep Mode"]
pub use MDMALPEN_R as FLITFLPEN_R;
#[doc = "Field `FMCLPEN` reader - FMC Peripheral Clocks Enable During CSleep Mode"]
pub use MDMALPEN_R as FMCLPEN_R;
#[doc = "Field `OCTOSPI1LPEN` reader - OCTOSPI1 and OCTOSPI1 delay block enable during CSleep Mode"]
pub use MDMALPEN_R as OCTOSPI1LPEN_R;
#[doc = "Field `SDMMC1LPEN` reader - SDMMC1 and SDMMC1 Delay Clock Enable During CSleep Mode"]
pub use MDMALPEN_R as SDMMC1LPEN_R;
#[doc = "Field `OCTOSPI2LPEN` reader - OCTOSPI2 and OCTOSPI2 delay block enable during CSleep Mode"]
pub use MDMALPEN_R as OCTOSPI2LPEN_R;
#[doc = "Field `IOMNGRLPEN` reader - OCTOSPI IO manager enable during CSleep Mode"]
pub use MDMALPEN_R as IOMNGRLPEN_R;
#[doc = "Field `OTFD1LPEN` reader - OTFDEC1 enable during CSleep Mode"]
pub use MDMALPEN_R as OTFD1LPEN_R;
#[doc = "Field `OTFD2LPEN` reader - OTFDEC2 enable during CSleep Mode"]
pub use MDMALPEN_R as OTFD2LPEN_R;
#[doc = "Field `D1DTCM1LPEN` reader - D1DTCM1 Block Clock Enable During CSleep mode"]
pub use MDMALPEN_R as D1DTCM1LPEN_R;
#[doc = "Field `DTCM2LPEN` reader - D1 DTCM2 Block Clock Enable During CSleep mode"]
pub use MDMALPEN_R as DTCM2LPEN_R;
#[doc = "Field `ITCMLPEN` reader - D1ITCM Block Clock Enable During CSleep mode"]
pub use MDMALPEN_R as ITCMLPEN_R;
#[doc = "Field `AXISRAMLPEN` reader - AXISRAM Block Clock Enable During CSleep mode"]
pub use MDMALPEN_R as AXISRAMLPEN_R;
#[doc = "Field `DMA2DLPEN` writer - DMA2D Clock Enable During CSleep Mode"]
pub use MDMALPEN_W as DMA2DLPEN_W;
#[doc = "Field `FLITFLPEN` writer - FLITF Clock Enable During CSleep Mode"]
pub use MDMALPEN_W as FLITFLPEN_W;
#[doc = "Field `FMCLPEN` writer - FMC Peripheral Clocks Enable During CSleep Mode"]
pub use MDMALPEN_W as FMCLPEN_W;
#[doc = "Field `OCTOSPI1LPEN` writer - OCTOSPI1 and OCTOSPI1 delay block enable during CSleep Mode"]
pub use MDMALPEN_W as OCTOSPI1LPEN_W;
#[doc = "Field `SDMMC1LPEN` writer - SDMMC1 and SDMMC1 Delay Clock Enable During CSleep Mode"]
pub use MDMALPEN_W as SDMMC1LPEN_W;
#[doc = "Field `OCTOSPI2LPEN` writer - OCTOSPI2 and OCTOSPI2 delay block enable during CSleep Mode"]
pub use MDMALPEN_W as OCTOSPI2LPEN_W;
#[doc = "Field `IOMNGRLPEN` writer - OCTOSPI IO manager enable during CSleep Mode"]
pub use MDMALPEN_W as IOMNGRLPEN_W;
#[doc = "Field `OTFD1LPEN` writer - OTFDEC1 enable during CSleep Mode"]
pub use MDMALPEN_W as OTFD1LPEN_W;
#[doc = "Field `OTFD2LPEN` writer - OTFDEC2 enable during CSleep Mode"]
pub use MDMALPEN_W as OTFD2LPEN_W;
#[doc = "Field `D1DTCM1LPEN` writer - D1DTCM1 Block Clock Enable During CSleep mode"]
pub use MDMALPEN_W as D1DTCM1LPEN_W;
#[doc = "Field `DTCM2LPEN` writer - D1 DTCM2 Block Clock Enable During CSleep mode"]
pub use MDMALPEN_W as DTCM2LPEN_W;
#[doc = "Field `ITCMLPEN` writer - D1ITCM Block Clock Enable During CSleep mode"]
pub use MDMALPEN_W as ITCMLPEN_W;
#[doc = "Field `AXISRAMLPEN` writer - AXISRAM Block Clock Enable During CSleep mode"]
pub use MDMALPEN_W as AXISRAMLPEN_W;
impl R {
#[doc = "Bit 0 - MDMA Clock Enable During CSleep Mode"]
#[inline(always)]
pub fn mdmalpen(&self) -> MDMALPEN_R {
MDMALPEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 4 - DMA2D Clock Enable During CSleep Mode"]
#[inline(always)]
pub fn dma2dlpen(&self) -> DMA2DLPEN_R {
DMA2DLPEN_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 8 - FLITF Clock Enable During CSleep Mode"]
#[inline(always)]
pub fn flitflpen(&self) -> FLITFLPEN_R {
FLITFLPEN_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 12 - FMC Peripheral Clocks Enable During CSleep Mode"]
#[inline(always)]
pub fn fmclpen(&self) -> FMCLPEN_R {
FMCLPEN_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 14 - OCTOSPI1 and OCTOSPI1 delay block enable during CSleep Mode"]
#[inline(always)]
pub fn octospi1lpen(&self) -> OCTOSPI1LPEN_R {
OCTOSPI1LPEN_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 16 - SDMMC1 and SDMMC1 Delay Clock Enable During CSleep Mode"]
#[inline(always)]
pub fn sdmmc1lpen(&self) -> SDMMC1LPEN_R {
SDMMC1LPEN_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 19 - OCTOSPI2 and OCTOSPI2 delay block enable during CSleep Mode"]
#[inline(always)]
pub fn octospi2lpen(&self) -> OCTOSPI2LPEN_R {
OCTOSPI2LPEN_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 21 - OCTOSPI IO manager enable during CSleep Mode"]
#[inline(always)]
pub fn iomngrlpen(&self) -> IOMNGRLPEN_R {
IOMNGRLPEN_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - OTFDEC1 enable during CSleep Mode"]
#[inline(always)]
pub fn otfd1lpen(&self) -> OTFD1LPEN_R {
OTFD1LPEN_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - OTFDEC2 enable during CSleep Mode"]
#[inline(always)]
pub fn otfd2lpen(&self) -> OTFD2LPEN_R {
OTFD2LPEN_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 28 - D1DTCM1 Block Clock Enable During CSleep mode"]
#[inline(always)]
pub fn d1dtcm1lpen(&self) -> D1DTCM1LPEN_R {
D1DTCM1LPEN_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - D1 DTCM2 Block Clock Enable During CSleep mode"]
#[inline(always)]
pub fn dtcm2lpen(&self) -> DTCM2LPEN_R {
DTCM2LPEN_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - D1ITCM Block Clock Enable During CSleep mode"]
#[inline(always)]
pub fn itcmlpen(&self) -> ITCMLPEN_R {
ITCMLPEN_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - AXISRAM Block Clock Enable During CSleep mode"]
#[inline(always)]
pub fn axisramlpen(&self) -> AXISRAMLPEN_R {
AXISRAMLPEN_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - MDMA Clock Enable During CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn mdmalpen(&mut self) -> MDMALPEN_W<AHB3LPENR_SPEC, 0> {
MDMALPEN_W::new(self)
}
#[doc = "Bit 4 - DMA2D Clock Enable During CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn dma2dlpen(&mut self) -> DMA2DLPEN_W<AHB3LPENR_SPEC, 4> {
DMA2DLPEN_W::new(self)
}
#[doc = "Bit 8 - FLITF Clock Enable During CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn flitflpen(&mut self) -> FLITFLPEN_W<AHB3LPENR_SPEC, 8> {
FLITFLPEN_W::new(self)
}
#[doc = "Bit 12 - FMC Peripheral Clocks Enable During CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn fmclpen(&mut self) -> FMCLPEN_W<AHB3LPENR_SPEC, 12> {
FMCLPEN_W::new(self)
}
#[doc = "Bit 14 - OCTOSPI1 and OCTOSPI1 delay block enable during CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn octospi1lpen(&mut self) -> OCTOSPI1LPEN_W<AHB3LPENR_SPEC, 14> {
OCTOSPI1LPEN_W::new(self)
}
#[doc = "Bit 16 - SDMMC1 and SDMMC1 Delay Clock Enable During CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn sdmmc1lpen(&mut self) -> SDMMC1LPEN_W<AHB3LPENR_SPEC, 16> {
SDMMC1LPEN_W::new(self)
}
#[doc = "Bit 19 - OCTOSPI2 and OCTOSPI2 delay block enable during CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn octospi2lpen(&mut self) -> OCTOSPI2LPEN_W<AHB3LPENR_SPEC, 19> {
OCTOSPI2LPEN_W::new(self)
}
#[doc = "Bit 21 - OCTOSPI IO manager enable during CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn iomngrlpen(&mut self) -> IOMNGRLPEN_W<AHB3LPENR_SPEC, 21> {
IOMNGRLPEN_W::new(self)
}
#[doc = "Bit 22 - OTFDEC1 enable during CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn otfd1lpen(&mut self) -> OTFD1LPEN_W<AHB3LPENR_SPEC, 22> {
OTFD1LPEN_W::new(self)
}
#[doc = "Bit 23 - OTFDEC2 enable during CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn otfd2lpen(&mut self) -> OTFD2LPEN_W<AHB3LPENR_SPEC, 23> {
OTFD2LPEN_W::new(self)
}
#[doc = "Bit 28 - D1DTCM1 Block Clock Enable During CSleep mode"]
#[inline(always)]
#[must_use]
pub fn d1dtcm1lpen(&mut self) -> D1DTCM1LPEN_W<AHB3LPENR_SPEC, 28> {
D1DTCM1LPEN_W::new(self)
}
#[doc = "Bit 29 - D1 DTCM2 Block Clock Enable During CSleep mode"]
#[inline(always)]
#[must_use]
pub fn dtcm2lpen(&mut self) -> DTCM2LPEN_W<AHB3LPENR_SPEC, 29> {
DTCM2LPEN_W::new(self)
}
#[doc = "Bit 30 - D1ITCM Block Clock Enable During CSleep mode"]
#[inline(always)]
#[must_use]
pub fn itcmlpen(&mut self) -> ITCMLPEN_W<AHB3LPENR_SPEC, 30> {
ITCMLPEN_W::new(self)
}
#[doc = "Bit 31 - AXISRAM Block Clock Enable During CSleep mode"]
#[inline(always)]
#[must_use]
pub fn axisramlpen(&mut self) -> AXISRAMLPEN_W<AHB3LPENR_SPEC, 31> {
AXISRAMLPEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "RCC AHB3 Sleep Clock Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb3lpenr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb3lpenr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct AHB3LPENR_SPEC;
impl crate::RegisterSpec for AHB3LPENR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ahb3lpenr::R`](R) reader structure"]
impl crate::Readable for AHB3LPENR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ahb3lpenr::W`](W) writer structure"]
impl crate::Writable for AHB3LPENR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets AHB3LPENR to value 0"]
impl crate::Resettable for AHB3LPENR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use itertools::Itertools;
#[derive(Debug)]
struct Item {
cost: u32,
damage: u32,
armour: u32,
}
#[derive(Debug)]
struct Player {
hp: i32,
damage: u32,
armour: u32,
}
impl Item {
fn weapon(cost: u32, damage: u32) -> Self {
Item {
cost,
damage,
armour: 0,
}
}
fn armour(cost: u32, armour: u32) -> Self {
Item {
cost,
damage: 0,
armour,
}
}
fn ring(cost: u32, damage: u32, armour: u32) -> Self {
Item {
cost,
damage,
armour,
}
}
}
impl Player {
fn new(hp: i32, damage: u32, armour: u32) -> Self {
Player { hp, damage, armour }
}
pub fn equip_item(&mut self, item: &Item) {
self.damage += item.damage;
self.armour += item.armour;
}
fn get_damage_against(&self, enemy: &Player) -> u32 {
std::cmp::max(1, self.damage.saturating_sub(enemy.armour))
}
}
fn does_player_wins(mut player: Player, mut enemy: Player) -> bool {
let player_damage = player.get_damage_against(&enemy) as i32;
let enemy_damage = enemy.get_damage_against(&player) as i32;
loop {
enemy.hp -= player_damage;
if enemy.hp <= 0 {
return true;
}
player.hp -= enemy_damage;
if player.hp <= 0 {
return false;
}
}
}
fn part_1(weapons: &[Item], armours: &[Item], rings: &[Item]) -> u32 {
let mut costs = std::collections::HashSet::new();
for weapon in weapons.iter() {
for armour in armours.iter() {
for ring in rings.iter().combinations(2) {
let mut player = Player::new(100, 0, 0);
let boss = Player::new(103, 9, 2);
player.equip_item(weapon);
player.equip_item(armour);
player.equip_item(ring[0]);
player.equip_item(ring[1]);
if does_player_wins(player, boss) {
let cost = weapon.cost + armour.cost + ring[0].cost + ring[1].cost;
costs.insert(cost);
}
}
}
}
*costs.iter().min().unwrap()
}
fn part_2(weapons: &[Item], armours: &[Item], rings: &[Item]) -> u32 {
let mut costs = std::collections::HashSet::new();
for weapon in weapons.iter() {
for armour in armours.iter() {
for ring in rings.iter().combinations(2) {
let mut player = Player::new(100, 0, 0);
let boss = Player::new(103, 9, 2);
player.equip_item(weapon);
player.equip_item(armour);
player.equip_item(ring[0]);
player.equip_item(ring[1]);
if !does_player_wins(player, boss) {
let cost = weapon.cost + armour.cost + ring[0].cost + ring[1].cost;
costs.insert(cost);
}
}
}
}
*costs.iter().max().unwrap()
}
fn main() {
let weapons = vec![
Item::weapon(8, 4),
Item::weapon(10, 5),
Item::weapon(25, 6),
Item::weapon(40, 7),
Item::weapon(74, 8),
];
let armours = vec![
Item::armour(0, 0),
Item::armour(13, 1),
Item::armour(31, 2),
Item::armour(53, 3),
Item::armour(75, 4),
Item::armour(102, 5),
];
let rings = vec![
Item::ring(0, 0, 0),
Item::ring(0, 0, 0),
Item::ring(25, 1, 0),
Item::ring(50, 2, 0),
Item::ring(100, 3, 0),
Item::ring(20, 0, 1),
Item::ring(40, 0, 2),
Item::ring(80, 0, 3),
];
println!("First puzzle: {}", part_1(&weapons, &armours, &rings));
println!("Second puzzle: {}", part_2(&weapons, &armours, &rings));
}
#[cfg(test)]
mod day21 {
use super::*;
#[test]
fn test_part_1() {
let boss = Player::new(12, 7, 2);
let player = Player::new(8, 5, 5);
assert_eq!(true, does_player_wins(player, boss));
let boss = Player::new(103, 9, 2);
let mut player = Player::new(100, 0, 0);
player.equip_item(&Item { cost: 10, damage: 5, armour: 0 });
player.equip_item(&Item { cost: 13, damage: 0, armour: 1 });
player.equip_item(&Item { cost: 25, damage: 1, armour: 0 });
player.equip_item(&Item { cost: 100, damage: 3, armour: 0 });
assert_eq!(false, does_player_wins(player, boss));
}
#[test]
fn test_player_get_damage_against() {
let boss = Player::new(100, 100, 100);
let player = Player::new(10, 10, 10);
assert_eq!(1, player.get_damage_against(&boss));
assert_eq!(90, boss.get_damage_against(&player));
}
}
|
#![feature(alloc)]
#![feature(heap_api)]
#![feature(core_intrinsics)]
#![feature(shared)]
#![feature(get_type_id)]
extern crate alloc;
#[macro_use]
extern crate impl_any;
pub mod gc;
pub mod lang;
|
//! # PostgreSQL Driver
mod models;
mod schema;
use crate::core::{Csrf, Key, Service, User};
use crate::driver;
use crate::driver::Error;
use chrono::{DateTime, Utc};
use diesel::prelude::*;
use diesel::r2d2::ConnectionManager;
embed_migrations!("migrations/postgres");
#[derive(Clone)]
pub struct Driver {
pool: r2d2::Pool<ConnectionManager<PgConnection>>,
}
type PooledConnection = r2d2::PooledConnection<ConnectionManager<PgConnection>>;
impl Driver {
pub fn initialise(database_url: &str, max_connections: u32) -> Result<Self, Error> {
let manager = ConnectionManager::<PgConnection>::new(database_url);
let pool = r2d2::Pool::builder()
.max_size(max_connections)
.build(manager)
.map_err(Error::R2d2)?;
let driver = Driver { pool };
driver.run_migrations()?;
Ok(driver)
}
fn connection(&self) -> Result<PooledConnection, Error> {
self.pool.get().map_err(Error::R2d2)
}
fn run_migrations(&self) -> Result<(), Error> {
let connection = self.connection()?;
embedded_migrations::run(&connection).map_err(Error::DieselMigrations)
}
}
impl driver::Driver for Driver {
fn box_clone(&self) -> Box<driver::Driver> {
Box::new((*self).clone())
}
fn key_list_where_id_lt(
&self,
lt: i64,
limit: i64,
service_id_mask: Option<i64>,
) -> Result<Vec<i64>, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
match service_id_mask {
Some(service_id_mask) => auth_key
.select(key_id)
.filter(service_id.eq(service_id_mask).and(key_id.lt(lt)))
.limit(limit)
.order(key_id.asc())
.load::<i64>(&conn),
None => auth_key
.select(key_id)
.filter(key_id.lt(lt))
.limit(limit)
.order(key_id.asc())
.load::<i64>(&conn),
}
.map_err(Error::Diesel)
}
fn key_list_where_id_gt(
&self,
gt: i64,
limit: i64,
service_id_mask: Option<i64>,
) -> Result<Vec<i64>, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
match service_id_mask {
Some(service_id_mask) => auth_key
.select(key_id)
.filter(service_id.eq(service_id_mask).and(key_id.gt(gt)))
.limit(limit)
.order(key_id.asc())
.load::<i64>(&conn),
None => auth_key
.select(key_id)
.filter(key_id.gt(gt))
.limit(limit)
.order(key_id.asc())
.load::<i64>(&conn),
}
.map_err(Error::Diesel)
}
fn key_create(
&self,
name: &str,
value: &str,
key_service_id: Option<i64>,
key_user_id: Option<i64>,
) -> Result<Key, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
let now = Utc::now();
let value = models::AuthKeyInsert {
created_at: &now,
updated_at: &now,
key_name: name,
key_value: value,
service_id: key_service_id,
user_id: key_user_id,
};
diesel::insert_into(auth_key)
.values(&value)
.get_result::<models::AuthKey>(&conn)
.map_err(Error::Diesel)
.map(Into::into)
}
fn key_read_by_id(&self, id: i64) -> Result<Option<Key>, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
auth_key
.filter(key_id.eq(id))
.get_result::<models::AuthKey>(&conn)
.map(|key| Some(key.into()))
.or_else(|err| match err {
diesel::result::Error::NotFound => Ok(None),
_ => Err(Error::Diesel(err)),
})
}
fn key_read_by_user_id(
&self,
key_service_id: i64,
key_user_id: i64,
) -> Result<Option<Key>, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
auth_key
.filter(user_id.eq(key_user_id).and(service_id.eq(key_service_id)))
// TODO(refactor): Better method to handle multiple keys?
.order(created_at.asc())
.get_result::<models::AuthKey>(&conn)
.map(|key| Some(key.into()))
.or_else(|err| match err {
diesel::result::Error::NotFound => Ok(None),
_ => Err(Error::Diesel(err)),
})
}
fn key_read_by_root_value(&self, value: &str) -> Result<Option<Key>, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
auth_key
.filter(
key_value
.eq(value)
.and(service_id.is_null())
.and(user_id.is_null()),
)
.get_result::<models::AuthKey>(&conn)
.map(|key| Some(key.into()))
.or_else(|err| match err {
diesel::result::Error::NotFound => Ok(None),
_ => Err(Error::Diesel(err)),
})
}
fn key_read_by_service_value(&self, value: &str) -> Result<Option<Key>, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
auth_key
.filter(
key_value
.eq(value)
.and(service_id.is_not_null())
.and(user_id.is_null()),
)
.get_result::<models::AuthKey>(&conn)
.map(|key| Some(key.into()))
.or_else(|err| match err {
diesel::result::Error::NotFound => Ok(None),
_ => Err(Error::Diesel(err)),
})
}
fn key_read_by_user_value(
&self,
key_service_id: i64,
value: &str,
) -> Result<Option<Key>, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
auth_key
.filter(
key_value
.eq(value)
.and(service_id.eq(key_service_id).and(user_id.is_not_null())),
)
.get_result::<models::AuthKey>(&conn)
.map(|key| Some(key.into()))
.or_else(|err| match err {
diesel::result::Error::NotFound => Ok(None),
_ => Err(Error::Diesel(err)),
})
}
fn key_update_by_id(&self, id: i64, name: Option<&str>) -> Result<Key, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
let now = chrono::Utc::now();
let value = models::AuthKeyUpdate {
updated_at: &now,
key_name: name,
};
diesel::update(auth_key.filter(key_id.eq(id)))
.set(&value)
.get_result::<models::AuthKey>(&conn)
.map_err(Error::Diesel)
.map(Into::into)
}
fn key_delete_by_id(&self, id: i64) -> Result<usize, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
diesel::delete(auth_key.filter(key_id.eq(id)))
.execute(&conn)
.map_err(Error::Diesel)
}
fn key_delete_root(&self) -> Result<usize, Error> {
use crate::driver::postgres::schema::auth_key::dsl::*;
let conn = self.connection()?;
diesel::delete(auth_key.filter(service_id.is_null().and(user_id.is_null())))
.execute(&conn)
.map_err(Error::Diesel)
}
fn service_list_where_id_lt(&self, lt: i64, limit: i64) -> Result<Vec<i64>, Error> {
use crate::driver::postgres::schema::auth_service::dsl::*;
let conn = self.connection()?;
auth_service
.select(service_id)
.filter(service_id.lt(lt))
.limit(limit)
.order(service_id.asc())
.load::<i64>(&conn)
.map_err(Error::Diesel)
}
fn service_list_where_id_gt(&self, gt: i64, limit: i64) -> Result<Vec<i64>, Error> {
use crate::driver::postgres::schema::auth_service::dsl::*;
let conn = self.connection()?;
auth_service
.select(service_id)
.filter(service_id.gt(gt))
.limit(limit)
.order(service_id.asc())
.load::<i64>(&conn)
.map_err(Error::Diesel)
}
fn service_create(&self, name: &str, url: &str) -> Result<Service, Error> {
use crate::driver::postgres::schema::auth_service::dsl::*;
let conn = self.connection()?;
let now = Utc::now();
let value = models::AuthServiceInsert {
created_at: &now,
updated_at: &now,
service_name: name,
service_url: url,
};
diesel::insert_into(auth_service)
.values(&value)
.get_result::<models::AuthService>(&conn)
.map_err(Error::Diesel)
.map(Into::into)
}
fn service_read_by_id(&self, id: i64) -> Result<Option<Service>, Error> {
use crate::driver::postgres::schema::auth_service::dsl::*;
let conn = self.connection()?;
auth_service
.filter(service_id.eq(id))
.get_result::<models::AuthService>(&conn)
.map(|service| Some(service.into()))
.or_else(|err| match err {
diesel::result::Error::NotFound => Ok(None),
_ => Err(Error::Diesel(err)),
})
}
fn service_update_by_id(&self, id: i64, name: Option<&str>) -> Result<Service, Error> {
use crate::driver::postgres::schema::auth_service::dsl::*;
let conn = self.connection()?;
let now = chrono::Utc::now();
let value = models::AuthServiceUpdate {
updated_at: &now,
service_name: name,
};
diesel::update(auth_service.filter(service_id.eq(id)))
.set(&value)
.get_result::<models::AuthService>(&conn)
.map_err(Error::Diesel)
.map(Into::into)
}
fn service_delete_by_id(&self, id: i64) -> Result<usize, Error> {
use crate::driver::postgres::schema::auth_service::dsl::*;
let conn = self.connection()?;
diesel::delete(auth_service.filter(service_id.eq(id)))
.execute(&conn)
.map_err(Error::Diesel)
}
fn user_list_where_id_lt(&self, lt: i64, limit: i64) -> Result<Vec<i64>, Error> {
use crate::driver::postgres::schema::auth_user::dsl::*;
let conn = self.connection()?;
auth_user
.select(user_id)
.filter(user_id.lt(lt))
.limit(limit)
.order(user_id.asc())
.load::<i64>(&conn)
.map_err(Error::Diesel)
}
fn user_list_where_id_gt(&self, gt: i64, limit: i64) -> Result<Vec<i64>, Error> {
use crate::driver::postgres::schema::auth_user::dsl::*;
let conn = self.connection()?;
auth_user
.select(user_id)
.filter(user_id.gt(gt))
.limit(limit)
.order(user_id.asc())
.load::<i64>(&conn)
.map_err(Error::Diesel)
}
fn user_create(
&self,
name: &str,
email: &str,
active: bool,
password_hash: Option<&str>,
password_revision: Option<i64>,
) -> Result<User, Error> {
use crate::driver::postgres::schema::auth_user::dsl::*;
let conn = self.connection()?;
let now = Utc::now();
let value = models::AuthUserInsert {
created_at: &now,
updated_at: &now,
user_name: name,
user_active: active,
user_email: email,
user_password_hash: password_hash,
user_password_revision: password_revision,
};
diesel::insert_into(auth_user)
.values(&value)
.get_result::<models::AuthUser>(&conn)
.map_err(Error::Diesel)
.map(Into::into)
}
fn user_read_by_id(&self, id: i64) -> Result<Option<User>, Error> {
use crate::driver::postgres::schema::auth_user::dsl::*;
let conn = self.connection()?;
auth_user
.filter(user_id.eq(id))
.get_result::<models::AuthUser>(&conn)
.map(|user| Some(user.into()))
.or_else(|err| match err {
diesel::result::Error::NotFound => Ok(None),
_ => Err(Error::Diesel(err)),
})
}
fn user_read_by_email(&self, email: &str) -> Result<Option<User>, Error> {
use crate::driver::postgres::schema::auth_user::dsl::*;
let conn = self.connection()?;
auth_user
.filter(user_email.eq(email))
.get_result::<models::AuthUser>(&conn)
.map(|user| Some(user.into()))
.or_else(|err| match err {
diesel::result::Error::NotFound => Ok(None),
_ => Err(Error::Diesel(err)),
})
}
fn user_update_by_id(
&self,
id: i64,
name: Option<&str>,
active: Option<bool>,
) -> Result<User, Error> {
use crate::driver::postgres::schema::auth_user::dsl::*;
let conn = self.connection()?;
let now = chrono::Utc::now();
let value = models::AuthUserUpdate {
updated_at: &now,
user_name: name,
user_active: active,
};
diesel::update(auth_user.filter(user_id.eq(id)))
.set(&value)
.get_result::<models::AuthUser>(&conn)
.map_err(Error::Diesel)
.map(Into::into)
}
fn user_update_password_by_id(
&self,
id: i64,
password_hash: &str,
password_revision: i64,
) -> Result<usize, Error> {
use crate::driver::postgres::schema::auth_user::dsl::*;
let conn = self.connection()?;
let now = chrono::Utc::now();
diesel::update(auth_user.filter(user_id.eq(id)))
.set((
updated_at.eq(now),
user_password_hash.eq(password_hash),
user_password_revision.eq(password_revision),
))
.execute(&conn)
.map_err(Error::Diesel)
}
fn user_delete_by_id(&self, id: i64) -> Result<usize, Error> {
use crate::driver::postgres::schema::auth_user::dsl::*;
let conn = self.connection()?;
diesel::delete(auth_user.filter(user_id.eq(id)))
.execute(&conn)
.map_err(Error::Diesel)
}
fn csrf_create(&self, key: &str, value: &str, csrf_service_id: i64) -> Result<Csrf, Error> {
use crate::driver::postgres::schema::auth_csrf::dsl::*;
let conn = self.connection()?;
let now = Utc::now();
let value = models::AuthCsrfInsert {
created_at: &now,
csrf_key: key,
csrf_value: value,
service_id: csrf_service_id,
};
diesel::insert_into(auth_csrf)
.values(&value)
.get_result::<models::AuthCsrf>(&conn)
.map_err(Error::Diesel)
.map(Into::into)
}
fn csrf_read_by_key(&self, key: &str) -> Result<Option<Csrf>, Error> {
use crate::driver::postgres::schema::auth_csrf::dsl::*;
let conn = self.connection()?;
auth_csrf
.filter(csrf_key.eq(key))
.get_result::<models::AuthCsrf>(&conn)
.map(|csrf| Some(csrf.into()))
.or_else(|err| match err {
diesel::result::Error::NotFound => Ok(None),
_ => Err(Error::Diesel(err)),
})
}
fn csrf_delete_by_key(&self, key: &str) -> Result<usize, Error> {
use crate::driver::postgres::schema::auth_csrf::dsl::*;
let conn = self.connection()?;
diesel::delete(auth_csrf.filter(csrf_key.eq(key)))
.execute(&conn)
.map_err(Error::Diesel)
}
fn csrf_delete_by_created_at(&self, csrf_created_at: &DateTime<Utc>) -> Result<usize, Error> {
use crate::driver::postgres::schema::auth_csrf::dsl::*;
let conn = self.connection()?;
diesel::delete(auth_csrf.filter(created_at.le(csrf_created_at)))
.execute(&conn)
.map_err(Error::Diesel)
}
}
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Status {
Met,
NotMet,
Unknown,
}
#[derive(Debug, Clone, Copy)]
pub enum Field {
Age,
County,
ChildrenCount,
Income,
SingleParent,
}
#[derive(Debug)]
pub enum Type {
Boolean(bool),
IntRange(u32, u32),
IntEquals(u32),
StringEquals(String),
}
|
extern crate exitfailure;
extern crate l_compilator;
fn main() -> Result<(), exitfailure::ExitFailure> {
Ok(l_compilator::App::run()?)
}
|
use std::io;
use std::io::prelude::*;
use std::cmp;
fn main() {
let stdin = io::stdin();
let mut minmax_sum = 0; // part 1
let mut divides_sum = 0; // part 2
for line in stdin.lock().lines() {
// parse stuff
let line = line.unwrap();
let splitv : Vec<u32> = line.trim()
.split_whitespace()
.map(|x| u32::from_str_radix(x, 10).unwrap())
.collect();
let min = splitv.iter().min().unwrap();
let max = splitv.iter().max().unwrap();
minmax_sum += max - min;
// all (x, y) pairs in the vector
for (i, x) in splitv.iter().enumerate() {
for y in &splitv[i+1..] {
let min = cmp::min(x, y);
let max = cmp::max(x, y);
if max % min == 0 {
divides_sum += max / min;
break;
}
}
}
}
println!("{}", minmax_sum);
println!("{}", divides_sum);
}
|
use std::any::Any;
use std::cell::Cell;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt;
use std::marker::PhantomData;
use std::mem;
use std::ops::Deref;
use std::ptr;
use std::rc::Rc;
use num_bigint::BigInt;
use num_complex::Complex64;
use num_traits::{One, Zero};
use crate::bytecode;
use crate::exceptions;
use crate::frame::Scope;
use crate::function::{IntoPyNativeFunc, PyFuncArgs};
use crate::obj::objbool;
use crate::obj::objbuiltinfunc::PyBuiltinFunction;
use crate::obj::objbytearray;
use crate::obj::objbytes;
use crate::obj::objclassmethod;
use crate::obj::objcode;
use crate::obj::objcode::PyCodeRef;
use crate::obj::objcomplex::{self, PyComplex};
use crate::obj::objdict::{self, PyDict, PyDictRef};
use crate::obj::objellipsis;
use crate::obj::objenumerate;
use crate::obj::objfilter;
use crate::obj::objfloat::{self, PyFloat};
use crate::obj::objframe;
use crate::obj::objfunction::{self, PyFunction, PyMethod};
use crate::obj::objgenerator;
use crate::obj::objint::{self, PyInt, PyIntRef};
use crate::obj::objiter;
use crate::obj::objlist::{self, PyList};
use crate::obj::objmap;
use crate::obj::objmemory;
use crate::obj::objmodule::{self, PyModule};
use crate::obj::objnone::{self, PyNone, PyNoneRef};
use crate::obj::objobject;
use crate::obj::objproperty;
use crate::obj::objproperty::PropertyBuilder;
use crate::obj::objrange;
use crate::obj::objset::{self, PySet};
use crate::obj::objslice;
use crate::obj::objstaticmethod;
use crate::obj::objstr;
use crate::obj::objsuper;
use crate::obj::objtuple::{self, PyTuple, PyTupleRef};
use crate::obj::objtype::{self, PyClass, PyClassRef};
use crate::obj::objweakproxy;
use crate::obj::objweakref;
use crate::obj::objzip;
use crate::vm::VirtualMachine;
/* Python objects and references.
Okay, so each python object itself is an class itself (PyObject). Each
python object can have several references to it (PyObjectRef). These
references are Rc (reference counting) rust smart pointers. So when
all references are destroyed, the object itself also can be cleaned up.
Basically reference counting, but then done by rust.
*/
/*
* Good reference: https://github.com/ProgVal/pythonvm-rust/blob/master/src/objects/mod.rs
*/
/// The `PyObjectRef` is one of the most used types. It is a reference to a
/// python object. A single python object can have multiple references, and
/// this reference counting is accounted for by this type. Use the `.clone()`
/// method to create a new reference and increment the amount of references
/// to the python object by 1.
pub type PyObjectRef = Rc<PyObject<dyn PyObjectPayload>>;
/// Use this type for function which return a python object or and exception.
/// Both the python object and the python exception are `PyObjectRef` types
/// since exceptions are also python objects.
pub type PyResult<T = PyObjectRef> = Result<T, PyObjectRef>; // A valid value, or an exception
/// For attributes we do not use a dict, but a hashmap. This is probably
/// faster, unordered, and only supports strings as keys.
pub type PyAttributes = HashMap<String, PyObjectRef>;
impl fmt::Display for PyObject<dyn PyObjectPayload> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(PyClass { ref name, .. }) = self.payload::<PyClass>() {
let type_name = self.class().name.clone();
// We don't have access to a vm, so just assume that if its parent's name
// is type, it's a type
if type_name == "type" {
return write!(f, "type object '{}'", name);
} else {
return write!(f, "'{}' object", type_name);
}
}
if let Some(PyModule { ref name, .. }) = self.payload::<PyModule>() {
return write!(f, "module '{}'", name);
}
write!(f, "'{}' object", self.class().name)
}
}
#[derive(Debug)]
pub struct PyContext {
pub bytes_type: PyClassRef,
pub bytesiterator_type: PyClassRef,
pub bytearray_type: PyClassRef,
pub bytearrayiterator_type: PyClassRef,
pub bool_type: PyClassRef,
pub classmethod_type: PyClassRef,
pub code_type: PyClassRef,
pub dict_type: PyClassRef,
pub ellipsis_type: PyClassRef,
pub enumerate_type: PyClassRef,
pub filter_type: PyClassRef,
pub float_type: PyClassRef,
pub frame_type: PyClassRef,
pub frozenset_type: PyClassRef,
pub generator_type: PyClassRef,
pub int_type: PyClassRef,
pub iter_type: PyClassRef,
pub complex_type: PyClassRef,
pub true_value: PyIntRef,
pub false_value: PyIntRef,
pub list_type: PyClassRef,
pub listiterator_type: PyClassRef,
pub dictkeyiterator_type: PyClassRef,
pub dictvalueiterator_type: PyClassRef,
pub dictitemiterator_type: PyClassRef,
pub dictkeys_type: PyClassRef,
pub dictvalues_type: PyClassRef,
pub dictitems_type: PyClassRef,
pub map_type: PyClassRef,
pub memoryview_type: PyClassRef,
pub none: PyNoneRef,
pub ellipsis: PyEllipsisRef,
pub not_implemented: PyNotImplementedRef,
pub tuple_type: PyClassRef,
pub tupleiterator_type: PyClassRef,
pub set_type: PyClassRef,
pub staticmethod_type: PyClassRef,
pub super_type: PyClassRef,
pub str_type: PyClassRef,
pub range_type: PyClassRef,
pub rangeiterator_type: PyClassRef,
pub slice_type: PyClassRef,
pub type_type: PyClassRef,
pub zip_type: PyClassRef,
pub function_type: PyClassRef,
pub builtin_function_or_method_type: PyClassRef,
pub property_type: PyClassRef,
pub readonly_property_type: PyClassRef,
pub module_type: PyClassRef,
pub bound_method_type: PyClassRef,
pub weakref_type: PyClassRef,
pub weakproxy_type: PyClassRef,
pub object: PyClassRef,
pub exceptions: exceptions::ExceptionZoo,
}
pub fn create_type(name: &str, type_type: &PyClassRef, base: &PyClassRef) -> PyClassRef {
let dict = PyAttributes::new();
objtype::new(type_type.clone(), name, vec![base.clone()], dict).unwrap()
}
pub type PyNotImplementedRef = PyRef<PyNotImplemented>;
#[derive(Debug)]
pub struct PyNotImplemented;
impl PyValue for PyNotImplemented {
fn class(vm: &VirtualMachine) -> PyClassRef {
vm.ctx.not_implemented().class()
}
}
pub type PyEllipsisRef = PyRef<PyEllipsis>;
#[derive(Debug)]
pub struct PyEllipsis;
impl PyValue for PyEllipsis {
fn class(vm: &VirtualMachine) -> PyClassRef {
vm.ctx.ellipsis_type.clone()
}
}
fn init_type_hierarchy() -> (PyClassRef, PyClassRef) {
// `type` inherits from `object`
// and both `type` and `object are instances of `type`.
// to produce this circular dependency, we need an unsafe block.
// (and yes, this will never get dropped. TODO?)
let (type_type, object_type) = unsafe {
let object_type = PyObject {
typ: mem::uninitialized(), // !
dict: None,
payload: PyClass {
name: String::from("object"),
mro: vec![],
subclasses: RefCell::new(vec![]),
attributes: RefCell::new(PyAttributes::new()),
},
}
.into_ref();
let type_type = PyObject {
typ: mem::uninitialized(), // !
dict: None,
payload: PyClass {
name: String::from("type"),
mro: vec![object_type.clone().downcast().unwrap()],
subclasses: RefCell::new(vec![]),
attributes: RefCell::new(PyAttributes::new()),
},
}
.into_ref();
let object_type_ptr = PyObjectRef::into_raw(object_type.clone()) as *mut PyObject<PyClass>;
let type_type_ptr = PyObjectRef::into_raw(type_type.clone()) as *mut PyObject<PyClass>;
let type_type: PyClassRef = type_type.downcast().unwrap();
let object_type: PyClassRef = object_type.downcast().unwrap();
ptr::write(&mut (*object_type_ptr).typ, type_type.clone());
ptr::write(&mut (*type_type_ptr).typ, type_type.clone());
(type_type, object_type)
};
object_type
.subclasses
.borrow_mut()
.push(objweakref::PyWeak::downgrade(&type_type.as_object()));
(type_type, object_type)
}
// Basic objects:
impl PyContext {
pub fn new() -> Self {
let (type_type, object_type) = init_type_hierarchy();
let dict_type = create_type("dict", &type_type, &object_type);
let module_type = create_type("module", &type_type, &object_type);
let classmethod_type = create_type("classmethod", &type_type, &object_type);
let staticmethod_type = create_type("staticmethod", &type_type, &object_type);
let function_type = create_type("function", &type_type, &object_type);
let builtin_function_or_method_type =
create_type("builtin_function_or_method", &type_type, &object_type);
let property_type = create_type("property", &type_type, &object_type);
let readonly_property_type = create_type("readonly_property", &type_type, &object_type);
let super_type = create_type("super", &type_type, &object_type);
let weakref_type = create_type("ref", &type_type, &object_type);
let weakproxy_type = create_type("weakproxy", &type_type, &object_type);
let generator_type = create_type("generator", &type_type, &object_type);
let bound_method_type = create_type("method", &type_type, &object_type);
let str_type = create_type("str", &type_type, &object_type);
let list_type = create_type("list", &type_type, &object_type);
let listiterator_type = create_type("list_iterator", &type_type, &object_type);
let dictkeys_type = create_type("dict_keys", &type_type, &object_type);
let dictvalues_type = create_type("dict_values", &type_type, &object_type);
let dictitems_type = create_type("dict_items", &type_type, &object_type);
let dictkeyiterator_type = create_type("dict_keyiterator", &type_type, &object_type);
let dictvalueiterator_type = create_type("dict_valueiterator", &type_type, &object_type);
let dictitemiterator_type = create_type("dict_itemiterator", &type_type, &object_type);
let set_type = create_type("set", &type_type, &object_type);
let frozenset_type = create_type("frozenset", &type_type, &object_type);
let int_type = create_type("int", &type_type, &object_type);
let float_type = create_type("float", &type_type, &object_type);
let frame_type = create_type("frame", &type_type, &object_type);
let complex_type = create_type("complex", &type_type, &object_type);
let bytes_type = create_type("bytes", &type_type, &object_type);
let bytesiterator_type = create_type("bytes_iterator", &type_type, &object_type);
let bytearray_type = create_type("bytearray", &type_type, &object_type);
let bytearrayiterator_type = create_type("bytearray_iterator", &type_type, &object_type);
let tuple_type = create_type("tuple", &type_type, &object_type);
let tupleiterator_type = create_type("tuple_iterator", &type_type, &object_type);
let iter_type = create_type("iter", &type_type, &object_type);
let enumerate_type = create_type("enumerate", &type_type, &object_type);
let filter_type = create_type("filter", &type_type, &object_type);
let map_type = create_type("map", &type_type, &object_type);
let zip_type = create_type("zip", &type_type, &object_type);
let bool_type = create_type("bool", &type_type, &int_type);
let memoryview_type = create_type("memoryview", &type_type, &object_type);
let code_type = create_type("code", &type_type, &int_type);
let range_type = create_type("range", &type_type, &object_type);
let rangeiterator_type = create_type("range_iterator", &type_type, &object_type);
let slice_type = create_type("slice", &type_type, &object_type);
let exceptions = exceptions::ExceptionZoo::new(&type_type, &object_type);
fn create_object<T: PyObjectPayload>(payload: T, cls: &PyClassRef) -> PyRef<T> {
PyRef {
obj: PyObject::new(payload, cls.clone(), None),
_payload: PhantomData,
}
}
let none_type = create_type("NoneType", &type_type, &object_type);
let none = create_object(PyNone, &none_type);
let ellipsis_type = create_type("EllipsisType", &type_type, &object_type);
let ellipsis = create_object(PyEllipsis, &ellipsis_type);
let not_implemented_type = create_type("NotImplementedType", &type_type, &object_type);
let not_implemented = create_object(PyNotImplemented, ¬_implemented_type);
let true_value = create_object(PyInt::new(BigInt::one()), &bool_type);
let false_value = create_object(PyInt::new(BigInt::zero()), &bool_type);
let context = PyContext {
bool_type,
memoryview_type,
bytearray_type,
bytearrayiterator_type,
bytes_type,
bytesiterator_type,
code_type,
complex_type,
classmethod_type,
int_type,
float_type,
frame_type,
staticmethod_type,
list_type,
listiterator_type,
dictkeys_type,
dictvalues_type,
dictitems_type,
dictkeyiterator_type,
dictvalueiterator_type,
dictitemiterator_type,
set_type,
frozenset_type,
true_value,
false_value,
tuple_type,
tupleiterator_type,
iter_type,
ellipsis_type,
enumerate_type,
filter_type,
map_type,
zip_type,
dict_type,
none,
ellipsis,
not_implemented,
str_type,
range_type,
rangeiterator_type,
slice_type,
object: object_type,
function_type,
builtin_function_or_method_type,
super_type,
property_type,
readonly_property_type,
generator_type,
module_type,
bound_method_type,
weakref_type,
weakproxy_type,
type_type,
exceptions,
};
objtype::init(&context);
objlist::init(&context);
objset::init(&context);
objtuple::init(&context);
objobject::init(&context);
objdict::init(&context);
objfunction::init(&context);
objstaticmethod::init(&context);
objclassmethod::init(&context);
objgenerator::init(&context);
objint::init(&context);
objfloat::init(&context);
objcomplex::init(&context);
objbytes::init(&context);
objbytearray::init(&context);
objproperty::init(&context);
objmemory::init(&context);
objstr::init(&context);
objrange::init(&context);
objslice::init(&context);
objsuper::init(&context);
objtuple::init(&context);
objiter::init(&context);
objellipsis::init(&context);
objenumerate::init(&context);
objfilter::init(&context);
objmap::init(&context);
objzip::init(&context);
objbool::init(&context);
objcode::init(&context);
objframe::init(&context);
objweakref::init(&context);
objweakproxy::init(&context);
objnone::init(&context);
objmodule::init(&context);
exceptions::init(&context);
context
}
pub fn bytearray_type(&self) -> PyClassRef {
self.bytearray_type.clone()
}
pub fn bytearrayiterator_type(&self) -> PyClassRef {
self.bytearrayiterator_type.clone()
}
pub fn bytes_type(&self) -> PyClassRef {
self.bytes_type.clone()
}
pub fn bytesiterator_type(&self) -> PyClassRef {
self.bytesiterator_type.clone()
}
pub fn code_type(&self) -> PyClassRef {
self.code_type.clone()
}
pub fn complex_type(&self) -> PyClassRef {
self.complex_type.clone()
}
pub fn dict_type(&self) -> PyClassRef {
self.dict_type.clone()
}
pub fn float_type(&self) -> PyClassRef {
self.float_type.clone()
}
pub fn frame_type(&self) -> PyClassRef {
self.frame_type.clone()
}
pub fn int_type(&self) -> PyClassRef {
self.int_type.clone()
}
pub fn list_type(&self) -> PyClassRef {
self.list_type.clone()
}
pub fn listiterator_type(&self) -> PyClassRef {
self.listiterator_type.clone()
}
pub fn module_type(&self) -> PyClassRef {
self.module_type.clone()
}
pub fn set_type(&self) -> PyClassRef {
self.set_type.clone()
}
pub fn range_type(&self) -> PyClassRef {
self.range_type.clone()
}
pub fn rangeiterator_type(&self) -> PyClassRef {
self.rangeiterator_type.clone()
}
pub fn slice_type(&self) -> PyClassRef {
self.slice_type.clone()
}
pub fn frozenset_type(&self) -> PyClassRef {
self.frozenset_type.clone()
}
pub fn bool_type(&self) -> PyClassRef {
self.bool_type.clone()
}
pub fn memoryview_type(&self) -> PyClassRef {
self.memoryview_type.clone()
}
pub fn tuple_type(&self) -> PyClassRef {
self.tuple_type.clone()
}
pub fn tupleiterator_type(&self) -> PyClassRef {
self.tupleiterator_type.clone()
}
pub fn iter_type(&self) -> PyClassRef {
self.iter_type.clone()
}
pub fn enumerate_type(&self) -> PyClassRef {
self.enumerate_type.clone()
}
pub fn filter_type(&self) -> PyClassRef {
self.filter_type.clone()
}
pub fn map_type(&self) -> PyClassRef {
self.map_type.clone()
}
pub fn zip_type(&self) -> PyClassRef {
self.zip_type.clone()
}
pub fn str_type(&self) -> PyClassRef {
self.str_type.clone()
}
pub fn super_type(&self) -> PyClassRef {
self.super_type.clone()
}
pub fn function_type(&self) -> PyClassRef {
self.function_type.clone()
}
pub fn builtin_function_or_method_type(&self) -> PyClassRef {
self.builtin_function_or_method_type.clone()
}
pub fn property_type(&self) -> PyClassRef {
self.property_type.clone()
}
pub fn readonly_property_type(&self) -> PyClassRef {
self.readonly_property_type.clone()
}
pub fn classmethod_type(&self) -> PyClassRef {
self.classmethod_type.clone()
}
pub fn staticmethod_type(&self) -> PyClassRef {
self.staticmethod_type.clone()
}
pub fn generator_type(&self) -> PyClassRef {
self.generator_type.clone()
}
pub fn bound_method_type(&self) -> PyClassRef {
self.bound_method_type.clone()
}
pub fn weakref_type(&self) -> PyClassRef {
self.weakref_type.clone()
}
pub fn weakproxy_type(&self) -> PyClassRef {
self.weakproxy_type.clone()
}
pub fn type_type(&self) -> PyClassRef {
self.type_type.clone()
}
pub fn none(&self) -> PyObjectRef {
self.none.clone().into_object()
}
pub fn ellipsis(&self) -> PyObjectRef {
self.ellipsis.clone().into_object()
}
pub fn not_implemented(&self) -> PyObjectRef {
self.not_implemented.clone().into_object()
}
pub fn object(&self) -> PyClassRef {
self.object.clone()
}
pub fn new_int<T: Into<BigInt>>(&self, i: T) -> PyObjectRef {
PyObject::new(PyInt::new(i), self.int_type(), None)
}
pub fn new_float(&self, value: f64) -> PyObjectRef {
PyObject::new(PyFloat::from(value), self.float_type(), None)
}
pub fn new_complex(&self, value: Complex64) -> PyObjectRef {
PyObject::new(PyComplex::from(value), self.complex_type(), None)
}
pub fn new_str(&self, s: String) -> PyObjectRef {
PyObject::new(objstr::PyString { value: s }, self.str_type(), None)
}
pub fn new_bytes(&self, data: Vec<u8>) -> PyObjectRef {
PyObject::new(objbytes::PyBytes::new(data), self.bytes_type(), None)
}
pub fn new_bytearray(&self, data: Vec<u8>) -> PyObjectRef {
PyObject::new(
objbytearray::PyByteArray::new(data),
self.bytearray_type(),
None,
)
}
pub fn new_bool(&self, b: bool) -> PyObjectRef {
if b {
self.true_value.clone().into_object()
} else {
self.false_value.clone().into_object()
}
}
pub fn new_tuple(&self, elements: Vec<PyObjectRef>) -> PyObjectRef {
PyObject::new(PyTuple::from(elements), self.tuple_type(), None)
}
pub fn new_list(&self, elements: Vec<PyObjectRef>) -> PyObjectRef {
PyObject::new(PyList::from(elements), self.list_type(), None)
}
pub fn new_set(&self) -> PyObjectRef {
// Initialized empty, as calling __hash__ is required for adding each object to the set
// which requires a VM context - this is done in the objset code itself.
PyObject::new(PySet::default(), self.set_type(), None)
}
pub fn new_dict(&self) -> PyDictRef {
PyObject::new(PyDict::default(), self.dict_type(), None)
.downcast()
.unwrap()
}
pub fn new_class(&self, name: &str, base: PyClassRef) -> PyClassRef {
objtype::new(self.type_type(), name, vec![base], PyAttributes::new()).unwrap()
}
pub fn new_scope(&self) -> Scope {
Scope::new(None, self.new_dict())
}
pub fn new_module(&self, name: &str, dict: PyDictRef) -> PyObjectRef {
PyObject::new(
PyModule {
name: name.to_string(),
},
self.module_type.clone(),
Some(dict),
)
}
pub fn new_rustfunc<F, T, R>(&self, f: F) -> PyObjectRef
where
F: IntoPyNativeFunc<T, R>,
{
PyObject::new(
PyBuiltinFunction::new(f.into_func()),
self.builtin_function_or_method_type(),
None,
)
}
pub fn new_property<F, I, V>(&self, f: F) -> PyObjectRef
where
F: IntoPyNativeFunc<I, V>,
{
PropertyBuilder::new(self).add_getter(f).create()
}
pub fn new_code_object(&self, code: bytecode::CodeObject) -> PyCodeRef {
PyObject::new(objcode::PyCode::new(code), self.code_type(), None)
.downcast()
.unwrap()
}
pub fn new_function(
&self,
code_obj: PyCodeRef,
scope: Scope,
defaults: Option<PyTupleRef>,
kw_only_defaults: Option<PyDictRef>,
) -> PyObjectRef {
PyObject::new(
PyFunction::new(code_obj, scope, defaults, kw_only_defaults),
self.function_type(),
Some(self.new_dict()),
)
}
pub fn new_bound_method(&self, function: PyObjectRef, object: PyObjectRef) -> PyObjectRef {
PyObject::new(
PyMethod::new(object, function),
self.bound_method_type(),
None,
)
}
pub fn new_instance(&self, class: PyClassRef, dict: Option<PyDictRef>) -> PyObjectRef {
PyObject {
typ: class,
dict,
payload: objobject::PyInstance,
}
.into_ref()
}
pub fn unwrap_constant(&self, value: &bytecode::Constant) -> PyObjectRef {
match *value {
bytecode::Constant::Integer { ref value } => self.new_int(value.clone()),
bytecode::Constant::Float { ref value } => self.new_float(*value),
bytecode::Constant::Complex { ref value } => self.new_complex(*value),
bytecode::Constant::String { ref value } => self.new_str(value.clone()),
bytecode::Constant::Bytes { ref value } => self.new_bytes(value.clone()),
bytecode::Constant::Boolean { ref value } => self.new_bool(value.clone()),
bytecode::Constant::Code { ref code } => {
self.new_code_object(*code.clone()).into_object()
}
bytecode::Constant::Tuple { ref elements } => {
let elements = elements
.iter()
.map(|value| self.unwrap_constant(value))
.collect();
self.new_tuple(elements)
}
bytecode::Constant::None => self.none(),
bytecode::Constant::Ellipsis => self.ellipsis(),
}
}
}
impl Default for PyContext {
fn default() -> Self {
PyContext::new()
}
}
/// This is an actual python object. It consists of a `typ` which is the
/// python class, and carries some rust payload optionally. This rust
/// payload can be a rust float or rust int in case of float and int objects.
pub struct PyObject<T>
where
T: ?Sized + PyObjectPayload,
{
pub typ: PyClassRef,
pub dict: Option<PyDictRef>, // __dict__ member
pub payload: T,
}
impl PyObject<dyn PyObjectPayload> {
/// Attempt to downcast this reference to a subclass.
///
/// If the downcast fails, the original ref is returned in as `Err` so
/// another downcast can be attempted without unnecessary cloning.
///
/// Note: The returned `Result` is _not_ a `PyResult`, even though the
/// types are compatible.
pub fn downcast<T: PyObjectPayload>(self: Rc<Self>) -> Result<PyRef<T>, PyObjectRef> {
if self.payload_is::<T>() {
Ok({
PyRef {
obj: self,
_payload: PhantomData,
}
})
} else {
Err(self)
}
}
}
/// A reference to a Python object.
///
/// Note that a `PyRef<T>` can only deref to a shared / immutable reference.
/// It is the payload type's responsibility to handle (possibly concurrent)
/// mutability with locks or concurrent data structures if required.
///
/// A `PyRef<T>` can be directly returned from a built-in function to handle
/// situations (such as when implementing in-place methods such as `__iadd__`)
/// where a reference to the same object must be returned.
#[derive(Debug)]
pub struct PyRef<T> {
// invariant: this obj must always have payload of type T
obj: PyObjectRef,
_payload: PhantomData<T>,
}
impl<T> Clone for PyRef<T> {
fn clone(&self) -> Self {
Self {
obj: self.obj.clone(),
_payload: PhantomData,
}
}
}
impl<T: PyValue> PyRef<T> {
pub fn as_object(&self) -> &PyObjectRef {
&self.obj
}
pub fn into_object(self) -> PyObjectRef {
self.obj
}
pub fn typ(&self) -> PyClassRef {
PyRef {
obj: self.obj.class().into_object(),
_payload: PhantomData,
}
}
}
impl<T> Deref for PyRef<T>
where
T: PyValue,
{
type Target = T;
fn deref(&self) -> &T {
self.obj.payload().expect("unexpected payload for type")
}
}
impl<T> TryFromObject for PyRef<T>
where
T: PyValue,
{
fn try_from_object(vm: &VirtualMachine, obj: PyObjectRef) -> PyResult<Self> {
if objtype::isinstance(&obj, &T::class(vm)) {
Ok(PyRef {
obj,
_payload: PhantomData,
})
} else {
let class = T::class(vm);
let expected_type = vm.to_pystr(&class)?;
let actual_type = vm.to_pystr(&obj.class())?;
Err(vm.new_type_error(format!(
"Expected type {}, not {}",
expected_type, actual_type,
)))
}
}
}
impl<T> IntoPyObject for PyRef<T> {
fn into_pyobject(self, _vm: &VirtualMachine) -> PyResult {
Ok(self.obj)
}
}
impl<'a, T: PyValue> From<&'a PyRef<T>> for &'a PyObjectRef {
fn from(obj: &'a PyRef<T>) -> Self {
obj.as_object()
}
}
impl<T: PyValue> From<PyRef<T>> for PyObjectRef {
fn from(obj: PyRef<T>) -> Self {
obj.into_object()
}
}
impl<T: fmt::Display> fmt::Display for PyRef<T>
where
T: PyValue + fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let value: &T = self.obj.payload().expect("unexpected payload for type");
fmt::Display::fmt(value, f)
}
}
pub trait IdProtocol {
fn get_id(&self) -> usize;
fn is<T>(&self, other: &T) -> bool
where
T: IdProtocol,
{
self.get_id() == other.get_id()
}
}
#[derive(Debug)]
enum Never {}
impl PyValue for Never {
fn class(_vm: &VirtualMachine) -> PyClassRef {
unreachable!()
}
}
impl<T: ?Sized + PyObjectPayload> IdProtocol for PyObject<T> {
fn get_id(&self) -> usize {
self as *const _ as *const PyObject<Never> as usize
}
}
impl<T: ?Sized + IdProtocol> IdProtocol for Rc<T> {
fn get_id(&self) -> usize {
(**self).get_id()
}
}
impl<T: PyObjectPayload> IdProtocol for PyRef<T> {
fn get_id(&self) -> usize {
self.obj.get_id()
}
}
pub trait TypeProtocol {
fn class(&self) -> PyClassRef;
}
impl TypeProtocol for PyObjectRef {
fn class(&self) -> PyClassRef {
(**self).class()
}
}
impl<T> TypeProtocol for PyObject<T>
where
T: ?Sized + PyObjectPayload,
{
fn class(&self) -> PyClassRef {
self.typ.clone()
}
}
impl<T> TypeProtocol for PyRef<T> {
fn class(&self) -> PyClassRef {
self.obj.typ.clone()
}
}
pub trait ItemProtocol {
fn get_item<T: IntoPyObject>(&self, key: T, vm: &VirtualMachine) -> PyResult;
fn set_item<T: IntoPyObject>(
&self,
key: T,
value: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult;
fn del_item<T: IntoPyObject>(&self, key: T, vm: &VirtualMachine) -> PyResult;
fn get_item_option<T: IntoPyObject>(
&self,
key: T,
vm: &VirtualMachine,
) -> PyResult<Option<PyObjectRef>> {
match self.get_item(key, vm) {
Ok(value) => Ok(Some(value)),
Err(exc) => {
if objtype::isinstance(&exc, &vm.ctx.exceptions.key_error) {
Ok(None)
} else {
Err(exc)
}
}
}
}
}
impl ItemProtocol for PyObjectRef {
fn get_item<T: IntoPyObject>(&self, key: T, vm: &VirtualMachine) -> PyResult {
vm.call_method(self, "__getitem__", key.into_pyobject(vm)?)
}
fn set_item<T: IntoPyObject>(
&self,
key: T,
value: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult {
vm.call_method(self, "__setitem__", vec![key.into_pyobject(vm)?, value])
}
fn del_item<T: IntoPyObject>(&self, key: T, vm: &VirtualMachine) -> PyResult {
vm.call_method(self, "__delitem__", key.into_pyobject(vm)?)
}
}
pub trait BufferProtocol {
fn readonly(&self) -> bool;
}
impl BufferProtocol for PyObjectRef {
fn readonly(&self) -> bool {
match self.class().name.as_str() {
"bytes" => false,
"bytearray" | "memoryview" => true,
_ => panic!("Bytes-Like type expected not {:?}", self),
}
}
}
impl fmt::Debug for PyObject<dyn PyObjectPayload> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[PyObj {:?}]", &self.payload)
}
}
/// An iterable Python object.
///
/// `PyIterable` implements `FromArgs` so that a built-in function can accept
/// an object that is required to conform to the Python iterator protocol.
///
/// PyIterable can optionally perform type checking and conversions on iterated
/// objects using a generic type parameter that implements `TryFromObject`.
pub struct PyIterable<T = PyObjectRef> {
method: PyObjectRef,
_item: std::marker::PhantomData<T>,
}
impl<T> PyIterable<T> {
/// Returns an iterator over this sequence of objects.
///
/// This operation may fail if an exception is raised while invoking the
/// `__iter__` method of the iterable object.
pub fn iter<'a>(&self, vm: &'a VirtualMachine) -> PyResult<PyIterator<'a, T>> {
let iter_obj = vm.invoke(
self.method.clone(),
PyFuncArgs {
args: vec![],
kwargs: vec![],
},
)?;
Ok(PyIterator {
vm,
obj: iter_obj,
_item: std::marker::PhantomData,
})
}
}
pub struct PyIterator<'a, T> {
vm: &'a VirtualMachine,
obj: PyObjectRef,
_item: std::marker::PhantomData<T>,
}
impl<'a, T> Iterator for PyIterator<'a, T>
where
T: TryFromObject,
{
type Item = PyResult<T>;
fn next(&mut self) -> Option<Self::Item> {
match self.vm.call_method(&self.obj, "__next__", vec![]) {
Ok(value) => Some(T::try_from_object(self.vm, value)),
Err(err) => {
if objtype::isinstance(&err, &self.vm.ctx.exceptions.stop_iteration) {
None
} else {
Some(Err(err))
}
}
}
}
}
impl<T> TryFromObject for PyIterable<T>
where
T: TryFromObject,
{
fn try_from_object(vm: &VirtualMachine, obj: PyObjectRef) -> PyResult<Self> {
if let Ok(method) = vm.get_method(obj.clone(), "__iter__") {
Ok(PyIterable {
method,
_item: std::marker::PhantomData,
})
} else if vm.get_method(obj.clone(), "__getitem__").is_ok() {
Self::try_from_object(
vm,
objiter::PySequenceIterator {
position: Cell::new(0),
obj: obj.clone(),
}
.into_ref(vm)
.into_object(),
)
} else {
Err(vm.new_type_error(format!("'{}' object is not iterable", obj.class().name)))
}
}
}
impl TryFromObject for PyObjectRef {
fn try_from_object(_vm: &VirtualMachine, obj: PyObjectRef) -> PyResult<Self> {
Ok(obj)
}
}
impl<T: TryFromObject> TryFromObject for Option<T> {
fn try_from_object(vm: &VirtualMachine, obj: PyObjectRef) -> PyResult<Self> {
if vm.get_none().is(&obj) {
Ok(None)
} else {
T::try_from_object(vm, obj).map(Some)
}
}
}
/// Allows coercion of a types into PyRefs, so that we can write functions that can take
/// refs, pyobject refs or basic types.
pub trait TryIntoRef<T> {
fn try_into_ref(self, vm: &VirtualMachine) -> PyResult<PyRef<T>>;
}
impl<T> TryIntoRef<T> for PyRef<T> {
fn try_into_ref(self, _vm: &VirtualMachine) -> PyResult<PyRef<T>> {
Ok(self)
}
}
impl<T> TryIntoRef<T> for PyObjectRef
where
T: PyValue,
{
fn try_into_ref(self, vm: &VirtualMachine) -> PyResult<PyRef<T>> {
TryFromObject::try_from_object(vm, self)
}
}
/// Implemented by any type that can be created from a Python object.
///
/// Any type that implements `TryFromObject` is automatically `FromArgs`, and
/// so can be accepted as a argument to a built-in function.
pub trait TryFromObject: Sized {
/// Attempt to convert a Python object to a value of this type.
fn try_from_object(vm: &VirtualMachine, obj: PyObjectRef) -> PyResult<Self>;
}
/// Implemented by any type that can be returned from a built-in Python function.
///
/// `IntoPyObject` has a blanket implementation for any built-in object payload,
/// and should be implemented by many primitive Rust types, allowing a built-in
/// function to simply return a `bool` or a `usize` for example.
pub trait IntoPyObject {
fn into_pyobject(self, vm: &VirtualMachine) -> PyResult;
}
impl IntoPyObject for PyObjectRef {
fn into_pyobject(self, _vm: &VirtualMachine) -> PyResult {
Ok(self)
}
}
impl<T> IntoPyObject for PyResult<T>
where
T: IntoPyObject,
{
fn into_pyobject(self, vm: &VirtualMachine) -> PyResult {
self.and_then(|res| T::into_pyobject(res, vm))
}
}
// Allows a built-in function to return any built-in object payload without
// explicitly implementing `IntoPyObject`.
impl<T> IntoPyObject for T
where
T: PyValue + Sized,
{
fn into_pyobject(self, vm: &VirtualMachine) -> PyResult {
Ok(PyObject::new(self, T::class(vm), None))
}
}
impl<T> PyObject<T>
where
T: Sized + PyObjectPayload,
{
pub fn new(payload: T, typ: PyClassRef, dict: Option<PyDictRef>) -> PyObjectRef {
PyObject { typ, dict, payload }.into_ref()
}
// Move this object into a reference object, transferring ownership.
pub fn into_ref(self) -> PyObjectRef {
Rc::new(self)
}
}
impl PyObject<dyn PyObjectPayload> {
#[inline]
pub fn payload<T: PyObjectPayload>(&self) -> Option<&T> {
self.payload.as_any().downcast_ref()
}
#[inline]
pub fn payload_is<T: PyObjectPayload>(&self) -> bool {
self.payload.as_any().is::<T>()
}
}
pub trait PyValue: fmt::Debug + Sized + 'static {
const HAVE_DICT: bool = false;
fn class(vm: &VirtualMachine) -> PyClassRef;
fn into_ref(self, vm: &VirtualMachine) -> PyRef<Self> {
PyRef {
obj: PyObject::new(self, Self::class(vm), None),
_payload: PhantomData,
}
}
fn into_ref_with_type(self, vm: &VirtualMachine, cls: PyClassRef) -> PyResult<PyRef<Self>> {
let class = Self::class(vm);
if objtype::issubclass(&cls, &class) {
let dict = if !Self::HAVE_DICT && cls.is(&class) {
None
} else {
Some(vm.ctx.new_dict())
};
Ok(PyRef {
obj: PyObject::new(self, cls, dict),
_payload: PhantomData,
})
} else {
let subtype = vm.to_pystr(&cls.obj)?;
let basetype = vm.to_pystr(&class.obj)?;
Err(vm.new_type_error(format!("{} is not a subtype of {}", subtype, basetype)))
}
}
}
pub trait PyObjectPayload: Any + fmt::Debug + 'static {
fn as_any(&self) -> &dyn Any;
}
impl<T: PyValue + 'static> PyObjectPayload for T {
#[inline]
fn as_any(&self) -> &dyn Any {
self
}
}
pub enum Either<A, B> {
A(A),
B(B),
}
/// This allows a builtin method to accept arguments that may be one of two
/// types, raising a `TypeError` if it is neither.
///
/// # Example
///
/// ```
/// use rustpython_vm::VirtualMachine;
/// use rustpython_vm::obj::{objstr::PyStringRef, objint::PyIntRef};
/// use rustpython_vm::pyobject::Either;
///
/// fn do_something(arg: Either<PyIntRef, PyStringRef>, vm: &VirtualMachine) {
/// match arg {
/// Either::A(int)=> {
/// // do something with int
/// }
/// Either::B(string) => {
/// // do something with string
/// }
/// }
/// }
/// ```
impl<A, B> TryFromObject for Either<PyRef<A>, PyRef<B>>
where
A: PyValue,
B: PyValue,
{
fn try_from_object(vm: &VirtualMachine, obj: PyObjectRef) -> PyResult<Self> {
obj.downcast::<A>()
.map(Either::A)
.or_else(|obj| obj.clone().downcast::<B>().map(Either::B))
.map_err(|obj| {
vm.new_type_error(format!(
"must be {} or {}, not {}",
A::class(vm),
B::class(vm),
obj.class()
))
})
}
}
pub trait PyClassDef {
const NAME: &'static str;
const DOC: Option<&'static str> = None;
}
impl<T> PyClassDef for PyRef<T>
where
T: PyClassDef,
{
const NAME: &'static str = T::NAME;
const DOC: Option<&'static str> = T::DOC;
}
pub trait PyClassImpl: PyClassDef {
fn impl_extend_class(ctx: &PyContext, class: &PyClassRef);
fn extend_class(ctx: &PyContext, class: &PyClassRef) {
Self::impl_extend_class(ctx, class);
if let Some(doc) = Self::DOC {
class.set_str_attr("__doc__", ctx.new_str(doc.into()));
}
}
fn make_class(ctx: &PyContext) -> PyClassRef {
let py_class = ctx.new_class(Self::NAME, ctx.object());
Self::extend_class(ctx, &py_class);
py_class
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_type_type() {
// TODO: Write this test
PyContext::new();
}
}
|
extern crate rand;
use std::io;
use std::cmp::Ordering;
use rand::Rng;
#[derive(Debug)]
struct Person {
name: String,
email: String,
age: u32,
address: String,
}
impl Person {
fn add_age(&mut self) {
self.age += 30;
}
fn older(&self, other: &Person) -> bool {
self.age > other.age
}
}
fn main() {
let s_number = rand::thread_rng().gen_range(1, 101);
let (x, y, z) = guess_game(s_number);
println!("The result value {}", x+y+z);
println!("helloworld");
let mut p = Person{
name: String::from("barryz"),
email: String::from("barryzxb@gmail.com"),
age: 28,
address: String::from("Shanghai China"),
};
println!("==========================");
println!("person is {:#?}", p);
// modify p, so p must be mutable.
p.add_age();
println!("after person's adding, the age is {}", p.age);
let ap = Person{
name: String::from("barryz1"),
email: String::from("barryzxb1@gmail.com"),
age: 20,
address: String::from("Shanghai China"),
};
if p.older(&ap) {
println!("{} is older than {}", p.name, ap.name);
} else {
println!("{} is younger than {}", p.name, ap.name);
}
}
// guess_name function of gusess_name.
fn guess_game(s_number: u32) -> (u32, u32, u32) {
println!("The secret number is {}", s_number);
println!("Guess the number!");
loop {
println!("Please input your guess.");
// mut means mutable
let mut guess = String::new();
io::stdin().read_line(&mut guess)
.expect("Failed to read line");
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(err) => {
println!("{} occured!", err);
continue;
},
};
println!("Your guessed: {}", guess);
match guess.cmp(&s_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
(s_number, s_number, s_number)
}
|
pub use dencode;
#[cfg(feature = "json")]
pub mod json;
pub mod sbp;
|
use super::types::{ModuleLoader, ModuleResolver};
use super::types::{KEY, MODULE_ID_KEY};
// use duktape::{error::ErrorKind, error::Result, Callable, Context};
use super::internal;
use duktape::prelude::*;
use duktape::Key;
use duktape::{error::ErrorKind, error::Result};
use regex::Regex;
use std::path::Path;
pub struct CommonJS {
pub(crate) loaders: Vec<Loader>,
resolvers: Vec<Resolver>,
modules: Vec<Module>,
}
impl CommonJS {
pub fn extensions(&self) -> Vec<String> {
self.loaders
.iter()
.map(|m| m.extension.clone())
.collect::<Vec<_>>()
}
pub fn protocols(&self) -> Vec<String> {
self.resolvers
.iter()
.map(|m| m.protocol.clone())
.collect::<Vec<_>>()
}
pub fn modules(&self) -> Vec<String> {
self.modules
.iter()
.map(|m| m.name.clone())
.collect::<Vec<_>>()
}
}
impl Drop for CommonJS {
fn drop(&mut self) {}
}
impl Key for CommonJS {
type Value = Self;
}
lazy_static! {
static ref PROTOCOL_RE: Regex =
Regex::new(r"^([a-zA-Z0-9]+)(?:://)(/?[a-zA-Z0-9\.\-]+(?:/[a-zA-Z0-9\.\-]+)*)$").unwrap();
static ref FILE_RE: Regex = Regex::new(r"^(?:/|\.\.?/)(?:[^/\\0]+(?:/)?)+$").unwrap();
}
pub(crate) struct Loader {
pub(crate) extension: String,
pub(crate) loader: Box<dyn ModuleLoader>,
}
struct Resolver {
protocol: String,
resolver: Box<dyn ModuleResolver>,
}
struct Module {
name: String,
module: Box<dyn Callable>,
}
pub struct Builder {
loaders: Vec<Loader>,
resolvers: Vec<Resolver>,
modules: Vec<Module>,
pub(crate) file_loader: bool,
}
impl Builder {
/// Creates a new builder
pub fn new() -> Builder {
Builder {
loaders: Vec::new(),
resolvers: Vec::new(),
modules: Vec::new(),
file_loader: true,
}
}
// Add a loader to the builder
pub fn loader(&mut self, extension: &str, loader: Box<dyn ModuleLoader>) -> &mut Self {
self.loaders.push(Loader {
extension: extension.to_owned(),
loader: loader,
});
self
}
// Add a resovler to the builder
pub fn resolver<T: AsRef<str>>(
&mut self,
protocol: T,
resolver: Box<dyn ModuleResolver>,
) -> &mut Self {
self.resolvers.push(Resolver {
protocol: protocol.as_ref().to_owned(),
resolver: resolver,
});
self
}
// Add a builtin module
pub fn module<T: 'static>(&mut self, id: &str, module: T) -> &mut Self
where
T: Callable,
{
if self
.modules
.iter()
.find(|m| m.name.as_str() == id)
.is_some()
{
return self;
}
let boxed = Box::new(module);
self.modules.push(Module {
name: id.to_string(),
module: boxed,
});
self
}
pub fn file_loader(&mut self, enable: bool) -> &mut Self {
self.file_loader = enable;
self
}
// Build
pub fn build(self) -> CommonJS {
CommonJS {
loaders: self.loaders,
modules: self.modules,
resolvers: self.resolvers,
}
}
}
pub struct Require;
impl Require {
/// Load a module
fn load_module<'a>(&self, id: &str, ctx: &'a Context, repo: &CommonJS) -> Result<Object<'a>> {
let caps = PROTOCOL_RE.captures(id).unwrap();
let protocol = caps.get(1).unwrap().as_str();
let idr = caps.get(2).unwrap().as_str();
let resolver = match repo
.resolvers
.iter()
.find(|m| m.protocol.as_str() == protocol)
{
Some(resolver) => resolver,
None => {
return Err(ErrorKind::TypeError(format!(
"could not find resolver for protocol: '{}'",
protocol
))
.into());
}
};
let o: Object = ctx.push_current_function().getp()?;
let parent = o.get::<_, String>(MODULE_ID_KEY)?;
let id = match resolver.resolver.resolve(idr, &parent, &repo.extensions()) {
Ok(id) => id,
Err(e) => return Err(ErrorKind::TypeError(format!("{}", e)).into()),
};
if self.has_cache(ctx, &id)? {
return self.get_cache(ctx, &id);
}
let path = Path::new(&id);
let module = match internal::push_module_object(ctx, &path, false) {
Ok(id) => id,
Err(e) => return Err(ErrorKind::Error(format!("{}", e)).into()),
};
if path.extension().is_none() {
bail!(ErrorKind::TypeError(format!(
"could not infer extension for path {}",
id
)));
}
let ext = path.extension().unwrap();
let loader = match repo.loaders.iter().find(|m| m.extension.as_str() == ext) {
Some(loader) => loader,
None => bail!(ErrorKind::Error(format!("no loader for: {:?}", ext))),
};
let content = match resolver.resolver.read(&id) {
Err(e) => bail!(ErrorKind::Error(format!("{}", e))),
Ok(m) => m,
};
match loader.loader.load(ctx, &module, &content) {
Err(e) => bail!(ErrorKind::Error(format!("{}", e))),
Ok(_) => Ok(module),
}
}
/// Load a builtin module
fn load_builtin_module<'a>(
&self,
id: &str,
ctx: &'a Context,
repo: &CommonJS,
) -> Result<Object<'a>> {
// Find buildin
let found = repo.modules.iter().find(|m| m.name == id);
if found.is_none() {
return Err(ErrorKind::TypeError(format!("could not find module: '{}'", id)).into());
}
if self.has_cache(ctx, id)? {
return self.get_cache(ctx, id);
}
let found = found.unwrap();
let module = internal::push_module_object(ctx, id, false).unwrap();
module.clone().to_context(ctx)?;
let top = ctx.top();
found.module.call(ctx)?;
if ctx.top() > top {
module.set("exports", ctx.getp::<Ref>()?);
}
ctx.pop(1);
Ok(module)
}
fn has_cache(&self, ctx: &Context, id: &str) -> Result<bool> {
let cache = ctx
.push_global_stash()
.getp::<Object>()?
.get::<_, Object>(KEY)?
.get::<_, Object>("cache")?;
Ok(cache.has(id))
}
fn get_cache<'a>(&self, ctx: &'a Context, id: &str) -> Result<Object<'a>> {
let cache = ctx
.push_global_stash()
.getp::<Object>()?
.get::<_, Object>(KEY)?
.get::<_, Object>("cache")?;
Ok(cache.get::<_, Object>(id).unwrap())
}
fn set_cache(&self, ctx: &Context, id: &str, module: &Object) -> Result<()> {
let cache = ctx
.push_global_stash()
.getp::<Object>()?
.get::<_, Object>(KEY)?
.get::<_, Object>("cache")?;
cache.set(id, module);
Ok(())
}
}
impl Callable for Require {
fn argc(&self) -> i32 {
1
}
fn call(&self, ctx: &Context) -> Result<i32> {
if !ctx.is_string(0) {
return Err(ErrorKind::TypeError("string expected".to_string()).into());
}
let mut id: String = ctx.get(0)?;
ctx.pop(1);
let common = ctx.data()?.get::<CommonJS>().unwrap();
let module = if common.modules.iter().find(|m| m.name == id).is_some() {
self.load_builtin_module(&id, ctx, common)?
} else {
if FILE_RE.is_match(&id) {
id = format!("file://{}", id);
}
if !PROTOCOL_RE.is_match(&id) {
return Err(ErrorKind::TypeError(format!("invalid require id: {}", id)).into());
}
self.load_module(&id, ctx, &common)?
};
if !module.has("exports") {
bail!(ErrorKind::TypeError(format!(
"module does not have a 'exports' field"
)));
}
self.set_cache(ctx, &id, &module)?;
module.get::<_, Ref>("exports")?.push();
Ok(1)
}
}
impl Drop for Require {
fn drop(&mut self) {}
}
pub(crate) fn build_require<'a>(ctx: &'a Context, module_id: &str) -> Result<Function<'a>> {
let function: Object = ctx.push_function(Require {}).getp()?;
let mut stash: Object = ctx.push_global_stash().getp()?;
stash = stash.get(KEY)?;
function
.set(MODULE_ID_KEY, module_id)
.set("cache", stash.get::<_, Ref>("cache")?)
.set("main", stash.get::<_, Ref>("main")?);
let function: Result<Function> = function.into();
match function {
Ok(mut ret) => {
ret.set_name("require");
Ok(ret)
}
Err(e) => Err(e),
}
}
|
/*
* A sample API conforming to the draft standard OGC API - Features - Part 1: Core
*
* This is a sample OpenAPI definition that conforms to the conformance classes \"Core\", \"GeoJSON\", \"HTML\" and \"OpenAPI 3.0\" of the draft standard \"OGC API - Features - Part 1: Core\". This example is a generic OGC API Features definition that uses path parameters to describe all feature collections and all features. The generic OpenAPI definition does not provide any details on the collections or the feature content. This information is only available from accessing the feature collection resources. There is [another example](ogcapi-features-1-example2.yaml) that specifies each collection explicitly.
*
* The version of the OpenAPI document: 1.0.0
* Contact: info@example.org
* Generated by: https://openapi-generator.tech
*/
/// ExtentSpatial : The spatial extent of the features in the collection.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExtentSpatial {
/// One or more bounding boxes that describe the spatial extent of the dataset. In the Core only a single bounding box is supported. Extensions may support additional areas. If multiple areas are provided, the union of the bounding boxes describes the spatial extent.
#[serde(rename = "bbox", skip_serializing_if = "Option::is_none")]
pub bbox: Option<Vec<Vec<f32>>>,
/// Coordinate reference system of the coordinates in the spatial extent (property `bbox`). The default reference system is WGS 84 longitude/latitude. In the Core this is the only supported coordinate reference system. Extensions may support additional coordinate reference systems and add additional enum values.
#[serde(rename = "crs", skip_serializing_if = "Option::is_none")]
pub crs: Option<Crs>,
}
impl ExtentSpatial {
/// The spatial extent of the features in the collection.
pub fn new() -> ExtentSpatial {
ExtentSpatial {
bbox: None,
crs: None,
}
}
}
/// Coordinate reference system of the coordinates in the spatial extent (property `bbox`). The default reference system is WGS 84 longitude/latitude. In the Core this is the only supported coordinate reference system. Extensions may support additional coordinate reference systems and add additional enum values.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum Crs {
#[serde(rename = "http://www.opengis.net/def/crs/OGC/1.3/CRS84")]
HttpWwwOpengisNetDefCrsOGC13CRS84,
}
|
use chrono;
pub type DateTime = chrono::DateTime<chrono::Utc>;
pub mod other;
pub mod private;
pub mod public;
pub mod reqs;
pub mod wsfeed;
|
#[doc = "Register `DFSDM2_CR1` reader"]
pub type R = crate::R<DFSDM2_CR1_SPEC>;
#[doc = "Register `DFSDM2_CR1` writer"]
pub type W = crate::W<DFSDM2_CR1_SPEC>;
#[doc = "Field `DFEN` reader - DFSDM enable"]
pub type DFEN_R = crate::BitReader;
#[doc = "Field `DFEN` writer - DFSDM enable"]
pub type DFEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JSWSTART` reader - Start a conversion of the injected group of channels"]
pub type JSWSTART_R = crate::BitReader;
#[doc = "Field `JSWSTART` writer - Start a conversion of the injected group of channels"]
pub type JSWSTART_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JSYNC` reader - Launch an injected conversion synchronously with the DFSDM0 JSWSTART trigger"]
pub type JSYNC_R = crate::BitReader;
#[doc = "Field `JSYNC` writer - Launch an injected conversion synchronously with the DFSDM0 JSWSTART trigger"]
pub type JSYNC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JSCAN` reader - Scanning conversion mode for injected conversions"]
pub type JSCAN_R = crate::BitReader;
#[doc = "Field `JSCAN` writer - Scanning conversion mode for injected conversions"]
pub type JSCAN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JDMAEN` reader - DMA channel enabled to read data for the injected channel group"]
pub type JDMAEN_R = crate::BitReader;
#[doc = "Field `JDMAEN` writer - DMA channel enabled to read data for the injected channel group"]
pub type JDMAEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JEXTSEL` reader - Trigger signal selection for launching injected conversions"]
pub type JEXTSEL_R = crate::FieldReader;
#[doc = "Field `JEXTSEL` writer - Trigger signal selection for launching injected conversions"]
pub type JEXTSEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>;
#[doc = "Field `JEXTEN` reader - Trigger enable and trigger edge selection for injected conversions"]
pub type JEXTEN_R = crate::FieldReader;
#[doc = "Field `JEXTEN` writer - Trigger enable and trigger edge selection for injected conversions"]
pub type JEXTEN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `RSWSTART` reader - Software start of a conversion on the regular channel"]
pub type RSWSTART_R = crate::BitReader;
#[doc = "Field `RSWSTART` writer - Software start of a conversion on the regular channel"]
pub type RSWSTART_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RCONT` reader - Continuous mode selection for regular conversions"]
pub type RCONT_R = crate::BitReader;
#[doc = "Field `RCONT` writer - Continuous mode selection for regular conversions"]
pub type RCONT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RSYNC` reader - Launch regular conversion synchronously with DFSDM0"]
pub type RSYNC_R = crate::BitReader;
#[doc = "Field `RSYNC` writer - Launch regular conversion synchronously with DFSDM0"]
pub type RSYNC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RDMAEN` reader - DMA channel enabled to read data for the regular conversion"]
pub type RDMAEN_R = crate::BitReader;
#[doc = "Field `RDMAEN` writer - DMA channel enabled to read data for the regular conversion"]
pub type RDMAEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RCH` reader - Regular channel selection"]
pub type RCH_R = crate::FieldReader;
#[doc = "Field `RCH` writer - Regular channel selection"]
pub type RCH_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `FAST` reader - Fast conversion mode selection for regular conversions"]
pub type FAST_R = crate::BitReader;
#[doc = "Field `FAST` writer - Fast conversion mode selection for regular conversions"]
pub type FAST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AWFSEL` reader - Analog watchdog fast mode select"]
pub type AWFSEL_R = crate::BitReader;
#[doc = "Field `AWFSEL` writer - Analog watchdog fast mode select"]
pub type AWFSEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - DFSDM enable"]
#[inline(always)]
pub fn dfen(&self) -> DFEN_R {
DFEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Start a conversion of the injected group of channels"]
#[inline(always)]
pub fn jswstart(&self) -> JSWSTART_R {
JSWSTART_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 3 - Launch an injected conversion synchronously with the DFSDM0 JSWSTART trigger"]
#[inline(always)]
pub fn jsync(&self) -> JSYNC_R {
JSYNC_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Scanning conversion mode for injected conversions"]
#[inline(always)]
pub fn jscan(&self) -> JSCAN_R {
JSCAN_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - DMA channel enabled to read data for the injected channel group"]
#[inline(always)]
pub fn jdmaen(&self) -> JDMAEN_R {
JDMAEN_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bits 8:12 - Trigger signal selection for launching injected conversions"]
#[inline(always)]
pub fn jextsel(&self) -> JEXTSEL_R {
JEXTSEL_R::new(((self.bits >> 8) & 0x1f) as u8)
}
#[doc = "Bits 13:14 - Trigger enable and trigger edge selection for injected conversions"]
#[inline(always)]
pub fn jexten(&self) -> JEXTEN_R {
JEXTEN_R::new(((self.bits >> 13) & 3) as u8)
}
#[doc = "Bit 17 - Software start of a conversion on the regular channel"]
#[inline(always)]
pub fn rswstart(&self) -> RSWSTART_R {
RSWSTART_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - Continuous mode selection for regular conversions"]
#[inline(always)]
pub fn rcont(&self) -> RCONT_R {
RCONT_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - Launch regular conversion synchronously with DFSDM0"]
#[inline(always)]
pub fn rsync(&self) -> RSYNC_R {
RSYNC_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 21 - DMA channel enabled to read data for the regular conversion"]
#[inline(always)]
pub fn rdmaen(&self) -> RDMAEN_R {
RDMAEN_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bits 24:26 - Regular channel selection"]
#[inline(always)]
pub fn rch(&self) -> RCH_R {
RCH_R::new(((self.bits >> 24) & 7) as u8)
}
#[doc = "Bit 29 - Fast conversion mode selection for regular conversions"]
#[inline(always)]
pub fn fast(&self) -> FAST_R {
FAST_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - Analog watchdog fast mode select"]
#[inline(always)]
pub fn awfsel(&self) -> AWFSEL_R {
AWFSEL_R::new(((self.bits >> 30) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - DFSDM enable"]
#[inline(always)]
#[must_use]
pub fn dfen(&mut self) -> DFEN_W<DFSDM2_CR1_SPEC, 0> {
DFEN_W::new(self)
}
#[doc = "Bit 1 - Start a conversion of the injected group of channels"]
#[inline(always)]
#[must_use]
pub fn jswstart(&mut self) -> JSWSTART_W<DFSDM2_CR1_SPEC, 1> {
JSWSTART_W::new(self)
}
#[doc = "Bit 3 - Launch an injected conversion synchronously with the DFSDM0 JSWSTART trigger"]
#[inline(always)]
#[must_use]
pub fn jsync(&mut self) -> JSYNC_W<DFSDM2_CR1_SPEC, 3> {
JSYNC_W::new(self)
}
#[doc = "Bit 4 - Scanning conversion mode for injected conversions"]
#[inline(always)]
#[must_use]
pub fn jscan(&mut self) -> JSCAN_W<DFSDM2_CR1_SPEC, 4> {
JSCAN_W::new(self)
}
#[doc = "Bit 5 - DMA channel enabled to read data for the injected channel group"]
#[inline(always)]
#[must_use]
pub fn jdmaen(&mut self) -> JDMAEN_W<DFSDM2_CR1_SPEC, 5> {
JDMAEN_W::new(self)
}
#[doc = "Bits 8:12 - Trigger signal selection for launching injected conversions"]
#[inline(always)]
#[must_use]
pub fn jextsel(&mut self) -> JEXTSEL_W<DFSDM2_CR1_SPEC, 8> {
JEXTSEL_W::new(self)
}
#[doc = "Bits 13:14 - Trigger enable and trigger edge selection for injected conversions"]
#[inline(always)]
#[must_use]
pub fn jexten(&mut self) -> JEXTEN_W<DFSDM2_CR1_SPEC, 13> {
JEXTEN_W::new(self)
}
#[doc = "Bit 17 - Software start of a conversion on the regular channel"]
#[inline(always)]
#[must_use]
pub fn rswstart(&mut self) -> RSWSTART_W<DFSDM2_CR1_SPEC, 17> {
RSWSTART_W::new(self)
}
#[doc = "Bit 18 - Continuous mode selection for regular conversions"]
#[inline(always)]
#[must_use]
pub fn rcont(&mut self) -> RCONT_W<DFSDM2_CR1_SPEC, 18> {
RCONT_W::new(self)
}
#[doc = "Bit 19 - Launch regular conversion synchronously with DFSDM0"]
#[inline(always)]
#[must_use]
pub fn rsync(&mut self) -> RSYNC_W<DFSDM2_CR1_SPEC, 19> {
RSYNC_W::new(self)
}
#[doc = "Bit 21 - DMA channel enabled to read data for the regular conversion"]
#[inline(always)]
#[must_use]
pub fn rdmaen(&mut self) -> RDMAEN_W<DFSDM2_CR1_SPEC, 21> {
RDMAEN_W::new(self)
}
#[doc = "Bits 24:26 - Regular channel selection"]
#[inline(always)]
#[must_use]
pub fn rch(&mut self) -> RCH_W<DFSDM2_CR1_SPEC, 24> {
RCH_W::new(self)
}
#[doc = "Bit 29 - Fast conversion mode selection for regular conversions"]
#[inline(always)]
#[must_use]
pub fn fast(&mut self) -> FAST_W<DFSDM2_CR1_SPEC, 29> {
FAST_W::new(self)
}
#[doc = "Bit 30 - Analog watchdog fast mode select"]
#[inline(always)]
#[must_use]
pub fn awfsel(&mut self) -> AWFSEL_W<DFSDM2_CR1_SPEC, 30> {
AWFSEL_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DFSDM control register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dfsdm2_cr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dfsdm2_cr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DFSDM2_CR1_SPEC;
impl crate::RegisterSpec for DFSDM2_CR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dfsdm2_cr1::R`](R) reader structure"]
impl crate::Readable for DFSDM2_CR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`dfsdm2_cr1::W`](W) writer structure"]
impl crate::Writable for DFSDM2_CR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DFSDM2_CR1 to value 0"]
impl crate::Resettable for DFSDM2_CR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! Various model repository for easily interacting with the database.
use super::model::Gossip;
use super::model::Gossiper;
use super::model::NewGossip;
use super::model::NewGossiper;
use super::DatabaseWrapper;
use diesel::prelude::*;
pub struct GossipRepository;
impl GossipRepository {
/// Save a new gossip.
pub fn save(gossip: NewGossip) -> anyhow::Result<()> {
use super::schema::gossips::dsl::*;
DatabaseWrapper::get().run(move |conn| {
match diesel::insert_into(gossips).values(gossip).execute(conn) {
Ok(val) => {
if val == 0 {
Err(anyhow!("no gossip was added"))
} else {
Ok(())
}
}
Err(e) => Err(anyhow!("{}", e)),
}
})
}
/// Find all gossips for a guild id.
pub fn find_all_by_guild_id(guild_id_filter: u64) -> anyhow::Result<Vec<Gossip>> {
use super::schema::gossips::dsl::*;
let guild_id_vec = super::u64_to_vecu8(guild_id_filter);
DatabaseWrapper::get().run(|conn| {
match gossips
.filter(guild_id.eq(guild_id_vec))
.load::<Gossip>(conn)
{
Ok(value) => Ok(value),
Err(e) => Err(anyhow!("{}", e)),
}
})
}
/// Find the latest gossip by guild id.
pub fn find_by_guild_id_ord_added_desc(guild_id_filter: u64) -> anyhow::Result<Option<Gossip>> {
use super::schema::gossips::dsl::*;
let guild_id_vec = super::u64_to_vecu8(guild_id_filter);
DatabaseWrapper::get().run(|conn| {
match gossips
.filter(guild_id.eq(guild_id_vec))
.order(added.desc())
.first::<Gossip>(conn)
.optional()
{
Ok(value) => Ok(value),
Err(e) => Err(anyhow!("{}", e)),
}
})
}
}
pub struct GossiperRepository;
impl GossiperRepository {
/// Save a new gossiper.
pub fn save(gossiper: NewGossiper) -> anyhow::Result<()> {
use super::schema::gossipers::dsl::*;
DatabaseWrapper::get().run(move |conn| {
match diesel::insert_into(gossipers)
.values(gossiper)
.execute(conn)
{
Ok(val) => {
if val == 0 {
Err(anyhow!("no gossiper was added"))
} else {
Ok(())
}
}
Err(e) => Err(anyhow!("{}", e)),
}
})
}
/// Find a gossiper by discord id.
pub fn find_by_discord_id(discord_id_filter: u64) -> anyhow::Result<Option<Gossiper>> {
use super::schema::gossipers::dsl::*;
let discord_id_vec = super::u64_to_vecu8(discord_id_filter);
DatabaseWrapper::get().run(|conn| {
match gossipers
.filter(discord_id.eq(discord_id_vec))
.first::<Gossiper>(conn)
.optional()
{
Ok(value) => Ok(value),
Err(e) => Err(anyhow!("{}", e)),
}
})
}
/// Update a gossiper's guild id by discord id.
pub fn update_preferred_guild_by_discord_id(
discord_id_filter: u64,
guild_id: u64,
) -> anyhow::Result<()> {
use super::schema::gossipers::dsl::*;
let gossiper = match Self::find_by_discord_id(discord_id_filter)? {
Some(value) => value,
None => bail!("there is no gossiper to update"),
};
DatabaseWrapper::get().run(|conn| {
match diesel::update(&gossiper)
.set(preferred_guild.eq(Some(super::u64_to_vecu8(guild_id))))
.execute(conn)
{
Ok(value) => {
if value != 0 {
Ok(())
} else {
Err(anyhow!("no gossiper was updated"))
}
}
Err(e) => Err(anyhow!("{}", e)),
}
})
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
HealthApi_GetServiceStatus(#[from] health_api::get_service_status::Error),
#[error(transparent)]
WebPubSub_GenerateClientToken(#[from] web_pub_sub::generate_client_token::Error),
#[error(transparent)]
WebPubSub_SendToAll(#[from] web_pub_sub::send_to_all::Error),
#[error(transparent)]
WebPubSub_CloseConnection(#[from] web_pub_sub::close_connection::Error),
#[error(transparent)]
WebPubSub_ConnectionExists(#[from] web_pub_sub::connection_exists::Error),
#[error(transparent)]
WebPubSub_SendToConnection(#[from] web_pub_sub::send_to_connection::Error),
#[error(transparent)]
WebPubSub_GroupExists(#[from] web_pub_sub::group_exists::Error),
#[error(transparent)]
WebPubSub_SendToGroup(#[from] web_pub_sub::send_to_group::Error),
#[error(transparent)]
WebPubSub_AddConnectionToGroup(#[from] web_pub_sub::add_connection_to_group::Error),
#[error(transparent)]
WebPubSub_RemoveConnectionFromGroup(#[from] web_pub_sub::remove_connection_from_group::Error),
#[error(transparent)]
WebPubSub_UserExists(#[from] web_pub_sub::user_exists::Error),
#[error(transparent)]
WebPubSub_SendToUser(#[from] web_pub_sub::send_to_user::Error),
#[error(transparent)]
WebPubSub_AddUserToGroup(#[from] web_pub_sub::add_user_to_group::Error),
#[error(transparent)]
WebPubSub_RemoveUserFromGroup(#[from] web_pub_sub::remove_user_from_group::Error),
#[error(transparent)]
WebPubSub_RemoveUserFromAllGroups(#[from] web_pub_sub::remove_user_from_all_groups::Error),
#[error(transparent)]
WebPubSub_GrantPermission(#[from] web_pub_sub::grant_permission::Error),
#[error(transparent)]
WebPubSub_RevokePermission(#[from] web_pub_sub::revoke_permission::Error),
#[error(transparent)]
WebPubSub_CheckPermission(#[from] web_pub_sub::check_permission::Error),
}
pub mod health_api {
use super::{models, API_VERSION};
pub async fn get_service_status(operation_config: &crate::OperationConfig) -> std::result::Result<(), get_service_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/health", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(get_service_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_service_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_service_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_service_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => Err(get_service_status::Error::DefaultResponse { status_code }),
}
}
pub mod get_service_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod web_pub_sub {
use super::{models, API_VERSION};
pub async fn generate_client_token(
operation_config: &crate::OperationConfig,
hub: &str,
user_id: Option<&str>,
role: &[&str],
minutes_to_expire: Option<i32>,
) -> std::result::Result<models::ClientTokenResponse, generate_client_token::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/hubs/{}/:generateToken", operation_config.base_path(), hub);
let mut url = url::Url::parse(url_str).map_err(generate_client_token::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(generate_client_token::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(user_id) = user_id {
url.query_pairs_mut().append_pair("userId", user_id);
}
for value in role {
url.query_pairs_mut().append_pair("role", value.to_string().as_str());
}
if let Some(minutes_to_expire) = minutes_to_expire {
url.query_pairs_mut()
.append_pair("minutesToExpire", minutes_to_expire.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(generate_client_token::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(generate_client_token::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ClientTokenResponse = serde_json::from_slice(rsp_body)
.map_err(|source| generate_client_token::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(generate_client_token::Error::DefaultResponse { status_code }),
}
}
pub mod generate_client_token {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn send_to_all(
operation_config: &crate::OperationConfig,
hub: &str,
excluded: &[&str],
message: &str,
) -> std::result::Result<(), send_to_all::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/hubs/{}/:send", operation_config.base_path(), hub);
let mut url = url::Url::parse(url_str).map_err(send_to_all::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(send_to_all::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
for value in excluded {
url.query_pairs_mut().append_pair("excluded", value.to_string().as_str());
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(message).map_err(send_to_all::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(send_to_all::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(send_to_all::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(()),
status_code => Err(send_to_all::Error::DefaultResponse { status_code }),
}
}
pub mod send_to_all {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn close_connection(
operation_config: &crate::OperationConfig,
hub: &str,
connection_id: &str,
reason: Option<&str>,
) -> std::result::Result<(), close_connection::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/hubs/{}/connections/{}", operation_config.base_path(), hub, connection_id);
let mut url = url::Url::parse(url_str).map_err(close_connection::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(close_connection::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(reason) = reason {
url.query_pairs_mut().append_pair("reason", reason);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(close_connection::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(close_connection::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => Err(close_connection::Error::DefaultResponse { status_code }),
}
}
pub mod close_connection {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn connection_exists(
operation_config: &crate::OperationConfig,
hub: &str,
connection_id: &str,
) -> std::result::Result<(), connection_exists::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/hubs/{}/connections/{}", operation_config.base_path(), hub, connection_id);
let mut url = url::Url::parse(url_str).map_err(connection_exists::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(connection_exists::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(connection_exists::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(connection_exists::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
http::StatusCode::NOT_FOUND => Err(connection_exists::Error::NotFound404 {}),
status_code => Err(connection_exists::Error::DefaultResponse { status_code }),
}
}
pub mod connection_exists {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn send_to_connection(
operation_config: &crate::OperationConfig,
hub: &str,
connection_id: &str,
message: &str,
) -> std::result::Result<(), send_to_connection::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/api/hubs/{}/connections/{}/:send",
operation_config.base_path(),
hub,
connection_id
);
let mut url = url::Url::parse(url_str).map_err(send_to_connection::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(send_to_connection::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(message).map_err(send_to_connection::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(send_to_connection::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(send_to_connection::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(()),
status_code => Err(send_to_connection::Error::DefaultResponse { status_code }),
}
}
pub mod send_to_connection {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn group_exists(
operation_config: &crate::OperationConfig,
hub: &str,
group: &str,
) -> std::result::Result<(), group_exists::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/hubs/{}/groups/{}", operation_config.base_path(), hub, group);
let mut url = url::Url::parse(url_str).map_err(group_exists::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(group_exists::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(group_exists::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(group_exists::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
http::StatusCode::NOT_FOUND => Err(group_exists::Error::NotFound404 {}),
status_code => Err(group_exists::Error::DefaultResponse { status_code }),
}
}
pub mod group_exists {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn send_to_group(
operation_config: &crate::OperationConfig,
hub: &str,
group: &str,
excluded: &[&str],
message: &str,
) -> std::result::Result<(), send_to_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/hubs/{}/groups/{}/:send", operation_config.base_path(), hub, group);
let mut url = url::Url::parse(url_str).map_err(send_to_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(send_to_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
for value in excluded {
url.query_pairs_mut().append_pair("excluded", value.to_string().as_str());
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(message).map_err(send_to_group::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(send_to_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(send_to_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(()),
status_code => Err(send_to_group::Error::DefaultResponse { status_code }),
}
}
pub mod send_to_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn add_connection_to_group(
operation_config: &crate::OperationConfig,
hub: &str,
group: &str,
connection_id: &str,
) -> std::result::Result<(), add_connection_to_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/api/hubs/{}/groups/{}/connections/{}",
operation_config.base_path(),
hub,
group,
connection_id
);
let mut url = url::Url::parse(url_str).map_err(add_connection_to_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(add_connection_to_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(add_connection_to_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(add_connection_to_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
http::StatusCode::NOT_FOUND => Err(add_connection_to_group::Error::NotFound404 {}),
status_code => Err(add_connection_to_group::Error::DefaultResponse { status_code }),
}
}
pub mod add_connection_to_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove_connection_from_group(
operation_config: &crate::OperationConfig,
hub: &str,
group: &str,
connection_id: &str,
) -> std::result::Result<(), remove_connection_from_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/api/hubs/{}/groups/{}/connections/{}",
operation_config.base_path(),
hub,
group,
connection_id
);
let mut url = url::Url::parse(url_str).map_err(remove_connection_from_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove_connection_from_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(remove_connection_from_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(remove_connection_from_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => Err(remove_connection_from_group::Error::DefaultResponse { status_code }),
}
}
pub mod remove_connection_from_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn user_exists(
operation_config: &crate::OperationConfig,
hub: &str,
user_id: &str,
) -> std::result::Result<(), user_exists::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/hubs/{}/users/{}", operation_config.base_path(), hub, user_id);
let mut url = url::Url::parse(url_str).map_err(user_exists::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(user_exists::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(user_exists::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(user_exists::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
http::StatusCode::NOT_FOUND => Err(user_exists::Error::NotFound404 {}),
status_code => Err(user_exists::Error::DefaultResponse { status_code }),
}
}
pub mod user_exists {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn send_to_user(
operation_config: &crate::OperationConfig,
hub: &str,
user_id: &str,
message: &str,
) -> std::result::Result<(), send_to_user::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/hubs/{}/users/{}/:send", operation_config.base_path(), hub, user_id);
let mut url = url::Url::parse(url_str).map_err(send_to_user::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(send_to_user::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(message).map_err(send_to_user::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(send_to_user::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(send_to_user::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(()),
status_code => Err(send_to_user::Error::DefaultResponse { status_code }),
}
}
pub mod send_to_user {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn add_user_to_group(
operation_config: &crate::OperationConfig,
hub: &str,
group: &str,
user_id: &str,
) -> std::result::Result<(), add_user_to_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/api/hubs/{}/users/{}/groups/{}",
operation_config.base_path(),
hub,
user_id,
group
);
let mut url = url::Url::parse(url_str).map_err(add_user_to_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(add_user_to_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(add_user_to_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(add_user_to_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
http::StatusCode::NOT_FOUND => Err(add_user_to_group::Error::NotFound404 {}),
status_code => Err(add_user_to_group::Error::DefaultResponse { status_code }),
}
}
pub mod add_user_to_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove_user_from_group(
operation_config: &crate::OperationConfig,
hub: &str,
group: &str,
user_id: &str,
) -> std::result::Result<(), remove_user_from_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/api/hubs/{}/users/{}/groups/{}",
operation_config.base_path(),
hub,
user_id,
group
);
let mut url = url::Url::parse(url_str).map_err(remove_user_from_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove_user_from_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(remove_user_from_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(remove_user_from_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => Err(remove_user_from_group::Error::DefaultResponse { status_code }),
}
}
pub mod remove_user_from_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove_user_from_all_groups(
operation_config: &crate::OperationConfig,
hub: &str,
user_id: &str,
) -> std::result::Result<(), remove_user_from_all_groups::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/api/hubs/{}/users/{}/groups", operation_config.base_path(), hub, user_id);
let mut url = url::Url::parse(url_str).map_err(remove_user_from_all_groups::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove_user_from_all_groups::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(remove_user_from_all_groups::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(remove_user_from_all_groups::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => Err(remove_user_from_all_groups::Error::DefaultResponse { status_code }),
}
}
pub mod remove_user_from_all_groups {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn grant_permission(
operation_config: &crate::OperationConfig,
hub: &str,
permission: &str,
connection_id: &str,
target_name: Option<&str>,
) -> std::result::Result<(), grant_permission::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/api/hubs/{}/permissions/{}/connections/{}",
operation_config.base_path(),
hub,
permission,
connection_id
);
let mut url = url::Url::parse(url_str).map_err(grant_permission::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(grant_permission::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(target_name) = target_name {
url.query_pairs_mut().append_pair("targetName", target_name);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(grant_permission::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(grant_permission::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => Err(grant_permission::Error::DefaultResponse { status_code }),
}
}
pub mod grant_permission {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn revoke_permission(
operation_config: &crate::OperationConfig,
hub: &str,
permission: &str,
connection_id: &str,
target_name: Option<&str>,
) -> std::result::Result<(), revoke_permission::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/api/hubs/{}/permissions/{}/connections/{}",
operation_config.base_path(),
hub,
permission,
connection_id
);
let mut url = url::Url::parse(url_str).map_err(revoke_permission::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(revoke_permission::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(target_name) = target_name {
url.query_pairs_mut().append_pair("targetName", target_name);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(revoke_permission::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(revoke_permission::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => Err(revoke_permission::Error::DefaultResponse { status_code }),
}
}
pub mod revoke_permission {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_permission(
operation_config: &crate::OperationConfig,
hub: &str,
permission: &str,
connection_id: &str,
target_name: Option<&str>,
) -> std::result::Result<(), check_permission::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/api/hubs/{}/permissions/{}/connections/{}",
operation_config.base_path(),
hub,
permission,
connection_id
);
let mut url = url::Url::parse(url_str).map_err(check_permission::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_permission::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(target_name) = target_name {
url.query_pairs_mut().append_pair("targetName", target_name);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(check_permission::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_permission::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
http::StatusCode::NOT_FOUND => Err(check_permission::Error::NotFound404 {}),
status_code => Err(check_permission::Error::DefaultResponse { status_code }),
}
}
pub mod check_permission {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
use Configuration;
const RGBA_PIXELS_BYTE_COUNT: usize = 4;
const RGBA_FULL_OPAQUE: u8 = 255;
fn initialize_vector(width: u32, height: u32) -> Vec<u8> {
let max_capacity: usize = RGBA_PIXELS_BYTE_COUNT * (width * height) as usize;
let mut v: Vec<u8> = Vec::with_capacity(max_capacity);
v.resize(max_capacity, 0);
return v;
}
fn iterate_mandel(cx: f64, cy: f64, iterations: usize) -> usize {
let mut x: f64 = 0.0;
let mut y: f64 = 0.0;
let mut xx: f64 = 0.0;
let mut yy: f64 = 0.0;
let mut xy: f64;
let mut i: usize = iterations;
while i > 0 && xx + yy <= 4.0 {
xy = x * y;
xx = x * x;
yy = y * y;
x = xx - yy + cx;
y = xy + xy + cy;
i -= 1;
}
return iterations - i;
}
#[inline]
fn fill_rgb(data: &mut Vec<u8>, position: usize, r: u8, g: u8, b: u8) {
data[position] = r;
data[position + 1] = g;
data[position + 2] = b;
data[position + 3] = RGBA_FULL_OPAQUE;
}
pub fn get_mandelbrot_set(configuration: &Configuration) -> Vec<u8> {
let iterations: usize = configuration.iterations;
let width = configuration.width;
let height = configuration.height;
let mut data: Vec<u8> = initialize_vector(width, height);
let xmin: f64 = configuration.xmin;
let xmax: f64 = configuration.xmax;
let ymin: f64 = configuration.ymin;
let ymax: f64 = configuration.ymax;
for ix in 0..width {
for iy in 0..height {
//let x: f64 = width as f64 / (xmax - xmin);
let x: f64 = xmin + (xmax - xmin) * ix as f64 / (width - 1) as f64;
let y: f64 = ymin + (ymax - ymin) * iy as f64 / (height - 1) as f64;
//let y: f64 = ymin + (ymax - ymin) * iy as f64 / (height - 1) as f64;
let i: usize = iterate_mandel(x, y, iterations);
let ppos: usize = (4 * (width * iy + ix)) as usize;
if i > iterations {
fill_rgb(&mut data, ppos, 0, 0, 0);
} else {
let c: u8 = (3 as f64 * (i as f64).ln() / (iterations as f64 - 1.0).ln()) as u8;
match c {
c if c < 1 => {
fill_rgb(&mut data, ppos, 255 * c, 0, 0);
},
c if c < 2 => {
fill_rgb(&mut data, ppos, 255, 255 * (c - 1), 0);
},
_ => {
fill_rgb(&mut data, ppos, 255, 255, 255 * (c - 2));
},
}
}
}
}
return data;
}
|
use std::fs::File;
use std::io::prelude::*;
use std::collections::HashMap;
use image::{ImageBuffer, Rgba};
use rusttype::{point, Font, Scale};
//FIXME: It's a bad way to get all glyphs
static GLYPH_DATA: &'static str = "qwertyuiop[]asdfghjkl;'zxcvbnm,./1234567890-=+`";
pub struct FontEngine{
pub glyph_list: HashMap<(String, u32), ImageBuffer<Rgba<u8>, Vec<u8>>>
}
#[allow(dead_code)]
impl FontEngine{
pub fn new() -> FontEngine{
FontEngine{
glyph_list: HashMap::new()
}
}
pub fn load_font(&mut self, path: String){
use std::path::Path;
let path = Path::new(&path);
let mut font_file = match File::open(path){
Ok(x) => x,
Err(_) => return ()
};
let mut raw = vec![];
match font_file.read_to_end(&mut raw){
Ok(x) => x,
Err(_) => return ()
};
let font = match Font::from_bytes(raw.as_slice()){
Ok(x) => x,
Err(_) => return ()
};
let scale = Scale { x: 32.0, y: 32.0 };
let start = point(20.0, 50.0);
for glyph in font.layout(GLYPH_DATA, scale, start) {
let mut image: ImageBuffer<Rgba<u8>, Vec<u8>> = ImageBuffer::new(32, 32);
if let Some(_) = glyph.pixel_bounding_box() {
glyph.draw(|x, y, v| {
image.put_pixel(x,y,Rgba{data: [255, 255, 255, (v * 255.0) as u8]},
)
});
}
self.glyph_list.insert((path.file_name().unwrap().to_str().unwrap().to_string(), glyph.id().0), image);
}
}
pub fn get(&self, font: String, glyph: u32) -> Option<&ImageBuffer<Rgba<u8>, Vec<u8>>>{
self.glyph_list.get(&(font, glyph))
}
}
|
use std::collections::HashMap;
use exonum::blockchain::Transaction;
use exonum::crypto;
use exonum::crypto::{PublicKey, Signature};
use exonum::messages::Message;
use exonum::storage::Fork;
use prometheus::{Histogram, IntCounter};
use currency::assets::AssetBundle;
use currency::error::Error;
use currency::service::CONFIGURATION;
use currency::status;
use currency::transactions::components::{FeeStrategy, FeesCalculator, ThirdPartyFees};
use currency::transactions::components::permissions;
use currency::wallet;
use currency::SERVICE_ID;
/// Transaction ID.
pub const EXCHANGE_ID: u16 = 601;
encoding_struct! {
struct ExchangeOffer {
sender: &PublicKey,
sender_assets: Vec<AssetBundle>,
sender_value: u64,
recipient: &PublicKey,
recipient_assets: Vec<AssetBundle>,
fee_strategy: u8,
seed: u64,
data_info: &str,
}
}
message! {
/// `exchange` transaction.
struct Exchange {
const TYPE = SERVICE_ID;
const ID = EXCHANGE_ID;
offer: ExchangeOffer,
sender_signature: &Signature,
}
}
impl FeesCalculator for Exchange {
fn calculate_fees(&self, view: &mut Fork) -> Result<HashMap<PublicKey, u64>, Error> {
let offer = self.offer();
let genesis_fees = CONFIGURATION.read().unwrap().fees();
let fees = ThirdPartyFees::new_exchange(
&*view,
offer
.sender_assets()
.into_iter()
.chain(offer.recipient_assets().into_iter()),
)?;
let fee_strategy =
FeeStrategy::try_from(offer.fee_strategy()).expect("fee strategy must be valid");
let mut fees_table = HashMap::new();
let payers = self.payers(&fee_strategy, genesis_fees.exchange())?;
for (payer_key, fee) in payers {
if genesis_fees.recipient() != &payer_key {
fees_table.insert(payer_key, fee);
}
}
for (receiver_key, fee) in fees.0 {
let payers = self.payers(&fee_strategy, fee)?;
for (payer_key, fee) in payers {
if payer_key != receiver_key {
*fees_table.entry(payer_key).or_insert(0) += fee;
}
}
}
Ok(fees_table)
}
}
impl Exchange {
fn payers(&self, fee_strategy: &FeeStrategy, fee: u64) -> Result<Vec<(PublicKey, u64)>, Error> {
let offer = self.offer();
let payers = match *fee_strategy {
FeeStrategy::Recipient => vec![(*offer.recipient(), fee)],
FeeStrategy::Sender => vec![(*offer.sender(), fee)],
FeeStrategy::RecipientAndSender => {
vec![(*offer.sender(), fee / 2), (*offer.recipient(), fee / 2)]
}
FeeStrategy::Intermediary => return Err(Error::InvalidTransaction),
};
Ok(payers)
}
/// Get raw bytes of the offer.
pub fn offer_raw(&self) -> Vec<u8> {
self.offer().raw
}
fn process(&self, view: &mut Fork) -> Result<(), Error> {
info!("Processing tx: {:?}", self);
let genesis_fees = CONFIGURATION.read().unwrap().fees();
let offer = self.offer();
let fee_strategy =
FeeStrategy::try_from(offer.fee_strategy()).expect("fee strategy must be valid");
let mut genesis = wallet::Schema(&*view).fetch(genesis_fees.recipient());
// Collect the blockchain fee. Execution shall not continue if this fails.
match fee_strategy {
FeeStrategy::Recipient => {
let mut recipient = wallet::Schema(&*view).fetch(offer.recipient());
wallet::move_coins(&mut recipient, &mut genesis, genesis_fees.exchange())?;
wallet::Schema(&mut *view).store(offer.recipient(), recipient);
}
FeeStrategy::Sender => {
let mut sender = wallet::Schema(&*view).fetch(offer.sender());
wallet::move_coins(&mut sender, &mut genesis, genesis_fees.exchange())?;
wallet::Schema(&mut *view).store(offer.sender(), sender);
}
FeeStrategy::RecipientAndSender => {
let mut recipient = wallet::Schema(&*view).fetch(offer.recipient());
let mut sender = wallet::Schema(&*view).fetch(offer.sender());
wallet::move_coins(&mut recipient, &mut genesis, genesis_fees.exchange() / 2)?;
wallet::move_coins(&mut sender, &mut genesis, genesis_fees.exchange() / 2)?;
wallet::Schema(&mut *view).store(offer.sender(), sender);
wallet::Schema(&mut *view).store(offer.recipient(), recipient);
}
FeeStrategy::Intermediary => return Err(Error::InvalidTransaction),
}
wallet::Schema(&mut *view).store(genesis_fees.recipient(), genesis);
let fees = ThirdPartyFees::new_exchange(
&*view,
offer
.sender_assets()
.into_iter()
.chain(offer.recipient_assets().into_iter()),
)?;
// Operations bellow must either all succeed, or return an error without
// saving anything to the database.
// Process third party fees.
let mut updated_wallets = match fee_strategy {
FeeStrategy::Recipient => fees.collect(view, offer.recipient())?,
FeeStrategy::Sender => fees.collect(view, offer.sender())?,
FeeStrategy::RecipientAndSender => {
fees.collect2(view, offer.sender(), offer.recipient())?
}
FeeStrategy::Intermediary => unreachable!(),
};
// Process the main transaction.
let mut sender = updated_wallets
.remove(&offer.sender())
.unwrap_or_else(|| wallet::Schema(&*view).fetch(&offer.sender()));
let mut recipient = updated_wallets
.remove(&offer.recipient())
.unwrap_or_else(|| wallet::Schema(&*view).fetch(&offer.recipient()));
wallet::move_coins(&mut sender, &mut recipient, offer.sender_value())?;
wallet::move_assets(&mut sender, &mut recipient, &offer.sender_assets())?;
wallet::move_assets(&mut recipient, &mut sender, &offer.recipient_assets())?;
updated_wallets.insert(*offer.sender(), sender);
updated_wallets.insert(*offer.recipient(), recipient);
// Save changes to the database.
for (key, wallet) in updated_wallets {
wallet::Schema(&mut *view).store(&key, wallet);
}
Ok(())
}
}
lazy_static! {
static ref VERIFY_COUNT: IntCounter = register_int_counter!(
"dmbc_transaction_exchange_verify_count",
"Times .verify() was called on a transaction."
).unwrap();
static ref VERIFY_SUCCESS_COUNT: IntCounter = register_int_counter!(
"dmbc_transaction_exchange_verify_success_count",
"Times verification was successfull on a transaction."
).unwrap();
static ref EXECUTE_COUNT: IntCounter = register_int_counter!(
"dmbc_transaction_exchange_execute_count",
"Transactions executed."
).unwrap();
static ref EXECUTE_SUCCESS_COUNT: IntCounter = register_int_counter!(
"dmbc_transaction_exchange_execute_success_count",
"Times transaction execution reported a success."
).unwrap();
static ref EXECUTE_FINISH_COUNT: IntCounter = register_int_counter!(
"dmbc_transaction_exchange_execute_finish_count",
"Times transaction has finished executing without panicking."
).unwrap();
static ref EXECUTE_DURATION: Histogram = register_histogram!(
"dmbc_transaction_exchange_execute_duration_seconds",
"Duration of transaction execution."
).unwrap();
}
impl Transaction for Exchange {
fn verify(&self) -> bool {
VERIFY_COUNT.inc();
let offer = self.offer();
let wallets_ok = offer.sender() != offer.recipient();
let fee_strategy_ok = match FeeStrategy::try_from(offer.fee_strategy()).unwrap() {
FeeStrategy::Recipient | FeeStrategy::Sender | FeeStrategy::RecipientAndSender => true,
_ => false,
};
if cfg!(fuzzing) {
return wallets_ok && fee_strategy_ok;
}
if !permissions::is_authorized(EXCHANGE_ID, vec![
&self.offer().sender(),
&self.offer().recipient()
]) {
return false;
}
let recipient_ok = self.verify_signature(offer.recipient());
let sender_ok = crypto::verify(self.sender_signature(), &offer.raw, offer.sender());
if wallets_ok && fee_strategy_ok && recipient_ok && sender_ok {
VERIFY_SUCCESS_COUNT.inc();
true
} else {
false
}
}
fn execute(&self, view: &mut Fork) {
EXECUTE_COUNT.inc();
let timer = EXECUTE_DURATION.start_timer();
let result = self.process(view);
if let &Ok(_) = &result {
EXECUTE_SUCCESS_COUNT.inc();
}
status::Schema(view).store(self.hash(), result);
timer.observe_duration();
EXECUTE_FINISH_COUNT.inc();
}
}
|
use crate::errors::ApiError;
use crate::models::duels::Duel;
use crate::models::games::{Game, GameResult, NewGame};
use crate::models::matches::Match;
use crate::rating::*;
use crate::schema::duels::dsl::duels as table_duels;
use crate::schema::games::dsl::{
duel_id as col_duel_id, games as table_games, match_id as col_match_id,
};
use crate::schema::matches::dsl::matches as table_matches;
use crate::DbConn;
use diesel::prelude::*;
use rocket::http::Status;
use rocket_contrib::json::{Json, JsonValue};
/**
** Mounted in /matches
**/
#[get("/<id_match>/games")]
pub fn match_get_all(
conn: DbConn,
id_match: i32,
) -> Result<JsonValue, ApiError> {
table_matches
.find(id_match)
.first::<Match>(&*conn)
.map_err(|_| ApiError::new("Could not find match", 404))?
.get_games(&*conn)
// Map resulting games to json result
.map(|g| json!(g))
}
#[put("/<id_match>/games", format = "json", data = "<game_result>")]
pub fn match_place_game(
conn: DbConn,
id_match: i32,
game_result: Json<GameResult>,
) -> Result<Status, ApiError> {
let new_game = NewGame::new(game_result.into_inner(), Some(id_match), None);
diesel::insert_into(table_games)
.values(&new_game)
.execute(&*conn)
.map_err(|_| ApiError::new("Could not insert game", 500))
.and_then(|_| {
table_games
.filter(col_match_id.eq(new_game.match_id))
.first(&*conn)
.map_err(|_| {
ApiError::new("Could not find newly inserted game", 500)
})
})
.and_then(|inserted_game: Game| {
update_team_ratings(&inserted_game, &*conn)
.and_then(|(winner, loser)| {
match_update_player_ratings(winner, loser, &*conn)
})
.and_then(|_| Ok(Status::raw(200)))
})
}
/**
** Mounted in /duels
**/
#[get("/<id_duel>/games")]
pub fn duel_get_all(conn: DbConn, id_duel: i32) -> Result<JsonValue, ApiError> {
table_duels
.find(id_duel)
.first::<Duel>(&*conn)
.map_err(|_| ApiError::new("Could not find match", 404))?
.get_games(&*conn)
// Map resulting games to json result
.map(|g| json!(g))
}
#[put("/<id_duel>/games", format = "json", data = "<game_result>")]
pub fn duel_place_game(
conn: DbConn,
id_duel: i32,
game_result: Json<GameResult>,
) -> Result<Status, ApiError> {
let new_game = NewGame::new(game_result.into_inner(), None, Some(id_duel));
diesel::insert_into(table_games)
.values(&new_game)
.execute(&*conn)
.map_err(|_| ApiError::new("Could not insert game", 500))
.and_then(|_| {
table_games
.filter(col_duel_id.eq(new_game.duel_id))
.first(&*conn)
.map_err(|_| {
ApiError::new("Could not find newly inserted game", 500)
})
})
.and_then(|inserted_game: Game| {
duel_update_player_ratings(&inserted_game, &*conn)
.and_then(|_| Ok(Status::raw(200)))
})
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.