text stringlengths 8 4.13M |
|---|
use crate::{DocBase, VarType};
const DESCRIPTION: &'static str = r#"
The function sets a number of study properties.
"#;
const EXAMPLE: &'static str = r#"
```pine
study(title='MyScriptStudy')
study(title="MyScriptStudy", shorttitle="MSS", overlay=true)
```
"#;
const ARGUMENT: &'static str = r#"
**title (string)** study title that would be seen in Indicators widget. Argument IS REQUIRED.
**shorttitle (string)** study short title that would be seen in the chart legend. Argument is optional.
**overlay (bool)** if true the study will be added as an overlay for the main series. If false - it would be added on a separate chart pane. Default is false.
**format (string)** type of formatting study values on the price axis. Possible values are: format.inherit, format.price, format.volume. Default is format.inherit.
"#;
// **precision (int)** number of digits after the floating point for study values on the price axis. Must be a non negative integer and not greater than 16. If omitted, using formatting from parent series. If format is format.inherit and this argument is set, then format becomes format.price.
// **scale (int)** price scale that the indicator should be attached to. Possible values are: scale.right, scale.left, scale.none. Value scale.none can be applied only in combination with 'overlay=true' setting. If omitted, using scale from main series.
// **max_bars_back (int)** Maximum number of bars available for a study for historical reference. This parameter is applied to every built-in or user variable in the script if there is a reference to historical data of a variable in the script code (‘[]’ operator is used). Variable buffer sizes in the Pine Script are typically autodetected. This however is not possible in certain cases which is why the parameter allows a user to manually set the lower bound of this value. NOTE: using of the max_bars_back function instead of the parameter is optimal because it applies to only one variable.
// **linktoseries (bool)** if true then the study will be always on the same pane and same price scale as the main series. Should be used only in combination with 'overlay=true'. Default is false.
pub fn gen_doc() -> Vec<DocBase> {
let fn_doc = DocBase {
var_type: VarType::Function,
name: "study",
signatures: vec![],
description: DESCRIPTION,
example: EXAMPLE,
returns: "",
arguments: ARGUMENT,
remarks: "",
links: "",
};
vec![fn_doc]
}
|
use crate::estimate::Statistic;
use crate::plot::plotters_backend::{Colors, DEFAULT_FONT, SIZE};
use crate::plot::{FilledCurve, Line, LineCurve, Rectangle as RectangleArea, Size};
use crate::report::BenchmarkId;
use crate::stats::univariate::Sample;
use plotters::data::float::pretty_print_float;
use plotters::prelude::*;
use std::path::PathBuf;
pub fn abs_distribution(
colors: &Colors,
id: &BenchmarkId,
statistic: Statistic,
size: Option<Size>,
path: PathBuf,
x_unit: &str,
distribution_curve: LineCurve,
bootstrap_area: FilledCurve,
point_estimate: Line,
) {
let root_area = SVGBackend::new(&path, size.unwrap_or(SIZE).into()).into_drawing_area();
let x_range = plotters::data::fitting_range(distribution_curve.xs.iter());
let mut y_range = plotters::data::fitting_range(distribution_curve.ys.iter());
y_range.end *= 1.1;
let mut chart = ChartBuilder::on(&root_area)
.margin((5).percent())
.caption(
format!("{}:{}", id.as_title(), statistic),
(DEFAULT_FONT, 20),
)
.set_label_area_size(LabelAreaPosition::Left, (5).percent_width().min(60))
.set_label_area_size(LabelAreaPosition::Bottom, (5).percent_height().min(40))
.build_cartesian_2d(x_range, y_range)
.unwrap();
chart
.configure_mesh()
.disable_mesh()
.x_desc(format!("Average time ({})", x_unit))
.y_desc("Density (a.u.)")
.x_label_formatter(&|&v| pretty_print_float(v, true))
.y_label_formatter(&|&v| pretty_print_float(v, true))
.draw()
.unwrap();
chart
.draw_series(LineSeries::new(
distribution_curve.to_points(),
&colors.current_sample,
))
.unwrap()
.label("Bootstrap distribution")
.legend(|(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], &colors.current_sample));
chart
.draw_series(AreaSeries::new(
bootstrap_area.to_points(),
0.0,
colors.current_sample.mix(0.25).filled().stroke_width(3),
))
.unwrap()
.label("Confidence interval")
.legend(|(x, y)| {
Rectangle::new(
[(x, y - 5), (x + 20, y + 5)],
colors.current_sample.mix(0.25).filled(),
)
});
chart
.draw_series(std::iter::once(PathElement::new(
point_estimate.to_line_vec(),
colors.current_sample.filled().stroke_width(3),
)))
.unwrap()
.label("Point estimate")
.legend(|(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], &colors.current_sample));
chart
.configure_series_labels()
.position(SeriesLabelPosition::UpperRight)
.draw()
.unwrap();
}
pub fn rel_distribution(
colors: &Colors,
id: &BenchmarkId,
statistic: Statistic,
size: Option<Size>,
path: PathBuf,
distribution_curve: LineCurve,
confidence_interval: FilledCurve,
point_estimate: Line,
noise_threshold: RectangleArea,
) {
let xs_ = Sample::new(&distribution_curve.xs);
let x_min = xs_.min();
let x_max = xs_.max();
let y_range = plotters::data::fitting_range(distribution_curve.ys);
let root_area = SVGBackend::new(&path, size.unwrap_or(SIZE).into()).into_drawing_area();
let mut chart = ChartBuilder::on(&root_area)
.margin((5).percent())
.caption(
format!("{}:{}", id.as_title(), statistic),
(DEFAULT_FONT, 20),
)
.set_label_area_size(LabelAreaPosition::Left, (5).percent_width().min(60))
.set_label_area_size(LabelAreaPosition::Bottom, (5).percent_height().min(40))
.build_cartesian_2d(x_min..x_max, y_range.clone())
.unwrap();
chart
.configure_mesh()
.disable_mesh()
.x_desc("Relative change (%)")
.y_desc("Density (a.u.)")
.x_label_formatter(&|&v| pretty_print_float(v, true))
.y_label_formatter(&|&v| pretty_print_float(v, true))
.draw()
.unwrap();
chart
.draw_series(LineSeries::new(
distribution_curve.to_points(),
&colors.current_sample,
))
.unwrap()
.label("Bootstrap distribution")
.legend(|(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], &colors.current_sample));
chart
.draw_series(AreaSeries::new(
confidence_interval.to_points(),
0.0,
colors.current_sample.mix(0.25).filled().stroke_width(3),
))
.unwrap()
.label("Confidence interval")
.legend(|(x, y)| {
Rectangle::new(
[(x, y - 5), (x + 20, y + 5)],
colors.current_sample.mix(0.25).filled(),
)
});
chart
.draw_series(std::iter::once(PathElement::new(
point_estimate.to_line_vec(),
colors.current_sample.filled().stroke_width(3),
)))
.unwrap()
.label("Point estimate")
.legend(|(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], &colors.current_sample));
chart
.draw_series(std::iter::once(Rectangle::new(
[
(noise_threshold.left, y_range.start),
(noise_threshold.right, y_range.end),
],
colors.previous_sample.mix(0.1).filled(),
)))
.unwrap()
.label("Noise threshold")
.legend(|(x, y)| {
Rectangle::new(
[(x, y - 5), (x + 20, y + 5)],
colors.previous_sample.mix(0.25).filled(),
)
});
chart
.configure_series_labels()
.position(SeriesLabelPosition::UpperRight)
.draw()
.unwrap();
}
|
use std::io::prelude::*;
use std::str::FromStr;
fn main() {
let stdin = std::io::stdin();
let l = stdin.lock().lines().next().unwrap().unwrap();
let mut d : Vec<i32> = l.split_whitespace().map(|x| i32::from_str(x).unwrap()).collect();
d.sort_by(|a,b| b.cmp(a));
let s : String = d.iter().map(|x| x.to_string()).collect::<Vec<_>>().join(" ");
println!("{}",s);
}
|
use crate::errors::*;
use crate::version::Version;
use bytes::*;
use std::cell::RefCell;
use std::convert::From;
use std::fmt::Display;
use std::mem;
use std::rc::Rc;
pub const TINY: u8 = 0x80;
pub const SMALL: u8 = 0xD0;
pub const MEDIUM: u8 = 0xD1;
pub const LARGE: u8 = 0xD2;
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct BoltString {
pub value: String,
}
impl BoltString {
pub fn new(value: &str) -> Self {
BoltString {
value: value.to_string(),
}
}
pub fn can_parse(_: Version, input: Rc<RefCell<Bytes>>) -> bool {
let marker = input.borrow()[0];
(TINY..=(TINY | 0x0F)).contains(&marker)
|| marker == SMALL
|| marker == MEDIUM
|| marker == LARGE
}
}
impl Display for BoltString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.value)
}
}
impl From<&str> for BoltString {
fn from(v: &str) -> Self {
BoltString::new(v)
}
}
impl From<String> for BoltString {
fn from(v: String) -> Self {
BoltString::new(&v)
}
}
impl Into<String> for BoltString {
fn into(self) -> String {
self.value
}
}
impl BoltString {
pub fn into_bytes(self, _: Version) -> Result<Bytes> {
let mut bytes = BytesMut::with_capacity(
mem::size_of::<u8>() + mem::size_of::<u32>() + self.value.len(),
);
match self.value.len() {
0..=15 => bytes.put_u8(TINY | self.value.len() as u8),
16..=255 => {
bytes.put_u8(SMALL);
bytes.put_u8(self.value.len() as u8);
}
256..=65_535 => {
bytes.put_u8(MEDIUM);
bytes.put_u16(self.value.len() as u16);
}
65_536..=4_294_967_295 => {
bytes.put_u8(LARGE);
bytes.put_u32(self.value.len() as u32);
}
_ => return Err(Error::StringTooLong),
};
bytes.put_slice(self.value.as_bytes());
Ok(bytes.freeze())
}
pub fn parse(_: Version, input: Rc<RefCell<Bytes>>) -> Result<BoltString> {
let mut input = input.borrow_mut();
let marker = input.get_u8();
let length = match marker {
0x80..=0x8F => 0x0F & marker as usize,
SMALL => input.get_u8() as usize,
MEDIUM => input.get_u16() as usize,
LARGE => input.get_u32() as usize,
_ => {
return Err(Error::InvalidTypeMarker {
type_name: "string",
marker,
})
}
};
let byte_array = input.split_to(length).to_vec();
let string_value = std::string::String::from_utf8(byte_array)
.map_err(Error::DeserializationError)?;
Ok(string_value.into())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_serialize_empty_string() {
let s = BoltString::new("");
let b: Bytes = s.into_bytes(Version::V4_1).unwrap();
assert_eq!(&b[..], Bytes::from_static(&[TINY]));
}
#[test]
fn should_deserialize_empty_string() {
let input = Rc::new(RefCell::new(Bytes::from_static(&[TINY])));
let s: BoltString = BoltString::parse(Version::V4_1, input).unwrap();
assert_eq!(s, "".into());
}
#[test]
fn should_serialize_tiny_string() {
let s = BoltString::new("a");
let b: Bytes = s.into_bytes(Version::V4_1).unwrap();
assert_eq!(&b[..], Bytes::from_static(&[0x81, 0x61]));
}
#[test]
fn should_deserialize_tiny_string() {
let serialized_bytes = Rc::new(RefCell::new(Bytes::from_static(&[0x81, 0x61])));
let result: BoltString = BoltString::parse(Version::V4_1, serialized_bytes).unwrap();
assert_eq!(result, "a".into());
}
#[test]
fn should_serialize_small_string() {
let s = BoltString::new(&"a".repeat(16));
let mut b: Bytes = s.into_bytes(Version::V4_1).unwrap();
assert_eq!(b.get_u8(), SMALL);
assert_eq!(b.get_u8(), 0x10);
assert_eq!(b.len(), 0x10);
for value in b {
assert_eq!(value, 0x61);
}
}
#[test]
fn should_deserialize_small_string() {
let serialized_bytes = Rc::new(RefCell::new(Bytes::from_static(&[SMALL, 0x01, 0x61])));
let result: BoltString = BoltString::parse(Version::V4_1, serialized_bytes).unwrap();
assert_eq!(result, "a".into());
}
#[test]
fn should_serialize_medium_string() {
let s = BoltString::new(&"a".repeat(256));
let mut b: Bytes = s.into_bytes(Version::V4_1).unwrap();
assert_eq!(b.get_u8(), MEDIUM);
assert_eq!(b.get_u16(), 0x100);
assert_eq!(b.len(), 0x100);
for value in b {
assert_eq!(value, 0x61);
}
}
#[test]
fn should_deserialize_medium_string() {
let serialized_bytes = Rc::new(RefCell::new(Bytes::from_static(&[
MEDIUM, 0x00, 0x01, 0x61,
])));
let result: BoltString = BoltString::parse(Version::V4_1, serialized_bytes).unwrap();
assert_eq!(result, "a".into());
}
#[test]
fn should_serialize_large_string() {
let s = BoltString::new(&"a".repeat(65_536));
let mut b: Bytes = s.into_bytes(Version::V4_1).unwrap();
assert_eq!(b.get_u8(), LARGE);
assert_eq!(b.get_u32(), 0x10000);
assert_eq!(b.len(), 0x10000);
for value in b {
assert_eq!(value, 0x61);
}
}
#[test]
fn should_deserialize_large_string() {
let serialized_bytes = Rc::new(RefCell::new(Bytes::from_static(&[
LARGE, 0x00, 0x00, 0x00, 0x01, 0x61,
])));
let result: BoltString = BoltString::parse(Version::V4_1, serialized_bytes).unwrap();
assert_eq!(result, "a".into());
}
}
|
//! ```elixir
//! # label 1
//! # pushed to stack: ()
//! # returned form call: {:ok, document}
//! # full stack: ({:ok, document})
//! # returns: {:ok parent}
//! {:ok, old_child} = Lumen.Web.Document.create_element(document, "table")
//! {:ok, parent} = Lumen.Web.Document.create_element(parent_document, "div")
//! :ok = Lumen.Web.Node.append_child(parent, old_child)
//! {:ok, new_child} = Lumen.Web.Document.create_element(document, "ul");
//! {:ok, replaced_child} = Lumen.Web.replace_child(parent, new_child, old_child)
//! ```
use std::convert::TryInto;
use liblumen_alloc::erts::exception;
use liblumen_alloc::erts::process::Process;
use liblumen_alloc::erts::term::prelude::*;
use super::label_2;
#[native_implemented::label]
fn result(process: &Process, ok_parent_document: Term) -> exception::Result<Term> {
assert!(
ok_parent_document.is_boxed_tuple(),
"ok_parent_document ({:?}) is not a tuple",
ok_parent_document
);
let ok_parent_document_tuple: Boxed<Tuple> = ok_parent_document.try_into().unwrap();
assert_eq!(ok_parent_document_tuple.len(), 2);
assert_eq!(ok_parent_document_tuple[0], Atom::str_to_term("ok"));
let parent_document = ok_parent_document_tuple[1];
assert!(parent_document.is_boxed_resource_reference());
let old_child_tag = process.binary_from_str("table");
process.queue_frame_with_arguments(
liblumen_web::document::create_element_2::frame()
.with_arguments(false, &[parent_document, old_child_tag]),
);
process.queue_frame_with_arguments(label_2::frame().with_arguments(true, &[parent_document]));
Ok(Term::NONE)
}
|
use amethyst::{
prelude::*,
core::{Transform},
ecs::{Component, NullStorage, Join, ParJoin, Read, Write, ReadStorage, System, WriteStorage, Entities},
};
use crate::physics::Physics;
use crate::charactermove::CharacterMove;
#[derive(Default)]
pub struct SimpleEnemy;
impl Component for SimpleEnemy {
type Storage = NullStorage<Self>;
}
pub struct SimpleEnemySystem;
impl<'s> System<'s> for SimpleEnemySystem {
type SystemData = (
ReadStorage<'s, SimpleEnemy>,
ReadStorage<'s, Transform>,
WriteStorage<'s, Physics>,
ReadStorage<'s, CharacterMove>,
);
fn run(&mut self, (simple_enemies, transforms, mut physics, charactermoves): Self::SystemData) {
/* Identify character position */
let mut character = None;
for (transform, charactermove) in (&transforms, &charactermoves).join() {
character = Some(transform);
}
/* Let the character walk. */
for (mut )
}
}
|
use criterion::measurement::WallTime;
use criterion::{
criterion_group, criterion_main, Bencher, BenchmarkGroup, BenchmarkId, Criterion, Throughput,
};
use rustpython_compiler::Mode;
use rustpython_parser::parse_program;
use rustpython_vm::{Interpreter, PyResult};
use std::collections::HashMap;
use std::path::Path;
fn bench_cpython_code(b: &mut Bencher, source: &str) {
let gil = cpython::Python::acquire_gil();
let python = gil.python();
b.iter(|| {
let res: cpython::PyResult<()> = python.run(source, None, None);
if let Err(e) = res {
e.print(python);
panic!("Error running source")
}
});
}
fn bench_rustpy_code(b: &mut Bencher, name: &str, source: &str) {
// NOTE: Take long time.
Interpreter::without_stdlib(Default::default()).enter(|vm| {
// Note: bench_cpython is both compiling and executing the code.
// As such we compile the code in the benchmark loop as well.
b.iter(|| {
let code = vm.compile(source, Mode::Exec, name.to_owned()).unwrap();
let scope = vm.new_scope_with_builtins();
let res: PyResult = vm.run_code_obj(code.clone(), scope);
vm.unwrap_pyresult(res);
})
})
}
pub fn benchmark_file_execution(
group: &mut BenchmarkGroup<WallTime>,
name: &str,
contents: &String,
) {
group.bench_function(BenchmarkId::new(name, "cpython"), |b| {
bench_cpython_code(b, &contents)
});
group.bench_function(BenchmarkId::new(name, "rustpython"), |b| {
bench_rustpy_code(b, name, &contents)
});
}
pub fn benchmark_file_parsing(group: &mut BenchmarkGroup<WallTime>, name: &str, contents: &str) {
group.throughput(Throughput::Bytes(contents.len() as u64));
group.bench_function(BenchmarkId::new("rustpython", name), |b| {
b.iter(|| parse_program(contents, name).unwrap())
});
group.bench_function(BenchmarkId::new("cpython", name), |b| {
let gil = cpython::Python::acquire_gil();
let py = gil.python();
let code = std::ffi::CString::new(contents).unwrap();
let fname = cpython::PyString::new(py, name);
b.iter(|| parse_program_cpython(py, &code, &fname))
});
}
fn parse_program_cpython(
py: cpython::Python<'_>,
code: &std::ffi::CStr,
fname: &cpython::PyString,
) {
extern "C" {
fn PyArena_New() -> *mut python3_sys::PyArena;
fn PyArena_Free(arena: *mut python3_sys::PyArena);
}
use cpython::PythonObject;
let fname = fname.as_object();
unsafe {
let arena = PyArena_New();
assert!(!arena.is_null());
let ret = python3_sys::PyParser_ASTFromStringObject(
code.as_ptr() as _,
fname.as_ptr(),
python3_sys::Py_file_input,
std::ptr::null_mut(),
arena,
);
if ret.is_null() {
cpython::PyErr::fetch(py).print(py);
}
PyArena_Free(arena);
}
}
pub fn benchmark_pystone(group: &mut BenchmarkGroup<WallTime>, contents: String) {
// Default is 50_000. This takes a while, so reduce it to 30k.
for idx in (10_000..=30_000).step_by(10_000) {
let code_with_loops = format!("LOOPS = {}\n{}", idx, contents);
let code_str = code_with_loops.as_str();
group.throughput(Throughput::Elements(idx as u64));
group.bench_function(BenchmarkId::new("cpython", idx), |b| {
bench_cpython_code(b, code_str)
});
group.bench_function(BenchmarkId::new("rustpython", idx), |b| {
bench_rustpy_code(b, "pystone", code_str)
});
}
}
pub fn criterion_benchmark(c: &mut Criterion) {
let benchmark_dir = Path::new("./benches/benchmarks/");
let mut benches = benchmark_dir
.read_dir()
.unwrap()
.map(|entry| {
let path = entry.unwrap().path();
(
path.file_name().unwrap().to_str().unwrap().to_owned(),
std::fs::read_to_string(path).unwrap(),
)
})
.collect::<HashMap<_, _>>();
// Benchmark parsing
let mut parse_group = c.benchmark_group("parse_to_ast");
for (name, contents) in &benches {
benchmark_file_parsing(&mut parse_group, name, contents);
}
parse_group.finish();
// Benchmark PyStone
if let Some(pystone_contents) = benches.remove("pystone.py") {
let mut pystone_group = c.benchmark_group("pystone");
benchmark_pystone(&mut pystone_group, pystone_contents);
pystone_group.finish();
}
// Benchmark execution
let mut execution_group = c.benchmark_group("execution");
for (name, contents) in &benches {
benchmark_file_execution(&mut execution_group, name, contents);
}
execution_group.finish();
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
use hashbrown::{HashMap, HashSet};
use log::*;
use morgan_interface::pubkey::Pubkey;
pub type Fork = u64;
#[derive(Default)]
pub struct AccountsIndex<T> {
account_maps: HashMap<Pubkey, Vec<(Fork, T)>>,
roots: HashSet<Fork>,
//This value that needs to be stored to recover the index from AppendVec
pub last_root: Fork,
}
impl<T: Clone> AccountsIndex<T> {
/// Get an account
/// The latest account that appears in `ancestors` or `roots` is returned.
pub fn get(&self, pubkey: &Pubkey, ancestors: &HashMap<Fork, usize>) -> Option<(&T, Fork)> {
let list = self.account_maps.get(pubkey)?;
let mut max = 0;
let mut rv = None;
for e in list.iter().rev() {
if e.0 >= max && (ancestors.get(&e.0).is_some() || self.is_root(e.0)) {
trace!("GET {} {:?}", e.0, ancestors);
rv = Some((&e.1, e.0));
max = e.0;
}
}
rv
}
/// Insert a new fork.
/// @retval - The return value contains any squashed accounts that can freed from storage.
pub fn insert(&mut self, fork: Fork, pubkey: &Pubkey, account_info: T) -> Vec<(Fork, T)> {
let mut rv = vec![];
let mut fork_vec: Vec<(Fork, T)> = vec![];
{
let entry = self.account_maps.entry(*pubkey).or_insert(vec![]);
std::mem::swap(entry, &mut fork_vec);
};
// filter out old entries
rv.extend(fork_vec.iter().filter(|(f, _)| *f == fork).cloned());
fork_vec.retain(|(f, _)| *f != fork);
// add the new entry
fork_vec.push((fork, account_info));
rv.extend(
fork_vec
.iter()
.filter(|(fork, _)| self.is_purged(*fork))
.cloned(),
);
fork_vec.retain(|(fork, _)| !self.is_purged(*fork));
{
let entry = self.account_maps.entry(*pubkey).or_insert(vec![]);
std::mem::swap(entry, &mut fork_vec);
};
rv
}
pub fn is_purged(&self, fork: Fork) -> bool {
fork < self.last_root
}
pub fn is_root(&self, fork: Fork) -> bool {
self.roots.contains(&fork)
}
pub fn add_root(&mut self, fork: Fork) {
assert!(
(self.last_root == 0 && fork == 0) || (fork > self.last_root),
"new roots must be increasing"
);
self.last_root = fork;
self.roots.insert(fork);
}
/// Remove the fork when the storage for the fork is freed
/// Accounts no longer reference this fork.
pub fn cleanup_dead_fork(&mut self, fork: Fork) {
self.roots.remove(&fork);
}
}
#[cfg(test)]
mod tests {
use super::*;
use morgan_interface::signature::{Keypair, KeypairUtil};
#[test]
fn test_get_empty() {
let key = Keypair::new();
let index = AccountsIndex::<bool>::default();
let ancestors = HashMap::new();
assert_eq!(index.get(&key.pubkey(), &ancestors), None);
}
#[test]
fn test_insert_no_ancestors() {
let key = Keypair::new();
let mut index = AccountsIndex::<bool>::default();
let gc = index.insert(0, &key.pubkey(), true);
assert!(gc.is_empty());
let ancestors = HashMap::new();
assert_eq!(index.get(&key.pubkey(), &ancestors), None);
}
#[test]
fn test_insert_wrong_ancestors() {
let key = Keypair::new();
let mut index = AccountsIndex::<bool>::default();
let gc = index.insert(0, &key.pubkey(), true);
assert!(gc.is_empty());
let ancestors = vec![(1, 1)].into_iter().collect();
assert_eq!(index.get(&key.pubkey(), &ancestors), None);
}
#[test]
fn test_insert_with_ancestors() {
let key = Keypair::new();
let mut index = AccountsIndex::<bool>::default();
let gc = index.insert(0, &key.pubkey(), true);
assert!(gc.is_empty());
let ancestors = vec![(0, 0)].into_iter().collect();
assert_eq!(index.get(&key.pubkey(), &ancestors), Some((&true, 0)));
}
#[test]
fn test_is_root() {
let mut index = AccountsIndex::<bool>::default();
assert!(!index.is_root(0));
index.add_root(0);
assert!(index.is_root(0));
}
#[test]
fn test_insert_with_root() {
let key = Keypair::new();
let mut index = AccountsIndex::<bool>::default();
let gc = index.insert(0, &key.pubkey(), true);
assert!(gc.is_empty());
let ancestors = vec![].into_iter().collect();
index.add_root(0);
assert_eq!(index.get(&key.pubkey(), &ancestors), Some((&true, 0)));
}
#[test]
fn test_is_purged() {
let mut index = AccountsIndex::<bool>::default();
assert!(!index.is_purged(0));
index.add_root(1);
assert!(index.is_purged(0));
index.add_root(2);
assert!(index.is_purged(1));
}
#[test]
fn test_max_last_root() {
let mut index = AccountsIndex::<bool>::default();
index.add_root(1);
assert_eq!(index.last_root, 1);
}
#[test]
#[should_panic]
fn test_max_last_root_old() {
let mut index = AccountsIndex::<bool>::default();
index.add_root(1);
index.add_root(0);
}
#[test]
fn test_cleanup_first() {
let mut index = AccountsIndex::<bool>::default();
index.add_root(0);
index.add_root(1);
index.cleanup_dead_fork(0);
assert!(index.is_root(1));
assert!(!index.is_root(0));
}
#[test]
fn test_cleanup_last() {
//this behavior might be undefined, clean up should only occur on older forks
let mut index = AccountsIndex::<bool>::default();
index.add_root(0);
index.add_root(1);
index.cleanup_dead_fork(1);
assert!(!index.is_root(1));
assert!(index.is_root(0));
}
#[test]
fn test_update_last_wins() {
let key = Keypair::new();
let mut index = AccountsIndex::<bool>::default();
let ancestors = vec![(0, 0)].into_iter().collect();
let gc = index.insert(0, &key.pubkey(), true);
assert!(gc.is_empty());
assert_eq!(index.get(&key.pubkey(), &ancestors), Some((&true, 0)));
let gc = index.insert(0, &key.pubkey(), false);
assert_eq!(gc, vec![(0, true)]);
assert_eq!(index.get(&key.pubkey(), &ancestors), Some((&false, 0)));
}
#[test]
fn test_update_new_fork() {
let key = Keypair::new();
let mut index = AccountsIndex::<bool>::default();
let ancestors = vec![(0, 0)].into_iter().collect();
let gc = index.insert(0, &key.pubkey(), true);
assert!(gc.is_empty());
let gc = index.insert(1, &key.pubkey(), false);
assert!(gc.is_empty());
assert_eq!(index.get(&key.pubkey(), &ancestors), Some((&true, 0)));
let ancestors = vec![(1, 0)].into_iter().collect();
assert_eq!(index.get(&key.pubkey(), &ancestors), Some((&false, 1)));
}
#[test]
fn test_update_gc_purged_fork() {
let key = Keypair::new();
let mut index = AccountsIndex::<bool>::default();
let gc = index.insert(0, &key.pubkey(), true);
assert!(gc.is_empty());
index.add_root(1);
let gc = index.insert(1, &key.pubkey(), false);
assert_eq!(gc, vec![(0, true)]);
let ancestors = vec![].into_iter().collect();
assert_eq!(index.get(&key.pubkey(), &ancestors), Some((&false, 1)));
}
}
|
extern crate ted_interface;
extern crate byteorder;
use std::sync::mpsc;
use std::net;
use std::io;
use std::thread;
use std::sync::{Mutex, Arc};
use ted_interface as tint;
use ted_interface::Command;
use ted_interface::Frame;
pub struct Client{
pub frame_stream: mpsc::Sender<Frame>,
pub command_stream: mpsc::Receiver<Command>,
pub player_id: Option<usize>
}
pub fn net_thread(client_list: Arc<Mutex<Vec<Client>>>){
let listener = net::TcpListener::bind("127.0.0.1:5000").expect("Failed to bind socket");
loop{
let istream=match listener.accept(){
Err(_)=>continue,
Ok((s,_))=>s
};
let ostream=istream.try_clone().unwrap();
let (command_out,command_in)=mpsc::channel();
let (frame_out,frame_in)=mpsc::channel();
thread::spawn(|| send_thread(istream,frame_in));
thread::spawn(|| receive_thread(ostream,command_out));
let client=Client{frame_stream: frame_out, command_stream: command_in,player_id: None};
let mut v=client_list.lock().unwrap();
v.push(client);
}
}
pub fn starting_client_list()->Arc<Mutex<Vec<Client>>>{
Arc::new(Mutex::new(vec![]))
}
fn send_thread(mut tcp_stream: net::TcpStream, frame_stream: mpsc::Receiver<Frame>)->io::Result<()>{
loop{
let mut frames=Vec::new();
let frame=match frame_stream.recv() {
Ok(f)=>f,
Err(e)=>return Err(io::Error::new(io::ErrorKind::UnexpectedEof, e))
};
frames.push(frame);
while let Ok(f)=frame_stream.try_recv(){
frames.push(f);
}
try!(tint::send_frames(&mut tcp_stream,frames));
}
}
fn receive_thread(mut tcp_stream: net::TcpStream, command_stream: mpsc::Sender<Command>)->io::Result<()>{
loop{
let mut cs=try!(tint::read_commands(&mut tcp_stream));
for c in cs.drain(..){
try!(command_stream.send(c).map_err(|e|io::Error::new(io::ErrorKind::Other, e)));
}
}
}
|
//! Code generation for OpenAPI v2.
#[cfg(feature = "cli")]
mod author;
mod emitter;
mod impls;
pub mod object;
mod state;
include!(concat!(env!("OUT_DIR"), "/template.rs"));
pub use self::{
emitter::{EmittedUnit, Emitter},
state::EmitterState,
};
use super::Schema;
use std::{fmt::Debug, marker::PhantomData};
/// Common conflicting keywords in Rust. An underscore will be added
/// to fields using these keywords.
const RUST_KEYWORDS: &[&str] = &[
"as", "break", "const", "continue", "crate", "dyn", "else", "enum", "extern", "false", "fn",
"for", "if", "impl", "in", "let", "loop", "match", "mod", "move", "mut", "pub", "ref",
"return", "self", "Self", "static", "struct", "super", "trait", "true", "type", "unsafe",
"use", "where", "while", "do",
];
/// Default emitter for anything that implements `Schema` trait.
///
/// This doesn't do anything special, as `Emitter` trait methods take
/// care of all the heavy load.
pub struct DefaultEmitter<S> {
state: EmitterState,
_schema: PhantomData<S>,
}
impl<S> From<EmitterState> for DefaultEmitter<S> {
fn from(state: EmitterState) -> Self {
DefaultEmitter {
state,
_schema: PhantomData,
}
}
}
impl<S: Schema + Debug> Emitter for DefaultEmitter<S> {
type Definition = S;
fn state(&self) -> &EmitterState {
&self.state
}
}
/// Metadata for generating a crate.
#[derive(Debug, Default, Clone)]
#[non_exhaustive]
pub struct CrateMeta {
/// Name of the crate. If this is not specified, then the name of the
/// working directory is assumed to be crate name.
pub name: Option<String>,
/// Version (defaults to 0.1.0)
pub version: Option<String>,
/// List of authors for this crate. Defaults to cargo's defaults.
pub authors: Option<Vec<String>>,
/// Whether we're planning to emit a lib, app or module.
pub mode: EmitMode,
/// Whether or not to make this a root crate.
pub no_root: bool,
}
/// Mode of codegen (module, crate or CLI app).
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum EmitMode {
Module,
Crate,
App,
}
impl Default for EmitMode {
fn default() -> Self {
// NOTE: Module by default - CLI will specify other modes if needed.
EmitMode::Module
}
}
|
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use bitflags::bitflags;
use crate::lexer::preprocessor::context::PreprocContext;
use crate::lexer::{Lexer, LocToken, Token};
use crate::parser::attributes::{Attributes, AttributesParser};
use crate::parser::declarations::Declarations;
use crate::parser::expression::{ExprNode, ExpressionParser};
use crate::parser::names::{Qualified, QualifiedParser};
use crate::parser::r#type::Type;
bitflags! {
pub struct Specifier: u8 {
pub const PRIVATE = 0b1;
pub const PUBLIC = 0b10;
pub const PROTECTED = 0b100;
pub const VIRTUAL = 0b1000;
}
}
impl Specifier {
pub fn from_tok(&mut self, tok: Token) {
match tok {
Token::Private => {
*self |= Specifier::PRIVATE;
}
Token::Public => {
*self |= Specifier::PUBLIC;
}
Token::Protected => {
*self |= Specifier::PROTECTED;
}
Token::Virtual => {
*self |= Specifier::VIRTUAL;
}
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum Kind {
Struct,
Class,
Union,
}
impl Kind {
fn from_tok(&tok: Token) -> Option<Self> {
match tok {
Token::Struct => Some(Kind::Struct),
Token::Class => Some(Kind::Class),
Token::Union => Some(Kind::Union),
_ => None,
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Derived {
pub attributes: Option<Attributes>,
pub name: Qualified,
pub specifier: Specifier,
}
#[derive(Clone, Debug, PartialEq)]
pub struct Class {
pub kind: Kind,
pub attributes: Option<Attributes>,
pub name: Option<Qualified>,
pub final: bool,
pub bases: Vec<Derived>,
pub body: Option<Declarations>,
}
struct DerivedParser<'a, 'b, PC: PreprocContext> {
lexer: &'b mut Lexer<'a, PC>,
}
impl<'a, 'b, PC: PreprocContext> DerivedParser<'a, 'b, PC> {
fn new(lexer: &'b mut Lexer<'a, PC>) -> Self {
Self { lexer }
}
(fn parse(self, tok: Option<LocToken>) -> (Option<LocToken>, Option<Derived>) {
// optional: attributes
let ap = AttributesParser::new(self.lexer);
let (tok, attributes) = ap.parse(tok);
// access-specifier | virtual-specifier
let mut tok = tok.unwrap_or_else(|| self.lexer.next_useful());
let mut spec = Specifier::empty();
while spec.from_tok(&tok.tok) {
tok = self.lexer.next_useful();
}
// class or decltype
let qp = QualifiedParser::new(self.lexer);
let (tok, name) = qp.parse(Some(tok), None);
let name = if let Some(name) = name {
name
} else {
return (tok, None);
};
(tok, Some(Derived {
attributes,
name,
specifiers,
}))
}
}
struct BaseClauseParser<'a, 'b, PC: PreprocContext> {
lexer: &'b mut Lexer<'a, PC>,
}
impl<'a, 'b, PC: PreprocContext> BaseClauseParser<'a, 'b, PC> {
fn new(lexer: &'b mut Lexer<'a, PC>) -> Self {
Self { lexer }
}
fn parse(self, tok: Option<LocToken>) -> (Option<LocToken>, Option<Vec<Derived>>) {
let tok = tok.unwrap_or_else(|| self.lexer.next_useful());
if tok.tok != Token::Colon {
return (Some(tok), None);
}
let mut bases = Vec::new();
loop {
let dp = DerivedParser::new(self.lexer);
let (tok, derived) = dp.parse(None);
if let Some(derived) = derived {
bases.push(derived);
} else {
break;
}
let tok = tok.unwrap_or_else(|| self.lexer.next_useful());
if tok.tok != Token::Comma {
break;
}
}
if bases.empty() {
(tok, None)
} else {
(tok, Some(bases))
}
}
}
struct ClassParser<'a, 'b, PC: PreprocContext> {
lexer: &'b mut Lexer<'a, PC>,
}
impl<'a, 'b, PC: PreprocContext> ClassParser<'a, 'b, PC> {
pub(super) fn new(lexer: &'b mut Lexer<'a, PC>) -> Self {
Self { lexer }
}
pub(super) fn parse(self, tok: Option<LocToken>) -> (Option<LocToken>, Option<Class>) {
let tok = tok.unwrap_or_else(|| self.lexer.next_useful());
let kind = if let Some(kind) = Kind::from_tok(&tok.tok) {
kind
} else {
return (Some(tok), None);
}
// optional: attributes
// TODO: alignas
let ap = AttributesParser::new(self.lexer);
let (tok, attributes) = ap.parse(tok);
// name
let qp = QualifiedParser::new(self.lexer);
let (tok, name) = qp.parse(tok, None);
let name = if let Some(name) = name {
name
} else {
unreachable!("Invalid token in clasee definition: {:?}", tok);
};
let tok = tok.unwrap_or_else(|| self.lexer.next_useful());
let (tok, final) = if tok.tok == Token::Final {
(None, true)
} else {
(Some(tok), false)
};
// optional: base-clause
let bcp = BaseClauseParser::new(self.lexer);
let (tok, bases) = bcp.parse(tok);
}
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type LocalLocation = *mut ::core::ffi::c_void;
pub type LocalLocationFinderResult = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct LocalLocationFinderStatus(pub i32);
impl LocalLocationFinderStatus {
pub const Success: Self = Self(0i32);
pub const UnknownError: Self = Self(1i32);
pub const InvalidCredentials: Self = Self(2i32);
pub const InvalidCategory: Self = Self(3i32);
pub const InvalidSearchTerm: Self = Self(4i32);
pub const InvalidSearchArea: Self = Self(5i32);
pub const NetworkFailure: Self = Self(6i32);
pub const NotSupported: Self = Self(7i32);
}
impl ::core::marker::Copy for LocalLocationFinderStatus {}
impl ::core::clone::Clone for LocalLocationFinderStatus {
fn clone(&self) -> Self {
*self
}
}
pub type LocalLocationHoursOfOperationItem = *mut ::core::ffi::c_void;
pub type LocalLocationRatingInfo = *mut ::core::ffi::c_void;
|
use gary_zmq::cluster_api::*;
use chrono::{DateTime, Utc};
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
pub fn start(m: Arc<Mutex<HashMap<String, DateTime<Utc>>>>) {
let m = ZmqClusterApi::new(m);
m.run();
println!("Cluster Api Running");
}
|
use crate::prelude::*;
use std::os::raw::c_void;
use std::ptr;
#[repr(C)]
#[derive(Debug, Clone)]
pub struct VkDeviceQueueCreateInfo {
pub sType: VkStructureType,
pub pNext: *const c_void,
pub flags: VkDeviceQueueCreateFlagBits,
pub queueFamilyIndex: u32,
pub queueCount: u32,
pub pQueuePriorities: *const f32,
}
impl VkDeviceQueueCreateInfo {
pub fn new<T>(flags: T, queue_family_index: u32, queue_priorities: &[f32]) -> Self
where
T: Into<VkDeviceQueueCreateFlagBits>,
{
VkDeviceQueueCreateInfo {
sType: VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
pNext: ptr::null(),
flags: flags.into(),
queueFamilyIndex: queue_family_index,
queueCount: queue_priorities.len() as u32,
pQueuePriorities: queue_priorities.as_ptr(),
}
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write;
use common_expression::types::*;
use common_expression::FromData;
use goldenfile::Mint;
use super::run_ast;
#[test]
fn test_cast() {
let mut mint = Mint::new("tests/it/scalars/testdata");
let file = &mut mint.new_goldenfile("cast.txt").unwrap();
for is_try in [false, true] {
test_cast_primitive(file, is_try);
test_cast_to_variant(file, is_try);
test_cast_number_to_timestamp(file, is_try);
test_cast_number_to_date(file, is_try);
test_cast_between_number_and_string(file, is_try);
test_cast_between_boolean_and_string(file, is_try);
test_cast_between_string_and_decimal(file, is_try);
test_cast_between_number_and_boolean(file, is_try);
test_cast_between_date_and_timestamp(file, is_try);
test_cast_between_string_and_timestamp(file, is_try);
test_between_string_and_date(file, is_try);
test_cast_to_nested_type(file, is_try);
}
}
fn test_cast_primitive(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}CAST(0 AS UINT8)"), &[]);
run_ast(file, format!("{prefix}CAST(0 AS UINT8 NULL)"), &[]);
run_ast(file, format!("{prefix}CAST('str' AS STRING)"), &[]);
run_ast(file, format!("{prefix}CAST('str' AS STRING NULL)"), &[]);
run_ast(file, format!("{prefix}CAST(NULL AS UINT8)"), &[]);
run_ast(file, format!("{prefix}CAST(NULL AS UINT8 NULL)"), &[]);
run_ast(file, format!("{prefix}CAST(NULL AS STRING)"), &[]);
run_ast(file, format!("{prefix}CAST(NULL AS STRING NULL)"), &[]);
run_ast(file, format!("{prefix}CAST(1024 AS UINT8)"), &[]);
run_ast(file, format!("{prefix}CAST(a AS UINT8)"), &[(
"a",
UInt16Type::from_data(vec![0u16, 64, 255, 512, 1024]),
)]);
run_ast(file, format!("{prefix}CAST(a AS UINT16)"), &[(
"a",
Int16Type::from_data(vec![0i16, 1, 2, 3, -4]),
)]);
run_ast(file, format!("{prefix}CAST(a AS INT64)"), &[(
"a",
Int16Type::from_data(vec![0i16, 1, 2, 3, -4]),
)]);
run_ast(
file,
format!(
"({prefix}CAST(a AS FLOAT32), {prefix}CAST(a AS INT32), {prefix}CAST(b AS FLOAT32), {prefix}CAST(b AS INT32))"
),
&[
(
"a",
UInt64Type::from_data(vec![
0,
1,
u8::MAX as u64,
u16::MAX as u64,
u32::MAX as u64,
u64::MAX,
]),
),
(
"b",
Float64Type::from_data(vec![
0.0,
u32::MAX as f64,
u64::MAX as f64,
f64::MIN,
f64::MAX,
f64::INFINITY,
]),
),
],
);
run_ast(
file,
format!("{prefix}CAST([[a, b], NULL, NULL] AS Array(Array(Int8)))"),
&[
("a", Int16Type::from_data(vec![0i16, 1, 2, 127, 255])),
("b", Int16Type::from_data(vec![0i16, -1, -127, -128, -129])),
],
);
run_ast(
file,
format!("{prefix}CAST((a, b, NULL) AS TUPLE(Int8, UInt8, Boolean NULL))"),
&[
("a", Int16Type::from_data(vec![0i16, 1, 2, 127, 256])),
("b", Int16Type::from_data(vec![0i16, 1, -127, -128, -129])),
],
);
run_ast(file, format!("{prefix}CAST(a AS INT16)"), &[(
"a",
Float64Type::from_data(vec![0.0f64, 1.1, 2.2, 3.3, -4.4]),
)]);
run_ast(file, format!("{prefix}CAST(b AS INT16)"), &[(
"b",
Int8Type::from_data(vec![0i8, 1, 2, 3, -4]),
)]);
run_ast(file, format!("{prefix}CAST(a AS UINT16)"), &[(
"a",
Int16Type::from_data(vec![0i16, 1, 2, 3, -4]),
)]);
run_ast(file, format!("{prefix}CAST(c AS INT16)"), &[(
"c",
Int64Type::from_data(vec![0i64, 11111111111, 2, 3, -4]),
)]);
}
fn test_cast_to_variant(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}CAST(NULL AS VARIANT)"), &[]);
run_ast(file, format!("{prefix}CAST(0 AS VARIANT)"), &[]);
run_ast(file, format!("{prefix}CAST(-1 AS VARIANT)"), &[]);
run_ast(file, format!("{prefix}CAST(1.1 AS VARIANT)"), &[]);
run_ast(
file,
format!("{prefix}CAST('🍦 が美味しい' AS VARIANT)"),
&[],
);
run_ast(file, format!("{prefix}CAST([0, 1, 2] AS VARIANT)"), &[]);
run_ast(
file,
format!("{prefix}CAST([0::VARIANT, 'a'::VARIANT] AS VARIANT)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(to_timestamp(1000000) AS VARIANT)"),
&[],
);
run_ast(file, format!("{prefix}CAST(false AS VARIANT)"), &[]);
run_ast(file, format!("{prefix}CAST(true AS VARIANT)"), &[]);
run_ast(
file,
format!("{prefix}CAST({prefix}CAST('🍦 が美味しい' AS VARIANT) AS VARIANT)"),
&[],
);
run_ast(file, format!("{prefix}CAST((1,) AS VARIANT)"), &[]);
run_ast(file, format!("{prefix}CAST((1, 2) AS VARIANT)"), &[]);
run_ast(file, format!("{prefix}CAST((false, true) AS VARIANT)"), &[]);
run_ast(file, format!("{prefix}CAST(('a',) AS VARIANT)"), &[]);
run_ast(
file,
format!("{prefix}CAST((1, 2, (false, true, ('a',))) AS VARIANT)"),
&[],
);
run_ast(file, format!("{prefix}CAST(a AS VARIANT)"), &[(
"a",
StringType::from_data_with_validity(vec!["a", "bc", "def"], vec![true, false, true]),
)]);
}
fn test_cast_number_to_timestamp(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(
file,
format!("{prefix}CAST(-30610224000000001 AS TIMESTAMP)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(-315360000000000 AS TIMESTAMP)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(-315360000000 AS TIMESTAMP)"),
&[],
);
run_ast(file, format!("{prefix}CAST(-100 AS TIMESTAMP)"), &[]);
run_ast(file, format!("{prefix}CAST(-0 AS TIMESTAMP)"), &[]);
run_ast(file, format!("{prefix}CAST(0 AS TIMESTAMP)"), &[]);
run_ast(file, format!("{prefix}CAST(100 AS TIMESTAMP)"), &[]);
run_ast(file, format!("{prefix}CAST(315360000000 AS TIMESTAMP)"), &[
]);
run_ast(
file,
format!("{prefix}CAST(315360000000000 AS TIMESTAMP)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(253402300800000000 AS TIMESTAMP)"),
&[],
);
run_ast(file, format!("{prefix}CAST(a AS TIMESTAMP)"), &[(
"a",
Int64Type::from_data(vec![
-315360000000000i64,
-315360000000,
-100,
0,
100,
315360000000,
315360000000000,
]),
)]);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(-315360000000000) AS INT64)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(-315360000000) AS INT64)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(-100) AS INT64)"),
&[],
);
run_ast(file, format!("{prefix}CAST(TO_TIMESTAMP(-0) AS INT64)"), &[
]);
run_ast(file, format!("{prefix}CAST(TO_TIMESTAMP(0) AS INT64)"), &[]);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(100) AS INT64)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(315360000000) AS INT64)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(315360000000000) AS INT64)"),
&[],
);
run_ast(file, format!("{prefix}CAST(a AS INT64)"), &[(
"a",
TimestampType::from_data(vec![
-315360000000000,
-315360000000,
-100,
0,
100,
315360000000,
315360000000000,
]),
)]);
}
fn test_cast_number_to_date(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}CAST(-354286 AS DATE)"), &[]);
run_ast(file, format!("{prefix}CAST(-354285 AS DATE)"), &[]);
run_ast(file, format!("{prefix}CAST(-100 AS DATE)"), &[]);
run_ast(file, format!("{prefix}CAST(-0 AS DATE)"), &[]);
run_ast(file, format!("{prefix}CAST(0 AS DATE)"), &[]);
run_ast(file, format!("{prefix}CAST(100 AS DATE)"), &[]);
run_ast(file, format!("{prefix}CAST(2932896 AS DATE)"), &[]);
run_ast(file, format!("{prefix}CAST(2932897 AS DATE)"), &[]);
run_ast(file, format!("{prefix}CAST(a AS DATE)"), &[(
"a",
Int32Type::from_data(vec![-354285, -100, 0, 100, 2932896]),
)]);
run_ast(file, format!("{prefix}CAST(TO_DATE(-354285) AS INT64)"), &[
]);
run_ast(file, format!("{prefix}CAST(TO_DATE(-100) AS INT64)"), &[]);
run_ast(file, format!("{prefix}CAST(TO_DATE(-0) AS INT64)"), &[]);
run_ast(file, format!("{prefix}CAST(TO_DATE(0) AS INT64)"), &[]);
run_ast(file, format!("{prefix}CAST(TO_DATE(100) AS INT64)"), &[]);
run_ast(file, format!("{prefix}CAST(TO_DATE(2932896) AS INT64)"), &[
]);
run_ast(file, format!("{prefix}CAST(a AS INT64)"), &[(
"a",
DateType::from_data(vec![-354285, -100, 0, 100, 2932896]),
)]);
}
fn test_cast_between_number_and_boolean(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}CAST(0 AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST(1 AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST(false AS UINT64)"), &[]);
run_ast(file, format!("{prefix}CAST(true AS INT64)"), &[]);
run_ast(file, format!("{prefix}CAST(0.0 AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST(1.0 AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST(false AS FLOAT32)"), &[]);
run_ast(file, format!("{prefix}CAST(true AS FLOAT64)"), &[]);
run_ast(file, format!("{prefix}CAST(num AS BOOLEAN)"), &[(
"num",
Int64Type::from_data(vec![0i64, -1, 1, 2]),
)]);
run_ast(file, format!("{prefix}CAST(num AS BOOLEAN)"), &[(
"num",
UInt64Type::from_data(vec![0u64, 1, 2]),
)]);
run_ast(file, format!("{prefix}CAST(bool AS UINT64)"), &[(
"bool",
BooleanType::from_data(vec![false, true]),
)]);
run_ast(file, format!("{prefix}CAST(bool AS INT64)"), &[(
"bool",
BooleanType::from_data(vec![false, true]),
)]);
}
fn test_cast_between_number_and_string(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}CAST('foo' AS UINT64)"), &[]);
run_ast(file, format!("{prefix}CAST('1foo' AS INT32)"), &[]);
run_ast(file, format!("{prefix}CAST('-1' AS UINT64)"), &[]);
run_ast(file, format!("{prefix}CAST('256' AS UINT8)"), &[]);
run_ast(file, format!("{prefix}CAST('1' AS UINT64)"), &[]);
run_ast(file, format!("{prefix}CAST(str AS INT64)"), &[(
"str",
StringType::from_data(vec![
"-9223372036854775808",
"-1",
"0",
"1",
"9223372036854775807",
]),
)]);
run_ast(file, format!("{prefix}CAST(str AS INT64)"), &[(
"str",
StringType::from_data_with_validity(vec!["foo", "foo", "0", "0"], vec![
true, false, true, false,
]),
)]);
run_ast(file, format!("{prefix}CAST(num AS STRING)"), &[(
"num",
Int64Type::from_data(vec![i64::MIN, -1, 0, 1, i64::MAX]),
)]);
run_ast(file, format!("{prefix}CAST(num AS STRING)"), &[(
"num",
UInt64Type::from_data(vec![0, 1, u64::MAX]),
)]);
}
fn test_cast_between_boolean_and_string(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}CAST('t' AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST('f' AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST('0' AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST('1' AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST('true' AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST('false' AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST('TRUE' AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST('FaLse' AS BOOLEAN)"), &[]);
run_ast(file, format!("{prefix}CAST(bool AS STRING)"), &[(
"bool",
BooleanType::from_data(vec![false, true]),
)]);
}
fn test_cast_between_date_and_timestamp(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}CAST(TO_DATE(1) AS TIMESTAMP)"), &[]);
run_ast(file, format!("{prefix}CAST(TO_TIMESTAMP(1) AS DATE)"), &[]);
run_ast(file, format!("{prefix}CAST(a AS DATE)"), &[(
"a",
TimestampType::from_data(vec![
-315360000000000,
-315360000000,
-100,
0,
100,
315360000000,
315360000000000,
]),
)]);
run_ast(file, format!("{prefix}CAST(a AS TIMESTAMP)"), &[(
"a",
DateType::from_data(vec![-354285, -100, 0, 100, 2932896]),
)]);
run_ast(file, format!("{prefix}CAST(TO_DATE(a) AS TIMESTAMP)"), &[(
"a",
Int32Type::from_data(vec![-354285, -100, 0, 100, 2932896]),
)]);
run_ast(file, format!("{prefix}CAST(a AS TIMESTAMP)"), &[(
"a",
Int64Type::from_data(vec![i64::MAX]),
)]);
run_ast(file, format!("{prefix}CAST(a AS DATE)"), &[(
"a",
Int64Type::from_data(vec![i64::MAX]),
)]);
}
fn test_cast_between_string_and_timestamp(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}TO_TIMESTAMP('2022')"), &[]);
run_ast(file, format!("{prefix}TO_TIMESTAMP('2022-01')"), &[]);
run_ast(file, format!("{prefix}TO_TIMESTAMP('2022-01-02')"), &[]);
run_ast(
file,
format!("{prefix}TO_TIMESTAMP('A NON-TIMESTMAP STR')"),
&[],
);
run_ast(
file,
format!("{prefix}TO_TIMESTAMP('2022-01-02T03:25:02.868894-07:00')"),
&[],
);
run_ast(
file,
format!("{prefix}TO_TIMESTAMP('2022-01-02 02:00:11')"),
&[],
);
run_ast(
file,
format!("{prefix}TO_TIMESTAMP('2022-01-02T02:00:22')"),
&[],
);
run_ast(
file,
format!("{prefix}TO_TIMESTAMP('2022-01-02T01:12:00-07:00')"),
&[],
);
run_ast(file, format!("{prefix}TO_TIMESTAMP('2022-01-02T01')"), &[]);
run_ast(file, format!("{prefix}TO_TIMESTAMP(a)"), &[(
"a",
StringType::from_data(vec![
"2022-01-02",
"2022-01-02T03:25:02.868894-07:00",
"2022-01-02 02:00:11",
"2022-01-02T01:12:00-07:00",
"2022-01-02T01",
]),
)]);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(-315360000000000) AS VARCHAR)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(-315360000000) AS VARCHAR)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(-100) AS VARCHAR)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(-0) AS VARCHAR)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(0) AS VARCHAR)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(100) AS VARCHAR)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(315360000000) AS VARCHAR)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TO_TIMESTAMP(315360000000000) AS VARCHAR)"),
&[],
);
run_ast(file, format!("{prefix}CAST(a AS VARCHAR)"), &[(
"a",
TimestampType::from_data(vec![
-315360000000000,
-315360000000,
-100,
0,
100,
315360000000,
315360000000000,
]),
)]);
}
fn test_between_string_and_date(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}TO_DATE('2022')"), &[]);
run_ast(file, format!("{prefix}TO_DATE('2022-01')"), &[]);
run_ast(file, format!("{prefix}TO_DATE('2022-01-02')"), &[]);
run_ast(file, format!("{prefix}TO_DATE('A NON-DATE STR')"), &[]);
run_ast(
file,
format!("{prefix}TO_DATE('2022-01-02T03:25:02.868894-07:00')"),
&[],
);
run_ast(file, format!("{prefix}TO_DATE('2022-01-02 02:00:11')"), &[]);
run_ast(file, format!("{prefix}TO_DATE('2022-01-02T02:00:22')"), &[]);
run_ast(
file,
format!("{prefix}TO_DATE('2022-01-02T01:12:00-07:00')"),
&[],
);
run_ast(file, format!("{prefix}TO_DATE('2022-01-02T01')"), &[]);
run_ast(file, format!("{prefix}TO_DATE(a)"), &[(
"a",
StringType::from_data(vec![
"2022-01-02",
"2022-01-02T03:25:02.868894-07:00",
"2022-01-02 02:00:11",
"2022-01-02T01:12:00-07:00",
"2022-01-02T01",
]),
)]);
run_ast(
file,
format!("{prefix}CAST(TO_DATE(-354285) AS VARCHAR)"),
&[],
);
run_ast(file, format!("{prefix}CAST(TO_DATE(-100) AS VARCHAR)"), &[]);
run_ast(file, format!("{prefix}CAST(TO_DATE(-0) AS VARCHAR)"), &[]);
run_ast(file, format!("{prefix}CAST(TO_DATE(0) AS VARCHAR)"), &[]);
run_ast(file, format!("{prefix}CAST(TO_DATE(100) AS VARCHAR)"), &[]);
run_ast(
file,
format!("{prefix}CAST(TO_DATE(2932896) AS VARCHAR)"),
&[],
);
run_ast(file, format!("{prefix}CAST(a AS VARCHAR)"), &[(
"a",
DateType::from_data(vec![-354285, -100, 0, 100, 2932896]),
)]);
}
fn test_cast_to_nested_type(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(
file,
format!("{prefix}CAST((1, TRUE) AS Tuple(STRING))"),
&[],
);
run_ast(file, format!("{prefix}CAST(('a',) AS Tuple(INT))"), &[]);
run_ast(
file,
format!("{prefix}CAST(((1, TRUE), 1) AS Tuple(Tuple(INT, INT), INT))"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(TRY_CAST(1 AS INT32) AS INT32)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(((1, 'a'), 1) AS Tuple(Tuple(INT, INT NULL), INT))"),
&[],
);
run_ast(
file,
format!("{prefix}CAST(((1, 'a'), 1) AS Tuple(Tuple(INT, INT), INT) NULL)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST([(1,TRUE),(2,FALSE)] AS Array(Tuple(INT, INT)))"),
&[],
);
run_ast(
file,
format!("{prefix}CAST([(1,'a'),(2,'a')] AS Array(Tuple(INT, INT)) NULL)"),
&[],
);
run_ast(
file,
format!("{prefix}CAST([(1,'a'),(2,'a')] AS Array(Tuple(INT, INT NULL)))"),
&[],
);
run_ast(
file,
format!("{prefix}CAST([[TRUE], [FALSE, TRUE]] AS Array(Array(INT)))"),
&[],
);
run_ast(
file,
format!("{prefix}CAST([['a'], ['b', 'c']] AS Array(Array(INT) NULL))"),
&[],
);
}
fn test_cast_between_string_and_decimal(file: &mut impl Write, is_try: bool) {
let prefix = if is_try { "TRY_" } else { "" };
run_ast(file, format!("{prefix}CAST('010.010' AS DECIMAL(5,3))"), &[
]);
run_ast(file, format!("{prefix}CAST('010.010' AS DECIMAL(5,4))"), &[
]);
run_ast(file, format!("{prefix}CAST('010.010' AS DECIMAL(5,2))"), &[
]);
run_ast(file, format!("{prefix}CAST('010.010' AS DECIMAL(4,3))"), &[
]);
run_ast(file, format!("{prefix}CAST('010.010' AS DECIMAL(4,2))"), &[
]);
run_ast(
file,
format!("{prefix}CAST('-1010.010' AS DECIMAL(7,3))"),
&[],
);
run_ast(file, format!("{prefix}CAST('00' AS DECIMAL(2,1))"), &[]);
run_ast(file, format!("{prefix}CAST('0.0' AS DECIMAL(2,0))"), &[]);
run_ast(file, format!("{prefix}CAST('.0' AS DECIMAL(1,0))"), &[]);
run_ast(
file,
format!("{prefix}CAST('+1.0e-10' AS DECIMAL(11, 10))"),
&[],
);
run_ast(
file,
format!("{prefix}CAST('-1.0e+10' AS DECIMAL(11, 0))"),
&[],
);
}
|
use std::io;
pub mod bcm2835;
pub mod epd;
use epd::doodle;
use epd::doodle::Color;
pub fn interactive() {
epd::epd_init();
let mut buff = epd::Frame::new();
println!("Welcome to doodle interactive!");
loop {
let mut command = String::new();
io::stdin().read_line(&mut command).expect("welp input is broken?");
let mut command = command.split_whitespace();
match command.next().unwrap_or("help") {
"exit" => break,
"clear" => buff.clear(),
"draw" => epd::display_color_frame(&buff),
"new" => interactive_new(&mut command, &mut buff),
_ => println!("Help:\nCommands {{exit;clear;draw;new {{point(x, y, color);rect(x, y, width, height, color)}}}}"),
};
}
epd::sleep();
}
//don't look its ugly
fn interactive_new(mut command: &mut std::str::SplitWhitespace, mut buff: &mut epd::Frame) {
let print_usage = |msg| println!("{}\nUsage: new point {{x, y, color}}|rect {{x, y, width, height, color}} ", msg);
let parse_color = |command: &mut std::str::SplitWhitespace| match command.next() {
Some(color) => match color {
"white" => {Some(Color::White)},
"red" => {Some(Color::Red)},
"black" => {Some(Color::Black)},
_ => None
},
None => None,
};
let parse_coord = |command: &mut std::str::SplitWhitespace| match command.next() {
Some(number) => number.parse::<isize>().ok(),
None => None
};
if let Some(word) = command.next() {
match word {
"point" => {
match parse_coord(&mut command) {
Some(x) => {
match parse_coord(&mut command) {
Some(y) => match parse_color(&mut command) {
Some(color) => doodle::draw_point(&mut buff, doodle::Point{x: x, y: y}, color),
None => print_usage("Can not parse/find the color."),
}
None => print_usage("Can not parse/find the y coordinate."),
}
},
None => print_usage("Can not parse/find the x coordinate."),
}
},
"rect" => {
match parse_coord(&mut command) {
Some(x) => {
match parse_coord(&mut command) {
Some(y) => match parse_coord(&mut command) {
Some(width) => {
match parse_coord(&mut command) {
Some(height) => {match parse_color(&mut command) {
Some(color) => doodle::draw_rect(&mut buff, doodle::Point{x: x, y: y}, width, height, color),
None => print_usage("Can not parse/find the color.")
}
},
None => print_usage("Can not parse/find the height."),
}
}
None => print_usage("Can not parse/find the width."),
}
None => print_usage("Can not parse/find the y coordinate."),
}
},
None => print_usage("Can not parse/find the x coordinate."),
}
},
_ => print_usage("No such type."),
}
} else {
print_usage("Type not found");
}
}
pub fn wake_draw_white_sleep() {
epd::epd_init();
let mut buff = epd::Frame::new();
doodle::draw_rect(&mut buff, doodle::Point{x:20, y: 70}, 84, 60, Color::Red);
epd::display_color_frame(&buff);
epd::sleep();
}
|
#![allow(dead_code)]
extern crate cgmath;
extern crate embree;
extern crate rand;
extern crate rayon;
extern crate support;
extern crate tobj;
use std::path::Path;
use cgmath::{InnerSpace, Matrix3, Point2, Vector2, Vector3, Vector4};
use embree::{Device, Geometry, IntersectContext, Ray, RayHit, Scene, TriangleMesh};
use rand::prelude::*;
use rayon::prelude::*;
use support::{Camera, AABB};
/// Function to sample a point inside a 2D disk
pub fn concentric_sample_disk(u: Point2<f32>) -> Point2<f32> {
// map uniform random numbers to $[-1,1]^2$
let u_offset: Point2<f32> = u * 2.0 as f32 - Vector2 { x: 1.0, y: 1.0 };
// handle degeneracy at the origin
if u_offset.x == 0.0 as f32 && u_offset.y == 0.0 as f32 {
return Point2 { x: 0.0, y: 0.0 };
}
// apply concentric mapping to point
let theta: f32;
let r: f32;
if u_offset.x.abs() > u_offset.y.abs() {
r = u_offset.x;
theta = std::f32::consts::FRAC_PI_4 * (u_offset.y / u_offset.x);
} else {
r = u_offset.y;
theta =
std::f32::consts::FRAC_PI_2 - std::f32::consts::FRAC_PI_4 * (u_offset.x / u_offset.y);
}
Point2 {
x: theta.cos(),
y: theta.sin(),
} * r
}
/// Function to sample cosine-weighted hemisphere
pub fn cosine_sample_hemisphere(u: Point2<f32>) -> Vector3<f32> {
let d: Point2<f32> = concentric_sample_disk(u);
let z: f32 = (0.0 as f32).max(1.0 as f32 - d.x * d.x - d.y * d.y).sqrt();
Vector3 { x: d.x, y: d.y, z }
}
// "Building an Orthonormal Basis, Revisited" by Duff et al., JCGT, 2017
// http://jcgt.org/published/0006/01/01/
pub struct Frame(Matrix3<f32>);
impl Frame {
pub fn new(n: Vector3<f32>) -> Frame {
let sign = n.z.signum();
let a = -1.0 / (sign + n.z);
let b = n.x * n.y * a;
Frame {
0: Matrix3 {
x: Vector3::new(1.0 + sign * n.x * n.x * a, sign * b, -sign * n.x),
y: Vector3::new(b, sign + n.y * n.y * a, -n.y),
z: n,
},
}
}
pub fn to_world(&self, v: Vector3<f32>) -> Vector3<f32> {
self.0.x * v.x + self.0.y * v.y + self.0.z * v.z
}
pub fn to_local(&self, v: Vector3<f32>) -> Vector3<f32> {
Vector3::new(v.dot(self.0.x), v.dot(self.0.y), v.dot(self.0.z))
}
}
// It is an example of a custom structure
// that encapsulate the embree commited scene
pub struct AOIntegrator<'embree> {
// Camera (can be updated using update_cam method)
camera: Camera,
// A borrowed commited scene
// Note here the lifetime for borrowing and devide are the same
// Which is fine in our case
rtscene: &'embree embree::CommittedScene<'embree>,
// List of models loaded from tobj
models: Vec<tobj::Model>,
// Meshs ids (to map embree intersection with the models list)
mesh_ids: Vec<u32>,
// Max distance (to compute the AO)
max_distance: Option<f32>,
}
impl<'embree> AOIntegrator<'embree> {
// Update the camera parameter
// return true if the camera paramter get changed
pub fn update_cam(&mut self, camera: Camera) -> bool {
if self.camera == camera {
false
} else {
self.camera = camera;
true
}
}
// Simple AO computation method
pub fn render(&self, i: u32, j: u32, u: Point2<f32>) -> f32 {
let dir = self.camera.ray_dir((i as f32 + 0.5, j as f32 + 0.5));
let ray = Ray::new(self.camera.pos, dir);
let mut ray_hit = RayHit::new(ray);
let mut intersection_ctx = IntersectContext::coherent();
self.rtscene.intersect(&mut intersection_ctx, &mut ray_hit);
if ray_hit.hit.hit() {
let mesh = &self.models[self.mesh_ids[ray_hit.hit.geomID as usize] as usize].mesh;
// Compute the normal at the intersection point
let mut n = {
if !mesh.normals.is_empty() {
// In this case, we will interpolate the normals
// Note that this operation is supported by embree (internal)
let prim = ray_hit.hit.primID as usize;
let tri = [
mesh.indices[prim * 3] as usize,
mesh.indices[prim * 3 + 1] as usize,
mesh.indices[prim * 3 + 2] as usize,
];
// Retrive the different normal vectors
let na = Vector3::new(
mesh.normals[tri[0] * 3],
mesh.normals[tri[0] * 3 + 1],
mesh.normals[tri[0] * 3 + 2],
);
let nb = Vector3::new(
mesh.normals[tri[1] * 3],
mesh.normals[tri[1] * 3 + 1],
mesh.normals[tri[1] * 3 + 2],
);
let nc = Vector3::new(
mesh.normals[tri[2] * 3],
mesh.normals[tri[2] * 3 + 1],
mesh.normals[tri[2] * 3 + 2],
);
// Interpolate
let w = 1.0 - ray_hit.hit.u - ray_hit.hit.v;
(na * w + nb * ray_hit.hit.u + nc * ray_hit.hit.v).normalize()
} else {
// As the mesh normal is not provided
// we will uses the geometric normals
// fortunately, embree computes this information for us
Vector3::new(ray_hit.hit.Ng_x, ray_hit.hit.Ng_y, ray_hit.hit.Ng_z).normalize()
}
};
// We flip the normal automatically in this case
if n.dot(dir) > 0.0 {
n *= -1.0;
}
// Create local frame
let frame = Frame::new(n);
let p = self.camera.pos + dir * ray_hit.ray.tfar;
// Do cosine weighted sampling of the outgoing direction
// note that we will not evaluate the cosine term from this point
// as it get canceled by the PDF
let dir = frame.to_world(cosine_sample_hemisphere(u));
// Launch a second ray from the intersection point
let ray = Ray::new(p, dir);
let mut ray_hit = RayHit::new(ray);
ray_hit.ray.tnear = 0.00001; // Avoid self intersection
let mut intersection_ctx = IntersectContext::incoherent();
self.rtscene.intersect(&mut intersection_ctx, &mut ray_hit);
if ray_hit.hit.hit() {
match self.max_distance {
None => 0.0,
Some(t) => {
if ray_hit.ray.tfar > t {
1.0
} else {
0.0
}
}
}
} else {
1.0
}
} else {
0.0
}
}
}
fn main() {
let mut display = support::Display::new(512, 512, "OBJ AO Viewer");
let device = Device::new();
// Expect <obj_path> [max_distance]
let args: Vec<_> = std::env::args().collect();
// Get the distance. If nothing provided
// use infinity
let max_distance = match args.len() {
1 => panic!("Need to provide obj path argument"),
2 => None,
3 => {
let d = args[2]
.parse::<f32>()
.expect("Impossible to parse the max distance: need to be float");
if d <= 0.0 {
panic!(format!("Max distance need to be more than 0.0 ({})", d));
}
Some(d)
}
_ => panic!(
"Too much arguments provided. Only supporting obj path and max distance arguments"
),
};
// Load the obj
let (models, _) = tobj::load_obj(&Path::new(&args[1])).unwrap();
let mut tri_geoms = Vec::new();
let mut aabb = AABB::default();
for m in models.iter() {
let mesh = &m.mesh;
println!(
"Mesh has {} triangles and {} verts",
mesh.indices.len() / 3,
mesh.positions.len() / 3
);
let mut tris =
TriangleMesh::unanimated(&device, mesh.indices.len() / 3, mesh.positions.len() / 3);
{
let mut verts = tris.vertex_buffer.map();
let mut tris = tris.index_buffer.map();
for i in 0..mesh.positions.len() / 3 {
aabb = aabb.union_vec(&Vector3::new(
mesh.positions[i * 3],
mesh.positions[i * 3 + 1],
mesh.positions[i * 3 + 2],
));
verts[i] = Vector4::new(
mesh.positions[i * 3],
mesh.positions[i * 3 + 1],
mesh.positions[i * 3 + 2],
0.0,
);
}
for i in 0..mesh.indices.len() / 3 {
tris[i] = Vector3::new(
mesh.indices[i * 3],
mesh.indices[i * 3 + 1],
mesh.indices[i * 3 + 2],
);
}
}
let mut tri_geom = Geometry::Triangle(tris);
tri_geom.commit();
tri_geoms.push(tri_geom);
}
display = display.aabb(aabb);
println!("Commit the scene ... ");
let mut scene = Scene::new(&device);
let mut mesh_ids = Vec::with_capacity(models.len());
for g in tri_geoms.drain(0..) {
let id = scene.attach_geometry(g);
mesh_ids.push(id);
}
let rtscene = scene.commit();
// Create my custom object that will compute the ambiant occlusion
let mut scene = AOIntegrator {
models,
mesh_ids,
camera: Camera::look_at(
Vector3::new(-1.0, 0.0, 0.0),
Vector3::new(0.0, 0.0, 0.0),
Vector3::new(0.0, 1.0, 0.0),
55.0,
(512, 512),
),
rtscene: &rtscene,
max_distance,
};
// Variables to average AO computation across frames
let mut spp = 0;
let mut img = Vec::new();
println!("Rendering launched ... ");
display.run(|image, camera_pose, _| {
for p in image.iter_mut() {
*p = 0;
}
let img_dims = image.dimensions();
// Update the camera
if scene.update_cam(Camera::look_dir(
camera_pose.pos,
camera_pose.dir,
camera_pose.up,
55.0,
img_dims,
)) {
// If the camera have moved, we clear previous accumulated results.
spp = 0;
img.resize((img_dims.0 * img_dims.1) as usize, 0.0);
for i in &mut img {
(*i) = 0.0;
}
}
// Render the scene with Rayon. Here each pixel compute 1 spp AO
img.par_chunks_mut(image.width() as usize)
.enumerate()
.for_each(|(y, row)| {
let mut rng = rand::thread_rng();
for (x, p) in row.iter_mut().enumerate() {
let u = Point2::new(rng.gen(), rng.gen());
// Weighting average
(*p) =
(*p * spp as f32 + scene.render(x as u32, y as u32, u)) / (spp + 1) as f32;
}
});
spp += 1;
// Copy the accumulated result inside the image buffer
let raw_out = image.as_mut();
raw_out.chunks_mut(3).zip(img.iter()).for_each(|(p, v)| {
p[0] = (v * 255.0) as u8;
p[1] = (v * 255.0) as u8;
p[2] = (v * 255.0) as u8;
});
});
}
|
use super::character::*;
use components::*;
pub fn new_guard() -> CharacterBuilder {
// add the resource
let render = EntityRender {
sprite_id: "/dungeon/walk_cycle.png".to_owned(),
width: 16,
height: 24,
start_frame: 88,
frame: 88,
};
let mut builder = CharacterBuilder::new();
builder.id("guard1");
builder.render(render);
builder.t_pos(TilePosition::new(35, 20, 0));
builder.anim("idle", vec![88]);
builder.start_state(Box::new(character::WaitState));
builder.anim(Facing::Up.name(), vec![80, 81, 82, 83]);
builder.anim(Facing::Right.name(), vec![84, 85, 86, 87]);
builder.anim(Facing::Down.name(), vec![88, 89, 90, 91]);
builder.anim(Facing::Left.name(), vec![92, 93, 94, 95]);
builder
}
|
pub mod get_field_offset;
pub mod get_field_scale;
pub mod get_field_string_value;
pub mod get_field_type;
|
use super::*;
#[test]
fn with_number_atom_reference_function_or_port_returns_false() {
run!(
|arc_process| {
(
strategy::term::pid::local(),
strategy::term::number_atom_reference_function_or_port(arc_process),
)
},
|(left, right)| {
prop_assert_eq!(result(left, right), false.into());
Ok(())
},
);
}
#[test]
fn with_lesser_local_pid_right_returns_false() {
is_less_than(|_, _| Pid::make_term(0, 0).unwrap(), false);
}
#[test]
fn with_same_local_pid_right_returns_false() {
is_less_than(|left, _| left, false);
}
#[test]
fn with_same_value_local_pid_right_returns_false() {
is_less_than(|_, _| Pid::make_term(0, 1).unwrap(), false);
}
#[test]
fn with_greater_local_pid_right_returns_true() {
is_less_than(|_, _| Pid::make_term(1, 1).unwrap(), true);
}
#[test]
fn with_external_pid_right_returns_true() {
is_less_than(
|_, process| process.external_pid(external_arc_node(), 2, 3).unwrap(),
true,
);
}
#[test]
fn with_tuple_map_list_or_bitstring_returns_true() {
run!(
|arc_process| {
(
strategy::term::pid::local(),
strategy::term::tuple_map_list_or_bitstring(arc_process),
)
},
|(left, right)| {
prop_assert_eq!(result(left, right), true.into());
Ok(())
},
);
}
fn is_less_than<R>(right: R, expected: bool)
where
R: FnOnce(Term, &Process) -> Term,
{
super::is_less_than(|_| Pid::make_term(0, 1).unwrap(), right, expected);
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::any::Any;
use std::cmp::Reverse;
use std::collections::BinaryHeap;
use std::collections::VecDeque;
use std::sync::Arc;
use common_arrow::arrow::compute::sort::row::RowConverter as ArrowRowConverter;
use common_arrow::arrow::compute::sort::row::Rows as ArrowRows;
use common_exception::ErrorCode;
use common_exception::Result;
use common_expression::types::DataType;
use common_expression::types::DateType;
use common_expression::types::NumberDataType;
use common_expression::types::NumberType;
use common_expression::types::StringType;
use common_expression::types::TimestampType;
use common_expression::with_number_mapped_type;
use common_expression::DataBlock;
use common_expression::DataSchemaRef;
use common_expression::SortColumnDescription;
use common_pipeline_core::pipe::Pipe;
use common_pipeline_core::pipe::PipeItem;
use common_pipeline_core::processors::port::InputPort;
use common_pipeline_core::processors::port::OutputPort;
use common_pipeline_core::processors::processor::Event;
use common_pipeline_core::processors::processor::ProcessorPtr;
use common_pipeline_core::processors::Processor;
use common_pipeline_core::Pipeline;
use super::sort::Cursor;
use super::sort::RowConverter;
use super::sort::Rows;
use super::sort::SimpleRowConverter;
use super::sort::SimpleRows;
pub fn try_add_multi_sort_merge(
pipeline: &mut Pipeline,
output_schema: DataSchemaRef,
block_size: usize,
limit: Option<usize>,
sort_columns_descriptions: Vec<SortColumnDescription>,
) -> Result<()> {
if pipeline.is_empty() {
return Err(ErrorCode::Internal("Cannot resize empty pipe."));
}
match pipeline.output_len() {
0 => Err(ErrorCode::Internal("Cannot resize empty pipe.")),
1 => Ok(()),
last_pipe_size => {
let mut inputs_port = Vec::with_capacity(last_pipe_size);
for _ in 0..last_pipe_size {
inputs_port.push(InputPort::create());
}
let output_port = OutputPort::create();
let processor = create_processor(
inputs_port.clone(),
output_port.clone(),
output_schema,
block_size,
limit,
sort_columns_descriptions,
)?;
pipeline.add_pipe(Pipe::create(inputs_port.len(), 1, vec![PipeItem::create(
processor,
inputs_port,
vec![output_port],
)]));
Ok(())
}
}
}
fn create_processor(
inputs: Vec<Arc<InputPort>>,
output: Arc<OutputPort>,
output_schema: DataSchemaRef,
block_size: usize,
limit: Option<usize>,
sort_columns_descriptions: Vec<SortColumnDescription>,
) -> Result<ProcessorPtr> {
Ok(if sort_columns_descriptions.len() == 1 {
let sort_type = output_schema
.field(sort_columns_descriptions[0].offset)
.data_type();
match sort_type {
DataType::Number(num_ty) => with_number_mapped_type!(|NUM_TYPE| match num_ty {
NumberDataType::NUM_TYPE =>
ProcessorPtr::create(Box::new(MultiSortMergeProcessor::<
SimpleRows<NumberType<NUM_TYPE>>,
SimpleRowConverter<NumberType<NUM_TYPE>>,
>::create(
inputs,
output,
output_schema,
block_size,
limit,
sort_columns_descriptions,
)?)),
}),
DataType::Date => ProcessorPtr::create(Box::new(MultiSortMergeProcessor::<
SimpleRows<DateType>,
SimpleRowConverter<DateType>,
>::create(
inputs,
output,
output_schema,
block_size,
limit,
sort_columns_descriptions,
)?)),
DataType::Timestamp => ProcessorPtr::create(Box::new(MultiSortMergeProcessor::<
SimpleRows<TimestampType>,
SimpleRowConverter<TimestampType>,
>::create(
inputs,
output,
output_schema,
block_size,
limit,
sort_columns_descriptions,
)?)),
DataType::String => ProcessorPtr::create(Box::new(MultiSortMergeProcessor::<
SimpleRows<StringType>,
SimpleRowConverter<StringType>,
>::create(
inputs,
output,
output_schema,
block_size,
limit,
sort_columns_descriptions,
)?)),
_ => ProcessorPtr::create(Box::new(MultiSortMergeProcessor::<
ArrowRows,
ArrowRowConverter,
>::create(
inputs,
output,
output_schema,
block_size,
limit,
sort_columns_descriptions,
)?)),
}
} else {
ProcessorPtr::create(Box::new(MultiSortMergeProcessor::<
ArrowRows,
ArrowRowConverter,
>::create(
inputs,
output,
output_schema,
block_size,
limit,
sort_columns_descriptions,
)?))
})
}
/// TransformMultiSortMerge is a processor with multiple input ports;
pub struct MultiSortMergeProcessor<R, Converter>
where
R: Rows,
Converter: RowConverter<R>,
{
/// Data from inputs (every input is sorted)
inputs: Vec<Arc<InputPort>>,
output: Arc<OutputPort>,
output_schema: DataSchemaRef,
/// Sort fields' indices in `output_schema`
sort_field_indices: Vec<usize>,
// Parameters
block_size: usize,
limit: Option<usize>,
/// For each input port, maintain a dequeue of data blocks.
blocks: Vec<VecDeque<DataBlock>>,
/// Maintain a flag for each input denoting if the current cursor has finished
/// and needs to pull data from input.
cursor_finished: Vec<bool>,
/// The accumulated rows for the next output data block.
///
/// Data format: (input_index, block_index, row_index)
in_progress_rows: Vec<(usize, usize, usize)>,
/// Heap that yields [`Cursor`] in increasing order.
heap: BinaryHeap<Reverse<Cursor<R>>>,
/// If the input port is finished.
input_finished: Vec<bool>,
/// Used to convert columns to rows.
row_converter: Converter,
state: ProcessorState,
}
impl<R, Converter> MultiSortMergeProcessor<R, Converter>
where
R: Rows,
Converter: RowConverter<R>,
{
pub fn create(
inputs: Vec<Arc<InputPort>>,
output: Arc<OutputPort>,
output_schema: DataSchemaRef,
block_size: usize,
limit: Option<usize>,
sort_columns_descriptions: Vec<SortColumnDescription>,
) -> Result<Self> {
let input_size = inputs.len();
let sort_field_indices = sort_columns_descriptions
.iter()
.map(|d| d.offset)
.collect::<Vec<_>>();
let row_converter = Converter::create(sort_columns_descriptions, output_schema.clone())?;
Ok(Self {
inputs,
output,
output_schema,
sort_field_indices,
block_size,
limit,
blocks: vec![VecDeque::with_capacity(2); input_size],
heap: BinaryHeap::with_capacity(input_size),
in_progress_rows: vec![],
cursor_finished: vec![true; input_size],
input_finished: vec![false; input_size],
row_converter,
state: ProcessorState::Consume,
})
}
fn get_data_blocks(&mut self) -> Result<Vec<(usize, DataBlock)>> {
let mut data = Vec::new();
for (i, input) in self.inputs.iter().enumerate() {
if input.is_finished() {
self.input_finished[i] = true;
continue;
}
input.set_need_data();
if self.cursor_finished[i] && input.has_data() {
data.push((i, input.pull_data().unwrap()?));
}
}
Ok(data)
}
fn nums_active_inputs(&self) -> usize {
self.input_finished
.iter()
.zip(self.cursor_finished.iter())
.filter(|(f, c)| !**f || !**c)
.count()
}
// Return if need output
#[inline]
fn drain_cursor(&mut self, mut cursor: Cursor<R>) -> bool {
let input_index = cursor.input_index;
let block_index = self.blocks[input_index].len() - 1;
while !cursor.is_finished() {
self.in_progress_rows
.push((input_index, block_index, cursor.advance()));
if let Some(limit) = self.limit {
if self.in_progress_rows.len() == limit {
return true;
}
}
}
// We have read all rows of this block, need to read a new one.
self.cursor_finished[input_index] = true;
false
}
fn drain_heap(&mut self) {
let nums_active_inputs = self.nums_active_inputs();
let mut need_output = false;
// Need to pop data to in_progress_rows.
// Use `>=` because some of the input ports may be finished, but the data is still in the heap.
while self.heap.len() >= nums_active_inputs && !need_output {
match self.heap.pop() {
Some(Reverse(mut cursor)) => {
let input_index = cursor.input_index;
if self.heap.is_empty() {
// If there is no other block in the heap, we can drain the whole block.
need_output = self.drain_cursor(cursor);
} else {
let next_cursor = &self.heap.peek().unwrap().0;
// If the last row of current block is smaller than the next cursor,
// we can drain the whole block.
if cursor.last().le(&next_cursor.current()) {
need_output = self.drain_cursor(cursor);
} else {
let block_index = self.blocks[input_index].len() - 1;
while !cursor.is_finished() && cursor.le(next_cursor) {
// If the cursor is smaller than the next cursor, don't need to push the cursor back to the heap.
self.in_progress_rows.push((
input_index,
block_index,
cursor.advance(),
));
if let Some(limit) = self.limit {
if self.in_progress_rows.len() == limit {
need_output = true;
break;
}
}
}
if !cursor.is_finished() {
self.heap.push(Reverse(cursor));
} else {
// We have read all rows of this block, need to read a new one.
self.cursor_finished[input_index] = true;
}
}
}
// Reach the block size, need to output.
if self.in_progress_rows.len() >= self.block_size {
need_output = true;
break;
}
if self.cursor_finished[input_index] && !self.input_finished[input_index] {
// Correctness: if input is not finished, we need to pull more data,
// or we can continue this loop.
break;
}
}
None => {
// Special case: self.heap.len() == 0 && nums_active_inputs == 0.
// `self.in_progress_rows` cannot be empty.
// If reach here, it means that all inputs are finished but `self.heap` is not empty before the while loop.
// Therefore, when reach here, data in `self.heap` is all drained into `self.in_progress_rows`.
debug_assert!(!self.in_progress_rows.is_empty());
self.state = ProcessorState::Output;
break;
}
}
}
if need_output {
self.state = ProcessorState::Output;
}
}
/// Drain `self.in_progress_rows` to build a output data block.
fn build_block(&mut self) -> Result<DataBlock> {
let num_rows = self.in_progress_rows.len();
debug_assert!(num_rows > 0);
let mut blocks_num_pre_sum = Vec::with_capacity(self.blocks.len());
let mut len = 0;
for block in self.blocks.iter() {
blocks_num_pre_sum.push(len);
len += block.len();
}
// Compute the indices of the output block.
let first_row = &self.in_progress_rows[0];
let mut index = blocks_num_pre_sum[first_row.0] + first_row.1;
let mut start_row_index = first_row.2;
let mut end_row_index = start_row_index + 1;
let mut indices = Vec::new();
for row in self.in_progress_rows.iter().skip(1) {
let next_index = blocks_num_pre_sum[row.0] + row.1;
if next_index == index {
// Within a same block.
end_row_index += 1;
continue;
}
// next_index != index
// Record a range in the block.
indices.push((index, start_row_index, end_row_index - start_row_index));
// Start to record a new block.
index = next_index;
start_row_index = row.2;
end_row_index = start_row_index + 1;
}
indices.push((index, start_row_index, end_row_index - start_row_index));
let columns = self
.output_schema
.fields()
.iter()
.enumerate()
.map(|(col_id, _)| {
// Collect all rows for a ceterain column out of all preserved chunks.
let candidate_cols = self
.blocks
.iter()
.flatten()
.map(|block| block.get_by_offset(col_id).clone())
.collect::<Vec<_>>();
DataBlock::take_column_by_slices_limit(&candidate_cols, &indices, None)
})
.collect::<Vec<_>>();
// Clear no need data.
self.in_progress_rows.clear();
// A cursor pointing to a new block is created onlyh if the previous block is finished.
// This means that all blocks except the last one for each input port are drained into the output block.
// Therefore, the previous blocks can be cleared.
for blocks in self.blocks.iter_mut() {
if blocks.len() > 1 {
blocks.drain(0..(blocks.len() - 1));
}
}
Ok(DataBlock::new(columns, num_rows))
}
}
#[async_trait::async_trait]
impl<R, Converter> Processor for MultiSortMergeProcessor<R, Converter>
where
R: Rows + Send + 'static,
Converter: RowConverter<R> + Send + 'static,
{
fn name(&self) -> String {
"MultiSortMerge".to_string()
}
fn as_any(&mut self) -> &mut dyn Any {
self
}
fn event(&mut self) -> Result<Event> {
if self.output.is_finished() {
for input in self.inputs.iter() {
input.finish();
}
return Ok(Event::Finished);
}
if !self.output.can_push() {
return Ok(Event::NeedConsume);
}
if let Some(limit) = self.limit {
if limit == 0 {
for input in self.inputs.iter() {
input.finish();
}
self.output.finish();
return Ok(Event::Finished);
}
}
if matches!(self.state, ProcessorState::Generated(_)) {
if let ProcessorState::Generated(data_block) =
std::mem::replace(&mut self.state, ProcessorState::Consume)
{
self.limit = self.limit.map(|limit| {
if data_block.num_rows() > limit {
0
} else {
limit - data_block.num_rows()
}
});
self.output.push_data(Ok(data_block));
return Ok(Event::NeedConsume);
}
}
match &self.state {
ProcessorState::Consume => {
let data_blocks = self.get_data_blocks()?;
if !data_blocks.is_empty() {
self.state = ProcessorState::Preserve(data_blocks);
return Ok(Event::Sync);
}
let active_inputs = self.nums_active_inputs();
if active_inputs == 0 {
if !self.heap.is_empty() {
// The heap is not drained yet. Need to drain data into in_progress_rows.
self.state = ProcessorState::Preserve(vec![]);
return Ok(Event::Sync);
}
if !self.in_progress_rows.is_empty() {
// The in_progress_rows is not drained yet. Need to drain data into output.
self.state = ProcessorState::Output;
return Ok(Event::Sync);
}
self.output.finish();
Ok(Event::Finished)
} else {
// `data_blocks` is empty
if !self.heap.is_empty() {
// The heap is not drained yet. Need to drain data into in_progress_rows.
self.state = ProcessorState::Preserve(vec![]);
Ok(Event::Sync)
} else {
Ok(Event::NeedData)
}
}
}
ProcessorState::Output => Ok(Event::Sync),
_ => Err(ErrorCode::Internal("It's a bug.")),
}
}
fn process(&mut self) -> Result<()> {
match std::mem::replace(&mut self.state, ProcessorState::Consume) {
ProcessorState::Preserve(blocks) => {
for (input_index, block) in blocks.into_iter() {
if block.is_empty() {
continue;
}
let columns = self
.sort_field_indices
.iter()
.map(|i| block.get_by_offset(*i).clone())
.collect::<Vec<_>>();
let rows = self.row_converter.convert(&columns, block.num_rows())?;
let cursor = Cursor::try_create(input_index, rows);
self.heap.push(Reverse(cursor));
self.cursor_finished[input_index] = false;
self.blocks[input_index].push_back(block);
}
self.drain_heap();
Ok(())
}
ProcessorState::Output => {
let block = self.build_block()?;
self.state = ProcessorState::Generated(block);
Ok(())
}
_ => Err(ErrorCode::Internal("It's a bug.")),
}
}
}
enum ProcessorState {
Consume,
// Need to consume data from input.
Preserve(Vec<(usize, DataBlock)>),
// Need to preserve blocks in memory.
Output,
// Need to generate output block.
Generated(DataBlock), // Need to push output block to output port.
}
|
//! Contains items related to the [`#[sabi_trait]`](macro@crate::sabi_trait) attribute.
#[doc(hidden)]
pub mod reexports {
pub use std::ops::{Deref as __DerefTrait, DerefMut as __DerefMutTrait};
pub use crate::{
marker_type::ErasedObject as __ErasedObject,
pointer_trait::GetPointerKind as __GetPointerKind,
};
pub mod __sabi_re {
pub use abi_stable::{
erased_types::{
DynTrait, MakeVTable as MakeDynTraitVTable, VTable_Ref as DynTraitVTable_Ref,
},
extern_fn_panic_handling,
marker_type::{
NonOwningPhantom, SyncSend, SyncUnsend, UnsafeIgnoredType, UnsyncSend, UnsyncUnsend,
},
pointer_trait::{AsMutPtr, AsPtr, CanTransmuteElement, OwnedPointer, TransmuteElement},
prefix_type::{PrefixRef, PrefixTypeTrait, WithMetadata},
sabi_trait::{
robject::RObject,
vtable::{GetRObjectVTable, RObjectVtable, RObjectVtable_Ref},
},
sabi_types::{MovePtr, RMut, RRef},
std_types::RBox,
traits::IntoInner,
utils::take_manuallydrop,
};
pub use core_extensions::{utils::transmute_ignore_size, TypeIdentity};
pub use std::{
marker::PhantomData,
mem::{transmute, ManuallyDrop},
ops::Deref,
ptr,
};
}
}
/// A prelude for modules using `#[sabi_trait]` generated traits/trait objects.
pub mod prelude {
pub use crate::type_level::downcasting::{TD_CanDowncast, TD_Opaque};
}
pub use crate::type_level::downcasting::{TD_CanDowncast, TD_Opaque};
#[cfg(any(test, feature = "sabi_trait_examples"))]
pub mod examples;
pub mod doc_examples;
mod robject;
#[doc(hidden)]
pub mod vtable;
#[cfg(test)]
pub mod tests;
#[cfg(all(test, not(feature = "only_new_tests")))]
pub mod test_supertraits;
use std::{
fmt::{Debug, Display},
marker::PhantomData,
};
use self::reexports::__sabi_re::*;
pub use self::robject::{RObject, ReborrowBounds, UneraseError};
use crate::{erased_types::c_functions, marker_type::ErasedObject, sabi_types::MaybeCmp};
|
pub mod codec;
pub mod naming;
|
use std::num::Float;
use vec::{ Vec3, dot };
use ray::{ Ray, Inter };
use scene::Scene;
pub trait Light {
fn bright(&self, ray: &Ray, inter: &Inter, scene: &Scene) -> (f64, f64);
}
pub struct Lights<'a> {
all: Vec<Box<Light + 'a>>,
}
impl<'a> Lights<'a> {
pub fn new(all: Vec<Box<Light + 'a>>) -> Lights<'a> {
Lights { all: all }
}
pub fn add(&mut self, light: Box<Light + 'a>) {
self.all.push(light);
}
fn bright_helper(light_pos: Vec3, shin: i32, ray: &Ray, inter: &Inter, scene: &Scene) -> (f64, f64) {
let l = (light_pos - inter.pos).normalize();
let r = (inter.normal * 2. * dot(l, inter.normal) - l).normalize();
let v = (ray.pos - inter.pos).normalize();
let s = scene.shadow(inter.pos, light_pos);
let diff = s * dot(l, inter.normal).max(0.);
let spec = s * dot(r, v).max(0.).powi(shin);
(spec, diff)
}
}
impl<'a> Light for Lights<'a> {
fn bright(&self, ray: &Ray, inter: &Inter, scene: &Scene) -> (f64, f64) {
self.all.iter()
.map(|l| l.bright(ray, inter, scene))
.fold((0., 0.), |acc, item| (acc.0 + item.0, acc.1 + item.1))
}
}
#[allow(dead_code)]
pub struct Bulb {
pos: Vec3,
spec: f64, // Specular
shin: i32, // Shininess
diff: f64, // Diffuse
}
impl Bulb {
#[allow(dead_code)]
pub fn new(pos: Vec3, spec: f64, shin: i32, diff: f64) -> Bulb {
Bulb { pos: pos, spec: spec, shin: shin, diff: diff }
}
}
impl Light for Bulb {
fn bright(&self, ray: &Ray, inter: &Inter, scene: &Scene) -> (f64, f64) {
let (spec, diff) = Lights::bright_helper(self.pos, self.shin, ray, inter, scene);
(spec * self.spec, diff * self.diff)
}
}
#[allow(dead_code)]
pub struct Sun {
dir: Vec3,
spec: f64, // Specular
shin: i32, // Shininess
diff: f64, // Diffuse
}
impl Sun {
#[allow(dead_code)]
pub fn new(dir: Vec3, spec: f64, shin: i32, diff: f64) -> Sun {
Sun { dir: dir.normalize(), spec: spec, shin: shin, diff: diff }
}
}
impl Light for Sun {
fn bright(&self, ray: &Ray, inter: &Inter, scene: &Scene) -> (f64, f64) {
let pos = self.dir * -1000000.;
let (spec, diff) = Lights::bright_helper(pos, self.shin, ray, inter, scene);
(spec * self.spec, diff * self.diff)
}
}
|
use game::game::Game;
use player::player::Player;
use std::collections::HashMap;
use team::team::Team;
pub type Id = String;
// TODO: World can only have one game at a time in this configuration, which is
// to see if we can have game have mutable references to the two teams (that are
// owned by world). Is there a way to have multiple concurrent games existing in
// the world, where each team is uniquely owned by at most one extant game? This
// seems like a runtime vs compile-time issue, so probably an Rc or Mutex or
// something?
pub struct World<'a> {
pub players: HashMap<Id, Player>,
pub teams: HashMap<Id, Team<'a>>,
pub game: Game<'a>,
}
|
use ::amethyst::assets::*;
use ::amethyst::ecs::*;
use std::collections::HashMap;
use std::fs;
use std::path::Path;
/// Loads asset from the so-called asset packs
/// It caches assets which you can manually load or unload on demand.
///
/// Example:
/// If the folder structure looks like this
/// /assets/base/sprites/player.png
/// /assets/base/sounds/click.ogg
/// /assets/base/models/cube.obj
/// /assets/mod1/sprites/player.png
/// /assets/mod1/sounds/click.ogg
/// /assets/mod2/sounds/click.ogg
///
/// resolve_path("sprites/player.png") -> /assets/mod1/sprites/player.png
/// resolve_path("models/cube.obj") -> /assets/base/models/cube.obj
/// resolve_path("sounds/click.ogg") -> Unknown.
pub struct AssetLoader {
base_path: String,
default_pack: String,
asset_packs: Vec<String>,
}
impl AssetLoader {
pub fn new(base_path: &str, default_pack: &str) -> Self {
let mut al = AssetLoader {
base_path: AssetLoader::sanitize_path_trail_only(&base_path),
default_pack: AssetLoader::sanitize_path(&default_pack),
asset_packs: Vec::new(),
};
al.get_asset_packs();
al
}
fn sanitize_path_trail_only(path: &str) -> String {
let mut out = path.to_string();
let chars = path.chars();
let last = chars.last().unwrap();
if last == '/' {
let idx = out.len() - 1;
out.remove(idx);
}
out
}
fn sanitize_path(path: &str) -> String {
let mut out = path.to_string();
let mut chars = path.chars();
let first = chars.next().expect("An empty path was specified!");
let last = chars.last().unwrap();
out = out.replace("\\", "/").replace("\\\\", "/");
if first == '/' {
out.remove(0);
}
if out.chars().next().unwrap() == '?' {
out.remove(0);
out.remove(0);
}
if last == '/' {
let idx = out.len() - 1;
out.remove(idx);
}
out
}
pub fn resolve_path(&self, path: &str) -> Option<String> {
// Try to get from default path
let mut res = self.resolve_path_for_pack(path, &self.default_pack);
// Try to find overrides
for p in &self.asset_packs {
if p != &self.default_pack {
if let Some(r) = self.resolve_path_for_pack(path, &p) {
res = Some(r);
}
}
}
res
}
fn resolve_path_for_pack(&self, path: &str, pack: &str) -> Option<String> {
let mut abs = self.base_path.to_owned() + "/" + pack + "/" + &path.to_owned();
if cfg!(windows) {
abs = abs.replace("/", "\\").replace("\\\\?\\", "");
}
let path = Path::new(&abs);
if path.exists() {
Some(abs.clone())
} else {
warn!("Failed to find file at path: {}", abs);
None
}
}
pub fn get_asset_packs(&mut self) -> &Vec<String> {
let mut buf: Option<Vec<String>> = None;
if self.asset_packs.len() == 0 {
if let Ok(elems) = fs::read_dir(&self.base_path) {
buf = Some(
elems
.map(|e| {
let path = &e.unwrap().path();
let tmp = &path.to_str().unwrap()[self.base_path.len()..];
AssetLoader::sanitize_path(&tmp)
})
.collect(),
);
} else {
error!(
"Failed to find base_path directory for asset loading: {}",
self.base_path
);
}
}
if let Some(v) = buf {
self.asset_packs = v;
}
&self.asset_packs
}
pub fn get_asset_handle<T>(path: &str, ali: &AssetLoaderInternal<T>) -> Option<Handle<T>> {
ali.assets.get(path).cloned()
}
pub fn get_asset<'a, T>(
path: &str,
ali: &AssetLoaderInternal<T>,
storage: &'a AssetStorage<T>,
) -> Option<&'a T>
where
T: Asset,
{
if let Some(h) = AssetLoader::get_asset_handle::<T>(path, ali) {
storage.get(&h)
} else {
None
}
}
pub fn get_asset_or_load<'a, T, F>(
&mut self,
path: &str,
format: F,
ali: &mut AssetLoaderInternal<T>,
storage: &'a mut AssetStorage<T>,
loader: &Loader,
) -> Option<&'a T>
where
T: Asset,
F: Format<T::Data> + 'static,
{
if let Some(h) = AssetLoader::get_asset_handle::<T>(path, ali) {
return storage.get(&h);
//return Some(a);
}
if let Some(h) = self.load::<T, F>(path, format, ali, storage, loader) {
return storage.get(&h);
}
None
}
pub fn load<T, F>(
&self,
path: &str,
format: F,
ali: &mut AssetLoaderInternal<T>,
storage: &mut AssetStorage<T>,
loader: &Loader,
) -> Option<Handle<T>>
where
T: Asset,
F: Format<T::Data> + 'static,
{
if let Some(handle) = AssetLoader::get_asset_handle(path, ali) {
return Some(handle);
}
if let Some(p) = self.resolve_path(path) {
let handle = loader.load(p, format, (), storage);
ali.assets.insert(String::from(path), handle.clone());
return Some(handle);
}
None
}
/// Only removes the internal Handle<T>. To truly unload the asset, you need to drop all handles that you have to it.
pub fn unload<T>(path: &str, ali: &mut AssetLoaderInternal<T>) {
ali.assets.remove(path);
}
/*pub fn load_from_extension<T>(&mut self,path: &str,ali: &mut AssetLoaderInternal<T>, storage: &mut AssetStorage<T>, loader: Loader) -> Option<Handle<T>> where T: Asset{
let ext = AssetLoader::extension_from_path(path);
match ext{
"obj" => Some(self.load::<Mesh,ObjFormat>(path,ObjFormat,ali,storage,loader)),
_ => None,
}
}
pub fn auto_load_from_extension(&mut self,path: &str,res: Resources){
let ext = AssetLoader::extension_from_path(path);
match ext{
"obj" => Some(self.load_from_extension::<Mesh>(ext,res.fetch_mut::<AssetLoaderInternal<Mesh>>(),res.fetch_mut::<AssetStorage<Mesh>>(),res.fetch())),
_ => None,
};
}*/
/*pub fn extension_from_path(path: &str) -> &str{
path.split(".").as_slice().last().clone()
}*/
}
impl Component for AssetLoader {
type Storage = VecStorage<Self>;
}
pub struct AssetLoaderInternal<T> {
/// Map path to asset handle.
pub assets: HashMap<String, Handle<T>>,
}
impl<T> Default for AssetLoaderInternal<T> {
fn default() -> Self {
Self::new()
}
}
impl<T> AssetLoaderInternal<T> {
pub fn new() -> Self {
AssetLoaderInternal {
assets: HashMap::new(),
}
}
}
impl<T> Component for AssetLoaderInternal<T>
where
T: Send + Sync + 'static,
{
type Storage = VecStorage<Self>;
}
#[cfg(test)]
mod test {
use crate::*;
fn load_asset_loader() -> AssetLoader {
AssetLoader::new(
&format!("{}/test/assets", env!("CARGO_MANIFEST_DIR")),
"main",
)
}
#[test]
fn path_sanitisation() {
AssetLoader::new(
&format!("{}/test/assets/", env!("CARGO_MANIFEST_DIR")),
"/base/",
);
}
#[test]
fn asset_loader_resolve_unique_other() {
let asset_loader = load_asset_loader();
assert_eq!(
asset_loader.resolve_path("config/uniqueother"),
Some(
format!(
"{}/test/assets/mod1/config/uniqueother",
env!("CARGO_MANIFEST_DIR")
)
.to_string()
)
)
}
#[test]
fn asset_loader_resolve_path_override_single() {
let asset_loader = load_asset_loader();
assert_eq!(
asset_loader.resolve_path("config/ov1"),
Some(format!("{}/test/assets/mod1/config/ov1", env!("CARGO_MANIFEST_DIR")).to_string())
)
}
#[test]
fn asset_loader_resolve_path_override_all() {
let asset_loader = load_asset_loader();
assert_eq!(
asset_loader.resolve_path("config/ovall"),
Some(
format!(
"{}/test/assets/mod2/config/ovall",
env!("CARGO_MANIFEST_DIR")
)
.to_string()
)
)
}
}
|
pub mod user_controller;
pub mod user_service;
use crate::cache::users::update_cache;
use crate::context::{generate_context, Ctx};
use crate::users::user_controller::{create_user, delete_user, get_user, get_users, update_user};
use futures::future::lazy;
use futures::{future, Future};
use std::boxed::Box;
use thruster::thruster_middleware::query_params::query_params;
use thruster::{middleware, App, MiddlewareChain, MiddlewareReturnValue, Request};
fn cache(
context: Ctx,
next: impl Fn(Ctx) -> MiddlewareReturnValue<Ctx> + Send + Sync,
) -> MiddlewareReturnValue<Ctx> {
let ctx_future = next(context).and_then(move |ctx| {
if ctx.request.method() != "GET" {
tokio::spawn(lazy(|| {
update_cache();
Ok(())
}));
}
future::ok(ctx)
});
Box::new(ctx_future)
}
pub fn init() -> App<Request, Ctx> {
let mut subroute = App::<Request, Ctx>::create(generate_context);
subroute.use_middleware("/", middleware![Ctx => cache, Ctx => query_params]);
subroute.get("/", middleware![Ctx => get_users]);
subroute.post("/", middleware![Ctx => create_user]);
subroute.get("/:id", middleware![Ctx => get_user]);
subroute.put("/:id", middleware![Ctx => update_user]);
subroute.delete("/:id", middleware![Ctx => delete_user]);
subroute
}
|
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT.
/// Location resource data.
#[repr(C, packed)]
pub struct Location
{
version: u8,
/// Size of a sphere at this location.
pub size: LocationCompressedCentimetres,
/// Horizontal precision.
pub horizontal_precision: LocationCompressedCentimetres,
/// Vertical precision.
pub vertical_precision: LocationCompressedCentimetres,
/// The latitude of the center of the sphere described by `size()`, expressed as a 32-bit integer, most significant octet first (network standard byte order), in thousandths of a second of arc.
///
/// 2^31 represents the equator; numbers above that are north latitude.
pub unsigned_latitude: [u8; 4],
/// The longitude of the center of the sphere described by `size()`, expressed as a 32-bit integer, most significant octet first (network standard byte order), in thousandths of a second of arc.
///
/// 2^31 represents the equator; numbers above that are north latitude.
pub unsigned_longitude: [u8; 4],
/// The altitude of the center of the sphere described by by `size()`d, expressed as a 32-bit integer, most significant octet first (network standard byte order), in centimeters, from a base of 100,000m below the WGS 84 reference spheroid used by GPS.
pub unsigned_altitude: [u8; 4],
}
impl Location
{
#[inline(always)]
pub(crate) fn version(&self) -> Result<LocationVersion, DnsProtocolError>
{
if likely!(self.version == 0)
{
Ok(LocationVersion::Version0)
}
else
{
Err(ResourceDataForTypeLOCHasAnIncorrectVersion(self.version))
}
}
}
|
// revisions: base nll
// ignore-compare-mode-nll
//[nll] compile-flags: -Z borrowck=mir
struct Foo {
field: i32
}
impl Foo {
fn foo<'a>(&self, x: &i32) -> &i32 {
x
//[base]~^ ERROR lifetime mismatch
//[nll]~^^ ERROR lifetime may not live long enough
}
}
fn main() { }
|
use rune::{Diagnostics, Options, Sources};
use runestick::{Any, AnyObj, Context, Module, Shared, Source, Vm, VmError};
use std::sync::Arc;
#[test]
fn test_reference_error() {
#[derive(Debug, Default, Any)]
struct Foo {
value: i64,
}
fn take_it(this: Shared<AnyObj>) -> Result<(), VmError> {
// NB: this will error, since this is a reference.
let _ = this.into_ref()?;
Ok(())
}
let mut module = Module::new();
module.function(&["take_it"], take_it).unwrap();
let mut context = Context::with_default_modules().unwrap();
context.install(&module).unwrap();
let mut sources = Sources::new();
sources.insert(Source::new(
"test",
r#"fn main(number) { take_it(number) }"#,
));
let mut diagnostics = Diagnostics::new();
let unit = rune::load_sources(
&context,
&Options::default(),
&mut sources,
&mut diagnostics,
)
.unwrap();
let vm = Vm::new(Arc::new(context.runtime()), Arc::new(unit));
let mut foo = Foo::default();
assert_eq!(foo.value, 0);
// This should error, because we're trying to acquire an `Ref` out of a
// passed in reference.
assert!(vm.call(&["main"], (&mut foo,)).is_err());
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![allow(missing_docs)]
use {
failure::{format_err, Error},
fdio::{fdio_sys, ioctl, make_ioctl},
fidl_fuchsia_device::ControllerSynchronousProxy,
fidl_fuchsia_device_test::{CONTROL_DEVICE, DeviceSynchronousProxy, RootDeviceSynchronousProxy},
fuchsia_zircon::{self as zircon, Handle},
rand::{self, Rng},
std::{
fs::{File, OpenOptions},
mem,
os::raw,
path::Path,
},
};
pub const DEV_TEST: &str = CONTROL_DEVICE;
pub const BTHCI_DRIVER_NAME: &str = "/system/driver/bt-hci-fake.so";
// Returns the name of the fake device and a File representing the device on success.
pub fn create_and_bind_device() -> Result<(File, String), Error> {
let mut rng = rand::thread_rng();
let id = format!("bt-hci-{}", rng.gen::<u8>());
let devpath = create_fake_device(DEV_TEST, id.as_str())?;
let mut retry = 0;
let mut dev = None;
{
while retry < 100 {
retry += 1;
if let Ok(d) = open_rdwr(&devpath) {
dev = Some(d);
break;
}
}
}
let dev = dev.ok_or_else(|| format_err!("could not open {:?}", devpath))?;
bind_fake_device(&dev)?;
Ok((dev, id))
}
pub fn create_fake_device(test_path: &str, dev_name: &str) -> Result<String, Error> {
let test_dev = open_rdwr(test_path)?;
let channel = fdio::clone_channel(&test_dev)?;
let mut interface = RootDeviceSynchronousProxy::new(channel);
let (status, devpath) = interface.create_device(dev_name, fuchsia_zircon::Time::INFINITE)?;
fuchsia_zircon::Status::ok(status)?;
match devpath {
Some(path) => Ok(path),
None => Err(format_err!("RootDevice.CreateDevice received no devpath?")),
}
}
pub fn bind_fake_device(device: &File) -> Result<(), Error> {
let channel = fdio::clone_channel(device)?;
let mut interface = ControllerSynchronousProxy::new(channel);
let status = interface.bind(BTHCI_DRIVER_NAME, fuchsia_zircon::Time::INFINITE)?;
fuchsia_zircon::Status::ok(status)?;
Ok(())
}
pub fn destroy_device(device: &File) -> Result<(), Error> {
let channel = fdio::clone_channel(device)?;
let mut interface = DeviceSynchronousProxy::new(channel);
Ok(interface.destroy()?)
}
// Ioctl called used to get the driver name of the bluetooth hci device. This is used to ensure
// the driver is the right driver to be bound to the device.
// TODO(bwb): move out to a generic crate
pub fn get_device_driver_name(device: &File) -> Result<String, Error> {
let channel = fdio::clone_channel(device)?;
let mut interface = ControllerSynchronousProxy::new(channel);
let (status, name) = interface.get_driver_name(fuchsia_zircon::Time::INFINITE)?;
fuchsia_zircon::Status::ok(status)?;
match name {
Some(name) => Ok(name),
None => Err(format_err!("GetDriverName returned no name?")),
}
}
// Ioctl definitions for the above calls.
// TODO(bwb): move out to a generic crate
pub fn open_snoop_channel(device: &File) -> Result<zircon::Handle, Error> {
let mut handle = zircon::sys::ZX_HANDLE_INVALID;
unsafe {
ioctl(
device,
IOCTL_BT_HCI_GET_SNOOP_CHANNEL,
::std::ptr::null_mut() as *mut raw::c_void,
0,
&mut handle as *mut _ as *mut raw::c_void,
mem::size_of::<zircon::sys::zx_handle_t>(),
)
.map(|_| Handle::from_raw(handle))
.map_err(|e| e.into())
}
}
fn open_rdwr<P: AsRef<Path>>(path: P) -> Result<File, Error> {
OpenOptions::new()
.read(true)
.write(true)
.open(path)
.map_err(|e| e.into())
}
// Ioctl definitions for the above calls.
// TODO(bwb): move out to a generic crate
const IOCTL_BT_HCI_GET_SNOOP_CHANNEL: raw::c_int = make_ioctl(
fdio_sys::IOCTL_KIND_GET_HANDLE,
fdio_sys::IOCTL_FAMILY_BT_HCI,
2
);
|
use proptest::prop_assert_eq;
use proptest::strategy::Just;
use crate::erlang::tuple_to_list_1::result;
use crate::test::strategy;
#[test]
fn without_tuple_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
strategy::term::is_not_tuple(arc_process),
)
},
|(arc_process, tuple)| {
prop_assert_is_not_tuple!(result(&arc_process, tuple), tuple);
Ok(())
},
);
}
#[test]
fn with_tuple_returns_list() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
proptest::collection::vec(strategy::term(arc_process.clone()), 0..=3),
)
},
|(arc_process, element_vec)| {
let tuple = arc_process.tuple_from_slice(&element_vec);
let list = arc_process.list_from_slice(&element_vec);
prop_assert_eq!(result(&arc_process, tuple), Ok(list));
Ok(())
},
);
}
|
use std::sync::{Arc, Mutex};
use log::{Level, Log, Metadata, Record};
use serde::Deserialize;
use alacritty_config::SerdeReplace as _;
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
#[derive(ConfigDeserialize, Debug, PartialEq, Eq)]
enum TestEnum {
One,
Two,
Three,
#[config(skip)]
Nine(String),
}
impl Default for TestEnum {
fn default() -> Self {
Self::Nine(String::from("nine"))
}
}
#[derive(ConfigDeserialize)]
struct Test {
#[config(alias = "noalias")]
#[config(deprecated = "use field2 instead")]
field1: usize,
#[config(deprecated = "shouldn't be hit")]
field2: String,
field3: Option<u8>,
#[doc(hidden)]
nesting: Test2<usize>,
#[config(flatten)]
flatten: Test3,
enom_small: TestEnum,
enom_big: TestEnum,
#[config(deprecated)]
enom_error: TestEnum,
#[config(removed = "it's gone")]
gone: bool,
}
impl Default for Test {
fn default() -> Self {
Self {
field1: 13,
field2: String::from("field2"),
field3: Some(23),
nesting: Test2::default(),
flatten: Test3::default(),
enom_small: TestEnum::default(),
enom_big: TestEnum::default(),
enom_error: TestEnum::default(),
gone: false,
}
}
}
#[derive(ConfigDeserialize, Default)]
struct Test2<T: Default> {
field1: T,
field2: Option<usize>,
#[config(skip)]
field3: usize,
#[config(alias = "aliased")]
field4: u8,
newtype: NewType,
}
#[derive(ConfigDeserialize, Default)]
struct Test3 {
flatty: usize,
}
#[derive(SerdeReplace, Deserialize, Default, PartialEq, Eq, Debug)]
struct NewType(usize);
#[test]
fn config_deserialize() {
let logger = unsafe {
LOGGER = Some(Logger::default());
LOGGER.as_mut().unwrap()
};
log::set_logger(logger).unwrap();
log::set_max_level(log::LevelFilter::Warn);
let test: Test = toml::from_str(
r#"
field1 = 3
field3 = 32
flatty = 123
enom_small = "one"
enom_big = "THREE"
enom_error = "HugaBuga"
gone = false
[nesting]
field1 = "testing"
field2 = "None"
field3 = 99
aliased = 8
"#,
)
.unwrap();
// Verify fields were deserialized correctly.
assert_eq!(test.field1, 3);
assert_eq!(test.field2, Test::default().field2);
assert_eq!(test.field3, Some(32));
assert_eq!(test.enom_small, TestEnum::One);
assert_eq!(test.enom_big, TestEnum::Three);
assert_eq!(test.enom_error, Test::default().enom_error);
assert!(!test.gone);
assert_eq!(test.nesting.field1, Test::default().nesting.field1);
assert_eq!(test.nesting.field2, None);
assert_eq!(test.nesting.field3, Test::default().nesting.field3);
assert_eq!(test.nesting.field4, 8);
assert_eq!(test.flatten.flatty, 123);
// Verify all log messages are correct.
let error_logs = logger.error_logs.lock().unwrap();
assert_eq!(error_logs.as_slice(), [
"Config error: enom_error: unknown variant `HugaBuga`, expected one of `One`, `Two`, \
`Three`",
"Config error: field1: invalid type: string \"testing\", expected usize",
]);
let warn_logs = logger.warn_logs.lock().unwrap();
assert_eq!(warn_logs.as_slice(), [
"Config warning: field1 has been deprecated; use field2 instead",
"Config warning: enom_error has been deprecated",
"Config warning: gone has been removed; it's gone",
"Unused config key: field3",
]);
}
static mut LOGGER: Option<Logger> = None;
/// Logger storing all messages for later validation.
#[derive(Default)]
struct Logger {
error_logs: Arc<Mutex<Vec<String>>>,
warn_logs: Arc<Mutex<Vec<String>>>,
}
impl Log for Logger {
fn log(&self, record: &Record<'_>) {
assert_eq!(record.target(), env!("CARGO_PKG_NAME"));
match record.level() {
Level::Error => {
let mut error_logs = self.error_logs.lock().unwrap();
error_logs.push(record.args().to_string());
},
Level::Warn => {
let mut warn_logs = self.warn_logs.lock().unwrap();
warn_logs.push(record.args().to_string());
},
_ => unreachable!(),
}
}
fn enabled(&self, _metadata: &Metadata<'_>) -> bool {
true
}
fn flush(&self) {}
}
#[test]
fn field_replacement() {
let mut test = Test::default();
let value = toml::from_str("nesting.field2=13").unwrap();
test.replace(value).unwrap();
assert_eq!(test.nesting.field2, Some(13));
}
#[test]
fn replace_derive() {
let mut test = Test::default();
let value = toml::from_str("nesting.newtype=9").unwrap();
test.replace(value).unwrap();
assert_eq!(test.nesting.newtype, NewType(9));
}
#[test]
fn replace_flatten() {
let mut test = Test::default();
let value = toml::from_str("flatty=7").unwrap();
test.replace(value).unwrap();
assert_eq!(test.flatten.flatty, 7);
}
|
use rustc_serialize::json::{ToJson, Json};
use chrono::*;
use bill::{Bill, BillItem, ItemList, Tax};
use ordered_float::OrderedFloat;
use std::process;
use std::error::Error;
use super::Project;
use super::product::Product;
use util::currency_to_string;
fn opt_to_json<T: ::std::fmt::Display>(opt:Option<T>) -> Json{
match opt{
Some(t) => Json::String(t.to_string()),
None => Json::Null
}
}
fn s(s:&str) -> String { String::from(s) }
fn itemlist_to_json(tax:&Tax, list: &ItemList<Product>) -> Json {
let gross_sum = list.gross_sum();
let tax_sum = list.tax_sum();
let map = btreemap!{
s("tax_value") => (tax.into_inner()*100.0).to_json(),
s("gross_sum") => currency_to_string(&gross_sum).to_json(),
s("tax_sum") => currency_to_string(&tax_sum).to_json(),
s("has_tax") => (tax.into_inner() > 0f64).to_json()
};
map.to_json()
}
fn taxes_by_tax_to_json(bill: &Bill<Product>) -> Json {
bill.iter()
.map(|(tax, list)| { itemlist_to_json(tax, list) })
.rev()
.collect::<Vec<Json>>()
.to_json()
}
impl ToJson for Project{
fn to_json(&self) -> Json{
use ::project::spec::*;
let s = |s:&str| String::from(s);
let opt_str = |opt:Option<&str>| opt.map(|e|e.to_owned()).to_json() ;
let dmy = |date:Option<Date<UTC>>| date.map(|d|d.format("%d.%m.%Y").to_string()).to_json();
let item_to_json = |item:&BillItem<Product>, tax:OrderedFloat<f64>| btreemap!{
s("name") => item.product.name.to_json(),
s("price") => currency_to_string(&item.product.price).to_json(),
s("unit") => item.product.unit.unwrap_or_else(||"").to_json(),
s("amount") => item.amount.to_json(),
s("cost") => currency_to_string(&item.gross()).to_json(),
s("tax") => tax.into_inner().to_json()
}.to_json();
let bill_to_json = |bill:&Bill<Product>| bill.as_items_with_tax()
.into_iter()
.map(|(tax, item)| item_to_json(item,tax) )
.collect::<Vec<Json>>()
.to_json();
let (offer,invoice) = match self.bills() {
Ok(bills) => bills,
Err(err) => {
error!("Cannot create Bill because: {}", err.description());
process::exit(1);
},
};
let map = btreemap!{
//String::from("adressing") => ,
s("bills") => btreemap!{
s("offer") => bill_to_json(&offer),
s("invoice") => bill_to_json(&invoice),
}.to_json(),
s("client") => btreemap!{
s("email") => opt_str(self.client().email()),
s("last_name") => opt_str(self.client().last_name()),
s("first_name") => opt_str(self.client().first_name()),
s("full_name") => self.client().full_name().to_json(),
s("title") => opt_str(self.client().title()),
s("address") => opt_str(self.client().address()),
s("addressing") => self.client().addressing().to_json(),
}.to_json(),
s("event") => btreemap!{
s("name") => IsProject::name(self).unwrap_or("unnamed").to_json(),
s("date") => dmy(self.event_date()),
s("manager") => self.responsible().unwrap_or("").to_string().to_json(),
}.to_json(),
s("offer") => btreemap!{
s("number") => self.offer().number().to_json(),
s("date") => dmy(self.offer().date()),
s("sums") => taxes_by_tax_to_json(&offer),
s("net_total") => currency_to_string(&offer.net_total()).to_json(),
s("gross_total") => currency_to_string(&offer.gross_total()).to_json(),
}.to_json(),
s("invoice") => btreemap!{
s("date") => dmy(self.invoice().date()),
s("number") => self.invoice().number_str().to_json(),
s("number_long") => self.invoice().number_long_str().to_json(),
s("official") => self.invoice().official().to_json(),
s("sums") => taxes_by_tax_to_json(&invoice),
s("net_total") => currency_to_string(&invoice.net_total()).to_json(),
s("gross_total") => currency_to_string(&invoice.gross_total()).to_json(),
}.to_json(),
s("hours") => btreemap!{
s("time") => opt_to_json(self.hours().total()),
s("salary") => opt_to_json(self.hours().salary().map(|ref c| currency_to_string(c)))
}.to_json(),
};
Json::Object(map)
}
}
impl<'a> ToJson for Product<'a> {
fn to_json(&self) -> Json {
let s = |s: &str| String::from(s);
Json::Object(btreemap!{
s("name") => self.name.to_json(),
s("unit") => self.unit.map(|s|s.to_owned()).to_json(),
s("tax") => self.tax.to_string().to_json(),
s("price") => currency_to_string(&self.price).to_json(),
s("currency") => self.price.0.map(|s|s.to_string()).to_json(),
})
}
}
|
#![doc = include_str!("../README.md")]
use macaddr::MacAddr6;
#[derive(Debug)]
pub struct SensorData {
pub relative_humidity_percent: f32,
pub radon_short_term: Option<u32>,
pub radon_long_term: Option<u32>,
pub temperature_celsius: f32,
pub relative_atmospheric_pressure: f32,
pub co2: f32,
pub voc: f32,
}
#[derive(Debug)]
pub struct SensorMetadata {
pub mac_address: MacAddr6,
pub serial_number: Option<String>,
pub measurements: SensorData,
}
pub enum SensorMeasurementKind {
RelativeHumidity,
RadonShortTerm,
RadonLongTerm,
TemperatureCelsius,
RelativeAtmosphericPressure,
Co2,
Voc,
}
pub mod consts {
pub const RELATIVE_HUMIDITY_UNIT_STR: &str = "%rH";
pub const RADON_UNIT_STR: &str = "Bq/m3";
pub const TEMPERATURE_CELSIUS_UNIT_STR: &str = "degC";
/// Alternative: `hPA`
pub const RELATIVE_ATMOSPHERIC_PRESSURE_UNIT_STR: &str = "mbar";
pub const CO2_UNIT_STR: &str = "ppm";
pub const VOC_UNIT_STR: &str = "ppb";
}
impl SensorMeasurementKind {
pub fn unit_str(&self) -> String {
match self {
Self::RelativeHumidity => consts::RELATIVE_HUMIDITY_UNIT_STR,
Self::RadonShortTerm => consts::RADON_UNIT_STR,
Self::RadonLongTerm => consts::RADON_UNIT_STR,
Self::TemperatureCelsius => consts::TEMPERATURE_CELSIUS_UNIT_STR,
Self::RelativeAtmosphericPressure => consts::RELATIVE_ATMOSPHERIC_PRESSURE_UNIT_STR,
Self::Co2 => consts::CO2_UNIT_STR,
Self::Voc => consts::VOC_UNIT_STR,
}
.to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn define() {
let _sensor_data = SensorData {
co2: 533.0,
voc: 72.0,
relative_atmospheric_pressure: 1005.92,
relative_humidity_percent: 36.5,
temperature_celsius: 20.42,
radon_short_term: Some(1),
radon_long_term: Some(1),
};
// println!("{:#?}", sensor_data);
}
}
|
extern crate curl;
mod rhyme;
use curl::http;
use rhyme::*;
use std::env;
fn main() {
let word = env::args().nth(1).unwrap_or("heart".to_string());
let url = format!("http://rhymebrain.com/talk?function=getRhymes&word={}", word);
let resp = http::handle().get(url).exec().unwrap();
println!("{}", resp);
}
|
extern crate reqwest;
extern crate serde;
extern crate serde_json;
extern crate console;
extern crate dialoguer;
use serde_json::Value;
use dialoguer::{Input, Select};
use std::process::{Command, Stdio};
use std::env;
const PROMPT: &str = "[search]";
fn req(ctx: &Context, q: &str) -> reqwest::Response {
let mut s = format!(
"https://www.googleapis.com/youtube/v3/search?key={}&part=snippet&q={}&maxResults=20",
ctx.api_key,
q
);
if let Some(ref t) = ctx.next_page_token {
s.push_str("&pageToken=");
s.push_str(t);
}
reqwest::get(&s).unwrap()
}
#[derive(Debug)]
struct Video {
id: String,
title: String,
channel: String,
}
fn video_to_string(v: &Video) -> String {
format!(" {}: {}", v.channel, v.title)
}
struct Context {
select_i: usize,
api_key: String,
videos: Vec<Video>,
next_page_token: Option<String>,
}
fn get_data(ctx: &mut Context, body: &str) -> Vec<Video> {
let v: Value = serde_json::from_str(&body).ok().expect(
"failed to parse json",
);
let next_page_token = v.get("nextPageToken").and_then(Value::as_str).map(
::std::string::ToString::to_string,
);
ctx.next_page_token = next_page_token;
let lst = v.get("items").and_then(Value::as_array).expect(
"couldn't find items",
);
let mut v = Vec::new();
for o in lst.iter() {
if o.pointer("/id/kind")
.and_then(Value::as_str)
.map(|s| s == "youtube#video")
.unwrap_or(false)
{
v.push(Video {
id: o.pointer("/id/videoId")
.expect("couldn't find /id/videoId")
.as_str()
.expect("/id/videoId wasn't a string")
.to_string(),
title: o.pointer("/snippet/title")
.expect("couldn't find /snippet/title")
.as_str()
.expect("/snippet/title wasn't a string")
.to_string(),
channel: o.pointer("/snippet/channelTitle")
.expect("couldn't find /snippet/channelTitle")
.as_str()
.expect("/snippet/channelTitle wasn't a string")
.to_string(),
});
}
}
v
}
fn play_video(vid: &Video) {
let yt_link = format!("https://www.youtube.com/watch?v={}", vid.id);
let child = Command::new("mpv")
.arg(&yt_link)
.stdout(Stdio::piped())
.spawn()
.expect("Failed to execute command");
child.wait_with_output().expect("failed to wait on child");
}
fn main() {
let api_key = match env::var("YT_API_KEY") {
Ok(s) => s,
Err(_) => panic!("Set the env variable 'YT_API_KEY' to be the API key."),
};
let term = console::Term::stdout();
let mut ctx = Context {
select_i: 0,
videos: vec![],
next_page_token: None,
api_key,
};
if let Ok(search_term) = Input::new(PROMPT).interact() {
let mut select;
loop {
let mut res = req(&ctx, &search_term);
let body = res.text().unwrap();
let vids = get_data(&mut ctx, &body);
ctx.videos.extend(vids.into_iter());
let strs = ctx.videos.iter().map(video_to_string);
select = Select::new();
for s in strs {
select.item(&s);
}
select.item("+More");
select.default(ctx.select_i);
if let Ok(i) = select.interact_on(&term) {
if i == ctx.videos.len() {
// more was clicked. We have saved the stuff in ctx. Just loop.
ctx.select_i = i;
} else {
play_video(&ctx.videos[i]);
break;
}
}
}
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate criterion;
use common_ast::parser::parse_expr;
use common_ast::parser::parse_sql;
use common_ast::parser::tokenize_sql;
use common_ast::Dialect;
use criterion::black_box;
use criterion::Criterion;
fn bench(c: &mut Criterion) {
let mut group = c.benchmark_group("bench_parser");
group.sample_size(10);
group.bench_function("large_statement", |b| {
b.iter(|| {
let case = r#"explain SELECT SUM(count) FROM (SELECT ((((((((((((true)and(true)))or((('614')like('998831')))))or(false)))and((true IN (true, true, (-1014651046 NOT BETWEEN -1098711288 AND -1158262473))))))or((('780820706')=('')))) IS NOT NULL AND ((((((((((true)AND(true)))or((('614')like('998831')))))or(false)))and((true IN (true, true, (-1014651046 NOT BETWEEN -1098711288 AND -1158262473))))))OR((('780820706')=(''))))) ::INT64)as count FROM t0) as res;"#;
let tokens = tokenize_sql(case).unwrap();
let (stmt, _) = parse_sql(&tokens, Dialect::PostgreSQL).unwrap();
black_box(stmt);
})
});
group.bench_function("large_query", |b| {
b.iter(|| {
let case = r#"SELECT SUM(count) FROM (SELECT ((((((((((((true)and(true)))or((('614')like('998831')))))or(false)))and((true IN (true, true, (-1014651046 NOT BETWEEN -1098711288 AND -1158262473))))))or((('780820706')=('')))) IS NOT NULL AND ((((((((((true)AND(true)))or((('614')like('998831')))))or(false)))and((true IN (true, true, (-1014651046 NOT BETWEEN -1098711288 AND -1158262473))))))OR((('780820706')=(''))))) ::INT64)as count FROM t0) as res;"#;
let tokens = tokenize_sql(case).unwrap();
let (stmt, _) = parse_sql(&tokens, Dialect::PostgreSQL).unwrap();
black_box(stmt);
})
});
group.bench_function("wide_expr", |b| {
b.iter(|| {
let case = r#"a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a AND a"#;
let tokens = tokenize_sql(case).unwrap();
let expr = parse_expr(&tokens, Dialect::PostgreSQL).unwrap();
black_box(expr);
})
});
group.bench_function("deep_expr", |b| {
b.iter(|| {
let case = r#"((((((((((((((((((((((((((((((1))))))))))))))))))))))))))))))"#;
let tokens = tokenize_sql(case).unwrap();
let expr = parse_expr(&tokens, Dialect::PostgreSQL).unwrap();
black_box(expr);
})
});
}
criterion_group!(benches, bench);
criterion_main!(benches);
|
#[doc = "Reader of register ITLINE23"]
pub type R = crate::R<u32, super::ITLINE23>;
#[doc = "Reader of field `I2C1`"]
pub type I2C1_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - I2C1"]
#[inline(always)]
pub fn i2c1(&self) -> I2C1_R {
I2C1_R::new((self.bits & 0x01) != 0)
}
}
|
#![cfg(any(target_arch="x86", target_arch="x86_64"))]
#![feature(const_fn)]
#![feature(asm)]
#![no_std]
#![cfg_attr(test, allow(unused_features))]
#[macro_use]
extern crate bitflags;
extern crate raw_cpuid;
#[cfg(feature = "performance-counter")]
#[macro_use]
extern crate phf;
#[cfg(target_arch="x86")]
pub mod bits32;
#[cfg(target_arch="x86_64")]
pub mod bits64;
pub mod shared;
pub mod current {
#[cfg(target_arch="x86")]
pub use bits32::*;
#[cfg(target_arch="x86_64")]
pub use bits64::*;
}
mod std {
pub use core::fmt;
pub use core::ops;
pub use core::option;
}
|
use arkworks_gadgets::{
poseidon::PoseidonParameters,
prelude::ark_bn254::Bn254,
setup::{
common::Curve,
mixer::{setup_groth16_circuit_circomx5, setup_groth16_random_circuit_circomx5, setup_random_circuit_circomx5},
},
utils::{
get_mds_poseidon_bls381_x3_5, get_mds_poseidon_bls381_x5_5, get_mds_poseidon_bn254_x3_5,
get_mds_poseidon_bn254_x5_5, get_mds_poseidon_circom_bn254_x5_3, get_rounds_poseidon_bls381_x3_5,
get_rounds_poseidon_bls381_x5_5, get_rounds_poseidon_bn254_x3_5, get_rounds_poseidon_bn254_x5_5,
get_rounds_poseidon_circom_bn254_x5_3,
},
};
use node_template_runtime::{
AccountId, AuraConfig, BLS381Poseidon3x5HasherConfig, BLS381Poseidon5x5HasherConfig,
BN254CircomPoseidon3x5HasherConfig, BN254Poseidon3x5HasherConfig, BN254Poseidon5x5HasherConfig, BalancesConfig,
GenesisConfig, GrandpaConfig, Signature, SudoConfig, SystemConfig, VerifierConfig, WASM_BINARY,
};
use sc_service::ChainType;
use sp_consensus_aura::sr25519::AuthorityId as AuraId;
use sp_core::{sr25519, Pair, Public};
use sp_finality_grandpa::AuthorityId as GrandpaId;
use sp_runtime::traits::{IdentifyAccount, Verify};
// The URL for the telemetry server.
// const STAGING_TELEMETRY_URL: &str = "wss://telemetry.polkadot.io/submit/";
/// Specialized `ChainSpec`. This is a specialization of the general Substrate
/// ChainSpec type.
pub type ChainSpec = sc_service::GenericChainSpec<GenesisConfig>;
/// Generate a crypto pair from seed.
pub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {
TPublic::Pair::from_string(&format!("//{}", seed), None)
.expect("static values are valid; qed")
.public()
}
type AccountPublic = <Signature as Verify>::Signer;
/// Generate an account ID from seed.
pub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId
where
AccountPublic: From<<TPublic::Pair as Pair>::Public>,
{
AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()
}
/// Generate an Aura authority key.
pub fn authority_keys_from_seed(s: &str) -> (AuraId, GrandpaId) {
(get_from_seed::<AuraId>(s), get_from_seed::<GrandpaId>(s))
}
pub fn development_config() -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or_else(|| "Development wasm binary not available".to_string())?;
Ok(ChainSpec::from_genesis(
// Name
"Development",
// ID
"dev",
ChainType::Development,
move || {
testnet_genesis(
wasm_binary,
// Initial PoA authorities
vec![authority_keys_from_seed("Alice")],
// Sudo account
get_account_id_from_seed::<sr25519::Public>("Alice"),
// Pre-funded accounts
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
],
true,
)
},
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
None,
// Properties
None,
// Extensions
None,
))
}
pub fn local_testnet_config() -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or_else(|| "Development wasm binary not available".to_string())?;
Ok(ChainSpec::from_genesis(
// Name
"Local Testnet",
// ID
"local_testnet",
ChainType::Local,
move || {
testnet_genesis(
wasm_binary,
// Initial PoA authorities
vec![authority_keys_from_seed("Alice"), authority_keys_from_seed("Bob")],
// Sudo account
get_account_id_from_seed::<sr25519::Public>("Alice"),
// Pre-funded accounts
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Charlie"),
get_account_id_from_seed::<sr25519::Public>("Dave"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
get_account_id_from_seed::<sr25519::Public>("Ferdie"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
get_account_id_from_seed::<sr25519::Public>("Charlie//stash"),
get_account_id_from_seed::<sr25519::Public>("Dave//stash"),
get_account_id_from_seed::<sr25519::Public>("Eve//stash"),
get_account_id_from_seed::<sr25519::Public>("Ferdie//stash"),
],
true,
)
},
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
None,
// Properties
None,
// Extensions
None,
))
}
/// Configure initial storage state for FRAME modules.
fn testnet_genesis(
wasm_binary: &[u8],
initial_authorities: Vec<(AuraId, GrandpaId)>,
root_key: AccountId,
endowed_accounts: Vec<AccountId>,
_enable_println: bool,
) -> GenesisConfig {
use ark_serialize::CanonicalSerialize;
use ark_std::test_rng;
let circom_params = {
let rounds = get_rounds_poseidon_circom_bn254_x5_3::<arkworks_gadgets::prelude::ark_bn254::Fr>();
let mds = get_mds_poseidon_circom_bn254_x5_3::<arkworks_gadgets::prelude::ark_bn254::Fr>();
PoseidonParameters::new(rounds, mds)
};
let bls381_3x_5_params = {
let rounds = get_rounds_poseidon_bls381_x3_5::<arkworks_gadgets::prelude::ark_bls12_381::Fr>();
let mds = get_mds_poseidon_bls381_x3_5::<arkworks_gadgets::prelude::ark_bls12_381::Fr>();
PoseidonParameters::new(rounds, mds)
};
let bls381_5x_5_params = {
let rounds = get_rounds_poseidon_bls381_x5_5::<arkworks_gadgets::prelude::ark_bls12_381::Fr>();
let mds = get_mds_poseidon_bls381_x5_5::<arkworks_gadgets::prelude::ark_bls12_381::Fr>();
PoseidonParameters::new(rounds, mds)
};
let bn254_3x_5_params = {
let rounds = get_rounds_poseidon_bn254_x3_5::<arkworks_gadgets::prelude::ark_bn254::Fr>();
let mds = get_mds_poseidon_bn254_x3_5::<arkworks_gadgets::prelude::ark_bn254::Fr>();
PoseidonParameters::new(rounds, mds)
};
let bn254_5x_5_params = {
let rounds = get_rounds_poseidon_bn254_x5_5::<arkworks_gadgets::prelude::ark_bn254::Fr>();
let mds = get_mds_poseidon_bn254_x5_5::<arkworks_gadgets::prelude::ark_bn254::Fr>();
PoseidonParameters::new(rounds, mds)
};
let verifier_params = {
let mut rng = test_rng();
pub const LEN: usize = 30;
let curve = Curve::Bn254;
let (pk, vk) = setup_groth16_random_circuit_circomx5::<_, Bn254, LEN>(&mut rng, curve);
let mut serialized = vec![0; vk.serialized_size()];
vk.serialize(&mut serialized[..]).unwrap();
serialized
};
GenesisConfig {
system: SystemConfig {
// Add Wasm runtime to storage.
code: wasm_binary.to_vec(),
changes_trie_config: Default::default(),
},
balances: BalancesConfig {
// Configure endowed accounts with initial balance of 1 << 60.
balances: endowed_accounts.iter().cloned().map(|k| (k, 1 << 60)).collect(),
},
aura: AuraConfig {
authorities: initial_authorities.iter().map(|x| (x.0.clone())).collect(),
},
grandpa: GrandpaConfig {
authorities: initial_authorities.iter().map(|x| (x.1.clone(), 1)).collect(),
},
sudo: SudoConfig {
// Assign network admin rights.
key: root_key,
},
bls381_poseidon_3x_5_hasher: BLS381Poseidon3x5HasherConfig {
parameters: Some(bls381_3x_5_params.to_bytes()),
phantom: Default::default(),
},
bls381_poseidon_5x_5_hasher: BLS381Poseidon5x5HasherConfig {
parameters: Some(bls381_5x_5_params.to_bytes()),
phantom: Default::default(),
},
bn254_poseidon_3x_5_hasher: BN254Poseidon3x5HasherConfig {
parameters: Some(bn254_3x_5_params.to_bytes()),
phantom: Default::default(),
},
bn254_poseidon_5x_5_hasher: BN254Poseidon5x5HasherConfig {
parameters: Some(bn254_5x_5_params.to_bytes()),
phantom: Default::default(),
},
bn254_circom_poseidon_3x_5_hasher: BN254CircomPoseidon3x5HasherConfig {
parameters: Some(circom_params.to_bytes()),
phantom: Default::default(),
},
verifier: VerifierConfig {
parameters: Some(verifier_params),
phantom: Default::default(),
},
}
}
|
mod options;
use std::sync::atomic::{AtomicU64, Ordering};
use lazy_static::lazy_static;
use liblumen_alloc::erts::process::Process;
use liblumen_alloc::erts::term::prelude::*;
use crate::runtime::scheduler;
pub use options::*;
/// There are two types of unique integers both created using the erlang:unique_integer() BIF:
///
/// 1. Unique integers created with the monotonic modifier consist of a set of 2⁶⁴ - 1 unique
/// integers.
/// 2. Unique integers created without the monotonic modifier consist of a set of 2⁶⁴ - 1 unique
/// integers per scheduler thread and a set of 2⁶⁴ - 1 unique integers shared by other threads.
/// That is, the total amount of unique integers without the monotonic modifier is
/// (NoSchedulers + 1) × (2⁶⁴ - 1).
///
/// If a unique integer is created each nanosecond, unique integers will at earliest be reused after
/// more than 584 years. That is, for the foreseeable future they are unique enough.
///
/// - http://erlang.org/doc/efficiency_guide/advanced.html#unique_integers
pub fn unique_integer(process: &Process, options: Options) -> Term {
if options.monotonic {
let u = MONOTONIC.fetch_add(1, Ordering::SeqCst);
// See https://github.com/erlang/otp/blob/769ff22c750d939fdc9cb45fae1e44817ec04307/erts/emulator/beam/erl_bif_unique.c#L669-L697
if options.positive {
process.integer(u)
} else {
// When not positive allow for negative and positive even though the counter is unsigned
// by subtracting counter value down into signed range.
let i = if u < NEGATED_I64_MIN_U64 {
(u as i64) + std::i64::MIN
} else {
(u - NEGATED_I64_MIN_U64) as i64
};
process.integer(i)
}
} else {
// Non-monotonic unique integers are per-scheduler (https://github.com/erlang/otp/blob/769ff22c750d939fdc9cb45fae1e44817ec04307/erts/emulator/beam/erl_bif_unique.c#L572-L584)
// Instead of being u64, they are u128 with the first u64 is the scheduler ID
let scheduler_id = process.scheduler_id().unwrap();
let scheduler_id_u128: u128 = scheduler_id.into();
let arc_scheduler = scheduler::from_id(&scheduler_id).unwrap();
let scheduler_unique_integer = arc_scheduler.next_unique_integer() as u128;
let u: u128 = (scheduler_id_u128 << 64) | scheduler_unique_integer;
if options.positive {
process.integer(u)
} else {
let i = if u < NEGATED_I128_MIN_U128 {
(u as i128) + std::i128::MIN
} else {
(u - NEGATED_I128_MIN_U128) as i128
};
process.integer(i)
}
}
}
// have to add and then subtract to prevent overflow
const NEGATED_I64_MIN_U64: u64 = ((-(std::i64::MIN + 1)) - 1) as u64;
// have to add and then subtract to prevent overflow
const NEGATED_I128_MIN_U128: u128 = ((-(std::i128::MIN + 1)) - 1) as u128;
lazy_static! {
static ref MONOTONIC: AtomicU64 = Default::default();
}
|
use std::io;
pub struct Operator {}
impl Operator {
pub fn add(x: i128, y: i128) -> i128 {
x + y
}
pub fn multiply(x: i128, y: i128) -> i128 {
x * y
}
pub fn input() -> i128 {
println!("Provide input: ");
let mut input = String::new();
io::stdin().read_line(&mut input)
.expect(":(");
input.trim().parse().expect("invalid input")
}
pub fn output(x: i128) -> i128 {
x
}
}
|
//! Reverse unit propagation redundancy checks.
use std::ops::Range;
use partial_ref::{partial, PartialRef};
use varisat_formula::{lit::LitIdx, Lit};
use varisat_internal_proof::ClauseHash;
use crate::{
clauses::{UnitClause, UnitId},
context::{parts::*, Context},
hash::rehash,
variables::ensure_var,
CheckerError,
};
/// Propagation of the RUP check.
struct TraceItem {
id: u64,
edges: Range<usize>,
unused: bool,
}
#[derive(Default)]
pub struct RupCheck {
/// Stores overwritten values in `unit_clauses` to undo assignments.
trail: Vec<(Lit, Option<UnitClause>)>,
/// Involved clauses during the last check.
trace: Vec<TraceItem>,
/// Edges of the trace implication graph.
trace_edges: Vec<LitIdx>,
/// Just the ids of `trace`.
pub trace_ids: Vec<u64>,
}
/// Check whether a clause is implied by clauses of the given hashes.
///
/// `lits` must be sorted and free of duplicates.
pub fn check_clause_with_hashes<'a>(
mut ctx: partial!(
Context<'a>,
mut ClauseHasherP,
mut ClausesP,
mut ProcessingP<'a>,
mut RupCheckP,
mut VariablesP,
CheckerStateP,
),
lits: &[Lit],
propagation_hashes: &[ClauseHash],
) -> Result<(), CheckerError> {
if ctx.part(ClauseHasherP).rename_in_buffered_solver_var_names {
// TODO partial rehashing?
rehash(ctx.borrow());
}
let (rup, mut ctx) = ctx.split_part_mut(RupCheckP);
rup.trace.clear();
rup.trace_edges.clear();
let mut rup_is_unsat = false;
assert!(rup.trail.is_empty());
for &lit in lits.iter() {
ensure_var(ctx.borrow(), lit.var());
}
let (clauses, ctx) = ctx.split_part_mut(ClausesP);
for &lit in lits.iter() {
if let Some((true, unit)) = clauses.lit_value(lit) {
if let UnitId::Global(id) = unit.id {
rup.trace_ids.clear();
rup.trace_ids.push(id);
return Ok(());
} else {
unreachable!("unexpected non global unit");
}
}
}
// Set all lits to false
for &lit in lits.iter() {
rup.trail.push((lit, clauses.unit_clauses[lit.index()]));
clauses.unit_clauses[lit.index()] = Some(UnitClause {
value: lit.is_negative(),
id: UnitId::InClause,
});
}
'hashes: for &hash in propagation_hashes.iter() {
let candidates = match clauses.clauses.get(&hash) {
Some(candidates) if !candidates.is_empty() => candidates,
_ => {
return Err(CheckerError::check_failed(
ctx.part(CheckerStateP).step,
format!("no clause found for hash {:x}", hash),
))
}
};
// Check if any clause matching the hash propagates
'candidates: for clause in candidates.iter() {
let mut unassigned_count = 0;
let mut unassigned_lit = None;
let range_begin = rup.trace_edges.len();
for &lit in clause.lits.slice(&clauses.literal_buffer).iter() {
match clauses.lit_value(lit) {
Some((true, _)) => {
continue 'candidates;
}
Some((false, unit)) => match unit.id {
UnitId::Global(id) => {
rup.trail.push((lit, clauses.unit_clauses[lit.index()]));
clauses.unit_clauses[lit.index()] = Some(UnitClause {
value: lit.is_negative(),
id: UnitId::TracePos(rup.trace.len()),
});
rup.trace_edges.push(rup.trace.len() as LitIdx);
rup.trace.push(TraceItem {
id,
edges: 0..0,
unused: true,
});
}
UnitId::TracePos(pos) => {
rup.trace_edges.push(pos as LitIdx);
}
UnitId::InClause => {}
},
None => {
unassigned_count += 1;
unassigned_lit = Some(lit);
}
}
}
let range = range_begin..rup.trace_edges.len();
match unassigned_lit {
None => {
rup.trace.push(TraceItem {
id: clause.id,
edges: range,
unused: false,
});
rup_is_unsat = true;
break 'hashes;
}
Some(lit) if unassigned_count == 1 => {
rup.trail.push((lit, clauses.unit_clauses[lit.index()]));
clauses.unit_clauses[lit.index()] = Some(UnitClause {
value: lit.is_positive(),
id: UnitId::TracePos(rup.trace.len()),
});
rup.trace.push(TraceItem {
id: clause.id,
edges: range,
unused: true,
});
}
_ => (),
}
}
}
if rup_is_unsat && !ctx.part(ProcessingP).processors.is_empty() {
for i in (0..rup.trace.len()).rev() {
if !rup.trace[i].unused {
let edges = rup.trace[i].edges.clone();
for &edge in rup.trace_edges[edges].iter() {
rup.trace[edge as usize].unused = false;
}
}
}
rup.trace_ids.clear();
rup.trace_ids.extend(rup.trace.iter().map(|trace| trace.id));
}
// Undo temporary assignments
for (lit, value) in rup.trail.drain(..).rev() {
clauses.unit_clauses[lit.index()] = value;
}
if rup_is_unsat {
Ok(())
} else {
Err(CheckerError::check_failed(
ctx.part(CheckerStateP).step,
format!("AT check failed for {:?}", lits),
))
}
}
|
use proconio::input;
fn main() {
input! {
y: usize,
};
let ans = if y % 4 <= 2 {
y + (2 - y % 4)
} else {
y + 3
};
println!("{}", ans);
}
|
use num::traits::{AsPrimitive, FromPrimitive, PrimInt};
//returns the digits of the number
pub fn digits<Int>(mut n: Int) -> Vec<u8>
where
Int: PrimInt + AsPrimitive<u8> + FromPrimitive,
{
let mut digits = vec![];
let ten = Int::from_usize(10usize).unwrap();
while n != Int::zero() {
digits.push((n % ten).as_());
n = n / ten;
}
digits
}
pub fn digits_iterator<Int>(n: Int) -> impl Iterator<Item = u8>
where
Int: PrimInt + AsPrimitive<u8> + FromPrimitive,
{
DigitIter { n: n }
}
pub struct DigitIter<Int> {
n: Int,
}
impl<Int> Iterator for DigitIter<Int>
where
Int: PrimInt + AsPrimitive<u8> + FromPrimitive,
{
type Item = u8;
fn next(&mut self) -> Option<Self::Item> {
let ten = Int::from_usize(10usize).unwrap();
if self.n != Int::zero() {
let ret = (self.n % ten).as_();
self.n = self.n / ten;
Some(ret)
} else {
None
}
}
}
pub fn from_digits<Int, I, B>(digits: I) -> Int
where
Int: PrimInt + AsPrimitive<u8> + FromPrimitive,
I: IntoIterator<Item = B>,
B: core::borrow::Borrow<u8>,
{
let ten = Int::from_usize(10usize).unwrap();
digits.into_iter().fold(Int::zero(), |mut acc, d| {
acc = acc * ten;
acc + Int::from_u8(*(d.borrow())).unwrap()
})
}
pub fn reverse_digits<Int>(n: Int) -> Int
where
Int: PrimInt + AsPrimitive<u8> + FromPrimitive,
{
from_digits(digits_iterator(n))
}
pub fn is_palindrome<Int>(n: Int) -> bool
where
Int: PrimInt + AsPrimitive<u8> + FromPrimitive,
{
digits_iterator(n)
.zip(digits(n).iter().rev())
.all(|(d1, &d2)| d1 == d2)
}
//concatenates two numbers n,m -> nm
pub fn concat_numbers(n: usize, m: usize) -> usize {
//first find out how many digits m has
let mut k = m;
let mut i = 0;
while k > 0 {
k /= 10;
i += 1;
}
n * (10usize.pow(i)) + m
}
pub fn digit_sum<Int>(mut n: Int) -> Int
where
Int: PrimInt + FromPrimitive,
{
let mut acc = Int::zero();
let ten = Int::from(10u8).unwrap();
while n > Int::zero() {
let r = n % ten;
acc = acc + r;
n = n / ten;
}
acc
}
|
#[doc = "Reader of register OPTCR"]
pub type R = crate::R<u32, super::OPTCR>;
#[doc = "Writer for register OPTCR"]
pub type W = crate::W<u32, super::OPTCR>;
#[doc = "Register OPTCR `reset()`'s with value 0"]
impl crate::ResetValue for super::OPTCR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `OPTLOCK`"]
pub type OPTLOCK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OPTLOCK`"]
pub struct OPTLOCK_W<'a> {
w: &'a mut W,
}
impl<'a> OPTLOCK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `OPTSTART`"]
pub type OPTSTART_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OPTSTART`"]
pub struct OPTSTART_W<'a> {
w: &'a mut W,
}
impl<'a> OPTSTART_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `MER`"]
pub type MER_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MER`"]
pub struct MER_W<'a> {
w: &'a mut W,
}
impl<'a> MER_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `OPTCHANGEERRIE`"]
pub type OPTCHANGEERRIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OPTCHANGEERRIE`"]
pub struct OPTCHANGEERRIE_W<'a> {
w: &'a mut W,
}
impl<'a> OPTCHANGEERRIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `SWAP_BANK`"]
pub type SWAP_BANK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SWAP_BANK`"]
pub struct SWAP_BANK_W<'a> {
w: &'a mut W,
}
impl<'a> SWAP_BANK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 0 - FLASH_OPTCR lock option configuration bit"]
#[inline(always)]
pub fn optlock(&self) -> OPTLOCK_R {
OPTLOCK_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Option byte start change option configuration bit"]
#[inline(always)]
pub fn optstart(&self) -> OPTSTART_R {
OPTSTART_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 4 - Flash mass erase enable bit"]
#[inline(always)]
pub fn mer(&self) -> MER_R {
MER_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 30 - Option byte change error interrupt enable bit"]
#[inline(always)]
pub fn optchangeerrie(&self) -> OPTCHANGEERRIE_R {
OPTCHANGEERRIE_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - Bank swapping configuration bit"]
#[inline(always)]
pub fn swap_bank(&self) -> SWAP_BANK_R {
SWAP_BANK_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - FLASH_OPTCR lock option configuration bit"]
#[inline(always)]
pub fn optlock(&mut self) -> OPTLOCK_W {
OPTLOCK_W { w: self }
}
#[doc = "Bit 1 - Option byte start change option configuration bit"]
#[inline(always)]
pub fn optstart(&mut self) -> OPTSTART_W {
OPTSTART_W { w: self }
}
#[doc = "Bit 4 - Flash mass erase enable bit"]
#[inline(always)]
pub fn mer(&mut self) -> MER_W {
MER_W { w: self }
}
#[doc = "Bit 30 - Option byte change error interrupt enable bit"]
#[inline(always)]
pub fn optchangeerrie(&mut self) -> OPTCHANGEERRIE_W {
OPTCHANGEERRIE_W { w: self }
}
#[doc = "Bit 31 - Bank swapping configuration bit"]
#[inline(always)]
pub fn swap_bank(&mut self) -> SWAP_BANK_W {
SWAP_BANK_W { w: self }
}
}
|
//= {
//= "output": {
//= "2": [
//= "",
//= true
//= ],
//= "1": [
//= "Resources \\{ mem: 20971520, cpu: 0\\.05 \\}\n",
//= true
//= ]
//= },
//= "children": [
//= {
//= "output": {
//= "2": [
//= "",
//= true
//= ],
//= "1": [
//= "hi Resources \\{ mem: 20971520, cpu: 0\\.001 \\}\n",
//= true
//= ]
//= },
//= "children": [],
//= "exit": "Success"
//= },
//= {
//= "output": {
//= "1": [
//= "hi Resources \\{ mem: 20971521, cpu: 0\\.001 \\}\n",
//= true
//= ],
//= "2": [
//= "",
//= true
//= ]
//= },
//= "children": [],
//= "exit": "Success"
//= },
//= {
//= "output": {
//= "2": [
//= "",
//= true
//= ],
//= "1": [
//= "hi Resources \\{ mem: 20971522, cpu: 0\\.001 \\}\n",
//= true
//= ]
//= },
//= "children": [],
//= "exit": "Success"
//= },
//= {
//= "output": {
//= "2": [
//= "",
//= true
//= ],
//= "1": [
//= "hi Resources \\{ mem: 20971523, cpu: 0\\.001 \\}\n",
//= true
//= ]
//= },
//= "children": [],
//= "exit": "Success"
//= }
//= ],
//= "exit": "Success"
//= }
#![deny(warnings, deprecated)]
extern crate constellation;
#[macro_use]
extern crate serde_closure;
use constellation::*;
fn main() {
init(Resources {
mem: 20 * 1024 * 1024,
..Resources::default()
});
println!("{:?}", resources());
for i in 0..4 {
let _pid = spawn(
Resources {
mem: 20 * 1024 * 1024 + i,
cpu: 0.001,
},
FnOnce!([i] move |_parent| {
assert_eq!(resources().mem, 20 * 1024 * 1024 + i);
println!("hi {:?}", resources());
}),
)
.expect("SPAWN FAILED");
}
}
|
//! Ready
//!
//! Check readiness of an InfluxDB instance at startup
use reqwest::{Method, StatusCode};
use snafu::ResultExt;
use crate::{Client, Http, RequestError, ReqwestProcessing};
impl Client {
/// Get the readiness of an instance at startup
pub async fn ready(&self) -> Result<bool, RequestError> {
let ready_url = format!("{}/ready", self.url);
let response = self
.request(Method::GET, &ready_url)
.send()
.await
.context(ReqwestProcessing)?;
match response.status() {
StatusCode::OK => Ok(true),
_ => {
let status = response.status();
let text = response.text().await.context(ReqwestProcessing)?;
Http { status, text }.fail()?
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use mockito::mock;
#[tokio::test]
async fn ready() {
let mock_server = mock("GET", "/ready").create();
let client = Client::new(&mockito::server_url(), "org", "");
let _result = client.ready().await;
mock_server.assert();
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use std::sync::Arc;
use common_catalog::table::Table;
use common_catalog::table_context::TableContext;
use common_exception::ErrorCode;
use common_exception::Result;
use common_expression::types::StringType;
use common_expression::utils::FromData;
use common_expression::DataBlock;
use common_expression::TableDataType;
use common_expression::TableField;
use common_expression::TableSchemaRefExt;
use common_meta_app::schema::TableIdent;
use common_meta_app::schema::TableInfo;
use common_meta_app::schema::TableMeta;
use common_metrics::MetricValue;
use common_storages_fuse::metrics_reset;
use crate::SyncOneBlockSystemTable;
use crate::SyncSystemTable;
pub struct MetricsTable {
table_info: TableInfo,
}
impl SyncSystemTable for MetricsTable {
const NAME: &'static str = "system.metrics";
fn get_table_info(&self) -> &TableInfo {
&self.table_info
}
fn get_full_data(&self, _: Arc<dyn TableContext>) -> Result<DataBlock> {
let prometheus_handle = common_metrics::try_handle().ok_or_else(|| {
ErrorCode::InitPrometheusFailure("Prometheus recorder is not initialized yet.")
})?;
let samples = common_metrics::dump_metric_samples(prometheus_handle)?;
let mut metrics: Vec<Vec<u8>> = Vec::with_capacity(samples.len());
let mut labels: Vec<Vec<u8>> = Vec::with_capacity(samples.len());
let mut kinds: Vec<Vec<u8>> = Vec::with_capacity(samples.len());
let mut values: Vec<Vec<u8>> = Vec::with_capacity(samples.len());
for sample in samples.into_iter() {
metrics.push(sample.name.clone().into_bytes());
kinds.push(sample.value.kind().into_bytes());
labels.push(self.display_sample_labels(&sample.labels)?.into_bytes());
values.push(self.display_sample_value(&sample.value)?.into_bytes());
}
Ok(DataBlock::new_from_columns(vec![
StringType::from_data(metrics),
StringType::from_data(kinds),
StringType::from_data(labels),
StringType::from_data(values),
]))
}
fn truncate(&self, _ctx: Arc<dyn TableContext>) -> Result<()> {
metrics_reset();
Ok(())
}
}
impl MetricsTable {
pub fn create(table_id: u64) -> Arc<dyn Table> {
let schema = TableSchemaRefExt::create(vec![
TableField::new("metric", TableDataType::String),
TableField::new("kind", TableDataType::String),
TableField::new("labels", TableDataType::String),
TableField::new("value", TableDataType::String),
]);
let table_info = TableInfo {
desc: "'system'.'metrics'".to_string(),
name: "metrics".to_string(),
ident: TableIdent::new(table_id, 0),
meta: TableMeta {
schema,
engine: "SystemMetrics".to_string(),
..Default::default()
},
..Default::default()
};
SyncOneBlockSystemTable::create(MetricsTable { table_info })
}
fn display_sample_labels(&self, labels: &HashMap<String, String>) -> Result<String> {
serde_json::to_string(labels).map_err(|err| {
ErrorCode::Internal(format!(
"Dump prometheus metrics on display labels: {}",
err
))
})
}
fn display_sample_value(&self, value: &MetricValue) -> Result<String> {
match value {
MetricValue::Counter(v) => serde_json::to_string(v),
MetricValue::Gauge(v) => serde_json::to_string(v),
MetricValue::Untyped(v) => serde_json::to_string(v),
MetricValue::Histogram(v) => serde_json::to_string(v),
MetricValue::Summary(v) => serde_json::to_string(v),
}
.map_err(|err| {
ErrorCode::Internal(format!(
"Dump prometheus metrics failed on display values: {}",
err
))
})
}
}
|
use std::ops::{Add, Mul, Sub};
use num::traits::{Float, ToPrimitive};
use super::{Channel, Colorspace, clamp};
pub use self::imp::ColorARGB;
#[cfg(target_endian = "little")]
mod imp {
#[repr(C)]
#[derive(Debug, Copy)]
pub struct ColorARGB<T> {
pub b: T,
pub g: T,
pub r: T,
pub a: T,
}
}
#[cfg(target_endian = "big")]
mod imp {
#[repr(C)]
#[derive(Debug, Copy)]
pub struct ColorARGB<T> {
pub a: T,
pub r: T,
pub g: T,
pub b: T,
}
}
impl<T: Clone> Clone for ColorARGB<T> {
fn clone(&self) -> ColorARGB<T> {
ColorARGB {
a: self.a.clone(),
r: self.r.clone(),
g: self.g.clone(),
b: self.b.clone(),
}
}
}
// Maybe later?: ColorARGB<f64>.quantize() -> ColorARGB<usize>
// How do we implement this more generally so that we may have ColorARGB<f64>
impl ColorARGB<f64> {
pub fn new_rgb_clamped(r: f64, g: f64, b: f64) -> ColorARGB<u8> {
let min_color: u8 = Channel::min_value();
let max_color: u8 = Channel::max_value();
ColorARGB::new_rgb(
clamp((r * max_color as f64).round() as i32, min_color as i32, max_color as i32) as u8,
clamp((g * max_color as f64).round() as i32, min_color as i32, max_color as i32) as u8,
clamp((b * max_color as f64).round() as i32, min_color as i32, max_color as i32) as u8)
}
}
impl ColorARGB<u8> {
pub fn from_packed_argb(color: u32) -> ColorARGB<u8> {
let a = ((color >> 24) & 0xFF) as u8;
let r = ((color >> 16) & 0xFF) as u8;
let g = ((color >> 8) & 0xFF) as u8;
let b = ((color >> 0) & 0xFF) as u8;
ColorARGB { a: a, r: r, g: g, b: b }
}
pub fn packed(&self) -> u32 {
let mut out = 0;
out |= (self.a as u32) << 24;
out |= (self.r as u32) << 16;
out |= (self.g as u32) << 8;
out |= (self.b as u32) << 0;
out
}
}
// Maybe later?: ColorARGB<f64>.quantize() -> ColorARGB<uint>
// How do we implement this more generally so that we may have ColorARGB<f64>
impl<T: Channel> ColorARGB<T> {
pub fn new_argb(a: T, r: T, g: T, b: T) -> ColorARGB<T> {
ColorARGB { a: a, r: r, g: g, b: b }
}
#[allow(dead_code)]
pub fn new_rgb(r: T, g: T, b: T) -> ColorARGB<T> {
ColorARGB::new_argb(Channel::max_value(), r, g, b)
}
pub fn white() -> ColorARGB<T> {
ColorARGB::new_rgb(
Channel::max_value(),
Channel::max_value(),
Channel::max_value())
}
pub fn black() -> ColorARGB<T> {
ColorARGB::new_rgb(
Channel::min_value(),
Channel::min_value(),
Channel::min_value())
}
pub fn channel_f64(&self) -> ColorARGB<f64> {
let max_val: T = Channel::max_value();
ColorARGB {
r: self.r.to_f64().unwrap() / max_val.to_f64().unwrap(),
g: self.g.to_f64().unwrap() / max_val.to_f64().unwrap(),
b: self.b.to_f64().unwrap() / max_val.to_f64().unwrap(),
a: self.a.to_f64().unwrap() / max_val.to_f64().unwrap(),
}
}
}
impl<T: Channel> Add for ColorARGB<T> {
type Output = ColorARGB<T>;
fn add(self, other: ColorARGB<T>) -> ColorARGB<T> {
ColorARGB {
r: Channel::add(self.r, other.r),
g: Channel::add(self.g, other.g),
b: Channel::add(self.b, other.b),
a: Channel::add(self.a, other.a),
}
}
}
impl<T: Channel> Sub for ColorARGB<T> {
type Output = ColorARGB<T>;
fn sub(self, other: ColorARGB<T>) -> ColorARGB<T> {
ColorARGB {
r: Channel::sub(self.r, other.r),
g: Channel::sub(self.g, other.g),
b: Channel::sub(self.b, other.b),
a: Channel::sub(self.a, other.a),
}
}
}
impl<T: Float> Mul for ColorARGB<T> {
type Output = ColorARGB<T>;
fn mul(self, other: ColorARGB<T>) -> ColorARGB<T> {
ColorARGB {
r: self.r * other.r,
g: self.g * other.g,
b: self.b * other.b,
a: self.a * other.a
}
}
}
// Scalar multiplication
impl<T: Float> Mul<T> for ColorARGB<T> {
type Output = ColorARGB<T>;
fn mul(self, other: T) -> ColorARGB<T> {
ColorARGB {
r: self.r * other,
g: self.g * other,
b: self.b * other,
a: self.a
}
}
}
impl<T> Colorspace for ColorARGB<T> where T: Channel+Copy {
fn white() -> Self {
ColorARGB::new_rgb(
Channel::max_value(),
Channel::max_value(),
Channel::max_value())
}
fn black() -> Self {
ColorARGB::new_rgb(
Channel::min_value(),
Channel::min_value(),
Channel::min_value())
}
}
#[test]
fn color_add() {
let foo_color: ColorARGB<u8> = ColorARGB::new_argb(1, 1, 1, 1) +
ColorARGB::new_argb(2, 2, 2, 2);
assert_eq!(foo_color.a, 3);
assert_eq!(foo_color.r, 3);
assert_eq!(foo_color.g, 3);
assert_eq!(foo_color.b, 3);
let foo_color: ColorARGB<u8> = ColorARGB::new_argb(1, 200, 1, 1) +
ColorARGB::new_argb(2, 200, 2, 2);
assert_eq!(foo_color.a, 3);
assert_eq!(foo_color.r, 255);
assert_eq!(foo_color.g, 3);
assert_eq!(foo_color.b, 3);
}
#[test]
fn color_sub() {
let foo_color: ColorARGB<u8> = ColorARGB::new_argb(7, 7, 7, 7) -
ColorARGB::new_argb(2, 2, 2, 2);
assert_eq!(foo_color.a, 5);
assert_eq!(foo_color.r, 5);
assert_eq!(foo_color.g, 5);
assert_eq!(foo_color.b, 5);
}
#[test]
fn color_mul() {
let foo_color = ColorARGB::<f64>::new_rgb(0.5, 0.0, 0.0) * 2.0;
assert_eq!(foo_color.a, 1.0);
assert_eq!(foo_color.r, 1.0);
assert_eq!(foo_color.g, 0.0);
assert_eq!(foo_color.b, 0.0);
}
|
#[doc = "Reader of register PRGPIO"]
pub type R = crate::R<u32, super::PRGPIO>;
#[doc = "Reader of field `R0`"]
pub type R0_R = crate::R<bool, bool>;
#[doc = "Reader of field `R1`"]
pub type R1_R = crate::R<bool, bool>;
#[doc = "Reader of field `R2`"]
pub type R2_R = crate::R<bool, bool>;
#[doc = "Reader of field `R3`"]
pub type R3_R = crate::R<bool, bool>;
#[doc = "Reader of field `R4`"]
pub type R4_R = crate::R<bool, bool>;
#[doc = "Reader of field `R5`"]
pub type R5_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - GPIO Port A Peripheral Ready"]
#[inline(always)]
pub fn r0(&self) -> R0_R {
R0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - GPIO Port B Peripheral Ready"]
#[inline(always)]
pub fn r1(&self) -> R1_R {
R1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - GPIO Port C Peripheral Ready"]
#[inline(always)]
pub fn r2(&self) -> R2_R {
R2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - GPIO Port D Peripheral Ready"]
#[inline(always)]
pub fn r3(&self) -> R3_R {
R3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - GPIO Port E Peripheral Ready"]
#[inline(always)]
pub fn r4(&self) -> R4_R {
R4_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - GPIO Port F Peripheral Ready"]
#[inline(always)]
pub fn r5(&self) -> R5_R {
R5_R::new(((self.bits >> 5) & 0x01) != 0)
}
}
|
#[ blah= "abc ] " ,
blah2= "abc ] \n \\ "
"
\"
\"
" end of string
] |
use utopia_core::{
math::{Size, Vector2},
widgets::{pod::WidgetPod, TypedWidget, Widget},
Backend,
};
pub struct Scale<T, B: Backend> {
pub scale_x: f32,
pub scale_y: f32,
widget: WidgetPod<T, B>,
}
impl<T, B: Backend> Scale<T, B> {
pub fn new<TW: TypedWidget<T, B> + 'static>(widget: TW) -> Self {
Scale {
scale_x: 1.,
scale_y: 1.,
widget: WidgetPod::new(widget),
}
}
pub fn x(mut self, scale_x: f32) -> Self {
self.scale_x = scale_x;
self
}
pub fn y(mut self, scale_y: f32) -> Self {
self.scale_y = scale_y;
self
}
}
#[derive(Debug)]
pub struct ScaledPrimitive<P> {
pub scale_x: f32,
pub scale_y: f32,
pub origin: Vector2,
pub primitive: Box<P>,
}
impl<T, B: Backend> Widget<T> for Scale<T, B> {
type Primitive = ScaledPrimitive<B::Primitive>;
type Context = B;
type Reaction = B::EventReaction;
type Event = B::Event;
fn draw(
&self,
origin: utopia_core::math::Vector2,
size: utopia_core::math::Size,
data: &T,
) -> Self::Primitive {
let primitive = TypedWidget::<T, B>::draw(&self.widget, origin, size, data);
ScaledPrimitive {
scale_x: self.scale_x,
scale_y: self.scale_y,
origin,
primitive: Box::new(primitive),
}
}
fn event(
&mut self,
origin: Vector2,
size: Size,
data: &mut T,
event: Self::Event,
) -> Option<Self::Reaction> {
TypedWidget::<T, B>::event(&mut self.widget, origin, size, data, event)
}
fn layout(
&mut self,
bc: &utopia_core::BoxConstraints,
context: &Self::Context,
data: &T,
) -> utopia_core::math::Size {
TypedWidget::<T, B>::layout(&mut self.widget, bc, context, data)
}
}
|
// Celsius to Farenheit
fn main() {
println!("Enter the Celsius value");
loop {
let mut celsius = String::new();
io::stdin()
.read_line(&mut celsius)
.expect("Failed to read line");
let celsius: i32 = match celsius.trim().parse() {
Ok(n) => n,
Err(_) => {
println!("Enter a valid number!, Entered value is {}", celsius);
continue;
},
};
println!("Value of {} celsius is {:.1}", celsius, (celsius as f64 * (1.8) as f64 + 32.00));
}
}
|
use core::cmp;
use core::convert::TryFrom;
use core::fmt;
use core::marker::PhantomData;
use core::ptr::NonNull;
use typenum::{IsGreaterOrEqual, True, Unsigned};
use crate::internal::Internal;
use crate::pointer::{
self, InvalidNullError,
Marked::{self, Null, Value},
MarkedNonNull, MarkedNonNullable, MarkedPtr,
};
/********** impl Clone ****************************************************************************/
impl<T, N> Clone for MarkedNonNull<T, N> {
#[inline]
fn clone(&self) -> Self {
Self::from(self.inner)
}
}
/********** impl Copy *****************************************************************************/
impl<T, N> Copy for MarkedNonNull<T, N> {}
/********** impl inherent *************************************************************************/
impl<T, N> MarkedNonNull<T, N> {
/// Cast to a pointer of another type.
#[inline]
pub const fn cast<U>(self) -> MarkedNonNull<U, N> {
MarkedNonNull { inner: self.inner.cast(), _marker: PhantomData }
}
/// Creates a new `MarkedNonNull` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
#[inline]
pub const fn dangling() -> Self {
Self { inner: NonNull::dangling(), _marker: PhantomData }
}
}
impl<T, N: Unsigned> MarkedNonNull<T, N> {
/// The number of available mark bits for this type.
pub const MARK_BITS: usize = N::USIZE;
/// The bitmask for the lower markable bits.
pub const MARK_MASK: usize = pointer::mark_mask::<T>(Self::MARK_BITS);
/// The bitmask for the (higher) pointer bits.
pub const POINTER_MASK: usize = !Self::MARK_MASK;
/// Returns the inner pointer *as is*, meaning potential tags are not
/// stripped.
#[inline]
pub fn into_non_null(self) -> NonNull<T> {
self.inner
}
/// Converts a marked non-null pointer with `M` potential mark bits to the
/// **same** marked pointer with `N` potential mark bits, requires that
/// `N >= M`.
#[inline]
pub fn convert<M: Unsigned>(other: MarkedNonNull<T, M>) -> Self
where
N: IsGreaterOrEqual<M, Output = True>,
{
Self::from(other.inner)
}
/// Creates a new `MarkedNonNull` from a marked pointer without checking
/// for `null`.
///
/// # Safety
///
/// `ptr` may be marked, but must be be neither an unmarked nor a marked
/// null pointer.
#[inline]
pub unsafe fn new_unchecked(ptr: MarkedPtr<T, N>) -> Self {
Self::from(NonNull::new_unchecked(ptr.inner))
}
/// Creates a new `MarkedNonNull` wrapped in a [`Marked`] if `ptr` is
/// non-null.
pub fn new(ptr: MarkedPtr<T, N>) -> Marked<Self> {
match ptr.decompose() {
(raw, _) if !raw.is_null() => unsafe { Value(Self::new_unchecked(ptr)) },
(_, tag) => Null(tag),
}
}
/// Clears the tag of `self` and returns the same but untagged pointer.
#[inline]
pub fn clear_tag(self) -> Self {
Self::from(self.decompose_non_null())
}
/// Clears the tag of `self` and replaces it with `tag`.
#[inline]
pub fn with_tag(self, tag: usize) -> Self {
Self::compose(self.decompose_non_null(), tag)
}
/// Converts the pointer to the equivalent [`MarkedPtr`].
#[inline]
pub fn into_marked_ptr(self) -> MarkedPtr<T, N> {
MarkedPtr::new(self.inner.as_ptr())
}
/// Composes a new marked non-null pointer from a non-null pointer and a tag
/// value.
#[inline]
pub fn compose(ptr: NonNull<T>, tag: usize) -> Self {
debug_assert_eq!(0, ptr.as_ptr() as usize & Self::MARK_MASK, "`ptr` is not well aligned");
unsafe { Self::from(NonNull::new_unchecked(pointer::compose::<_, N>(ptr.as_ptr(), tag))) }
}
/// Decomposes the marked pointer, returning the separated raw
/// [`NonNull`] pointer and its tag.
#[inline]
pub fn decompose(self) -> (NonNull<T>, usize) {
let (ptr, tag) = pointer::decompose(self.inner.as_ptr() as usize, Self::MARK_BITS);
(unsafe { NonNull::new_unchecked(ptr) }, tag)
}
/// Decomposes the marked pointer, returning only the separated raw pointer.
#[inline]
pub fn decompose_ptr(self) -> *mut T {
pointer::decompose_ptr(self.inner.as_ptr() as usize, Self::MARK_BITS)
}
/// Decomposes the marked pointer, returning only the separated raw
/// [`NonNull`] pointer.
#[inline]
pub fn decompose_non_null(self) -> NonNull<T> {
unsafe {
NonNull::new_unchecked(pointer::decompose_ptr(
self.inner.as_ptr() as usize,
Self::MARK_BITS,
))
}
}
/// Decomposes the marked pointer, returning only the separated tag.
#[inline]
pub fn decompose_tag(self) -> usize {
pointer::decompose_tag::<T>(self.inner.as_ptr() as usize, Self::MARK_BITS)
}
/// Decomposes the marked pointer, dereferences the the raw pointer and
/// returns both the reference and the separated tag.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use e.g. `&*my_ptr.decompose_ptr()`.
///
/// # Safety
///
/// This is unsafe because it cannot verify the validity of the returned
/// pointer.
#[inline]
pub unsafe fn decompose_ref(&self) -> (&T, usize) {
let (ptr, tag) = self.decompose();
(&*ptr.as_ptr(), tag)
}
/// Decomposes the marked pointer, dereferences the the raw pointer and
/// returns both the reference and the separated tag. The returned reference
/// is not bound to the lifetime of the `MarkedNonNull`.
///
/// # Safety
///
/// This is unsafe because it cannot verify the validity of the returned
/// pointer, nor can it ensure that the lifetime `'a` returned is indeed a
/// valid lifetime for the contained data.
#[inline]
pub unsafe fn decompose_ref_unbounded<'a>(self) -> (&'a T, usize) {
let (ptr, tag) = self.decompose();
(&*ptr.as_ptr(), tag)
}
/// Decomposes the marked pointer, mutably dereferences the the raw pointer
/// and returns both the mutable reference and the separated tag.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use e.g. `&mut *my_ptr.decompose_ptr()`.
///
/// # Safety
///
/// This is unsafe because it cannot verify the validity of the returned
/// pointer.
#[inline]
pub unsafe fn decompose_mut(&mut self) -> (&mut T, usize) {
let (ptr, tag) = self.decompose();
(&mut *ptr.as_ptr(), tag)
}
/// Decomposes the marked pointer, mutably dereferences the the raw pointer
/// and returns both the mutable reference and the separated tag. The
/// returned reference is not bound to the lifetime of the `MarkedNonNull`.
///
/// # Safety
///
/// This is unsafe because it cannot verify the validity of the returned
/// pointer, nor can it ensure that the lifetime `'a` returned is indeed a
/// valid lifetime for the contained data.
#[inline]
pub unsafe fn decompose_mut_unbounded<'a>(&mut self) -> (&'a mut T, usize) {
let (ptr, tag) = self.decompose();
(&mut *ptr.as_ptr(), tag)
}
/// Decomposes the marked pointer, returning only the de-referenced raw
/// pointer.
///
/// The resulting lifetime is bound to self so this behaves "as if" it were
/// actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use e.g. `&*my_ptr.decompose_ptr()`
/// or [`as_ref_unbounded`][MarkedNonNull::as_ref_unbounded].
///
/// # Safety
///
/// This is unsafe because it cannot verify the validity of the returned
/// pointer.
#[inline]
pub unsafe fn as_ref(&self) -> &T {
&*self.decompose_non_null().as_ptr()
}
/// Decomposes the marked pointer, returning only the de-referenced raw
/// pointer, which is not bound to the lifetime of the `MarkedNonNull`.
///
/// # Safety
///
/// This is unsafe because it cannot verify the validity of the returned
/// pointer, nor can it ensure that the lifetime `'a` returned is indeed a
/// valid lifetime for the contained data.
#[inline]
pub unsafe fn as_ref_unbounded<'a>(self) -> &'a T {
&*self.decompose_non_null().as_ptr()
}
/// Decomposes the marked pointer, returning only the mutably de-referenced
/// raw pointer.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use e.g. `&mut *my_ptr.decompose_ptr()`
/// or [`as_mut_unbounded`][MarkedNonNull::as_ref_unbounded].
///
/// # Safety
///
/// This is unsafe because it cannot verify the validity of the returned
/// pointer.
#[inline]
pub unsafe fn as_mut(&mut self) -> &mut T {
&mut *self.decompose_non_null().as_ptr()
}
/// Decomposes the marked pointer, returning only the mutably de-referenced
/// raw pointer, which is not bound to the lifetime of the `MarkedNonNull`.
///
/// # Safety
///
/// This is unsafe because it cannot verify the validity of the returned
/// pointer, nor can it ensure that the lifetime `'a` returned is indeed a
/// valid lifetime for the contained data.
#[inline]
pub unsafe fn as_mut_unbounded<'a>(self) -> &'a mut T {
&mut *self.decompose_non_null().as_ptr()
}
}
/********** impl Debug ****************************************************************************/
impl<T, N: Unsigned> fmt::Debug for MarkedNonNull<T, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let (ptr, tag) = self.decompose();
f.debug_struct("MarkedNonNull").field("ptr", &ptr).field("tag", &tag).finish()
}
}
/********** impl Pointer **************************************************************************/
impl<T, N: Unsigned> fmt::Pointer for MarkedNonNull<T, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&self.decompose_non_null(), f)
}
}
/********** impl From *****************************************************************************/
impl<T, N> From<NonNull<T>> for MarkedNonNull<T, N> {
#[inline]
fn from(ptr: NonNull<T>) -> Self {
Self { inner: ptr, _marker: PhantomData }
}
}
impl<'a, T, N: Unsigned> From<&'a T> for MarkedNonNull<T, N> {
#[inline]
fn from(reference: &'a T) -> Self {
Self::from(NonNull::from(reference))
}
}
impl<'a, T, N: Unsigned> From<&'a mut T> for MarkedNonNull<T, N> {
#[inline]
fn from(reference: &'a mut T) -> Self {
Self::from(NonNull::from(reference))
}
}
/********** impl TryFrom **************************************************************************/
impl<T, N: Unsigned> TryFrom<MarkedPtr<T, N>> for MarkedNonNull<T, N> {
type Error = InvalidNullError;
#[inline]
fn try_from(ptr: MarkedPtr<T, N>) -> Result<Self, Self::Error> {
match ptr.decompose() {
(raw, _) if raw.is_null() => Err(InvalidNullError),
_ => unsafe { Ok(MarkedNonNull::new_unchecked(ptr)) },
}
}
}
/********** impl PartialEq ************************************************************************/
impl<T, N> PartialEq for MarkedNonNull<T, N> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
impl<T, N> PartialEq<MarkedPtr<T, N>> for MarkedNonNull<T, N> {
#[inline]
fn eq(&self, other: &MarkedPtr<T, N>) -> bool {
self.inner.as_ptr() == other.inner
}
}
/********** impl PartialOrd ***********************************************************************/
impl<T, N> PartialOrd for MarkedNonNull<T, N> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
self.inner.partial_cmp(&other.inner)
}
}
impl<T, N> PartialOrd<MarkedPtr<T, N>> for MarkedNonNull<T, N> {
#[inline]
fn partial_cmp(&self, other: &MarkedPtr<T, N>) -> Option<cmp::Ordering> {
self.inner.as_ptr().partial_cmp(&other.inner)
}
}
/********** impl Eq *******************************************************************************/
impl<T, N> Eq for MarkedNonNull<T, N> {}
/********** impl Ord ******************************************************************************/
impl<T, N> Ord for MarkedNonNull<T, N> {
#[inline]
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.inner.cmp(&other.inner)
}
}
/********** impl NonNullable **********************************************************************/
impl<T, N: Unsigned> MarkedNonNullable for MarkedNonNull<T, N> {
type Item = T;
type MarkBits = N;
#[inline]
fn into_marked_non_null(self) -> MarkedNonNull<Self::Item, Self::MarkBits> {
self
}
}
/********** impl Internal *************************************************************************/
impl<T, N: Unsigned> Internal for MarkedNonNull<T, N> {}
#[cfg(test)]
mod tests {
use std::ptr;
use typenum::U2;
use crate::align::Aligned4;
type MarkedPtr<T> = crate::pointer::MarkedPtr<T, U2>;
type MarkedNonNull<T> = crate::pointer::MarkedNonNull<T, U2>;
#[test]
fn new() {
let reference = &mut Aligned4(1);
let unmarked = MarkedPtr::new(reference);
let marked = MarkedNonNull::new(unmarked);
assert_eq!(unsafe { marked.unwrap_value().decompose_ref() }, (&Aligned4(1), 0));
let marked = MarkedNonNull::new(MarkedPtr::compose(reference, 0b11));
assert_eq!(unsafe { marked.unwrap_value().decompose_ref() }, (&Aligned4(1), 0b11));
let null: *mut Aligned4<i32> = ptr::null_mut();
let marked = MarkedNonNull::new(MarkedPtr::compose(null, 0b11));
assert!(marked.is_null());
assert_eq!(marked.unwrap_null(), 0b11);
let marked = MarkedNonNull::new(MarkedPtr::compose(null, 0));
assert!(marked.is_null());
}
}
|
use {
alloc::sync::Arc,
core::mem::size_of,
zircon_object::{object::KernelObject, util::kcounter::*, vm::*},
};
/// Create kcounter VMOs from kernel memory.
/// Return (KCOUNTER_NAMES_VMO, KCOUNTER_VMO).
#[cfg(target_os = "none")]
pub fn create_kcounter_vmo() -> (Arc<VmObject>, Arc<VmObject>) {
const HEADER_SIZE: usize = size_of::<KCounterVmoHeader>();
const DESC_SIZE: usize = size_of::<KCounterDescItem>();
let descriptors = KCounterDescriptorArray::get();
let counter_table_size = descriptors.0.len() * DESC_SIZE;
let counter_name_vmo = VmObject::new_paged(pages(counter_table_size + HEADER_SIZE));
let header = KCounterVmoHeader {
magic: KCOUNTER_MAGIC,
max_cpu: 1,
counter_table_size,
};
let serde_header: [u8; HEADER_SIZE] = unsafe { core::mem::transmute(header) };
counter_name_vmo.write(0, &serde_header).unwrap();
for (i, descriptor) in descriptors.0.iter().enumerate() {
let serde_counter: [u8; DESC_SIZE] =
unsafe { core::mem::transmute(KCounterDescItem::from(descriptor)) };
counter_name_vmo
.write(HEADER_SIZE + i * DESC_SIZE, &serde_counter)
.unwrap();
}
counter_name_vmo.set_name("counters/desc");
let kcounters_vmo = {
extern "C" {
fn kcounters_arena_start();
fn kcounters_arena_end();
}
use kernel_hal::PageTableTrait;
let mut pgtable = kernel_hal::PageTable::current();
let paddr = pgtable.query(kcounters_arena_start as usize).unwrap();
assert_eq!(
kcounters_arena_start as usize / PAGE_SIZE,
kcounters_arena_end as usize / PAGE_SIZE,
"all kcounters must in the same page"
);
VmObject::new_physical(paddr, 1)
};
kcounters_vmo.set_name("counters/arena");
(counter_name_vmo, kcounters_vmo)
}
/// Create kcounter VMOs.
/// NOTE: kcounter is not supported in libos.
#[cfg(not(target_os = "none"))]
pub fn create_kcounter_vmo() -> (Arc<VmObject>, Arc<VmObject>) {
const HEADER_SIZE: usize = size_of::<KCounterVmoHeader>();
let counter_name_vmo = VmObject::new_paged(1);
let header = KCounterVmoHeader {
magic: KCOUNTER_MAGIC,
max_cpu: 1,
counter_table_size: 0,
};
let serde_header: [u8; HEADER_SIZE] = unsafe { core::mem::transmute(header) };
counter_name_vmo.write(0, &serde_header).unwrap();
counter_name_vmo.set_name("counters/desc");
let kcounters_vmo = VmObject::new_paged(1);
kcounters_vmo.set_name("counters/arena");
(counter_name_vmo, kcounters_vmo)
}
#[repr(C)]
struct KCounterDescItem {
name: [u8; 56],
type_: KCounterType,
}
#[repr(u64)]
enum KCounterType {
Sum = 1,
}
impl From<&KCounterDescriptor> for KCounterDescItem {
fn from(desc: &KCounterDescriptor) -> Self {
let mut name = [0u8; 56];
let length = desc.name.len().min(56);
name[..length].copy_from_slice(&desc.name.as_bytes()[..length]);
KCounterDescItem {
name,
type_: KCounterType::Sum,
}
}
}
#[repr(C)]
struct KCounterVmoHeader {
magic: u64,
max_cpu: u64,
counter_table_size: usize,
}
const KCOUNTER_MAGIC: u64 = 1_547_273_975;
|
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::ICR {
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Proxy"]
pub struct _SSI_ICR_RORICW<'a> {
w: &'a mut W,
}
impl<'a> _SSI_ICR_RORICW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 0);
self.w.bits |= ((value as u32) & 1) << 0;
self.w
}
}
#[doc = r"Proxy"]
pub struct _SSI_ICR_RTICW<'a> {
w: &'a mut W,
}
impl<'a> _SSI_ICR_RTICW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 1);
self.w.bits |= ((value as u32) & 1) << 1;
self.w
}
}
#[doc = r"Proxy"]
pub struct _SSI_ICR_DMARXICW<'a> {
w: &'a mut W,
}
impl<'a> _SSI_ICR_DMARXICW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 4);
self.w.bits |= ((value as u32) & 1) << 4;
self.w
}
}
#[doc = r"Proxy"]
pub struct _SSI_ICR_DMATXICW<'a> {
w: &'a mut W,
}
impl<'a> _SSI_ICR_DMATXICW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 5);
self.w.bits |= ((value as u32) & 1) << 5;
self.w
}
}
#[doc = r"Proxy"]
pub struct _SSI_ICR_EOTICW<'a> {
w: &'a mut W,
}
impl<'a> _SSI_ICR_EOTICW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 6);
self.w.bits |= ((value as u32) & 1) << 6;
self.w
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - SSI Receive Overrun Interrupt Clear"]
#[inline(always)]
pub fn ssi_icr_roric(&mut self) -> _SSI_ICR_RORICW {
_SSI_ICR_RORICW { w: self }
}
#[doc = "Bit 1 - SSI Receive Time-Out Interrupt Clear"]
#[inline(always)]
pub fn ssi_icr_rtic(&mut self) -> _SSI_ICR_RTICW {
_SSI_ICR_RTICW { w: self }
}
#[doc = "Bit 4 - SSI Receive DMA Interrupt Clear"]
#[inline(always)]
pub fn ssi_icr_dmarxic(&mut self) -> _SSI_ICR_DMARXICW {
_SSI_ICR_DMARXICW { w: self }
}
#[doc = "Bit 5 - SSI Transmit DMA Interrupt Clear"]
#[inline(always)]
pub fn ssi_icr_dmatxic(&mut self) -> _SSI_ICR_DMATXICW {
_SSI_ICR_DMATXICW { w: self }
}
#[doc = "Bit 6 - End of Transmit Interrupt Clear"]
#[inline(always)]
pub fn ssi_icr_eotic(&mut self) -> _SSI_ICR_EOTICW {
_SSI_ICR_EOTICW { w: self }
}
}
|
extern crate clipboard2;
use clipboard2::{Clipboard, SystemClipboard};
fn main() {
let clipboard = SystemClipboard::new().unwrap();
clipboard
.set_string_contents(String::from("Hello"))
.unwrap();
println!("{}", clipboard.get_string_contents().unwrap());
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::alloc::Layout;
use std::fmt;
use std::sync::Arc;
use common_arrow::arrow::bitmap::Bitmap;
use common_exception::Result;
use common_expression::types::number::NumberColumnBuilder;
use common_expression::types::DataType;
use common_expression::types::NumberDataType;
use common_expression::utils::column_merge_validity;
use common_expression::Column;
use common_expression::ColumnBuilder;
use common_expression::Scalar;
use common_io::prelude::*;
use super::aggregate_function::AggregateFunction;
use super::aggregate_function_factory::AggregateFunctionDescription;
use super::StateAddr;
use crate::aggregates::aggregator_common::assert_variadic_arguments;
pub struct AggregateCountState {
count: u64,
}
#[derive(Clone)]
pub struct AggregateCountFunction {
display_name: String,
}
impl AggregateCountFunction {
pub fn try_create(
display_name: &str,
_params: Vec<Scalar>,
arguments: Vec<DataType>,
) -> Result<Arc<dyn AggregateFunction>> {
assert_variadic_arguments(display_name, arguments.len(), (0, 1))?;
Ok(Arc::new(AggregateCountFunction {
display_name: display_name.to_string(),
}))
}
pub fn desc() -> AggregateFunctionDescription {
let features = super::aggregate_function_factory::AggregateFunctionFeatures {
returns_default_when_only_null: true,
..Default::default()
};
AggregateFunctionDescription::creator_with_features(Box::new(Self::try_create), features)
}
}
impl AggregateFunction for AggregateCountFunction {
fn name(&self) -> &str {
"AggregateCountFunction"
}
fn return_type(&self) -> Result<DataType> {
Ok(DataType::Number(NumberDataType::UInt64))
}
fn init_state(&self, place: StateAddr) {
place.write(|| AggregateCountState { count: 0 });
}
fn state_layout(&self) -> Layout {
Layout::new::<AggregateCountState>()
}
// columns may be nullable
// if not we use validity as the null signs
fn accumulate(
&self,
place: StateAddr,
columns: &[Column],
validity: Option<&Bitmap>,
input_rows: usize,
) -> Result<()> {
let state = place.get::<AggregateCountState>();
let nulls = if columns.is_empty() {
validity.map(|v| v.unset_bits()).unwrap_or(0)
} else {
match &columns[0] {
Column::Nullable(c) => validity
.map(|v| v & (&c.validity))
.unwrap_or_else(|| c.validity.clone())
.unset_bits(),
_ => validity.map(|v| v.unset_bits()).unwrap_or(0),
}
};
state.count += (input_rows - nulls) as u64;
Ok(())
}
fn accumulate_keys(
&self,
places: &[StateAddr],
offset: usize,
columns: &[Column],
_input_rows: usize,
) -> Result<()> {
let validity = columns
.iter()
.fold(None, |acc, col| column_merge_validity(col, acc));
match validity {
Some(v) => {
// all nulls
if v.unset_bits() == v.len() {
return Ok(());
}
for (valid, place) in v.iter().zip(places.iter()) {
if valid {
let state = place.next(offset).get::<AggregateCountState>();
state.count += 1;
}
}
}
_ => {
for place in places {
let state = place.next(offset).get::<AggregateCountState>();
state.count += 1;
}
}
}
Ok(())
}
fn accumulate_row(&self, place: StateAddr, _columns: &[Column], _row: usize) -> Result<()> {
let state = place.get::<AggregateCountState>();
state.count += 1;
Ok(())
}
fn serialize(&self, place: StateAddr, writer: &mut Vec<u8>) -> Result<()> {
let state = place.get::<AggregateCountState>();
serialize_into_buf(writer, &state.count)
}
fn deserialize(&self, place: StateAddr, reader: &mut &[u8]) -> Result<()> {
let state = place.get::<AggregateCountState>();
state.count = deserialize_from_slice(reader)?;
Ok(())
}
fn merge(&self, place: StateAddr, rhs: StateAddr) -> Result<()> {
let state = place.get::<AggregateCountState>();
let rhs = rhs.get::<AggregateCountState>();
state.count += rhs.count;
Ok(())
}
fn batch_merge_result(&self, places: &[StateAddr], builder: &mut ColumnBuilder) -> Result<()> {
match builder {
ColumnBuilder::Number(NumberColumnBuilder::UInt64(builder)) => {
for place in places {
let state = place.get::<AggregateCountState>();
builder.push(state.count);
}
}
_ => unreachable!(),
}
Ok(())
}
fn merge_result(&self, place: StateAddr, builder: &mut ColumnBuilder) -> Result<()> {
match builder {
ColumnBuilder::Number(NumberColumnBuilder::UInt64(builder)) => {
let state = place.get::<AggregateCountState>();
builder.push(state.count);
}
_ => unreachable!(),
}
Ok(())
}
fn get_own_null_adaptor(
&self,
_nested_function: super::AggregateFunctionRef,
_params: Vec<Scalar>,
_arguments: Vec<DataType>,
) -> Result<Option<super::AggregateFunctionRef>> {
Ok(Some(Arc::new(self.clone())))
}
}
impl fmt::Display for AggregateCountFunction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.display_name)
}
}
|
#![allow(unused_variables)]
use std::{
fmt,
sync::{Arc, Mutex},
collections::BTreeMap,
};
use euclid::{TypedRect, TypedSize2D};
use webrender::api::{
LayoutPixel, DisplayListBuilder, PrimitiveInfo, GradientStop,
ColorF, PipelineId, Epoch, ImageData, ImageDescriptor,
ResourceUpdate, AddImage, BorderRadius, ClipMode,
LayoutPoint, LayoutSize, GlyphOptions, LayoutRect, ExternalScrollId,
ComplexClipRegion, LayoutPrimitiveInfo, ExternalImageId,
ExternalImageData, ImageFormat, ExternalImageType, TextureTarget,
ImageRendering, AlphaType, FontInstanceFlags, FontRenderMode,
};
use azul_css::{
Css, LayoutPosition,CssProperty, LayoutOverflow,
StyleBorderRadius, LayoutMargin, LayoutPadding, BoxShadowClipMode,
StyleTextColor, StyleBackground, StyleBoxShadow,
StyleBackgroundSize, StyleBackgroundRepeat, StyleBorder, BoxShadowPreDisplayItem,
RectStyle, RectLayout, ColorU as StyleColorU, DynamicCssPropertyDefault,
};
use {
FastHashMap,
app_resources::AppResources,
callbacks::{IFrameCallback, GlTextureCallback, HidpiAdjustedBounds, StackCheckedPointer},
ui_state::UiState,
ui_description::{UiDescription, StyledNode},
id_tree::{NodeDataContainer, NodeId, NodeHierarchy},
dom::{
NodeData, ScrollTagId, DomHash, DomString, new_scroll_tag_id,
NodeType::{self, Div, Text, Image, GlTexture, IFrame, Label},
},
ui_solver::{do_the_layout, LayoutResult, PositionedRectangle},
app_resources::ImageId,
compositor::new_opengl_texture_id,
window::{Window, FakeWindow, ScrollStates},
callbacks::LayoutInfo,
window_state::WindowSize,
};
const DEFAULT_FONT_COLOR: StyleTextColor = StyleTextColor(StyleColorU { r: 0, b: 0, g: 0, a: 255 });
pub(crate) struct DisplayList<'a, T: 'a> {
pub(crate) ui_descr: &'a UiDescription<T>,
pub(crate) rectangles: NodeDataContainer<DisplayRectangle<'a>>
}
impl<'a, T: 'a> fmt::Debug for DisplayList<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"DisplayList {{ ui_descr: {:?}, rectangles: {:?} }}",
self.ui_descr, self.rectangles
)
}
}
/// DisplayRectangle is the main type which the layout parsing step gets operated on.
#[derive(Debug)]
pub(crate) struct DisplayRectangle<'a> {
/// `Some(id)` if this rectangle has a callback attached to it
/// Note: this is not the same as the `NodeId`!
/// These two are completely separate numbers!
pub tag: Option<u64>,
/// The original styled node
pub(crate) styled_node: &'a StyledNode,
/// The style properties of the node, parsed
pub(crate) style: RectStyle,
/// The layout properties of the node, parsed
pub(crate) layout: RectLayout,
}
impl<'a> DisplayRectangle<'a> {
#[inline]
pub fn new(tag: Option<u64>, styled_node: &'a StyledNode) -> Self {
Self { tag, styled_node, style: RectStyle::default(), layout: RectLayout::default() }
}
}
impl<'a, T: 'a> DisplayList<'a, T> {
/// NOTE: This function assumes that the UiDescription has an initialized arena
///
/// This only looks at the user-facing styles of the `UiDescription`, not the actual
/// layout. The layout is done only in the `into_display_list_builder` step.
pub(crate) fn new_from_ui_description(ui_description: &'a UiDescription<T>, ui_state: &UiState<T>) -> Self {
let arena = &ui_description.ui_descr_arena;
let display_rect_arena = arena.node_data.transform(|node, node_id| {
let style = &ui_description.styled_nodes[node_id];
let tag = ui_state.node_ids_to_tag_ids.get(&node_id).map(|tag| *tag);
let mut rect = DisplayRectangle::new(tag, style);
populate_css_properties(&mut rect, node_id, &ui_description.dynamic_css_overrides);
rect
});
Self {
ui_descr: ui_description,
rectangles: display_rect_arena,
}
}
/// Inserts and solves the top-level DOM (i.e. the DOM with the ID 0)
pub(crate) fn into_display_list_builder(
&self,
app_data_access: &mut Arc<Mutex<T>>,
window: &mut Window<T>,
fake_window: &mut FakeWindow<T>,
app_resources: &mut AppResources
) -> (DisplayListBuilder, ScrolledNodes, LayoutResult) {
use glium::glutin::dpi::LogicalSize;
let mut resource_updates = Vec::<ResourceUpdate>::new();
let arena = &self.ui_descr.ui_descr_arena;
let node_hierarchy = &arena.node_layout;
let node_data = &arena.node_data;
// Scan the styled DOM for image and font keys.
//
// The problem is that we need to scan all DOMs for image and font keys and insert them
// before the layout() step - however, can't call IFrameCallbacks upfront, because each
// IFrameCallback needs to know its size (so it has to be invoked after the layout() step).
// So, this process needs to follow an order like:
//
// - For each DOM to render:
// - Create a DOM ID
// - Style the DOM according to the stylesheet
// - Scan all the font keys and image keys
// - Insert the new font keys and image keys into the render API
// - Scan all IFrameCallbacks, generate the DomID for each callback
// - Repeat while number_of_iframe_callbacks != 0
app_resources.add_fonts_and_images(&self);
let window_size = window.state.size.get_reverse_logical_size();
let layout_result = do_the_layout(
node_hierarchy,
node_data,
&self.rectangles,
&*app_resources,
LayoutSize::new(window_size.width as f32, window_size.height as f32),
LayoutPoint::new(0.0, 0.0),
);
// TODO: After the layout has been done, call all IFrameCallbacks and get and insert
// their font keys / image keys
let mut scrollable_nodes = get_nodes_that_need_scroll_clip(
node_hierarchy, &self.rectangles, node_data, &layout_result.rects,
&layout_result.node_depths, window.internal.pipeline_id
);
// Make sure unused scroll states are garbage collected.
window.scroll_states.remove_unused_scroll_states();
let LogicalSize { width, height } = window.state.size.dimensions;
let mut builder = DisplayListBuilder::with_capacity(window.internal.pipeline_id, TypedSize2D::new(width as f32, height as f32), self.rectangles.len());
let rects_in_rendering_order = determine_rendering_order(node_hierarchy, &self.rectangles, &layout_result.rects);
push_rectangles_into_displaylist(
window.internal.epoch,
window.state.size,
rects_in_rendering_order,
&mut scrollable_nodes,
&mut window.scroll_states,
&DisplayListParametersRef {
pipeline_id: window.internal.pipeline_id,
node_hierarchy,
node_data,
display_rectangle_arena: &self.rectangles,
css: &window.css,
layout_result: &layout_result,
},
&mut DisplayListParametersMut {
app_data: app_data_access,
app_resources,
fake_window,
builder: &mut builder,
resource_updates: &mut resource_updates,
pipeline_id: window.internal.pipeline_id,
},
);
(builder, scrollable_nodes, layout_result)
}
}
/// In order to render rectangles in the correct order, we have to group them together:
/// As long as there are no position:absolute items, items are inserted in a parents-then-child order
///
/// ```no_run,ignore
/// a
/// |- b
/// |- c
/// | |- d
/// e
/// |- f
/// g
/// ```
/// is rendered in the order `a, b, c, d, e, f, g`. This is necessary for clipping and scrolling,
/// meaning that if there is an overflow:scroll element, all children of that element are clipped
/// within that group. This means, that the z-order is completely determined by the DOM hierarchy.
///
/// Trees with elements with `position:absolute` are more complex: The absolute items need
/// to be rendered completely on top of all other items, however, they still need to clip
/// and scroll properly.
///
/// ```no_run,ignore
/// a:relative
/// |- b
/// |- c:absolute
/// | |- d
/// e
/// |- f
/// g
/// ```
///
/// will be rendered as: `a,b,e,f,g,c,d`, so that the `c,d` sub-DOM is on top of the rest
/// of the content. To support this, the content needs to be grouped: Whenever there is a
/// `position:absolute` encountered, the children are grouped into a new `ContentGroup`:
///
/// ```no_run,ignore
/// Group 1: [a, b, c, e, f, g]
/// Group 2: [c, d]
/// ```
/// Then the groups are simply rendered in-order: if there are multiple position:absolute
/// groups, this has the side effect of later groups drawing on top of earlier groups.
#[derive(Debug, Clone, PartialEq)]
struct ContentGroup {
/// The parent of the current node group, i.e. either the root node (0)
/// or the last positioned node ()
root: RenderableNodeId,
/// Depth of the root node in the DOM hierarchy
root_depth: usize,
/// Node ids in order of drawing
node_ids: Vec<RenderableNodeId>,
}
#[derive(Debug, Copy, Clone, PartialEq)]
struct RenderableNodeId {
/// Whether the (hierarchical) children of this group need to be clipped (usually
/// because the parent has an `overflow:hidden` property set).
clip_children: bool,
/// Whether the children overflow the parent (see `O`)
scrolls_children: bool,
/// The actual node ID of the content
node_id: NodeId,
}
#[derive(Debug, Clone, PartialEq)]
struct ContentGroupOrder {
groups: Vec<ContentGroup>,
}
fn determine_rendering_order<'a>(
node_hierarchy: &NodeHierarchy,
rectangles: &NodeDataContainer<DisplayRectangle<'a>>,
layouted_rects: &NodeDataContainer<PositionedRectangle>,
) -> ContentGroupOrder
{
let mut content_groups = Vec::new();
determine_rendering_order_inner(
node_hierarchy,
rectangles,
layouted_rects,
0, // depth of this node
NodeId::new(0),
&mut content_groups
);
ContentGroupOrder { groups: content_groups }
}
fn determine_rendering_order_inner<'a>(
node_hierarchy: &NodeHierarchy,
rectangles: &NodeDataContainer<DisplayRectangle<'a>>,
layouted_rects: &NodeDataContainer<PositionedRectangle>,
// recursive parameters
root_depth: usize,
root_id: NodeId,
content_groups: &mut Vec<ContentGroup>,
)
{
use id_tree::NodeEdge;
let mut root_group = ContentGroup {
root: RenderableNodeId {
node_id: root_id,
clip_children: node_needs_to_clip_children(&rectangles[root_id].layout),
scrolls_children: false, // TODO
},
root_depth,
node_ids: Vec::new(),
};
let mut absolute_node_ids = Vec::new();
let mut depth = root_depth + 1;
// Same as the traverse function, but allows us to skip items, returns the next element
fn traverse_simple(root_id: NodeId, current_node: NodeEdge<NodeId>, node_hierarchy: &NodeHierarchy) -> Option<NodeEdge<NodeId>> {
// returns the next item
match current_node {
NodeEdge::Start(current_node) => {
match node_hierarchy[current_node].first_child {
Some(first_child) => Some(NodeEdge::Start(first_child)),
None => Some(NodeEdge::End(current_node.clone()))
}
}
NodeEdge::End(current_node) => {
if current_node == root_id {
None
} else {
match node_hierarchy[current_node].next_sibling {
Some(next_sibling) => Some(NodeEdge::Start(next_sibling)),
None => node_hierarchy[current_node].parent.and_then(|parent| Some(NodeEdge::End(parent))),
}
}
}
}
}
let mut current_node_edge = NodeEdge::Start(root_id);
while let Some(next_node_id) = traverse_simple(root_id, current_node_edge.clone(), node_hierarchy) {
let mut should_continue_loop = true;
if next_node_id.clone().inner_value() != root_id {
match next_node_id {
NodeEdge::Start(node_id) => {
let rect_node = &rectangles[node_id];
let position = rect_node.layout.position.unwrap_or_default();
if position == LayoutPosition::Absolute {
// For now, ignore the node and put it aside for later
absolute_node_ids.push((depth, node_id));
// Skip this sub-tree and go straight to the next sibling
// Since the tree is positioned absolute, we'll worry about it later
current_node_edge = NodeEdge::End(node_id);
should_continue_loop = false;
} else {
// TODO: Overflow hidden in horizontal / vertical direction
let node_is_overflow_hidden = node_needs_to_clip_children(&rect_node.layout);
let node_needs_to_scroll_children = false; // TODO
root_group.node_ids.push(RenderableNodeId {
node_id,
clip_children: node_is_overflow_hidden,
scrolls_children: node_needs_to_scroll_children,
});
}
depth += 1;
},
NodeEdge::End(node_id) => {
depth -= 1;
},
}
}
if should_continue_loop {
current_node_edge = next_node_id;
}
}
content_groups.push(root_group);
// Note: Currently reversed order, so that earlier absolute
// items are drawn on top of later absolute items
for (absolute_depth, absolute_node_id) in absolute_node_ids.into_iter().rev() {
determine_rendering_order_inner(node_hierarchy, rectangles, layouted_rects, absolute_depth, absolute_node_id, content_groups);
}
}
#[derive(Default, Debug, Clone)]
pub(crate) struct ScrolledNodes {
pub(crate) overflowing_nodes: BTreeMap<NodeId, OverflowingScrollNode>,
pub(crate) tags_to_node_ids: BTreeMap<ScrollTagId, NodeId>,
}
#[derive(Debug, Clone)]
pub(crate) struct OverflowingScrollNode {
pub(crate) parent_rect: PositionedRectangle,
pub(crate) child_rect: LayoutRect,
pub(crate) parent_external_scroll_id: ExternalScrollId,
pub(crate) parent_dom_hash: DomHash,
pub(crate) scroll_tag_id: ScrollTagId,
}
/// Returns all node IDs where the children overflow the parent, together with the
/// `(parent_rect, child_rect)` - the child rect is the sum of the children.
///
/// TODO: The performance of this function can be theoretically improved:
///
/// - Unioning the rectangles is heavier than just looping through the children and
/// summing up their width / height / padding + margin.
/// - Scroll nodes only need to be inserted if the parent doesn't have `overflow: hidden`
/// activated
/// - Overflow for X and Y needs to be tracked seperately (for overflow-x / overflow-y separation),
/// so there we'd need to track in which direction the inner_rect is overflowing.
fn get_nodes_that_need_scroll_clip<'a, T: 'a>(
node_hierarchy: &NodeHierarchy,
display_list_rects: &NodeDataContainer<DisplayRectangle<'a>>,
dom_rects: &NodeDataContainer<NodeData<T>>,
layouted_rects: &NodeDataContainer<PositionedRectangle>,
parents: &[(usize, NodeId)],
pipeline_id: PipelineId,
) -> ScrolledNodes {
let mut nodes = BTreeMap::new();
let mut tags_to_node_ids = BTreeMap::new();
for (_, parent) in parents {
let mut children_sum_rect = None;
for child in parent.children(&node_hierarchy) {
let old = children_sum_rect.unwrap_or(LayoutRect::zero());
children_sum_rect = Some(old.union(&layouted_rects[child].bounds));
}
let children_sum_rect = match children_sum_rect {
None => continue,
Some(sum) => sum,
};
let parent_rect = layouted_rects.get(*parent).unwrap();
if children_sum_rect.contains_rect(&parent_rect.bounds) {
continue;
}
let parent_dom_hash = dom_rects[*parent].calculate_node_data_hash();
// Create an external scroll id. This id is required to preserve its
// scroll state accross multiple frames.
let parent_external_scroll_id = ExternalScrollId(parent_dom_hash.0, pipeline_id);
// Create a unique scroll tag for hit-testing
let scroll_tag_id = match display_list_rects.get(*parent).and_then(|node| node.tag) {
Some(existing_tag) => ScrollTagId(existing_tag),
None => new_scroll_tag_id(),
};
tags_to_node_ids.insert(scroll_tag_id, *parent);
nodes.insert(*parent, OverflowingScrollNode {
parent_rect: parent_rect.clone(),
child_rect: children_sum_rect,
parent_external_scroll_id,
parent_dom_hash,
scroll_tag_id,
});
}
ScrolledNodes { overflowing_nodes: nodes, tags_to_node_ids }
}
fn node_needs_to_clip_children(layout: &RectLayout) -> bool {
let overflow = layout.overflow.unwrap_or_default();
!overflow.is_horizontal_overflow_visible() ||
!overflow.is_vertical_overflow_visible()
}
#[test]
fn test_overflow_parsing() {
use azul_css::Overflow;
let layout1 = RectLayout::default();
// The default for overflowing is overflow: auto, which clips
// children, so this should evaluate to true by default
assert_eq!(node_needs_to_clip_children(&layout1), true);
let layout2 = RectLayout {
overflow: Some(LayoutOverflow {
horizontal: Some(Overflow::Visible),
vertical: Some(Overflow::Visible),
}),
.. Default::default()
};
assert_eq!(node_needs_to_clip_children(&layout2), false);
let layout3 = RectLayout {
overflow: Some(LayoutOverflow {
horizontal: Some(Overflow::Hidden),
vertical: Some(Overflow::Visible),
}),
.. Default::default()
};
assert_eq!(node_needs_to_clip_children(&layout3), true);
}
fn push_rectangles_into_displaylist<'a, 'b, 'c, 'd, 'e, 'f, T>(
epoch: Epoch,
window_size: WindowSize,
content_grouped_rectangles: ContentGroupOrder,
scrollable_nodes: &mut ScrolledNodes,
scroll_states: &mut ScrollStates,
referenced_content: &DisplayListParametersRef<'a,'b,'c,'d,'e, T>,
referenced_mutable_content: &mut DisplayListParametersMut<'f, T>)
{
let mut clip_stack = Vec::new();
for content_group in content_grouped_rectangles.groups {
let rectangle = DisplayListRectParams {
epoch,
rect_idx: content_group.root.node_id,
html_node: &referenced_content.node_data[content_group.root.node_id].node_type,
window_size,
};
// Push the root of the node
push_rectangles_into_displaylist_inner(
content_group.root,
scrollable_nodes,
&rectangle,
referenced_content,
referenced_mutable_content,
&mut clip_stack
);
for item in content_group.node_ids {
let rectangle = DisplayListRectParams {
epoch,
rect_idx: item.node_id,
html_node: &referenced_content.node_data[item.node_id].node_type,
window_size,
};
push_rectangles_into_displaylist_inner(
item,
scrollable_nodes,
&rectangle,
referenced_content,
referenced_mutable_content,
&mut clip_stack
);
}
}
}
fn push_rectangles_into_displaylist_inner<'a,'b,'c,'d,'e,'f, T>(
item: RenderableNodeId,
scrollable_nodes: &mut ScrolledNodes,
rectangle: &DisplayListRectParams<'a, T>,
referenced_content: &DisplayListParametersRef<'a,'b,'c,'d,'e, T>,
referenced_mutable_content: &mut DisplayListParametersMut<'f, T>,
clip_stack: &mut Vec<NodeId>,
) {
displaylist_handle_rect(
scrollable_nodes,
rectangle,
referenced_content,
referenced_mutable_content
);
/*
// NOTE: table demo has problems with clipping
if item.clip_children {
if let Some(last_child) = referenced_content.node_hierarchy[rectangle.rect_idx].last_child {
let styled_node = &referenced_content.display_rectangle_arena[rectangle.rect_idx];
let solved_rect = &referenced_content.layout_result.rects[rectangle.rect_idx];
let clip = get_clip_region(solved_rect.bounds, &styled_node)
.unwrap_or(ComplexClipRegion::new(solved_rect.bounds, BorderRadius::zero(), ClipMode::Clip));
let clip_id = referenced_mutable_content.builder.define_clip(solved_rect.bounds, vec![clip], /* image_mask: */ None);
referenced_mutable_content.builder.push_clip_id(clip_id);
clip_stack.push(last_child);
}
}
if clip_stack.last().cloned() == Some(rectangle.rect_idx) {
referenced_mutable_content.builder.pop_clip_id();
clip_stack.pop();
}
*/
}
/// Parameters that apply to a single rectangle / div node
#[derive(Copy, Clone)]
pub(crate) struct DisplayListRectParams<'a, T: 'a> {
pub epoch: Epoch,
pub rect_idx: NodeId,
pub html_node: &'a NodeType<T>,
window_size: WindowSize,
}
fn get_clip_region<'a>(bounds: LayoutRect, rect: &DisplayRectangle<'a>) -> Option<ComplexClipRegion> {
use css::webrender_translate::wr_translate_border_radius;
rect.style.border_radius.and_then(|border_radius| {
Some(ComplexClipRegion {
rect: bounds,
radii: wr_translate_border_radius(border_radius.0).into(),
mode: ClipMode::Clip,
})
})
}
/// Push a single rectangle into the display list builder
#[inline]
fn displaylist_handle_rect<'a,'b,'c,'d,'e,'f,'g, T>(
scrollable_nodes: &mut ScrolledNodes,
rectangle: &DisplayListRectParams<'a, T>,
referenced_content: &DisplayListParametersRef<'b,'c,'d,'e,'f, T>,
referenced_mutable_content: &mut DisplayListParametersMut<'g, T>)
{
let DisplayListParametersRef {
css, display_rectangle_arena,
pipeline_id, node_hierarchy, node_data,
layout_result,
} = referenced_content;
let DisplayListRectParams {
epoch, rect_idx, html_node, window_size,
} = rectangle;
let rect = &display_rectangle_arena[*rect_idx];
let bounds = layout_result.rects[*rect_idx].bounds;
let info = LayoutPrimitiveInfo {
rect: bounds,
clip_rect: bounds,
is_backface_visible: false,
tag: rect.tag.map(|tag| (tag, 0)).or({
scrollable_nodes.overflowing_nodes
.get(&rect_idx)
.map(|scrolled| (scrolled.scroll_tag_id.0, 0))
}),
};
let clip_region_id = get_clip_region(bounds, &rect).map(|clip|
referenced_mutable_content.builder.define_clip(bounds, vec![clip], None)
);
// Push the "outset" box shadow, before the clip is active
push_box_shadow(
referenced_mutable_content.builder,
&rect.style,
&bounds,
BoxShadowClipMode::Outset,
);
if let Some(id) = clip_region_id {
referenced_mutable_content.builder.push_clip_id(id);
}
// If the rect is hit-testing relevant, we need to push a rect anyway.
// Otherwise the hit-testing gets confused
if let Some(bg) = &rect.style.background {
push_background(
&info,
&bounds,
referenced_mutable_content.builder,
bg,
&rect.style.background_size,
&rect.style.background_repeat,
&referenced_mutable_content.app_resources,
);
} else if info.tag.is_some() {
const TRANSPARENT_BG: StyleColorU = StyleColorU { r: 0, g: 0, b: 0, a: 0 };
push_rect(
&info,
referenced_mutable_content.builder,
&TRANSPARENT_BG,
);
}
if let Some(ref border) = rect.style.border {
push_border(
&info,
referenced_mutable_content.builder,
&border,
&rect.style.border_radius,
);
}
match html_node {
Div => { },
Text(_) | Label(_) => {
// Text is laid out and positioned during the layout pass,
// so this should succeed - if there were problems
//
// TODO: In the table demo, the numbers don't show - empty glyphs (why?)!
push_text(
&info,
referenced_mutable_content.builder,
layout_result,
rect_idx,
&rect.style,
&rect.layout,
)
},
Image(image_id) => push_image(
&info,
referenced_mutable_content.builder,
referenced_mutable_content.app_resources,
image_id,
LayoutSize::new(info.rect.size.width, info.rect.size.height)
),
GlTexture(callback) => push_opengl_texture(callback, &info, rectangle, referenced_content, referenced_mutable_content),
IFrame(callback) => push_iframe(callback, &info, scrollable_nodes, rectangle, referenced_content, referenced_mutable_content),
};
// Push the inset shadow (if any)
push_box_shadow(
referenced_mutable_content.builder,
&rect.style,
&bounds,
BoxShadowClipMode::Inset
);
if clip_region_id.is_some() {
referenced_mutable_content.builder.pop_clip_id();
}
}
fn push_opengl_texture<'a,'b,'c,'d,'e,'f, T>(
(texture_callback, texture_stack_ptr): &(GlTextureCallback<T>, StackCheckedPointer<T>),
info: &LayoutPrimitiveInfo,
rectangle: &DisplayListRectParams<'a, T>,
referenced_content: &DisplayListParametersRef<'a,'b,'c,'d,'e, T>,
referenced_mutable_content: &mut DisplayListParametersMut<'f, T>,
) {
use compositor::{ActiveTexture, ACTIVE_GL_TEXTURES};
use gleam::gl;
use app_resources::FontImageApi;
let bounds = HidpiAdjustedBounds::from_bounds(
info.rect,
rectangle.window_size.hidpi_factor,
rectangle.window_size.winit_hidpi_factor
);
let texture;
{
// Make sure that the app data is locked before invoking the callback
let _lock = referenced_mutable_content.app_data.lock().unwrap();
texture = (texture_callback.0)(&texture_stack_ptr, LayoutInfo {
window: &mut *referenced_mutable_content.fake_window,
resources: &referenced_mutable_content.app_resources,
}, bounds);
// Reset the framebuffer and SRGB color target to 0
let gl_context = referenced_mutable_content.fake_window.read_only_window().get_gl_context();
gl_context.bind_framebuffer(gl::FRAMEBUFFER, 0);
gl_context.disable(gl::FRAMEBUFFER_SRGB);
gl_context.disable(gl::MULTISAMPLE);
gl_context.viewport(0, 0, info.rect.size.width as i32, info.rect.size.height as i32);
}
let texture = match texture {
Some(s) => s,
None => return,
};
let texture_width = texture.inner.width() as f32;
let texture_height = texture.inner.height() as f32;
let opaque = false;
// The texture gets mapped 1:1 onto the display, so there is no need for mipmaps
let allow_mipmaps = false;
// Note: The ImageDescriptor has no effect on how large the image appears on-screen
let descriptor = ImageDescriptor::new(texture_width as i32, texture_height as i32, ImageFormat::BGRA8, opaque, allow_mipmaps);
let key = referenced_mutable_content.app_resources.get_render_api().new_image_key();
let external_image_id = ExternalImageId(new_opengl_texture_id() as u64);
let data = ImageData::External(ExternalImageData {
id: external_image_id,
channel_index: 0,
image_type: ExternalImageType::TextureHandle(TextureTarget::Default),
});
ACTIVE_GL_TEXTURES.lock().unwrap()
.entry(rectangle.epoch).or_insert_with(|| FastHashMap::default())
.insert(external_image_id, ActiveTexture { texture: texture.clone() });
referenced_mutable_content.resource_updates.push(ResourceUpdate::AddImage(
AddImage { key, descriptor, data, tiling: None }
));
referenced_mutable_content.builder.push_image(
&info,
LayoutSize::new(texture_width, texture_height),
LayoutSize::zero(),
ImageRendering::Auto,
AlphaType::Alpha,
key,
ColorF::WHITE
);
}
fn push_iframe<'a,'b,'c,'d,'e,'f, T>(
(iframe_callback, iframe_pointer): &(IFrameCallback<T>, StackCheckedPointer<T>),
info: &LayoutPrimitiveInfo,
parent_scrollable_nodes: &mut ScrolledNodes,
rectangle: &DisplayListRectParams<'a, T>,
referenced_content: &DisplayListParametersRef<'a,'b,'c,'d,'e, T>,
referenced_mutable_content: &mut DisplayListParametersMut<'f, T>,
) {
let bounds = HidpiAdjustedBounds::from_bounds(
info.rect,
rectangle.window_size.hidpi_factor,
rectangle.window_size.hidpi_factor
);
let new_dom = {
// Make sure that the app data is locked before invoking the callback
let _lock = referenced_mutable_content.app_data.lock().unwrap();
let window_info = LayoutInfo {
window: referenced_mutable_content.fake_window,
resources: &referenced_mutable_content.app_resources,
};
(iframe_callback.0)(&iframe_pointer, window_info, bounds)
};
// TODO: Right now, no focusing, hovering or :active allowed in iframes!
let is_mouse_down = false;
let mut focused_node = None;
let mut focus_target = None;
let hovered_nodes = BTreeMap::new();
let mut ui_state = new_dom.into_ui_state();
let ui_description = UiDescription::<T>::match_css_to_dom(
&mut ui_state,
&referenced_content.css,
&mut focused_node,
&mut focus_target,
&hovered_nodes,
is_mouse_down
);
let display_list = DisplayList::new_from_ui_description(&ui_description, &ui_state);
referenced_mutable_content.app_resources.add_fonts_and_images(&display_list);
let arena = &ui_description.ui_descr_arena;
let node_hierarchy = &arena.node_layout;
let node_data = &arena.node_data;
// Insert the DOM into the solver so we can solve the layout of the rectangles
let rect_size = LayoutSize::new(
info.rect.size.width / rectangle.window_size.hidpi_factor as f32 * rectangle.window_size.winit_hidpi_factor as f32,
info.rect.size.height / rectangle.window_size.hidpi_factor as f32 * rectangle.window_size.winit_hidpi_factor as f32,
);
let rect_origin = LayoutPoint::new(info.rect.origin.x, info.rect.origin.y);
let layout_result = do_the_layout(
&node_hierarchy,
&node_data,
&display_list.rectangles,
&*referenced_mutable_content.app_resources,
rect_size,
rect_origin,
);
let mut scrollable_nodes = get_nodes_that_need_scroll_clip(
node_hierarchy, &display_list.rectangles, node_data, &layout_result.rects,
&layout_result.node_depths, referenced_content.pipeline_id
);
let rects_in_rendering_order = determine_rendering_order(
node_hierarchy, &display_list.rectangles, &layout_result.rects
);
let referenced_content = DisplayListParametersRef {
// Important: Need to update the ui description, otherwise this function would be endlessly recursive
node_hierarchy,
node_data,
display_rectangle_arena: &display_list.rectangles,
layout_result: &layout_result,
.. *referenced_content
};
push_rectangles_into_displaylist(
rectangle.epoch,
rectangle.window_size,
rects_in_rendering_order,
&mut scrollable_nodes,
&mut ScrollStates::new(),
&referenced_content,
referenced_mutable_content
);
parent_scrollable_nodes.overflowing_nodes.extend(scrollable_nodes.overflowing_nodes.into_iter());
parent_scrollable_nodes.tags_to_node_ids.extend(scrollable_nodes.tags_to_node_ids.into_iter());
}
/// Since the display list can take a lot of parameters, we don't want to
/// continually pass them as parameters of the function and rather use a
/// struct to pass them around. This is purely for ergonomic reasons.
///
/// `DisplayListParametersRef` has only members that are
/// **immutable references** to other things that need to be passed down the display list
#[derive(Copy, Clone)]
struct DisplayListParametersRef<'a, 'b, 'c, 'd, 'e, T: 'a> {
pub node_data: &'a NodeDataContainer<NodeData<T>>,
/// The CSS that should be applied to the DOM
pub css: &'b Css,
/// Laid out words and rectangles (contains info about content bounds and text layout)
pub layout_result: &'c LayoutResult,
/// Reference to the arena that contains all the styled rectangles
pub display_rectangle_arena: &'d NodeDataContainer<DisplayRectangle<'d>>,
pub node_hierarchy: &'e NodeHierarchy,
pub pipeline_id: PipelineId,
}
/// Same as `DisplayListParametersRef`, but for `&mut Something`
///
/// Note: The `'a` in the `'a + Layout` is technically not required.
/// Only rustc 1.28 requires this, more modern compiler versions insert it automatically.
struct DisplayListParametersMut<'a, T: 'a> {
/// Needs to be present, because the dom_to_displaylist_builder
/// could call (recursively) a sub-DOM function again, for example an OpenGL callback
pub app_data: &'a mut Arc<Mutex<T>>,
/// The original, top-level display list builder that we need to push stuff into
pub builder: &'a mut DisplayListBuilder,
/// The app resources, so that a sub-DOM / iframe can register fonts and images
/// TODO: How to handle cleanup ???
pub app_resources: &'a mut AppResources,
/// If new fonts or other stuff are created, we need to tell WebRender about this
pub resource_updates: &'a mut Vec<ResourceUpdate>,
/// Window access, so that sub-items can register OpenGL textures
pub fake_window: &'a mut FakeWindow<T>,
pub pipeline_id: PipelineId,
}
fn push_rect(
info: &PrimitiveInfo<LayoutPixel>,
builder: &mut DisplayListBuilder,
color: &StyleColorU
) {
use css::webrender_translate::wr_translate_color_u;
builder.push_rect(&info, wr_translate_color_u(*color).into());
}
fn push_text(
info: &PrimitiveInfo<LayoutPixel>,
builder: &mut DisplayListBuilder,
layout_result: &LayoutResult,
node_id: &NodeId,
rect_style: &RectStyle,
rect_layout: &RectLayout,
) {
use text_layout::get_layouted_glyphs;
use css::webrender_translate::wr_translate_color_u;
use ui_solver::determine_text_alignment;
let (scaled_words, _font_instance_key) = match layout_result.scaled_words.get(node_id) {
Some(s) => s,
None => return,
};
let (word_positions, font_instance_key) = match layout_result.positioned_word_cache.get(node_id) {
Some(s) => s,
None => return,
};
let (horz_alignment, vert_alignment) = determine_text_alignment(rect_style, rect_layout);
let rect_padding_top = rect_layout.padding.unwrap_or_default().top.map(|top| top.to_pixels()).unwrap_or(0.0);
let rect_padding_left = rect_layout.padding.unwrap_or_default().left.map(|left| left.to_pixels()).unwrap_or(0.0);
let rect_offset = LayoutPoint::new(info.rect.origin.x + rect_padding_left, info.rect.origin.y + rect_padding_top);
let bounding_size_height_px = info.rect.size.height - rect_layout.get_vertical_padding();
let layouted_glyphs = get_layouted_glyphs(
word_positions,
scaled_words,
horz_alignment,
vert_alignment,
rect_offset.clone(),
bounding_size_height_px
);
let font_color = rect_style.font_color.unwrap_or(DEFAULT_FONT_COLOR).0;
let font_color = wr_translate_color_u(font_color);
// WARNING: Do not enable FontInstanceFlags::FONT_SMOOTHING or FontInstanceFlags::FORCE_AUTOHINT -
// they seem to interfere with the text layout thereby messing with the actual text layout.
let mut flags = FontInstanceFlags::empty();
flags.set(FontInstanceFlags::SUBPIXEL_BGR, true);
flags.set(FontInstanceFlags::NO_AUTOHINT, true);
flags.set(FontInstanceFlags::LCD_VERTICAL, true);
let overflow_horizontal_visible = rect_layout.is_horizontal_overflow_visible();
let overflow_vertical_visible = rect_layout.is_horizontal_overflow_visible();
let max_bounds = builder.content_size();
let current_bounds = info.rect;
let original_text_bounds = rect_layout.padding
.as_ref()
.map(|padding| subtract_padding(¤t_bounds, padding))
.unwrap_or(current_bounds);
// Adjust the bounds by the padding, depending on the overflow:visible parameter
let mut text_bounds = match (overflow_horizontal_visible, overflow_vertical_visible) {
(true, true) => None,
(false, false) => Some(original_text_bounds),
(true, false) => {
// Horizontally visible, vertically cut
Some(LayoutRect::new(rect_offset, LayoutSize::new(max_bounds.width, original_text_bounds.size.height)))
},
(false, true) => {
// Vertically visible, horizontally cut
Some(LayoutRect::new(rect_offset, LayoutSize::new(original_text_bounds.size.width, max_bounds.height)))
},
};
if let Some(text_bounds) = &mut text_bounds {
text_bounds.size.width = text_bounds.size.width.max(0.0);
text_bounds.size.height = text_bounds.size.height.max(0.0);
let clip_id = builder.define_clip(*text_bounds, vec![ComplexClipRegion {
rect: *text_bounds,
radii: BorderRadius::zero(),
mode: ClipMode::Clip,
}], None);
builder.push_clip_id(clip_id);
}
builder.push_text(
&info,
&layouted_glyphs.glyphs,
*font_instance_key,
font_color.into(),
Some(GlyphOptions {
render_mode: FontRenderMode::Subpixel,
flags: flags,
})
);
if text_bounds.is_some() {
builder.pop_clip_id();
}
}
enum ShouldPushShadow {
OneShadow,
TwoShadows,
AllShadows,
}
/// WARNING: For "inset" shadows, you must push a clip ID first, otherwise the
/// shadow will not show up.
///
/// To prevent a shadow from being pushed twice, you have to annotate the clip
/// mode for this - outset or inset.
#[inline]
fn push_box_shadow(
builder: &mut DisplayListBuilder,
style: &RectStyle,
bounds: &LayoutRect,
shadow_type: BoxShadowClipMode)
{
use self::ShouldPushShadow::*;
// Box-shadow can be applied to each corner separately. This means, in practice
// that we simply overlay multiple shadows with shifted clipping rectangles
let StyleBoxShadow { top, left, bottom, right } = match &style.box_shadow {
Some(s) => s,
None => return,
};
let border_radius = style.border_radius.unwrap_or(StyleBorderRadius::zero());
let what_shadow_to_push = match [top, left, bottom, right].iter().filter(|x| x.is_some()).count() {
1 => OneShadow,
2 => TwoShadows,
4 => AllShadows,
_ => return,
};
match what_shadow_to_push {
OneShadow => {
let current_shadow = match (top, left, bottom, right) {
| (Some(Some(shadow)), None, None, None)
| (None, Some(Some(shadow)), None, None)
| (None, None, Some(Some(shadow)), None)
| (None, None, None, Some(Some(shadow)))
=> shadow,
_ => return, // reachable, but invalid box-shadow
};
push_single_box_shadow_edge(
builder, current_shadow, bounds, border_radius, shadow_type,
top, bottom, left, right
);
},
// Two shadows in opposite directions:
//
// box-shadow-top: 0px 0px 5px red;
// box-shadow-bottom: 0px 0px 5px blue;
TwoShadows => {
match (top, left, bottom, right) {
// top + bottom box-shadow pair
(Some(Some(t)), None, Some(Some(b)), right) => {
push_single_box_shadow_edge(
builder, t, bounds, border_radius, shadow_type,
top, &None, &None, &None
);
push_single_box_shadow_edge(
builder, b, bounds, border_radius, shadow_type,
&None, bottom, &None, &None
);
},
// left + right box-shadow pair
(None, Some(Some(l)), None, Some(Some(r))) => {
push_single_box_shadow_edge(
builder, l, bounds, border_radius, shadow_type,
&None, &None, left, &None
);
push_single_box_shadow_edge(
builder, r, bounds, border_radius, shadow_type,
&None, &None, &None, right
);
}
_ => return, // reachable, but invalid
}
},
AllShadows => {
// Assumes that all box shadows are the same, so just use the top shadow
let top_shadow = top.unwrap();
let clip_rect = top_shadow
.as_ref()
.map(|top_shadow| get_clip_rect(top_shadow, bounds))
.unwrap_or(*bounds);
push_box_shadow_inner(
builder,
&top_shadow,
border_radius,
bounds,
clip_rect,
shadow_type
);
}
}
}
fn push_box_shadow_inner(
builder: &mut DisplayListBuilder,
pre_shadow: &Option<BoxShadowPreDisplayItem>,
border_radius: StyleBorderRadius,
bounds: &LayoutRect,
clip_rect: LayoutRect,
shadow_type: BoxShadowClipMode)
{
use webrender::api::LayoutVector2D;
use css::webrender_translate::{
wr_translate_color_u, wr_translate_border_radius,
wr_translate_box_shadow_clip_mode
};
let pre_shadow = match pre_shadow {
None => return,
Some(ref s) => s,
};
// The pre_shadow is missing the StyleBorderRadius & LayoutRect
if pre_shadow.clip_mode != shadow_type {
return;
}
let full_screen_rect = LayoutRect::new(LayoutPoint::zero(), builder.content_size());;
// prevent shadows that are larger than the full screen
let clip_rect = clip_rect.intersection(&full_screen_rect).unwrap_or(clip_rect);
// Apply a gamma of 2.2 to the original value
//
// NOTE: strangely box-shadow is the only thing that needs to be gamma-corrected...
fn apply_gamma(color: ColorF) -> ColorF {
const GAMMA: f32 = 2.2;
const GAMMA_F: f32 = 1.0 / GAMMA;
ColorF {
r: color.r.powf(GAMMA_F),
g: color.g.powf(GAMMA_F),
b: color.b.powf(GAMMA_F),
a: color.a,
}
}
let info = LayoutPrimitiveInfo::with_clip_rect(LayoutRect::zero(), clip_rect);
builder.push_box_shadow(
&info,
*bounds,
LayoutVector2D::new(pre_shadow.offset[0].to_pixels(), pre_shadow.offset[1].to_pixels()),
apply_gamma(wr_translate_color_u(pre_shadow.color).into()),
pre_shadow.blur_radius.to_pixels(),
pre_shadow.spread_radius.to_pixels(),
wr_translate_border_radius(border_radius.0).into(),
wr_translate_box_shadow_clip_mode(pre_shadow.clip_mode)
);
}
fn get_clip_rect(pre_shadow: &BoxShadowPreDisplayItem, bounds: &LayoutRect) -> LayoutRect {
if pre_shadow.clip_mode == BoxShadowClipMode::Inset {
// inset shadows do not work like outset shadows
// for inset shadows, you have to push a clip ID first, so that they are
// clipped to the bounds -we trust that the calling function knows to do this
*bounds
} else {
// calculate the maximum extent of the outset shadow
let mut clip_rect = *bounds;
let origin_displace = (pre_shadow.spread_radius.to_pixels() + pre_shadow.blur_radius.to_pixels()) * 2.0;
clip_rect.origin.x = clip_rect.origin.x - pre_shadow.offset[0].to_pixels() - origin_displace;
clip_rect.origin.y = clip_rect.origin.y - pre_shadow.offset[1].to_pixels() - origin_displace;
clip_rect.size.height = clip_rect.size.height + (origin_displace * 2.0);
clip_rect.size.width = clip_rect.size.width + (origin_displace * 2.0);
clip_rect
}
}
#[allow(clippy::collapsible_if)]
fn push_single_box_shadow_edge(
builder: &mut DisplayListBuilder,
current_shadow: &BoxShadowPreDisplayItem,
bounds: &LayoutRect,
border_radius: StyleBorderRadius,
shadow_type: BoxShadowClipMode,
top: &Option<Option<BoxShadowPreDisplayItem>>,
bottom: &Option<Option<BoxShadowPreDisplayItem>>,
left: &Option<Option<BoxShadowPreDisplayItem>>,
right: &Option<Option<BoxShadowPreDisplayItem>>,
) {
let is_inset_shadow = current_shadow.clip_mode == BoxShadowClipMode::Inset;
let origin_displace = (current_shadow.spread_radius.to_pixels() + current_shadow.blur_radius.to_pixels()) * 2.0;
let mut shadow_bounds = *bounds;
let mut clip_rect = *bounds;
if is_inset_shadow {
// If the shadow is inset, we adjust the clip rect to be
// exactly the amount of the shadow
if let Some(Some(top)) = top {
clip_rect.size.height = origin_displace;
shadow_bounds.size.width += origin_displace;
shadow_bounds.origin.x -= origin_displace / 2.0;
} else if let Some(Some(bottom)) = bottom {
clip_rect.size.height = origin_displace;
clip_rect.origin.y += bounds.size.height - origin_displace;
shadow_bounds.size.width += origin_displace;
shadow_bounds.origin.x -= origin_displace / 2.0;
} else if let Some(Some(left)) = left {
clip_rect.size.width = origin_displace;
shadow_bounds.size.height += origin_displace;
shadow_bounds.origin.y -= origin_displace / 2.0;
} else if let Some(Some(right)) = right {
clip_rect.size.width = origin_displace;
clip_rect.origin.x += bounds.size.width - origin_displace;
shadow_bounds.size.height += origin_displace;
shadow_bounds.origin.y -= origin_displace / 2.0;
}
} else {
if let Some(Some(top)) = top {
clip_rect.size.height = origin_displace;
clip_rect.origin.y -= origin_displace;
shadow_bounds.size.width += origin_displace;
shadow_bounds.origin.x -= origin_displace / 2.0;
} else if let Some(Some(bottom)) = bottom {
clip_rect.size.height = origin_displace;
clip_rect.origin.y += bounds.size.height;
shadow_bounds.size.width += origin_displace;
shadow_bounds.origin.x -= origin_displace / 2.0;
} else if let Some(Some(left)) = left {
clip_rect.size.width = origin_displace;
clip_rect.origin.x -= origin_displace;
shadow_bounds.size.height += origin_displace;
shadow_bounds.origin.y -= origin_displace / 2.0;
} else if let Some(Some(right)) = right {
clip_rect.size.width = origin_displace;
clip_rect.origin.x += bounds.size.width;
shadow_bounds.size.height += origin_displace;
shadow_bounds.origin.y -= origin_displace / 2.0;
}
}
push_box_shadow_inner(
builder,
&Some(*current_shadow),
border_radius,
&shadow_bounds,
clip_rect,
shadow_type
);
}
#[inline]
fn push_background(
info: &PrimitiveInfo<LayoutPixel>,
bounds: &TypedRect<f32, LayoutPixel>,
builder: &mut DisplayListBuilder,
background: &StyleBackground,
background_size: &Option<StyleBackgroundSize>,
background_repeat: &Option<StyleBackgroundRepeat>,
app_resources: &AppResources)
{
use azul_css::{Shape, StyleBackground::*};
use css::webrender_translate::{
wr_translate_color_u, wr_translate_extend_mode, wr_translate_layout_point,
wr_translate_layout_rect,
};
match background {
RadialGradient(gradient) => {
let stops: Vec<GradientStop> = gradient.stops.iter().map(|gradient_pre|
GradientStop {
offset: gradient_pre.offset.unwrap().get(),
color: wr_translate_color_u(gradient_pre.color).into(),
}).collect();
let center = bounds.center();
// Note: division by 2.0 because it's the radius, not the diameter
let radius = match gradient.shape {
Shape::Ellipse => TypedSize2D::new(bounds.size.width / 2.0, bounds.size.height / 2.0),
Shape::Circle => {
let largest_bound_size = bounds.size.width.max(bounds.size.height);
TypedSize2D::new(largest_bound_size / 2.0, largest_bound_size / 2.0)
},
};
let gradient = builder.create_radial_gradient(center, radius, stops, wr_translate_extend_mode(gradient.extend_mode));
builder.push_radial_gradient(&info, gradient, bounds.size, LayoutSize::zero());
},
LinearGradient(gradient) => {
let stops: Vec<GradientStop> = gradient.stops.iter().map(|gradient_pre|
GradientStop {
offset: gradient_pre.offset.unwrap().get() / 100.0,
color: wr_translate_color_u(gradient_pre.color).into(),
}).collect();
let (begin_pt, end_pt) = gradient.direction.to_points(&wr_translate_layout_rect(*bounds));
let gradient = builder.create_gradient(
wr_translate_layout_point(begin_pt),
wr_translate_layout_point(end_pt),
stops,
wr_translate_extend_mode(gradient.extend_mode),
);
builder.push_gradient(&info, gradient, bounds.size, LayoutSize::zero());
},
Image(style_image_id) => {
// TODO: background-origin, background-position, background-repeat
if let Some(image_id) = app_resources.get_css_image_id(&style_image_id.0) {
let bounds = info.rect;
let image_dimensions = app_resources.get_image_info(image_id)
.map(|info| (info.descriptor.size.width, info.descriptor.size.height))
.unwrap_or((bounds.size.width as i32, bounds.size.height as i32)); // better than crashing...
let size = match background_size {
Some(bg_size) => calculate_background_size(bg_size, &info, &image_dimensions),
None => TypedSize2D::new(image_dimensions.0 as f32, image_dimensions.1 as f32),
};
let background_repeat = background_repeat.unwrap_or_default();
let background_repeat_info = get_background_repeat_info(&info, background_repeat, size);
push_image(&background_repeat_info, builder, app_resources, image_id, size);
}
},
Color(c) => {
push_rect(&info, builder, c);
},
NoBackground => { },
}
}
fn get_background_repeat_info(
info: &LayoutPrimitiveInfo,
background_repeat: StyleBackgroundRepeat,
background_size: TypedSize2D<f32, LayoutPixel>,
) -> LayoutPrimitiveInfo {
use azul_css::StyleBackgroundRepeat::*;
match background_repeat {
NoRepeat => LayoutPrimitiveInfo::with_clip_rect(
info.rect,
TypedRect::new(
info.rect.origin,
TypedSize2D::new(background_size.width, background_size.height),
),
),
Repeat => *info,
RepeatX => LayoutPrimitiveInfo::with_clip_rect(
info.rect,
TypedRect::new(
info.rect.origin,
TypedSize2D::new(info.rect.size.width, background_size.height),
),
),
RepeatY => LayoutPrimitiveInfo::with_clip_rect(
info.rect,
TypedRect::new(
info.rect.origin,
TypedSize2D::new(background_size.width, info.rect.size.height),
),
),
}
}
struct Ratio {
width: f32,
height: f32
}
fn calculate_background_size(
bg_size: &StyleBackgroundSize,
info: &PrimitiveInfo<LayoutPixel>,
image_dimensions: &(i32, i32)
) -> TypedSize2D<f32, LayoutPixel> {
let original_ratios = Ratio {
width: info.rect.size.width / image_dimensions.0 as f32,
height: info.rect.size.height / image_dimensions.1 as f32,
};
let ratio = match bg_size {
StyleBackgroundSize::Contain => original_ratios.width.min(original_ratios.height),
StyleBackgroundSize::Cover => original_ratios.width.max(original_ratios.height)
};
TypedSize2D::new(image_dimensions.0 as f32 * ratio, image_dimensions.1 as f32 * ratio)
}
#[inline]
fn push_image(
info: &PrimitiveInfo<LayoutPixel>,
builder: &mut DisplayListBuilder,
app_resources: &AppResources,
image_id: &ImageId,
size: TypedSize2D<f32, LayoutPixel>
) {
if let Some(image_info) = app_resources.get_image_info(image_id) {
builder.push_image(
info,
size,
LayoutSize::zero(),
ImageRendering::Auto,
AlphaType::PremultipliedAlpha,
image_info.key,
ColorF::WHITE,
);
}
}
#[inline]
fn push_border(
info: &PrimitiveInfo<LayoutPixel>,
builder: &mut DisplayListBuilder,
border: &StyleBorder,
border_radius: &Option<StyleBorderRadius>)
{
use css::webrender_translate::{
wr_translate_layout_side_offsets, wr_translate_border_details
};
if let Some((border_widths, border_details)) = border.get_webrender_border(*border_radius) {
builder.push_border(
info,
wr_translate_layout_side_offsets(border_widths),
wr_translate_border_details(border_details));
}
}
/// Subtracts the padding from the bounds, returning the new bounds
///
/// Warning: The resulting rectangle may have negative width or height
fn subtract_padding(bounds: &TypedRect<f32, LayoutPixel>, padding: &LayoutPadding)
-> TypedRect<f32, LayoutPixel>
{
let top = padding.top.map(|top| top.to_pixels()).unwrap_or(0.0);
let bottom = padding.bottom.map(|bottom| bottom.to_pixels()).unwrap_or(0.0);
let left = padding.left.map(|left| left.to_pixels()).unwrap_or(0.0);
let right = padding.right.map(|right| right.to_pixels()).unwrap_or(0.0);
let mut new_bounds = *bounds;
new_bounds.origin.x += left;
new_bounds.size.width -= right + left;
new_bounds.origin.y += top;
new_bounds.size.height -= top + bottom;
new_bounds
}
/// Populate the style properties of the `DisplayRectangle`, apply static / dynamic properties
fn populate_css_properties(
rect: &mut DisplayRectangle,
node_id: NodeId,
css_overrides: &BTreeMap<NodeId, FastHashMap<DomString, CssProperty>>
) {
use azul_css::CssDeclaration::*;
for constraint in rect.styled_node.css_constraints.values() {
match &constraint {
Static(static_property) => apply_style_property(rect, static_property),
Dynamic(dynamic_property) => {
let is_dynamic_prop = css_overrides.get(&node_id).and_then(|overrides| {
overrides.get(&DomString::Heap(dynamic_property.dynamic_id.clone()))
});
if let Some(overridden_property) = is_dynamic_prop {
// Only apply the dynamic style property default, if it isn't set to auto
if property_type_matches(overridden_property, &dynamic_property.default) {
apply_style_property(rect, overridden_property);
} else {
#[cfg(feature = "logging")] {
error!(
"Dynamic style property on rect {:?} don't have the same discriminant type,\r\n
cannot override {:?} with {:?} - enum discriminant mismatch",
rect, dynamic_property.default, overridden_property
)
}
}
} else if let DynamicCssPropertyDefault::Exact(default) = &dynamic_property.default {
apply_style_property(rect, default);
}
}
}
}
}
// Assert that the types of two properties matches
fn property_type_matches(a: &CssProperty, b: &DynamicCssPropertyDefault) -> bool {
use std::mem::discriminant;
use azul_css::DynamicCssPropertyDefault::*;
match b {
Exact(e) => discriminant(a) == discriminant(e),
Auto => true, // "auto" always matches
}
}
fn apply_style_property(rect: &mut DisplayRectangle, property: &CssProperty) {
use azul_css::CssProperty::*;
match property {
BorderRadius(b) => { rect.style.border_radius = Some(*b); },
BackgroundSize(s) => { rect.style.background_size = Some(*s); },
BackgroundRepeat(r) => { rect.style.background_repeat = Some(*r); },
TextColor(t) => { rect.style.font_color = Some(*t); },
Border(b) => { StyleBorder::merge(&mut rect.style.border, &b); },
Background(b) => { rect.style.background = Some(b.clone()); },
FontSize(f) => { rect.style.font_size = Some(*f); },
FontFamily(f) => { rect.style.font_family = Some(f.clone()); },
LetterSpacing(l) => { rect.style.letter_spacing = Some(*l); },
TextAlign(ta) => { rect.style.text_align = Some(*ta); },
BoxShadow(b) => { StyleBoxShadow::merge(&mut rect.style.box_shadow, b); },
LineHeight(lh) => { rect.style.line_height = Some(*lh); },
Width(w) => { rect.layout.width = Some(*w); },
Height(h) => { rect.layout.height = Some(*h); },
MinWidth(mw) => { rect.layout.min_width = Some(*mw); },
MinHeight(mh) => { rect.layout.min_height = Some(*mh); },
MaxWidth(mw) => { rect.layout.max_width = Some(*mw); },
MaxHeight(mh) => { rect.layout.max_height = Some(*mh); },
Position(p) => { rect.layout.position = Some(*p); },
Top(t) => { rect.layout.top = Some(*t); },
Bottom(b) => { rect.layout.bottom = Some(*b); },
Right(r) => { rect.layout.right = Some(*r); },
Left(l) => { rect.layout.left = Some(*l); },
Padding(p) => { LayoutPadding::merge(&mut rect.layout.padding, &p); },
Margin(m) => { LayoutMargin::merge(&mut rect.layout.margin, &m); },
Overflow(o) => { LayoutOverflow::merge(&mut rect.layout.overflow, &o); },
WordSpacing(ws) => { rect.style.word_spacing = Some(*ws); },
TabWidth(tw) => { rect.style.tab_width = Some(*tw); },
FlexGrow(g) => { rect.layout.flex_grow = Some(*g) },
FlexShrink(s) => { rect.layout.flex_shrink = Some(*s) },
FlexWrap(w) => { rect.layout.wrap = Some(*w); },
FlexDirection(d) => { rect.layout.direction = Some(*d); },
JustifyContent(j) => { rect.layout.justify_content = Some(*j); },
AlignItems(a) => { rect.layout.align_items = Some(*a); },
AlignContent(a) => { rect.layout.align_content = Some(*a); },
Cursor(_) => { /* cursor neither affects layout nor styling */ },
}
}
|
use rand::Rng;
use std::cmp::Ordering;
fn main() {
let number = rand::thread_rng().gen_range(0, 11);
println!("number: {}", number);
// if else if else
if number < 5 {
println!("The number is lower than 5");
} else if number == 5 {
println!("The number is 5");
} else {
println!("The number is higher than 5");
}
// match
match number.cmp(&5) {
Ordering::Less => println!("The number is lower than 5"),
Ordering::Equal => println!("The number is equal to 5"),
Ordering::Greater => println!("The number is greater than 5")
};
// if used in an expression (like ternary operator or Python)
let odd_or_not = if number % 2 == 0 { "even" } else { "odd" };
println!("The number is {}", odd_or_not);
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::{
formatting::{format_parts, get_child_listing, get_locations, Formatter},
options::PathFormat,
result::IqueryResult,
},
failure::{bail, format_err, Error},
inspect_formatter::{self, HierarchyFormatter},
serde::ser::Serialize,
serde_json::{
json,
ser::{PrettyFormatter, Serializer as JsonSerializer},
},
std::str::from_utf8,
};
pub struct JsonFormatter {
path_format: PathFormat,
max_depth: Option<u64>,
sort: bool,
}
impl Formatter for JsonFormatter {
fn new(path_format: PathFormat, max_depth: Option<u64>, sort: bool) -> Self {
Self { path_format, max_depth, sort }
}
fn format_recursive(&self, results: Vec<IqueryResult>) -> Result<String, Error> {
let hierachies = results
.into_iter()
.map(|mut result| {
if self.sort {
result.sort_hierarchy();
}
if !result.is_loaded() {
bail!("Failed to format result at {}", result.location.to_string());
}
Ok(inspect_formatter::HierarchyData {
hierarchy: result.hierarchy.unwrap(),
file_path: match self.path_format {
PathFormat::Absolute => result.location.absolute_path_to_string()?,
PathFormat::Undefined | PathFormat::Full => result.location.to_string(),
},
fields: result.location.parts,
})
})
.collect::<Result<Vec<inspect_formatter::HierarchyData>, Error>>()?;
inspect_formatter::JsonFormatter::format(hierachies)
}
fn format_locations(&self, results: Vec<IqueryResult>) -> Result<String, Error> {
let values = results
.into_iter()
.flat_map(|mut result| {
if self.sort {
result.sort_hierarchy();
}
if self.max_depth.is_none() && result.is_loaded() {
get_locations(result.hierarchy.unwrap(), &result.location, &self.path_format)
.into_iter()
} else {
vec![format_parts(&result.location, &self.path_format, &vec![])].into_iter()
}
})
.collect();
self.output(values)
}
fn format_child_listing(&self, results: Vec<IqueryResult>) -> Result<String, Error> {
let children_names = get_child_listing(results, self.sort, &self.path_format);
self.output(children_names)
}
}
impl JsonFormatter {
fn output<T: Serialize>(&self, values: Vec<T>) -> Result<String, Error> {
let mut bytes = Vec::new();
let mut serializer =
JsonSerializer::with_formatter(&mut bytes, PrettyFormatter::with_indent(b" "));
json!(values).serialize(&mut serializer)?;
from_utf8(&bytes)
.map(|result| result.to_string())
.map_err(|e| format_err!("Error serializing: {:?}", e))
}
}
|
use pyo3::prelude::*;
use ukis_h3cellstore::clickhouse::compacted_tables::TableSet;
/// A Tableset describes the available database tables of a schema created from a `CompactedTableSchema`
#[pyclass]
pub struct PyTableSet {
tableset: TableSet,
}
#[pymethods]
impl PyTableSet {
/// The name of the TableSet.
///
/// Matches `CompactedTableSchema.name`.
#[getter]
pub fn basename(&self) -> String {
self.tableset.basename.clone()
}
/// All compacted resolutions available in the tableset
#[getter]
pub fn compacted_resolutions(&self) -> Vec<u8> {
self.tableset.compacted_resolutions()
}
/// All base resolutions available in the tableset
#[getter]
pub fn base_resolutions(&self) -> Vec<u8> {
self.tableset.base_resolutions()
}
}
impl From<TableSet> for PyTableSet {
fn from(tableset: TableSet) -> Self {
Self { tableset }
}
}
|
pub mod equipment;
pub mod fermentable;
pub mod hop;
pub mod mash;
pub mod misc;
pub mod recipe;
pub mod style;
mod utils;
pub mod water;
pub mod yeast;
pub use equipment::Equipment;
pub use fermentable::Fermentable;
pub use hop::Hop;
pub use misc::Misc;
pub use recipe::Recipe;
pub use recipe::Type;
pub use style::Style;
pub use water::Water;
pub use yeast::Yeast;
|
use futures::{SinkExt, StreamExt, TryFutureExt};
use std::collections::HashMap;
use tokio::sync::mpsc::{self, UnboundedSender};
use tokio_stream::wrappers::UnboundedReceiverStream;
use uuid::Uuid;
use warp::ws::WebSocket;
use warp::Filter;
use xtra::prelude::*;
use xtra::spawn::Tokio;
// User
struct User {
id: Uuid,
tx: UnboundedSender<String>,
}
impl Actor for User {}
impl User {
fn new(id: Uuid, tx: UnboundedSender<String>) -> Self {
Self { id, tx }
}
}
// ToUser - sends message back up to user
struct ToUser(String);
impl Message for ToUser {
type Result = ();
}
#[async_trait::async_trait]
impl Handler<ToUser> for User {
async fn handle(&mut self, msg: ToUser, _ctx: &mut Context<Self>) {
self.tx.send(msg.0).expect("Could not pipe message back");
}
}
// Room
struct Room {
users: HashMap<Uuid, Address<User>>,
}
impl Actor for Room {}
impl Room {
fn new() -> Self {
Self {
users: HashMap::new(),
}
}
}
// GotUserMessage
struct GotUserMessage(Uuid, String);
impl Message for GotUserMessage {
type Result = ();
}
#[async_trait::async_trait]
impl Handler<GotUserMessage> for Room {
async fn handle(&mut self, msg: GotUserMessage, _ctx: &mut Context<Self>) {
for (id, addr) in self.users.iter() {
println!("sending!");
// Send to all but sender
if id != &msg.0 {
addr.send(ToUser(msg.1.clone()))
.await
.expect("Could not send");
}
}
}
}
// Join
struct Join(Uuid, Address<User>);
impl Message for Join {
type Result = ();
}
#[async_trait::async_trait]
impl Handler<Join> for Room {
async fn handle(&mut self, msg: Join, _ctx: &mut Context<Self>) {
self.users.insert(msg.0, msg.1);
println!("Joined! now there are {}", &self.users.len());
}
}
// Leave
struct Leave(Uuid);
impl Message for Leave {
type Result = ();
}
#[async_trait::async_trait]
impl Handler<Leave> for Room {
async fn handle(&mut self, msg: Leave, _ctx: &mut Context<Self>) {
println!("left!");
self.users.remove(&msg.0);
}
}
// Main
#[tokio::main]
async fn main() {
pretty_env_logger::init();
// Keep track of all connected users, key is usize, value
// is a websocket sender.
let room = Room::new().create(None).spawn(&mut Tokio::Global);
let room = warp::any().map(move || room.clone());
let chat = warp::path("ws")
.and(warp::ws())
.and(room)
.map(|ws: warp::ws::Ws, room| ws.on_upgrade(move |socket| user_connected(socket, room)));
let index = warp::path::end().map(|| warp::reply::html(INDEX_HTML));
let routes = index.or(chat);
warp::serve(routes).run(([127, 0, 0, 1], 3030)).await;
}
async fn user_connected(ws: WebSocket, room: xtra::Address<Room>) {
let (mut user_ws_tx, mut user_ws_rx) = ws.split();
let (tx, rx) = mpsc::unbounded_channel();
let mut rx = UnboundedReceiverStream::new(rx);
let id = Uuid::new_v4();
let addr = User::new(id, tx).create(None).spawn(&mut Tokio::Global);
room.send(Join(id, addr))
.await
.expect("Could not join the room");
// Pipe mesesages back up to the user
tokio::task::spawn(async move {
while let Some(value) = rx.next().await {
let message = warp::ws::Message::text(value);
user_ws_tx
.send(message)
.unwrap_or_else(|e| {
eprintln!("websocket send error: {}", e);
})
.await;
}
});
// Receive messages
while let Some(result) = user_ws_rx.next().await {
let msg = match result {
Ok(msg) => msg,
Err(_) => {
break;
}
};
// Send in to actor
if let Ok(s) = msg.to_str() {
room.send(GotUserMessage(id, s.to_string()))
.await
.expect("Could not receive message");
};
}
room.send(Leave(id))
.await
.expect("Could not leave the room");
}
static INDEX_HTML: &str = r#"<!DOCTYPE html>
<html lang="en">
<head>
<title>Warp Chat</title>
</head>
<body>
<h1>Warp chat</h1>
<div id="chat">
<p><em>Connecting...</em></p>
</div>
<input type="text" id="text" />
<button type="button" id="send">Send</button>
<script type="text/javascript">
const chat = document.getElementById('chat');
const text = document.getElementById('text');
const uri = 'ws://' + location.host + '/ws';
const ws = new WebSocket(uri);
function message(data) {
const line = document.createElement('p');
line.innerText = data;
chat.appendChild(line);
}
ws.onopen = function() {
chat.innerHTML = '<p><em>Connected!</em></p>';
};
ws.onmessage = function(msg) {
message(msg.data);
};
ws.onclose = function() {
chat.getElementsByTagName('em')[0].innerText = 'Disconnected!';
};
send.onclick = function() {
const msg = text.value;
ws.send(msg);
text.value = '';
message('<You>: ' + msg);
};
</script>
</body>
</html>
"#;
|
//! A lot of macros for hiding ugly unsafe blocks by changing most useful global static variables.
#[macro_export]
macro_rules! get_ref_curmap {
() => ({
use map;
unsafe { &map::GAME_MAP[map::CUR_MAP] }
})
}
#[macro_export]
macro_rules! get_ref_curmap_wo_unsafe {
() => ({
use map;
&map::GAME_MAP[map::CUR_MAP]
})
}
#[macro_export]
macro_rules! get_mut_ref_curmap {
() => ({
use map;
unsafe { &mut map::GAME_MAP[map::CUR_MAP] }
})
}
#[macro_export]
macro_rules! get_ref_cell {
( $x:ident, $y:ident ) => ({
use map;
unsafe {
&map::GAME_MAP[map::CUR_MAP].Cells[$x][$y]
}
});
( $x:path, $y:path ) => ({
use map;
unsafe {
&map::GAME_MAP[map::CUR_MAP].Cells[$x][$y]
}
});
( $x:expr, $y:expr ) => ({
use map;
unsafe {
&map::GAME_MAP[map::CUR_MAP].Cells[$x][$y]
}
})
}
#[macro_export]
macro_rules! get_mut_ref_cell_wo_unsafe {
( $x:ident, $y:ident ) => ({
use map;
&mut map::GAME_MAP[map::CUR_MAP].Cells[$x][$y]
});
( $x:path, $y:path ) => ({
use map;
&mut map::GAME_MAP[map::CUR_MAP].Cells[$x][$y]
});
( $x:expr, $y:expr ) => ({
use map;
&mut map::GAME_MAP[map::CUR_MAP].Cells[$x][$y]
});
}
#[macro_export]
macro_rules! get_mut_ref_cell {
( $x:ident, $y:ident ) => ({
use map;
unsafe {
&mut map::GAME_MAP[map::CUR_MAP].Cells[$x][$y]
}
})
}
#[macro_export]
macro_rules! get_ref_curhero {
() => ({
use hero;
unsafe {
&hero::HEROES[hero::CUR_HERO]
}
});
( $HeroNum:path ) => ({
use hero;
unsafe {
&hero::HEROES[$HeroNum]
}
});
( $HeroNum:ident ) => ({
use hero;
unsafe {
&hero::HEROES[$HeroNum]
}
})
}
#[macro_export]
macro_rules! get_mut_ref_curhero {
() => ({
use hero;
unsafe {
&mut hero::HEROES[hero::CUR_HERO]
}
});
( $HeroNum:ident ) => ({
use hero;
unsafe {
&mut hero::HEROES[$HeroNum]
}
});
( $HeroNum:path ) => ({
use hero;
unsafe {
&mut hero::HEROES[$HeroNum]
}
})
}
#[macro_export]
macro_rules! get_mut_ref_curhero_wo_unsafe {
() => ({
use hero;
unsafe {
&mut hero::HEROES[hero::CUR_HERO]
}
});
( $HeroNum:ident ) => ({
use hero;
&mut hero::HEROES[$HeroNum]
});
( $HeroNum:path ) => ({
use hero;
&mut hero::HEROES[$HeroNum]
})
}
#[macro_export]
macro_rules! log {
($message:expr) => ({
use ::DEBUG;
if DEBUG {
use loggers::log;
log($message);
} else {}
})
}
#[macro_export]
macro_rules! strict_log {
($message:expr) => ({
use ::DEBUG;
if DEBUG {
use loggers::strict_log;
strict_log($message);
} else {}
})
}
|
use std::f64::consts::PI;
use super::model::Boid;
use super::model::velocity::Velocity;
const AVERAGE_WEIGHT: f64 = 2.0;
const AVOIDANCE_WEIGHT: f64 = 3.0;
const SEEK_WEIGHT: f64 = 2.0;
const TOTAL_WEIGHT: f64 = AVERAGE_WEIGHT + AVOIDANCE_WEIGHT + SEEK_WEIGHT;
pub fn brain(boid: &Boid, clique: &Vec<Boid>) -> Option<Velocity> {
let closest: Vec<Boid> = clique.iter()
.map(|c| (c.clone(), distance(&boid, &c)))
.filter(|t| t.1 <= 100.0 )
.map(|t| t.0 )
.collect();
let average_velocity = average_velocity(&closest);
let avoidance_velocity = avoid_closest(&boid, &closest);
let seek_velocity = seek_center(&boid, &closest);
let mut velocity =
average_velocity.clone() * AVERAGE_WEIGHT +
avoidance_velocity.clone() * AVOIDANCE_WEIGHT +
seek_velocity.clone() * SEEK_WEIGHT;
velocity = velocity * (1.0/TOTAL_WEIGHT);
if velocity.speed.abs() > 300.0 {
velocity.speed = 300.0
}
Some(velocity)
}
fn average_velocity(clique: &Vec<Boid>) -> Velocity {
let n = clique.len() as f64;
let mut velocity = clique.iter()
.map(|c| c.velocity.clone())
.fold(Velocity::new(0.0, 0.0), |acc, c| acc + c);
velocity = velocity * (1f64/n);
velocity
}
fn avoid_closest(boid: &Boid, clique: &Vec<Boid>) -> Velocity {
let maybe_closest = closest_boid(&boid, &clique);
match maybe_closest {
Some(closest) => {
let x = closest.x - boid.x;
let y = closest.y - boid.y;
let heading = y.atan2(x) + PI;
let distance = x.abs().hypot(y.abs());
let speed = 300.0 * 5.0/distance;
Velocity::new(heading, speed)
},
None => Velocity::new(0.0, 0.0)
}
}
fn closest_boid(boid: &Boid, clique: &Vec<Boid>) -> Option<Boid> {
let mut tuples: Vec<(Boid, f64)> = clique.iter()
.map(|c| (c.clone(), distance(boid, c)))
.filter(|t| t.1 > 0.0)
.collect();
tuples.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap() );
tuples.first().map(|t| t.0.clone())
}
fn distance(u: &Boid, v: &Boid) -> f64 {
(u.x - v.x).abs().hypot((u.y - v.y).abs())
}
fn seek_center(boid: &Boid, clique: &Vec<Boid>) -> Velocity {
let n = clique.len() as f64;
let (center_x, center_y) = clique.iter()
.map(|c| (c.x, c.y))
.fold((0f64, 0f64), |acc, t| (acc.0 + t.0, acc.1 + t.1));
let dx = center_x - boid.x;
let dy = center_y - boid.y;
let heading = dy.atan2(dx);
let speed = 100f64.max(10f64*dx.hypot(dy));
let velocity = Velocity::new(heading, speed) * (1.0/n);
velocity
}
|
pub fn count_paranthesis(line: &str) -> i64 {
let mut count = 0;
for bracket in line.chars() {
let change = match bracket {
'(' => 1,
')' => -1,
_ => 0,
};
count += change;
}
count
}
pub fn count_newlines(s: &str) -> usize {
bytecount::count(s.as_bytes(), b'\n')
}
|
extern crate rand;
use rand::Rng;
use rand::distributions::{IndependentSample, Range};
use find_folder::Search;
use find_folder::Error;
use std::path::PathBuf;
pub fn select_random<T>(x: T, y: T) -> T {
if rand::thread_rng().gen() { x } else { y }
}
pub fn select_random_3<T>(x: T, y: T, z: T) -> T {
let mut rng = rand::thread_rng();
let range = Range::new(0, 3);
let choice = range.ind_sample(&mut rng);
match choice {
0 => {x}
1 => {y}
2 => {z}
_ => { panic!("Random PANIC!!") }
}
}
pub fn select_random_in_range(min: usize, max_exclusize: usize) -> usize {
Range::new(min, max_exclusize)
.ind_sample(&mut rand::thread_rng())
}
pub fn find_assets_folder() -> PathBuf {
let p = Search::KidsThenParents(3, 3).for_folder("assets");
match p {
Result::Ok(path) => { path }
Result::Err(E) => { panic!("Assets folder not found.") }
}
}
pub fn find_asset(filename: &str) -> PathBuf {
find_assets_folder().join(filename)
} |
//! Code to filter a trace down to queries.
//!
//! We are looking for symbols like this:
//!
//! rustc::ty::maps::<impl rustc::ty::maps::queries::borrowck<'tcx>>::force
//!
//! which we want to transform to just the name of the query `borrowck`.
const QUERY_PREFIX_0: &str = "rustc::ty::maps::<impl rustc::ty::maps::queries::";
const QUERY_SUFFIX_0: &str = ">::force";
const QUERY_PREFIX_1: &str = "rustc::ty::maps::__query_compute::";
const QUERY_PREFIX_2: &str = "_ZN5rustc2ty4maps15__query_compute";
use std::str::FromStr;
use trace::TraceArgs;
pub fn to_query_stack(trace_args: &mut TraceArgs) {
let stack: Vec<String> = ::std::iter::once("main()")
.chain(trace_args.stack.iter().filter_map(|s| match_query(s)))
.map(|s| s.to_string())
.collect();
trace_args.stack = stack;
}
fn match_query(frame: &str) -> Option<&str> {
// Try multiple formats a query symbol can have in different versions of
// the compiler.
let query_name = if frame.starts_with(QUERY_PREFIX_0) && frame.ends_with(QUERY_SUFFIX_0) {
&frame[QUERY_PREFIX_0.len()..frame.len() - QUERY_SUFFIX_0.len()]
} else if frame.starts_with(QUERY_PREFIX_1) {
&frame[QUERY_PREFIX_1.len()..]
} else if frame.starts_with(QUERY_PREFIX_2) {
// This is a mangled symbol, we have to parse out how many characters
// to read for the query name.
let num_chars_start = QUERY_PREFIX_2.len();
let mut num_chars_end = num_chars_start + 1;
while frame.as_bytes()[num_chars_end].is_ascii_digit() {
num_chars_end += 1;
}
let num_chars = usize::from_str(&frame[num_chars_start..num_chars_end]).unwrap();
&frame[num_chars_end .. num_chars_end + num_chars]
} else {
return None
};
Some(query_name)
}
|
//use super::field::{One, Zero};
//use std::ops::{Add, BitAnd, Shl, Shr, Sub};
pub trait DivRem<RHS = Self>: Sized {
type Output;
fn divrem(self, rhs: RHS) -> (Self::Output, Self::Output);
}
pub trait Egcd<RHS = Self> {
type Output;
fn egcd(self, rhs: RHS) -> (Self::Output, Self::Output);
}
pub trait ModInv<MOD = Self> {
type Output;
fn modinv(self, modulo: MOD) -> Self::Output;
}
impl<T> ModInv for T
where
T: Egcd<Output = T>,
{
type Output = Self;
fn modinv(self, modulo: Self) -> Self::Output {
self.egcd(modulo).0
}
}
|
use ffi;
use error;
use device::Device;
use MantleObject;
use DeviceExt;
use std::ops::{Deref, DerefMut};
use std::sync::Arc;
use std::slice;
use std::mem;
pub struct Buffer {
memory: ffi::GR_GPU_MEMORY,
size: usize,
default_state: ffi::GR_ENUM,
}
pub struct Mapping<'a, T> {
buffer: &'a Buffer,
pointer: *mut T,
size: usize,
}
impl Buffer {
pub fn empty(device: &Arc<Device>, size: usize) -> Arc<Buffer> {
let heap = &device.get_heaps()[0];
let infos = ffi::GR_MEMORY_ALLOC_INFO {
size: (heap.page_size * (1 + (size - 1) / heap.page_size)) as ffi::GR_GPU_SIZE,
alignment: 0,
flags: 0,
heapCount: 1,
heaps: [heap.id, 0, 0, 0, 0, 0, 0, 0],
memPriority: ffi::GR_MEMORY_PRIORITY_NORMAL,
};
let mem = unsafe {
let mut mem = mem::uninitialized();
error::check_result(ffi::grAllocMemory(*device.get_id(), &infos, &mut mem)).unwrap();
mem
};
/*
// switching to `GR_WSI_WIN_IMAGE_STATE_PRESENT_WINDOWED` state
unsafe {
let infos = ffi::GR_CMD_BUFFER_CREATE_INFO {
queueType: ffi::GR_QUEUE_UNIVERSAL,
flags: 0,
};
let mut cmd_buffer = mem::uninitialized();
error::check_result(ffi::grCreateCommandBuffer(*device.get_id(), &infos, &mut cmd_buffer)).unwrap();
error::check_result(ffi::grBeginCommandBuffer(cmd_buffer, ffi::GR_CMD_BUFFER_OPTIMIZE_ONE_TIME_SUBMIT)).unwrap();
let transition = ffi::GR_MEMORY_STATE_TRANSITION {
mem: mem,
oldState: ffi::GR_MEMORY_STATE_DATA_TRANSFER,
newState: ffi::GR_MEMORY_STATE_GRAPHICS_SHADER_READ_ONLY,
offset: 0,
regionSize: size as ffi::GR_GPU_SIZE,
};
ffi::grCmdPrepareMemoryRegions(cmd_buffer, 1, &transition);
error::check_result(ffi::grEndCommandBuffer(cmd_buffer)).unwrap();
let mem = ffi::GR_MEMORY_REF {
mem: mem,
flags: 0,
};
error::check_result(ffi::grQueueSubmit(device.get_queue(), 1, &cmd_buffer, 1, &mem, 0)).unwrap();
}
*/
Arc::new(Buffer {
memory: mem,
size: size,
default_state: ffi::GR_MEMORY_STATE_DATA_TRANSFER,
})
}
pub fn map<T>(&self) -> Mapping<T> {
let data = unsafe {
let mut data = mem::uninitialized();
error::check_result(ffi::grMapMemory(self.memory, 0, &mut data)).unwrap();
data
};
Mapping {
buffer: self,
pointer: data as *mut _,
size: self.size,
}
}
}
impl Drop for Buffer {
fn drop(&mut self) {
unsafe {
error::check_result(ffi::grFreeMemory(self.memory)).unwrap();
}
}
}
impl<'a, T> Deref for Mapping<'a, T> {
type Target = [T];
fn deref(&self) -> &[T] {
unsafe {
slice::from_raw_parts_mut(self.pointer, self.size)
}
}
}
impl<'a, T> DerefMut for Mapping<'a, T> {
fn deref_mut(&mut self) -> &mut [T] {
unsafe {
slice::from_raw_parts_mut(self.pointer, self.size)
}
}
}
impl<'a, T> Drop for Mapping<'a, T> {
fn drop(&mut self) {
unsafe {
error::check_result(ffi::grUnmapMemory(self.buffer.memory)).unwrap();
}
}
}
|
//! Commands available to use on the HD44780 device.
use crate::instructions::*;
/// Commands every driver must support
/// /// Commands that only require a write capable bus
pub trait Driver {
type Error;
/// Clears entire display and sets DDRAM address 0 in address counter.
fn clear_display(&mut self) -> Result<(), Self::Error>;
/// Sets DDRAM address 0 in address counter. Also returns display from being shifted to original
/// position. DDRAM contents remain unchanged.
///
/// ```rust, ignore
/// lcd.return_home();
/// ```
fn return_home(&mut self) -> Result<(), Self::Error>;
/// Sets cursor move direction and specifies display shift. These operations are performed
/// during data write and read.
fn set_entry_mode(
&mut self,
direction: IncrementDecrement,
display_shift: AccompaniesDisplayShift,
) -> Result<(), Self::Error>;
// todo! individual entry mode settings
/// Sets entire display (D) on/off, cursor on/off (C), and blinking of cursor position character
/// (B).
fn set_display_control(
&mut self,
display_on: ShowDisplay,
cursor_displayed: ShowCursor,
cursor_blink: Blink,
) -> Result<(), Self::Error>;
// todo! individual display control settings
// todo! could provide mutation methods for cursor/display shift like entry mode etc.
/// Move the cursor left or right once.
fn shift_cursor(&mut self, direction: ShiftDirection) -> Result<(), Self::Error>;
/// Move the display left or right once.
fn shift_display(&mut self, direction: ShiftDirection) -> Result<(), Self::Error>;
/// Sets interface data length (DL), number of display lines (N), and character font (F).
fn function_set(
&mut self,
data_length: DataLength,
num_lines: NumberOfDisplayLines,
font: CharacterFont,
) -> Result<(), Self::Error>;
// todo! function set methods
/// Sets CGRAM address. CGRAM data is sent and received after this setting.
fn set_cgram_address(&mut self, address: u8) -> Result<(), Self::Error>;
/// Sets DDRAM address, moving the cursor to the specified position. The next character written
/// will appear at this position. DDRAM data is sent and received after this setting. Line 2
/// begins at address 40.
///
/// ```rust, ignore
/// lcd.position(5); // Line 1, column 5
/// lcd.write_char('X'); // 'X' at line 1, column 5
/// lcd.position(45); // Line 2, column 5
/// lcd.write_char('Y'); // 'Y' at line 2, column 5, right below 'X'
/// ```
fn set_position(&mut self, address: u8) -> Result<(), Self::Error>;
/// Writes a byte of data into DDRAM or CGRAM. Type is selected by setting either DDRAM or CGRAM
/// address. Note: character will be truncated to fit into u8.
fn write_char(&mut self, data: char) -> Result<(), Self::Error>;
/// Writes a string of data to the display. Note: each character in the string is written as the
/// corresponding byte and will be truncated if does not fit into u8.
fn write_str(&mut self, str: &str) -> Result<(), Self::Error>;
/// Write a byte of data to the display.
fn write_byte(&mut self, byte: u8) -> Result<(), Self::Error>;
/// Writes a series of bytes to the display.
fn write_bytes(&mut self, bytes: &[u8]) -> Result<(), Self::Error>;
// todo! cgram stuff
}
/// Commands that require a read-write bus
pub trait ReadableDriver {
type Error;
/// Reads busy flag (BF) indicating internal operation is being performed and reads address
/// counter contents.
fn read_busy_flag_and_address_counter(&mut self) -> Result<u8, Self::Error>;
/// Reads data from DDRAM or CGRAM. Type is selected by setting either DDRAM or CGRAM address.
fn read_data(&mut self) -> Result<u8, Self::Error>;
}
|
use survey_manager_core::dtos::{SurveyDTO, SurveyDTOs, ListViewSurveyDTO};
use survey_manager_core::app_services::repository_contracts::SurveyDTOReadRepository;
pub struct MysqlSurveyDTOsRepository {
// A single connection to Mysql. Handed down from a pool likely.
conn: mysql::PooledConn,
}
impl MysqlSurveyDTOsRepository {
pub fn new() -> MysqlSurveyDTOsRepository {
let pool = super::MYSQL_POOL.clone();
MysqlSurveyDTOsRepository {
conn: pool.get_conn().unwrap()
}
}
}
impl SurveyDTOReadRepository for MysqlSurveyDTOsRepository {
type Error = mysql::Error;
fn get_survey_for_author(&mut self, id: &String, author: &String) -> Result<Option<SurveyDTO>, mysql::Error> {
let survey_result: Option<SurveyDTO> =
match self.conn.prep_exec(
"SELECT survey_data FROM survey WHERE id=? AND author=?",
(id, author)
) {
Ok(mut q_result) => {
if let Some(row_result) = q_result.next() {
let row = row_result?;
let survey_data: String = mysql::from_row(row);
Some(serde_json::from_str(&survey_data).unwrap())
} else {
None
}
},
Err(e) => {
return Err(e);
},
};
Ok(survey_result)
}
fn get_surveys_by_author(&mut self, author: &String) -> Result<Option<SurveyDTOs>, mysql::Error> {
let survey_results: Option<SurveyDTOs> =
match self.conn.prep_exec(
"SELECT id, author, title, category FROM survey WHERE author=?",
(author,)
) {
Ok(q_result) => {
let mut surveys = Vec::new();
for row_result in q_result {
let row = row_result?;
let (id, author, title, category) = mysql::from_row(row);
let s_dto = ListViewSurveyDTO {
id,
author,
title,
category,
};
surveys.push(s_dto);
}
if surveys.len() == 0 {
None
} else {
Some(
SurveyDTOs {
surveys,
}
)
}
},
Err(e) => {
return Err(e);
},
};
Ok(survey_results)
}
}
|
#[doc = "Reader of register CCVR"]
pub type R = crate::R<u32, super::CCVR>;
#[doc = "Reader of field `NCV`"]
pub type NCV_R = crate::R<u8, u8>;
#[doc = "Reader of field `PCV`"]
pub type PCV_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:3 - NMOS compensation value"]
#[inline(always)]
pub fn ncv(&self) -> NCV_R {
NCV_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - PMOS compensation value"]
#[inline(always)]
pub fn pcv(&self) -> PCV_R {
PCV_R::new(((self.bits >> 4) & 0x0f) as u8)
}
}
|
use std::fmt;
use std::path::PathBuf;
pub type Result<T> = std::result::Result<T, Error>;
/// The various kinds of errors that can happen while reading input.
#[derive(Debug)]
pub enum Error {
/// An I/O error (such as not being able to read the input file)
Io(std::io::Error),
/// An error while reading or writing CSV.
Csv(csv::Error),
/// The timetable file is not valid.
InvalidTimetable {
file: PathBuf,
line: usize,
error: TimetableError,
},
}
#[derive(Debug)]
pub enum TimetableError {
/// There are unknown and/or missing players in the previous timetable.
PlayerMismatch,
/// There are too many or too few players assigned on a match.
InvalidPlayerCount,
}
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Error {
Error::Io(err)
}
}
impl From<csv::Error> for Error {
fn from(err: csv::Error) -> Error {
Error::Csv(err)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::Io(err) => err.fmt(f),
Error::Csv(err) => err.fmt(f),
Error::InvalidTimetable { file, line, error } => {
write!(f, "{}:{}: {}", file.to_string_lossy(), line, error)
}
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::Io(error) => Some(error),
Error::Csv(error) => Some(error),
Error::InvalidTimetable { error, .. } => Some(error),
}
}
}
impl fmt::Display for TimetableError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TimetableError::PlayerMismatch => write!(
f,
"Players in time table do not match with players in planning data"
),
TimetableError::InvalidPlayerCount => write!(f, "Match has too many players"),
}
}
}
impl std::error::Error for TimetableError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
}
|
extern crate reqwest;
extern crate twitter_rs;
use twitter_rs::*;
fn main() {
twitter_rs::ClientContext::new_with_oauth(ANDROID_CK, ANDROID_CS);
}
|
pub mod configuration;
pub mod minecraft_related;
pub mod server_interactions;
pub mod shared_data;
pub mod web_server;
|
//! # Pretty Assertions
//!
//! When writing tests in Rust, you'll probably use `assert_eq!(a, b)` _a lot_.
//!
//! If such a test fails, it will present all the details of `a` and `b`.
//! But you have to spot the differences yourself, which is not always straightforward,
//! like here:
//!
//! 
//!
//! Wouldn't that task be _much_ easier with a colorful diff?
//!
//! 
//!
//! Yep — and you only need **one line of code** to make it happen:
//!
//! ```rust,ignore
//! #[macro_use] extern crate pretty_assertions;
//! ```
//!
//! <details>
//! <summary>Show the example behind the screenshots above.</summary>
//!
//! ```rust,ignore
//! // 1. add the `pretty_assertions` dependency to `Cargo.toml`.
//! // 2. insert this line at the top of your crate root or integration test
//! #[macro_use] extern crate pretty_assertions;
//!
//! fn main() {
//! #[derive(Debug, PartialEq)]
//! struct Foo {
//! lorem: &'static str,
//! ipsum: u32,
//! dolor: Result<String, String>,
//! }
//!
//! let x = Some(Foo { lorem: "Hello World!", ipsum: 42, dolor: Ok("hey".to_string())});
//! let y = Some(Foo { lorem: "Hello Wrold!", ipsum: 42, dolor: Ok("hey ho!".to_string())});
//!
//! assert_eq!(x, y);
//! }
//! ```
//! </details>
//!
//! ## Tip
//!
//! Specify it as [`[dev-dependencies]`](http://doc.crates.io/specifying-dependencies.html#development-dependencies)
//! and it will only be used for compiling tests, examples, and benchmarks.
//! This way the compile time of `cargo build` won't be affected!
//!
//! In your crate root, also add `#[cfg(test)]` to the crate import, like this:
//!
//! ```rust,ignore
//! #[cfg(test)] // <-- not needed in examples + integration tests
//! #[macro_use]
//! extern crate pretty_assertions;
//! ```
//!
//! ## Note
//!
//! * Each example and integration test also needs `#[macro_use] extern crate
//! pretty_assertions`, if you want colorful diffs there.
//! * The replacement is only effective in your own crate, not in other libraries
//! you include.
//! * `assert_ne` is also switched to multi-line presentation, but does _not_ show
//! a diff.
extern crate difference;
extern crate ansi_term;
mod format_changeset;
use std::fmt::{self, Debug, Display};
use difference::Changeset;
use format_changeset::format_changeset;
pub use ansi_term::Style;
#[doc(hidden)]
pub struct Comparison(Changeset);
impl Comparison {
pub fn new<TLeft: Debug, TRight: Debug>(left: &TLeft, right: &TRight) -> Comparison {
let left_dbg = format!("{:#?}", *left);
let right_dbg = format!("{:#?}", *right);
let changeset = Changeset::new(&left_dbg, &right_dbg, "\n");
Comparison(changeset)
}
}
impl Display for Comparison {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
format_changeset(f, &self.0)
}
}
#[macro_export]
macro_rules! assert_eq {
($left:expr , $right:expr,) => ({
assert_eq!($left, $right)
});
($left:expr , $right:expr) => ({
match (&($left), &($right)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
panic!("assertion failed: `(left == right)`\
\n\
\n{}\
\n",
$crate::Comparison::new(left_val, right_val))
}
}
}
});
($left:expr , $right:expr, $($arg:tt)*) => ({
match (&($left), &($right)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
panic!("assertion failed: `(left == right)`: {}\
\n\
\n{}\
\n",
format_args!($($arg)*),
$crate::Comparison::new(left_val, right_val))
}
}
}
});
}
#[macro_export]
#[doc(hidden)]
macro_rules! __assert_ne {
($left:expr, $right:expr, $maybe_semicolon:expr, $($arg:tt)+) => ({
match (&($left), &($right)) {
(left_val, right_val) => {
if *left_val == *right_val {
let left_dbg = format!("{:?}", *left_val);
let right_dbg = format!("{:?}", *right_val);
if left_dbg != right_dbg {
panic!("assertion failed: `(left != right)`{}{}\
\n\
\n{}\
\n{}: According to the `PartialEq` implementation, both of the values \
are partially equivalent, even if the `Debug` outputs differ.\
\n\
\n",
$maybe_semicolon,
format_args!($($arg)+),
$crate::Comparison::new(left_val, right_val),
$crate::Style::new()
.bold()
.underline()
.paint("Note"))
}
panic!("assertion failed: `(left != right)`{}{}\
\n\
\n{}:\
\n{:#?}\
\n\
\n",
$maybe_semicolon,
format_args!($($arg)+),
$crate::Style::new().bold().paint("Both sides"),
left_val)
}
}
}
});
}
#[macro_export]
macro_rules! assert_ne {
($left:expr, $right:expr) => ({
__assert_ne!($left, $right, "", "");
});
($left:expr, $right:expr,) => ({
__assert_ne!($left, $right, "", "");
});
($left:expr, $right:expr, $($arg:tt)+) => ({
__assert_ne!($left, $right, ": ", $($arg)+);
});
}
|
//! Helper functions and macros for use in the rest of PICL.
// use std::collections::HashMap;
/// A helper for making a HashMap literal.
///
/// # Examples
/// ```
/// let my_map = map!{ "this" => "that", "foo" => "bar" };
/// ```
#[macro_export]
macro_rules! map(
{ $($key:expr => $value:expr),+ } => {
{
let mut _map = ::std::collections::HashMap::new();
$(_map.insert($key, $value);)+
_map
}
};
);
|
use crate::report::Styled;
pub fn write_diff(
writer: &mut dyn std::fmt::Write,
expected: &crate::Data,
actual: &crate::Data,
expected_name: Option<&dyn std::fmt::Display>,
actual_name: Option<&dyn std::fmt::Display>,
palette: crate::report::Palette,
) -> Result<(), std::fmt::Error> {
#[allow(unused_mut)]
let mut rendered = false;
#[cfg(feature = "diff")]
if let (Some(expected), Some(actual)) = (expected.render(), actual.render()) {
write_diff_inner(
writer,
&expected,
&actual,
expected_name,
actual_name,
palette,
)?;
rendered = true;
}
if !rendered {
if let Some(expected_name) = expected_name {
writeln!(writer, "{} {}:", expected_name, palette.info("(expected)"))?;
} else {
writeln!(writer, "{}:", palette.info("Expected"))?;
}
writeln!(writer, "{}", palette.info(&expected))?;
if let Some(actual_name) = actual_name {
writeln!(writer, "{} {}:", actual_name, palette.error("(actual)"))?;
} else {
writeln!(writer, "{}:", palette.error("Actual"))?;
}
writeln!(writer, "{}", palette.error(&actual))?;
}
Ok(())
}
#[cfg(feature = "diff")]
fn write_diff_inner(
writer: &mut dyn std::fmt::Write,
expected: &str,
actual: &str,
expected_name: Option<&dyn std::fmt::Display>,
actual_name: Option<&dyn std::fmt::Display>,
palette: crate::report::Palette,
) -> Result<(), std::fmt::Error> {
let timeout = std::time::Duration::from_millis(500);
let min_elide = 20;
let context = 5;
let changes = similar::TextDiff::configure()
.algorithm(similar::Algorithm::Patience)
.timeout(timeout)
.newline_terminated(false)
.diff_lines(expected, actual);
writeln!(writer)?;
if let Some(expected_name) = expected_name {
writeln!(
writer,
"{}",
palette.info(format_args!("{:->4} expected: {}", "", expected_name))
)?;
} else {
writeln!(writer, "{}", palette.info(format_args!("--- Expected")))?;
}
if let Some(actual_name) = actual_name {
writeln!(
writer,
"{}",
palette.error(format_args!("{:+>4} actual: {}", "", actual_name))
)?;
} else {
writeln!(writer, "{}", palette.error(format_args!("+++ Actual")))?;
}
let changes = changes
.ops()
.iter()
.flat_map(|op| changes.iter_inline_changes(op))
.collect::<Vec<_>>();
let tombstones = if min_elide < changes.len() {
let mut tombstones = vec![true; changes.len()];
let mut counter = context;
for (i, change) in changes.iter().enumerate() {
match change.tag() {
similar::ChangeTag::Insert | similar::ChangeTag::Delete => {
counter = context;
tombstones[i] = false;
}
similar::ChangeTag::Equal => {
if counter != 0 {
tombstones[i] = false;
counter -= 1;
}
}
}
}
let mut counter = context;
for (i, change) in changes.iter().enumerate().rev() {
match change.tag() {
similar::ChangeTag::Insert | similar::ChangeTag::Delete => {
counter = context;
tombstones[i] = false;
}
similar::ChangeTag::Equal => {
if counter != 0 {
tombstones[i] = false;
counter -= 1;
}
}
}
}
tombstones
} else {
Vec::new()
};
let mut elided = false;
for (i, change) in changes.into_iter().enumerate() {
if tombstones.get(i).copied().unwrap_or(false) {
if !elided {
let sign = "⋮";
write!(writer, "{:>4} ", " ",)?;
write!(writer, "{:>4} ", " ",)?;
writeln!(writer, "{}", palette.hint(sign))?;
}
elided = true;
} else {
elided = false;
match change.tag() {
similar::ChangeTag::Insert => {
write_change(writer, change, "+", palette.actual, palette.error, palette)?;
}
similar::ChangeTag::Delete => {
write_change(writer, change, "-", palette.expected, palette.info, palette)?;
}
similar::ChangeTag::Equal => {
write_change(writer, change, "|", palette.hint, palette.hint, palette)?;
}
}
}
}
Ok(())
}
#[cfg(feature = "diff")]
fn write_change(
writer: &mut dyn std::fmt::Write,
change: similar::InlineChange<str>,
sign: &str,
em_style: crate::report::Style,
style: crate::report::Style,
palette: crate::report::Palette,
) -> Result<(), std::fmt::Error> {
if let Some(index) = change.old_index() {
write!(writer, "{:>4} ", palette.hint(index + 1),)?;
} else {
write!(writer, "{:>4} ", " ",)?;
}
if let Some(index) = change.new_index() {
write!(writer, "{:>4} ", palette.hint(index + 1),)?;
} else {
write!(writer, "{:>4} ", " ",)?;
}
write!(writer, "{} ", Styled::new(sign, style))?;
for &(emphasized, change) in change.values() {
let cur_style = if emphasized { em_style } else { style };
write!(writer, "{}", Styled::new(change, cur_style))?;
}
if change.missing_newline() {
writeln!(writer, "{}", Styled::new("∅", em_style))?;
}
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
#[cfg(feature = "diff")]
#[test]
fn diff_eq() {
let expected = "Hello\nWorld\n";
let expected_name = "A";
let actual = "Hello\nWorld\n";
let actual_name = "B";
let palette = crate::report::Palette::plain();
let mut actual_diff = String::new();
write_diff_inner(
&mut actual_diff,
expected,
actual,
Some(&expected_name),
Some(&actual_name),
palette,
)
.unwrap();
let expected_diff = "
---- expected: A
++++ actual: B
1 1 | Hello
2 2 | World
";
assert_eq!(expected_diff, actual_diff);
}
#[cfg(feature = "diff")]
#[test]
fn diff_ne_line_missing() {
let expected = "Hello\nWorld\n";
let expected_name = "A";
let actual = "Hello\n";
let actual_name = "B";
let palette = crate::report::Palette::plain();
let mut actual_diff = String::new();
write_diff_inner(
&mut actual_diff,
expected,
actual,
Some(&expected_name),
Some(&actual_name),
palette,
)
.unwrap();
let expected_diff = "
---- expected: A
++++ actual: B
1 1 | Hello
2 - World
";
assert_eq!(expected_diff, actual_diff);
}
#[cfg(feature = "diff")]
#[test]
fn diff_eq_trailing_extra_newline() {
let expected = "Hello\nWorld";
let expected_name = "A";
let actual = "Hello\nWorld\n";
let actual_name = "B";
let palette = crate::report::Palette::plain();
let mut actual_diff = String::new();
write_diff_inner(
&mut actual_diff,
expected,
actual,
Some(&expected_name),
Some(&actual_name),
palette,
)
.unwrap();
let expected_diff = "
---- expected: A
++++ actual: B
1 1 | Hello
2 - World∅
2 + World
";
assert_eq!(expected_diff, actual_diff);
}
#[cfg(feature = "diff")]
#[test]
fn diff_eq_trailing_newline_missing() {
let expected = "Hello\nWorld\n";
let expected_name = "A";
let actual = "Hello\nWorld";
let actual_name = "B";
let palette = crate::report::Palette::plain();
let mut actual_diff = String::new();
write_diff_inner(
&mut actual_diff,
expected,
actual,
Some(&expected_name),
Some(&actual_name),
palette,
)
.unwrap();
let expected_diff = "
---- expected: A
++++ actual: B
1 1 | Hello
2 - World
2 + World∅
";
assert_eq!(expected_diff, actual_diff);
}
#[cfg(feature = "diff")]
#[test]
fn diff_eq_elided() {
let mut expected = String::new();
expected.push_str("Hello\n");
for i in 0..20 {
expected.push_str(&i.to_string());
expected.push('\n');
}
expected.push_str("World\n");
for i in 0..20 {
expected.push_str(&i.to_string());
expected.push('\n');
}
expected.push_str("!\n");
let expected_name = "A";
let mut actual = String::new();
actual.push_str("Goodbye\n");
for i in 0..20 {
actual.push_str(&i.to_string());
actual.push('\n');
}
actual.push_str("Moon\n");
for i in 0..20 {
actual.push_str(&i.to_string());
actual.push('\n');
}
actual.push_str("?\n");
let actual_name = "B";
let palette = crate::report::Palette::plain();
let mut actual_diff = String::new();
write_diff_inner(
&mut actual_diff,
&expected,
&actual,
Some(&expected_name),
Some(&actual_name),
palette,
)
.unwrap();
let expected_diff = "
---- expected: A
++++ actual: B
1 - Hello
1 + Goodbye
2 2 | 0
3 3 | 1
4 4 | 2
5 5 | 3
6 6 | 4
⋮
17 17 | 15
18 18 | 16
19 19 | 17
20 20 | 18
21 21 | 19
22 - World
22 + Moon
23 23 | 0
24 24 | 1
25 25 | 2
26 26 | 3
27 27 | 4
⋮
38 38 | 15
39 39 | 16
40 40 | 17
41 41 | 18
42 42 | 19
43 - !
43 + ?
";
assert_eq!(expected_diff, actual_diff);
}
}
|
use std::collections::{HashMap, VecDeque};
use std::io::{Write, ErrorKind};
use std::net::{SocketAddr, ToSocketAddrs};
use std::time::{Duration, Instant};
use std::thread;
use netopt::{NetworkOptions, NetworkStream};
use rand::{self, Rng};
use mqtt3::{MqttRead, MqttWrite, Message, QoS, SubscribeReturnCodes, SubscribeTopic};
use mqtt3::{self, Protocol, Packet, ConnectReturnCode, PacketIdentifier, LastWill, ToTopicPath};
use error::{Error, Result};
use sub::Subscription;
use {Connection, PubSub, ClientState, ReconnectMethod, PubOpt, ToPayload, ToSubTopics, ToUnSubTopics};
use store::Store;
// #[derive(Clone)]
pub struct ClientOptions {
protocol: Protocol,
keep_alive: Option<Duration>,
clean_session: bool,
client_id: Option<String>,
last_will: Option<LastWill>,
username: Option<String>,
password: Option<String>,
reconnect: ReconnectMethod,
incomming_store: Option<Box<Store + Send>>,
outgoing_store: Option<Box<Store + Send>>,
}
impl ClientOptions {
/// A type used for Client settings
///
/// - Protocol is set to MQTT(4)
/// - Keep alive` is set to 30 seconds
/// - `clean_session` is set to true
/// - `reconnect` is set to `ReconnectMethod::ForeverDisconnect`
///
/// The rest of the options are set to None
pub fn new() -> ClientOptions {
ClientOptions {
protocol: Protocol::MQTT(4),
keep_alive: Some(Duration::new(30, 0)),
clean_session: true,
client_id: None,
last_will: None,
username: None,
password: None,
reconnect: ReconnectMethod::ForeverDisconnect,
incomming_store: None,
outgoing_store: None,
}
}
pub fn set_keep_alive(&mut self, secs: u16) -> &mut ClientOptions {
self.keep_alive = Some(Duration::new(secs as u64, 0));
self
}
pub fn set_protocol(&mut self, protocol: Protocol) -> &mut ClientOptions {
self.protocol = protocol;
self
}
pub fn set_client_id(&mut self, client_id: String) -> &mut ClientOptions {
self.client_id = Some(client_id);
self
}
pub fn set_clean_session(&mut self, clean_session: bool) -> &mut ClientOptions {
self.clean_session = clean_session;
self
}
pub fn set_incomming_store(&mut self, store: Box<Store + Send>) -> &mut ClientOptions {
self.incomming_store = Some(store);
self
}
pub fn set_outgoing_store(&mut self, store: Box<Store + Send>) -> &mut ClientOptions {
self.outgoing_store = Some(store);
self
}
pub fn generate_client_id(&mut self) -> &mut ClientOptions {
let mut rng = rand::thread_rng();
let id = rng.gen::<u32>();
self.client_id = Some(format!("mqttc_{}", id));
self
}
pub fn set_username(&mut self, username: String) -> &mut ClientOptions {
self.username = Some(username);
self
}
pub fn set_password(&mut self, password: String) -> &mut ClientOptions {
self.password = Some(password);
self
}
pub fn set_last_will<T: ToTopicPath, P: ToPayload>(&mut self,
topic: T,
message: String,
pub_opt: PubOpt)
-> Result<()> {
let topic_name = topic.to_topic_name()?;
self.last_will = Some(LastWill {
topic: topic_name.to_topic_name()?.path(),
message: message,
qos: pub_opt.qos(),
retain: pub_opt.is_retain(),
});
Ok(())
}
pub fn set_last_will_opt(&mut self, last_will: Option<LastWill>) -> &mut ClientOptions {
self.last_will = last_will;
self
}
pub fn set_reconnect(&mut self, reconnect: ReconnectMethod) -> &mut ClientOptions {
self.reconnect = reconnect;
self
}
pub fn connect<A: ToSocketAddrs>(mut self, addr: A, netopt: NetworkOptions) -> Result<Client> {
if self.client_id == None {
self.generate_client_id();
}
let addr = addr.to_socket_addrs()?.next().expect("Socket address is broken");
info!(" Connecting to {}", addr);
let conn = self._reconnect(addr, &netopt)?;
let mut client = Client {
addr: addr,
state: ClientState::Disconnected,
netopt: netopt,
opts: self,
conn: conn,
session_present: false,
// Queues
last_flush: Instant::now(),
last_pid: PacketIdentifier::zero(),
await_ping: false,
incomming_pub: VecDeque::new(),
incomming_rec: VecDeque::new(),
incomming_rel: VecDeque::new(),
outgoing_ack: VecDeque::new(),
outgoing_rec: VecDeque::new(),
outgoing_comp: VecDeque::new(),
await_suback: VecDeque::new(),
await_unsuback: VecDeque::new(),
subscriptions: HashMap::new(), // Subscriptions
};
// Send CONNECT then wait CONNACK
client._handshake()?;
Ok(client)
}
fn _reconnect(&self,
addr: SocketAddr,
netopt: &NetworkOptions)
-> Result<Connection> {
let stream = netopt.connect(addr)?;
stream.set_read_timeout(self.keep_alive).unwrap();
stream.set_write_timeout(self.keep_alive).unwrap();
Ok(Connection::new(stream)?)
}
fn _generate_connect_packet(&self) -> Box<mqtt3::Connect> {
let keep_alive = if let Some(dur) = self.keep_alive {
dur.as_secs() as u16
} else {
0
};
Box::new(mqtt3::Connect {
protocol: self.protocol,
keep_alive: keep_alive,
client_id: self.client_id.clone().unwrap(),
clean_session: self.clean_session,
last_will: self.last_will.clone(),
username: self.username.clone(),
password: self.password.clone(),
})
}
}
pub struct Client {
addr: SocketAddr,
state: ClientState,
netopt: NetworkOptions,
opts: ClientOptions,
conn: Connection,
session_present: bool,
// Queues
last_flush: Instant,
last_pid: PacketIdentifier,
await_ping: bool,
incomming_pub: VecDeque<Box<Message>>, // QoS 1
incomming_rec: VecDeque<Box<Message>>, // QoS 2
incomming_rel: VecDeque<PacketIdentifier>, // QoS 2
outgoing_ack: VecDeque<Box<Message>>, // QoS 1
outgoing_rec: VecDeque<Box<Message>>, // QoS 2
outgoing_comp: VecDeque<PacketIdentifier>, // QoS 2
await_suback: VecDeque<Box<mqtt3::Subscribe>>,
await_unsuback: VecDeque<Box<mqtt3::Unsubscribe>>,
// Subscriptions
subscriptions: HashMap<String, Subscription>,
}
impl PubSub for Client {
fn publish<T, P>(&mut self, topic: T, payload: P, pubopt: PubOpt) -> Result<()>
where T: ToTopicPath,
P: ToPayload
{
self._publish(topic, payload, pubopt)?;
self._flush()
}
fn subscribe<S: ToSubTopics>(&mut self, subs: S) -> Result<()> {
self._subscribe(subs)?;
self._flush()
}
fn unsubscribe<U: ToUnSubTopics>(&mut self, unsubs: U) -> Result<()> {
self._unsubscribe(unsubs)?;
self._flush()
}
fn disconnect(mut self) -> Result<()> {
// self._disconnect();
self._flush()
}
}
impl Client {
pub fn await(&mut self) -> Result<Option<Box<Message>>> {
loop {
match self.accept() {
Ok(message) => {
if let Some(m) = message {
return Ok(Some(m));
}
}
Err(e) => {
match e {
Error::Timeout => {
if self.state == ClientState::Connected {
if !self.await_ping {
let _ = self.ping();
} else {
self._unbind();
}
} else {
return Err(Error::Timeout);
}
}
_ => return Err(e),
}
}
}
if self._normalized() {
return Ok(None);
}
}
}
pub fn accept(&mut self) -> Result<Option<Box<Message>>> {
match self.state {
ClientState::Connected | ClientState::Handshake => {
// Don't forget to send PING packets in time
if let Some(keep_alive) = self.opts.keep_alive {
let elapsed = self.last_flush.elapsed();
if elapsed >= keep_alive {
return Err(Error::Timeout);
}
self.conn.set_read_timeout(Some(keep_alive - elapsed))?;
}
match self.conn.read_packet() {
Ok(packet) => {
match self._parse_packet(packet) {
Ok(message) => Ok(message),
Err(err) => {
match err {
Error::ConnectionAbort => {
self._unbind();
Err(Error::ConnectionAbort)
}
err => {
error!("{:?}", err);
Err(err)
}
}
}
}
}
Err(err) => {
match err {
mqtt3::Error::UnexpectedEof => {
error!("{:?}", err);
if self._try_reconnect() {
Ok(None)
} else {
Err(Error::Disconnected)
}
}
mqtt3::Error::Io(e) => {
match e.kind() {
ErrorKind::WouldBlock | ErrorKind::TimedOut => {
Err(Error::Timeout)
}
ErrorKind::UnexpectedEof |
ErrorKind::ConnectionRefused |
ErrorKind::ConnectionReset |
ErrorKind::ConnectionAborted => {
error!("{:?}", e);
self._unbind();
if self._try_reconnect() {
Ok(None)
} else {
Err(Error::Disconnected)
}
}
_ => {
error!("{:?}", e);
self._unbind();
Err(Error::from(e))
}
}
}
_ => {
error!("{:?}", err);
Err(Error::from(err))
}
}
}
}
}
ClientState::Disconnected => {
if self._try_reconnect() {
Ok(None)
} else {
Err(Error::Disconnected)
}
}
}
}
pub fn reconnect(&mut self) -> Result<()> {
if self.state == ClientState::Connected {
warn!("mqttc is already connected");
return Ok(());
};
let conn = self.opts._reconnect(self.addr, &self.netopt)?;
self.conn = conn;
self._handshake()?;
self._resubscribe();
Ok(())
}
pub fn ping(&mut self) -> Result<()> {
debug!(" Pingreq");
self.await_ping = true;
self._write_packet(&Packet::Pingreq);
self._flush()
}
pub fn complete(&mut self, pid: PacketIdentifier) -> Result<()> {
let same_pid = self.incomming_rel.pop_back();
if same_pid == Some(pid) {
self._write_packet(&Packet::Pubcomp(pid));
self._flush()?;
if let Some(ref mut store) = self.opts.incomming_store {
store.delete(pid)?;
Ok(())
} else {
return Err(Error::IncommingStorageAbsent);
}
} else {
Err(Error::ProtocolViolation)
}
}
pub fn terminate(&mut self) {
self._unbind();
}
pub fn set_reconnect(&mut self, reconnect: ReconnectMethod) {
self.opts.reconnect = reconnect;
}
pub fn session_present(&self) -> bool {
self.session_present
}
fn _normalized(&self) -> bool {
(self.state == ClientState::Connected) && (!self.await_ping) &&
(self.outgoing_ack.len() == 0) && (self.outgoing_rec.len() == 0) &&
(self.incomming_pub.len() == 0) && (self.incomming_rec.len() == 0) &&
(self.incomming_rel.len() == 0) && (self.await_suback.len() == 0) &&
(self.await_unsuback.len() == 0)
}
fn _parse_packet(&mut self, packet: Packet) -> Result<Option<Box<Message>>> {
trace!("{:?}", packet);
match self.state {
ClientState::Handshake => {
match packet {
Packet::Connack(ref connack) => {
if connack.code == ConnectReturnCode::Accepted {
self.session_present = connack.session_present;
self.state = ClientState::Connected;
info!(" Connection accepted");
Ok(None)
} else {
Err(Error::ConnectionRefused(connack.code))
}
}
_ => Err(Error::HandshakeFailed),
}
}
ClientState::Connected => {
match packet {
Packet::Connack(_) => Err(Error::AlreadyConnected),
Packet::Publish(ref publish) => {
let message = Message::from_pub(publish.clone())?;
self._handle_message(message)
}
Packet::Puback(pid) => {
if let Some(message) = self.outgoing_ack.pop_front() {
if message.pid == Some(pid) {
Ok(None)
} else {
Err(Error::UnhandledPuback(pid))
}
} else {
Err(Error::UnhandledPuback(pid))
}
}
Packet::Pubrec(pid) => {
if let Some(message) = self.outgoing_rec.pop_front() {
if message.pid == Some(pid) {
self._write_packet(&Packet::Pubrel(pid));
self._flush()?;
self.outgoing_comp.push_back(pid);
if let Some(ref mut store) = self.opts.outgoing_store {
store.delete(pid)?;
} else {
return Err(Error::IncommingStorageAbsent);
}
Ok(None)
} else {
Err(Error::UnhandledPubrec(pid))
}
} else {
Err(Error::UnhandledPubrec(pid))
}
}
Packet::Pubrel(pid) => {
if let Some(message) = self.incomming_rec.pop_front() {
if message.pid == Some(pid) {
let message = if let Some(ref mut store) = self.opts
.incomming_store {
store.get(pid)?
} else {
return Err(Error::IncommingStorageAbsent);
};
self.incomming_rel.push_back(pid);
Ok(Some(message))
} else {
Err(Error::UnhandledPubrel(pid))
}
} else {
Err(Error::UnhandledPubrel(pid))
}
}
Packet::Pubcomp(pid) => {
if let Some(_) = self.outgoing_comp.pop_front() {
Ok(None)
} else {
Err(Error::UnhandledPubcomp(pid))
}
}
Packet::Suback(ref suback) => {
if let Some(subscribe) = self.await_suback.pop_front() {
if subscribe.pid == suback.pid {
if subscribe.topics.len() == suback.return_codes.len() {
let iter = suback.return_codes.iter().zip(&subscribe.topics);
for (ref code, ref sub_topic) in iter {
match **code {
SubscribeReturnCodes::Success(qos) => {
let sub = Subscription {
pid: subscribe.pid,
topic_path: sub_topic.topic_path
.to_topic_path()?,
qos: qos,
};
self.subscriptions
.insert(sub_topic.topic_path.clone(), sub);
}
SubscribeReturnCodes::Failure => {
// ignore subscription
}
}
}
Ok(None)
} else {
Err(Error::ProtocolViolation)
}
} else {
Err(Error::ProtocolViolation)
}
} else {
Err(Error::ProtocolViolation)
}
}
Packet::Unsuback(pid) => {
if let Some(unsubscribe) = self.await_unsuback.pop_front() {
if unsubscribe.pid == pid {
for topic in unsubscribe.topics.iter() {
self.subscriptions.remove(topic);
}
Ok(None)
} else {
Err(Error::ProtocolViolation)
}
} else {
Err(Error::ProtocolViolation)
}
}
Packet::Pingresp => {
self.await_ping = false;
Ok(None)
}
_ => Err(Error::UnrecognizedPacket),
}
}
ClientState::Disconnected => Err(Error::ConnectionAbort),
}
}
fn _handle_message(&mut self, message: Box<Message>) -> Result<Option<Box<Message>>> {
debug!(" Publish {} {} < {} bytes",
message.qos.to_u8(),
message.topic.path(),
message.payload.len());
match message.qos {
QoS::AtMostOnce => Ok(Some(message)),
QoS::AtLeastOnce => {
self.incomming_pub.push_back(message.clone());
let pid = message.pid.unwrap();
// debug!(" Puback {}", pid.0);
self._write_packet(&Packet::Puback(pid));
self._flush()?;
// FIXME: can be repeated
let _ = self.incomming_pub.pop_front();
Ok(Some(message))
}
QoS::ExactlyOnce => {
self.incomming_rec.push_back(message.clone());
let pid = message.pid.unwrap();
if let Some(ref mut store) = self.opts.incomming_store {
store.put(message)?;
} else {
return Err(Error::IncommingStorageAbsent);
}
self._write_packet(&Packet::Pubrec(pid));
self._flush()?;
Ok(None)
}
}
}
fn _handshake(&mut self) -> Result<()> {
self.state = ClientState::Handshake;
// send CONNECT
self._connect()?;
// wait CONNACK
let _ = self.await()?;
Ok(())
}
fn _try_reconnect(&mut self) -> bool {
match self.opts.reconnect {
ReconnectMethod::ForeverDisconnect => false,
ReconnectMethod::ReconnectAfter(dur) => {
info!(" Reconnect in {} seconds", dur.as_secs());
thread::sleep(dur);
let _ = self.reconnect();
true
}
}
}
fn _connect(&mut self) -> Result<()> {
let connect = self.opts._generate_connect_packet();
debug!(" Connect {}", connect.client_id);
let packet = Packet::Connect(connect);
self._write_packet(&packet);
self._flush()
}
fn _publish<T: ToTopicPath, P: ToPayload>(&mut self,
topic: T,
payload: P,
pubopt: PubOpt)
-> Result<()> {
let mut message = Box::new(Message {
topic: topic.to_topic_name()?,
qos: pubopt.qos(),
retain: pubopt.is_retain(),
pid: None,
payload: payload.to_payload(),
});
match message.qos {
QoS::AtMostOnce => (),
QoS::AtLeastOnce => {
message.pid = Some(self._next_pid());
self.outgoing_ack.push_back(message.clone());
}
QoS::ExactlyOnce => {
message.pid = Some(self._next_pid());
if let Some(ref mut store) = self.opts.outgoing_store {
store.put(message.clone())?;
} else {
return Err(Error::OutgoingStorageAbsent);
}
self.outgoing_rec.push_back(message.clone());
}
}
debug!(" Publish {} {} > {} bytes",
message.qos.to_u8(),
message.topic.path(),
message.payload.len());
let packet = Packet::Publish(message.to_pub(None, false));
self._write_packet(&packet);
Ok(())
}
fn _subscribe<S: ToSubTopics>(&mut self, subs: S) -> Result<()> {
let iter = subs.to_subscribe_topics()?;
let subscribe = Box::new(mqtt3::Subscribe {
pid: self._next_pid(),
topics: iter.collect(),
});
debug!(" Subscribe {:?}", subscribe.topics);
self.await_suback.push_back(subscribe.clone());
self._write_packet(&Packet::Subscribe(subscribe));
Ok(())
}
fn _unsubscribe<U: ToUnSubTopics>(&mut self, unsubs: U) -> Result<()> {
let iter = unsubs.to_unsubscribe_topics()?;
let unsubscribe = Box::new(mqtt3::Unsubscribe {
pid: self._next_pid(),
topics: iter.collect(),
});
debug!(" Unsubscribe {:?}", unsubscribe.topics);
self.await_unsuback.push_back(unsubscribe.clone());
self._write_packet(&Packet::Unsubscribe(unsubscribe));
Ok(())
}
fn _resubscribe(&mut self) {
let subs: Vec<SubscribeTopic> = self.subscriptions
.values()
.map(|sub| sub.to_subscribe_topic())
.collect();
let _ = self._subscribe(subs);
}
fn _disconnect(&mut self) {
self._write_packet(&Packet::Disconnect);
}
#[inline]
fn _write_packet(&mut self, packet: &Packet) {
trace!("{:?}", packet);
self.conn.write_packet(&packet).unwrap();
}
fn _flush(&mut self) -> Result<()> {
// TODO: in case of disconnection, trying to reconnect
self.conn.flush()?;
self.last_flush = Instant::now();
Ok(())
}
fn _unbind(&mut self) {
let _ = self.conn.terminate();
self.await_unsuback.clear();
self.await_suback.clear();
self.await_ping = false;
self.state = ClientState::Disconnected;
info!(" Disconnected {}", self.opts.client_id.clone().unwrap());
}
#[inline]
fn _next_pid(&mut self) -> PacketIdentifier {
self.last_pid = self.last_pid.next();
self.last_pid
}
}
#[cfg(test)]
mod test {
use std::io::Cursor;
use super::ClientOptions;
use netopt::{NetworkStream, NetworkOptions};
use netopt::mock::MockStream;
#[test]
fn client_connect_test() {
let stream = MockStream::with_vec(vec![0b00100000, 0x02, 0x01, 0x00]);
let options = ClientOptions::new();
let mut netopt = NetworkOptions::new();
netopt.attach(stream);
// Connect and create MQTT client
let client = options.connect("127.0.0.1:1883", netopt).unwrap();
}
}
|
use crate::crawler::photo_crawler::craw_photo_splider;
use rocket::http::Status;
use rocket::Route;
use rocket_contrib::json::JsonValue;
#[get("/photo/entries?<count>", format = "json")]
fn get_photo_entries(count: usize) -> Result<JsonValue, Status> {
if count > 15 {
return Err(Status::BadRequest);
}
match craw_photo_splider(count) {
Ok(photo_list) => {
let data: Vec<_> = photo_list
.iter()
.map(|m| {
json!({
"image_link": m.photo_link.clone(),
})
})
.collect();
Ok(json!({ "count": count, "data": data, }))
}
Err(e) => {
eprintln!("craw photo error: {:?}", e);
Err(Status::InternalServerError)
}
}
}
pub fn photo_routes() -> Vec<Route> {
routes![get_photo_entries]
}
|
use {
crate::switchboard::base::*,
crate::switchboard::hanging_get_handler::{HangingGetHandler, Sender},
crate::switchboard::switchboard_impl::SwitchboardImpl,
fidl::endpoints::ServiceMarker,
fidl_fuchsia_settings::{
DoNotDisturbMarker, DoNotDisturbRequest, DoNotDisturbRequestStream, DoNotDisturbSettings,
DoNotDisturbWatchResponder, Error,
},
fuchsia_async as fasync,
fuchsia_syslog::fx_log_err,
futures::lock::Mutex,
futures::TryStreamExt,
parking_lot::RwLock,
std::sync::Arc,
};
impl Sender<DoNotDisturbSettings> for DoNotDisturbWatchResponder {
fn send_response(self, data: DoNotDisturbSettings) {
self.send(data).log_fidl_response_error(DoNotDisturbMarker::DEBUG_NAME);
}
}
impl From<SettingResponse> for DoNotDisturbSettings {
fn from(response: SettingResponse) -> Self {
if let SettingResponse::DoNotDisturb(info) = response {
let mut dnd_settings = fidl_fuchsia_settings::DoNotDisturbSettings::empty();
dnd_settings.user_initiated_do_not_disturb = Some(info.user_dnd);
dnd_settings.night_mode_initiated_do_not_disturb = Some(info.night_mode_dnd);
dnd_settings
} else {
panic!("incorrect value sent to do_not_disturb");
}
}
}
fn to_request(settings: DoNotDisturbSettings) -> Option<SettingRequest> {
let mut request = None;
if let Some(user_dnd) = settings.user_initiated_do_not_disturb {
request = Some(SettingRequest::SetUserInitiatedDoNotDisturb(user_dnd));
} else if let Some(night_mode_dnd) = settings.night_mode_initiated_do_not_disturb {
request = Some(SettingRequest::SetNightModeInitiatedDoNotDisturb(night_mode_dnd));
}
request
}
pub fn spawn_do_not_disturb_fidl_handler(
switchboard_handle: Arc<RwLock<SwitchboardImpl>>,
mut stream: DoNotDisturbRequestStream,
) {
let switchboard_lock = switchboard_handle.clone();
type DNDHangingGetHandler =
Arc<Mutex<HangingGetHandler<DoNotDisturbSettings, DoNotDisturbWatchResponder>>>;
let hanging_get_handler: DNDHangingGetHandler =
HangingGetHandler::create(switchboard_handle, SettingType::DoNotDisturb);
fasync::spawn(async move {
while let Ok(Some(req)) = stream.try_next().await {
// Support future expansion of FIDL
#[allow(unreachable_patterns)]
match req {
DoNotDisturbRequest::Set { settings, responder } => {
if let Some(request) = to_request(settings) {
let (response_tx, response_rx) =
futures::channel::oneshot::channel::<SettingResponseResult>();
if switchboard_lock
.write()
.request(SettingType::DoNotDisturb, request, response_tx)
.is_ok()
{
fasync::spawn(async move {
match response_rx.await {
Ok(_) => responder
.send(&mut Ok(()))
.log_fidl_response_error(DoNotDisturbMarker::DEBUG_NAME),
Err(_) => responder
.send(&mut Err(Error::Failed))
.log_fidl_response_error(DoNotDisturbMarker::DEBUG_NAME),
};
});
} else {
responder
.send(&mut Err(Error::Failed))
.log_fidl_response_error(DoNotDisturbMarker::DEBUG_NAME);
}
} else {
responder
.send(&mut Err(Error::Failed))
.log_fidl_response_error(DoNotDisturbMarker::DEBUG_NAME);
}
}
DoNotDisturbRequest::Watch { responder } => {
let mut hanging_get_lock = hanging_get_handler.lock().await;
hanging_get_lock.watch(responder).await;
}
_ => {
fx_log_err!("Unsupported DoNotDisturbRequest type");
}
}
}
})
}
|
use rune::ast::{CopySource, Delimiter, Kind, NumberSource, StrSource, StringSource, Token};
use rune::macros::{with_context, MacroContext};
use rune::quote;
use runestick::Span;
use Kind::*;
macro_rules! assert_quote {
([$($expected:expr),* $(,)?], $quote:expr) => {
assert_eq!(vec![$(token($expected),)*], $quote.into_token_stream());
}
}
fn token(kind: Kind) -> Token {
Token {
span: Span::default(),
kind,
}
}
#[test]
fn test_tokens() {
let ctx = MacroContext::empty();
with_context(ctx, || {
assert_quote!([Amp], quote!(&));
assert_quote!([Abstract], quote!(abstract));
assert_quote!([AlignOf], quote!(alignof));
assert_quote!([Amp], quote!(&));
assert_quote!([AmpAmp], quote!(&&));
assert_quote!([AmpEq], quote!(&=));
assert_quote!([Arrow], quote!(->));
assert_quote!([As], quote!(as));
assert_quote!([Async], quote!(async));
assert_quote!([At], quote!(@));
assert_quote!([Await], quote!(await));
assert_quote!([Bang], quote!(!));
assert_quote!([BangEq], quote!(!=));
assert_quote!([Become], quote!(become));
assert_quote!([Break], quote!(break));
assert_quote!([Caret], quote!(^));
assert_quote!([CaretEq], quote!(^=));
assert_quote!([Colon], quote!(:));
assert_quote!([ColonColon], quote!(::));
assert_quote!([Comma], quote!(,));
assert_quote!([Const], quote!(const));
assert_quote!([Crate], quote!(crate));
assert_quote!([Dash], quote!(-));
assert_quote!([DashEq], quote!(-=));
assert_quote!([Default], quote!(default));
assert_quote!([Div], quote!(/));
assert_quote!([Do], quote!(do));
assert_quote!([Dollar], quote!($));
assert_quote!([Dot], quote!(.));
assert_quote!([DotDot], quote!(..));
assert_quote!([Else], quote!(else));
assert_quote!([Enum], quote!(enum));
assert_quote!([Eq], quote!(=));
assert_quote!([EqEq], quote!(==));
assert_quote!([Extern], quote!(extern));
assert_quote!([False], quote!(false));
assert_quote!([Final], quote!(final));
assert_quote!([Fn], quote!(fn));
assert_quote!([For], quote!(for));
assert_quote!([Gt], quote!(>));
assert_quote!([GtEq], quote!(>=));
assert_quote!([GtGt], quote!(>>));
assert_quote!([GtGtEq], quote!(>>=));
assert_quote!([If], quote!(if));
assert_quote!([Impl], quote!(impl));
assert_quote!([In], quote!(in));
assert_quote!([Is], quote!(is));
assert_quote!([Let], quote!(let));
assert_quote!([Loop], quote!(loop));
assert_quote!([Lt], quote!(<));
assert_quote!([LtEq], quote!(<=));
assert_quote!([LtLt], quote!(<<));
assert_quote!([LtLtEq], quote!(<<=));
assert_quote!([Macro], quote!(macro));
assert_quote!([Match], quote!(match));
assert_quote!([Mod], quote!(mod));
assert_quote!([Move], quote!(move));
assert_quote!([Not], quote!(not));
assert_quote!([OffsetOf], quote!(offsetof));
assert_quote!([Override], quote!(override));
assert_quote!([Perc], quote!(%));
assert_quote!([PercEq], quote!(%=));
assert_quote!([Pipe], quote!(|));
assert_quote!([PipeEq], quote!(|=));
assert_quote!([PipePipe], quote!(||));
assert_quote!([Plus], quote!(+));
assert_quote!([PlusEq], quote!(+=));
assert_quote!([Pound], quote!(#));
assert_quote!([Priv], quote!(priv));
assert_quote!([Proc], quote!(proc));
assert_quote!([Pub], quote!(pub));
assert_quote!([Pure], quote!(pure));
assert_quote!([QuestionMark], quote!(?));
assert_quote!([Ref], quote!(ref));
assert_quote!([Return], quote!(return));
assert_quote!([Rocket], quote!(=>));
assert_quote!([Select], quote!(select));
assert_quote!([SelfType], quote!(Self));
assert_quote!([SelfValue], quote!(self));
assert_quote!([SemiColon], quote!(;));
assert_quote!([SizeOf], quote!(sizeof));
assert_quote!([SlashEq], quote!(/=));
assert_quote!([Star], quote!(*));
assert_quote!([StarEq], quote!(*=));
assert_quote!([Static], quote!(static));
assert_quote!([Struct], quote!(struct));
assert_quote!([Super], quote!(super));
assert_quote!([Tilde], quote!(~));
assert_quote!([True], quote!(true));
assert_quote!([TypeOf], quote!(typeof));
assert_quote!([Underscore], quote!(_));
assert_quote!([Unsafe], quote!(unsafe));
assert_quote!([Use], quote!(use));
assert_quote!([Virtual], quote!(virtual));
assert_quote!([While], quote!(while));
assert_quote!([Yield], quote!(yield));
});
}
#[test]
fn test_synthetic() {
let ctx = MacroContext::empty();
with_context(ctx, || {
assert_quote!([Ident(StringSource::Synthetic(0))], quote!(hello));
assert_quote!([ByteStr(StrSource::Synthetic(0))], quote!(b"hello"));
assert_quote!([Str(StrSource::Synthetic(0))], quote!("hello"));
assert_quote!([Number(NumberSource::Synthetic(0))], quote!(0));
assert_quote!([Number(NumberSource::Synthetic(1))], quote!(42.0));
assert_quote!([Char(CopySource::Inline('a'))], quote!('a'));
assert_quote!([Byte(CopySource::Inline(b'a'))], quote!(b'a'));
});
}
#[test]
fn test_interpolate() {
let ctx = MacroContext::empty();
with_context(ctx, || {
let outer = quote!(self struct enum);
assert_quote!([SelfValue, Struct, Enum], quote!(#outer));
});
}
#[test]
fn test_attribute() {
let ctx = MacroContext::empty();
with_context(ctx, || {
assert_quote!(
[
Pound,
Open(Delimiter::Bracket),
Ident(StringSource::Synthetic(0)),
Close(Delimiter::Bracket),
],
quote!(#[test])
);
});
}
#[test]
fn test_object() {
let ctx = MacroContext::empty();
with_context(ctx, || {
assert_quote!(
[
Pound,
Open(Delimiter::Brace),
Ident(StringSource::Synthetic(0)),
Colon,
Number(NumberSource::Synthetic(0)),
Close(Delimiter::Brace),
],
quote!(#{test: 42})
);
});
}
|
use super::*;
pub fn parse_new(s :&String)->Result<String, &str> {
#[derive(Debug)]
enum InitType {
Constructor, RefType, CopyType, _None
}
#[derive(Debug)]
enum DeclareType {
Make,
Have,
Let
}
use DeclareType::*;
use InitType::*;
let keyword = &keyword(&s).to_ascii_lowercase()[..];
let splited = &split(&s);
let init_type :InitType;
let make_type :DeclareType = match &keyword[..] {
"have" => Have,
"let" => Let,
"make" => Make,
_ => Let
};
let mut where_as = 1;
for elem in &splited[1..] {
if regi(&elem, "^(as|is|:|->|for|of|to|with|about)$") { break; }
else { where_as += 1; }
}
if where_as == splited.len() {
init_type = InitType::_None;
}
else {
init_type = match &splited[where_as].to_ascii_lowercase()[..] {
":" | "->" |
"to" | "of" |
"for" | "about" |
"with" => InitType::Constructor,
"is" => InitType::RefType,
"as" => InitType::CopyType,
_ => InitType::_None
};
}
let var = &declarition_parse(&splited[1..where_as].to_vec())?;
let ret = match init_type {
Constructor => match make_type {
Have => format!("constexpr {}({});\n", var.to_string(), &value_parse(&splited[where_as+1..].to_vec().join(" "), 0)?),
Let => format!("{}({});\n", var.to_string(), &value_parse(&splited[where_as+1..].to_vec().join(" "), 0)?),
Make => format!(
"{type}* {name} = new {type}({args});\n",
type = var.typename,
name = var.name,
args = &value_parse(&splited[where_as+1..].to_vec().join(" "), 0)?
)
},
CopyType => match make_type {
Have => format!("constexpr {} = {};\n", var.to_string(), &value_parse(&splited[where_as+1..].to_vec().join(" "), 1)?),
Let => format!("{} = {};\n", var.to_string(), &value_parse(&splited[where_as+1..].to_vec().join(" "), 1)?),
Make => format!(
"{type}* {name} = new {type}({to_copy});\n",
type = var.typename,
name = var.name,
to_copy = &value_parse(&splited[where_as+1..].to_vec().join(" "), 1)?
)
},
RefType => match make_type {
Have => return Err("우흥~ 하고 울어요. 우흥~ 하고 우는데..."),
Let => format!("{}&{} = {};\n", var.typename, var.name, &value_parse(&splited[where_as+1..].to_vec().join(" "), 1)?),
Make => format!(
"{type} {name} = {to_copy};\n",
type = var.typename,
name = var.name,
to_copy = &value_parse(&splited[where_as+1..].to_vec().join(" "), 1)?
)
},
_None => var.to_string()
};
Ok(ret + ";")
} |
mod amt_test;
mod cbor_test;
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
use std::result;
use serde::{de::DeserializeOwned, Serialize};
use serde_cbor::Value;
use blockstore::BlockstoreError;
use cid::{Cid, Codec, Prefix, Version};
use crate::node::{create_root, Item, Node, PartAmt};
use super::*;
#[derive(Default, Clone)]
pub struct DB {
db: Rc<RefCell<HashMap<Vec<u8>, Vec<u8>>>>,
}
impl Blocks for DB {
fn get<Output: DeserializeOwned>(&self, cid: &Cid) -> result::Result<Output, AmtIpldError> {
let o = self
.db
.borrow()
.get(&cid.to_bytes())
.ok_or(BlockstoreError::NotFound(cid.clone()).into())
.and_then(|v| serde_cbor::from_slice(v).map_err(AmtIpldError::Cbor))?;
Ok(o)
}
fn put<Input: Serialize>(&mut self, v: Input) -> result::Result<Cid, AmtIpldError> {
let v = serde_cbor::to_vec(&v)?;
let prefix = Prefix {
version: Version::V1,
codec: Codec::DagCBOR,
mh_type: multihash::Code::Blake2b256,
mh_len: 32,
};
let cid = Cid::new_from_prefix(&prefix, v.as_ref());
self.db.borrow_mut().insert(cid.to_bytes(), v);
Ok(cid)
}
}
pub fn db() -> DB {
Default::default()
}
|
// Copyright (c) 2019 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::fmt;
use std::ops;
use hex::{FromHex, FromHexError, ToHex};
use serde::de::{self, Deserialize, Deserializer, Visitor};
use serde::ser::{self, Serialize, Serializer};
/// A Git object ID (i.e., a SHA1).
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Default)]
pub struct Oid([u8; 20]);
impl Oid {
pub fn from_hex(s: &str) -> Result<Self, ()> {
Ok(Oid(<[u8; 20]>::from_hex(s).map_err(|_| ())?))
}
/// The empty tree sha `4b825dc642cb6eb9a060e54bf8d69288fbee4904`.
///
/// This can be computed manually with `git hash-object -t tree /dev/null`.
pub const EMPTY_TREE: Oid = Oid([
0x4b, 0x82, 0x5d, 0xc6, 0x42, 0xcb, 0x6e, 0xb9, 0xa0, 0x60, 0xe5, 0x4b,
0xf8, 0xd6, 0x92, 0x88, 0xfb, 0xee, 0x49, 0x04,
]);
/// A sha of all zeros. Usually used to indicate that a branch is either
/// created or deleted.
pub const ZERO: Oid = Oid([
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
]);
}
impl fmt::UpperHex for Oid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.write_hex_upper(f)
}
}
impl fmt::LowerHex for Oid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.write_hex(f)
}
}
impl fmt::Display for Oid {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
<Self as fmt::LowerHex>::fmt(self, f)
}
}
impl fmt::Debug for Oid {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
<Self as fmt::Display>::fmt(self, f)
}
}
impl ops::Deref for Oid {
type Target = [u8; 20];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Serialize for Oid {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if serializer.is_human_readable() {
// Serialize as a hex string.
let mut hex = String::new();
self.0
.as_ref()
.write_hex(&mut hex)
.map_err(ser::Error::custom)?;
serializer.serialize_str(&hex)
} else {
// Serialize as a byte array with known length.
serializer.serialize_bytes(self.0.as_ref())
}
}
}
impl<'de> Deserialize<'de> for Oid {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct OidVisitor;
impl<'de> Visitor<'de> for OidVisitor {
type Value = Oid;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "hex string or 20 bytes")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let v = <[u8; 20]>::from_hex(v).map_err(|e| match e {
FromHexError::InvalidHexCharacter { c, .. } => {
E::invalid_value(
de::Unexpected::Char(c),
&"string with only hexadecimal characters",
)
}
FromHexError::InvalidStringLength => E::invalid_length(
v.len(),
&"hex string with a valid length",
),
FromHexError::OddLength => E::invalid_length(
v.len(),
&"hex string with an even length",
),
})?;
Ok(Oid(v))
}
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where
E: de::Error,
{
if v.len() != 20 {
return Err(E::invalid_length(v.len(), &"20 bytes"));
}
let mut inner = <[u8; 20]>::default();
inner.copy_from_slice(v);
Ok(Oid(inner))
}
}
if deserializer.is_human_readable() {
deserializer.deserialize_str(OidVisitor)
} else {
deserializer.deserialize_bytes(OidVisitor)
}
}
}
|
// Copyright 2020, The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
mod stored_message;
pub use stored_message::{NewStoredMessage, StoredMessage};
use crate::{
envelope::DhtMessageType,
schema::stored_messages,
storage::{DbConnection, StorageError},
store_forward::message::StoredMessagePriority,
};
use chrono::{DateTime, NaiveDateTime, Utc};
use diesel::{BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl};
use tari_comms::{
peer_manager::{node_id::NodeDistance, NodeId},
types::CommsPublicKey,
};
use tari_utilities::hex::Hex;
pub struct StoreAndForwardDatabase {
connection: DbConnection,
}
impl StoreAndForwardDatabase {
pub fn new(connection: DbConnection) -> Self {
Self { connection }
}
pub async fn insert_message(&self, message: NewStoredMessage) -> Result<(), StorageError> {
self.connection
.with_connection_async(|conn| {
diesel::insert_into(stored_messages::table)
.values(message)
.execute(conn)?;
Ok(())
})
.await
}
pub async fn find_messages_for_peer(
&self,
public_key: &CommsPublicKey,
node_id: &NodeId,
since: Option<DateTime<Utc>>,
limit: i64,
) -> Result<Vec<StoredMessage>, StorageError>
{
let pk_hex = public_key.to_hex();
let node_id_hex = node_id.to_hex();
self.connection
.with_connection_async::<_, Vec<StoredMessage>>(move |conn| {
let mut query = stored_messages::table
.select(stored_messages::all_columns)
.filter(
stored_messages::destination_pubkey
.eq(pk_hex)
.or(stored_messages::destination_node_id.eq(node_id_hex)),
)
.filter(stored_messages::message_type.eq(DhtMessageType::None as i32))
.into_boxed();
if let Some(since) = since {
query = query.filter(stored_messages::stored_at.gt(since.naive_utc()));
}
query
.order_by(stored_messages::stored_at.desc())
.limit(limit)
.get_results(conn)
.map_err(Into::into)
})
.await
}
pub async fn find_regional_messages(
&self,
node_id: &NodeId,
dist_threshold: Option<Box<NodeDistance>>,
since: Option<DateTime<Utc>>,
limit: i64,
) -> Result<Vec<StoredMessage>, StorageError>
{
let node_id_hex = node_id.to_hex();
let results = self
.connection
.with_connection_async::<_, Vec<StoredMessage>>(move |conn| {
let mut query = stored_messages::table
.select(stored_messages::all_columns)
.filter(stored_messages::destination_node_id.ne(node_id_hex))
.filter(stored_messages::destination_node_id.is_not_null())
.filter(stored_messages::message_type.eq(DhtMessageType::None as i32))
.into_boxed();
if let Some(since) = since {
query = query.filter(stored_messages::stored_at.gt(since.naive_utc()));
}
query
.order_by(stored_messages::stored_at.desc())
.limit(limit)
.get_results(conn)
.map_err(Into::into)
})
.await?;
match dist_threshold {
Some(dist_threshold) => {
// Filter node ids that are within the distance threshold from the source node id
let results = results
.into_iter()
// TODO: Investigate if we could do this in sqlite using XOR (^)
.filter(|message| match message.destination_node_id {
Some(ref dest_node_id) => match NodeId::from_hex(dest_node_id).ok() {
Some(dest_node_id) => {
&dest_node_id == node_id || &dest_node_id.distance(node_id) <= &*dist_threshold
},
None => false,
},
None => true,
})
.collect();
Ok(results)
},
None => Ok(results),
}
}
pub async fn find_anonymous_messages(
&self,
since: Option<DateTime<Utc>>,
limit: i64,
) -> Result<Vec<StoredMessage>, StorageError>
{
self.connection
.with_connection_async(move |conn| {
let mut query = stored_messages::table
.select(stored_messages::all_columns)
.filter(stored_messages::origin_pubkey.is_null())
.filter(stored_messages::destination_pubkey.is_null())
.filter(stored_messages::is_encrypted.eq(true))
.filter(stored_messages::message_type.eq(DhtMessageType::None as i32))
.into_boxed();
if let Some(since) = since {
query = query.filter(stored_messages::stored_at.gt(since.naive_utc()));
}
query
.order_by(stored_messages::stored_at.desc())
.limit(limit)
.get_results(conn)
.map_err(Into::into)
})
.await
}
pub async fn find_join_messages(
&self,
since: Option<DateTime<Utc>>,
limit: i64,
) -> Result<Vec<StoredMessage>, StorageError>
{
self.connection
.with_connection_async(move |conn| {
let mut query = stored_messages::table
.select(stored_messages::all_columns)
.filter(stored_messages::message_type.eq(DhtMessageType::Join as i32))
.into_boxed();
if let Some(since) = since {
query = query.filter(stored_messages::stored_at.gt(since.naive_utc()));
}
query
.order_by(stored_messages::stored_at.desc())
.limit(limit)
.get_results(conn)
.map_err(Into::into)
})
.await
}
pub async fn find_messages_of_type_for_pubkey(
&self,
public_key: &CommsPublicKey,
message_type: DhtMessageType,
since: Option<DateTime<Utc>>,
limit: i64,
) -> Result<Vec<StoredMessage>, StorageError>
{
let pk_hex = public_key.to_hex();
self.connection
.with_connection_async(move |conn| {
let mut query = stored_messages::table
.select(stored_messages::all_columns)
.filter(stored_messages::destination_pubkey.eq(pk_hex))
.filter(stored_messages::message_type.eq(message_type as i32))
.into_boxed();
if let Some(since) = since {
query = query.filter(stored_messages::stored_at.gt(since.naive_utc()));
}
query
.order_by(stored_messages::stored_at.desc())
.limit(limit)
.get_results(conn)
.map_err(Into::into)
})
.await
}
#[cfg(test)]
pub(crate) async fn get_all_messages(&self) -> Result<Vec<StoredMessage>, StorageError> {
self.connection
.with_connection_async(|conn| {
stored_messages::table
.select(stored_messages::all_columns)
.get_results(conn)
.map_err(Into::into)
})
.await
}
pub(crate) async fn delete_messages_with_priority_older_than(
&self,
priority: StoredMessagePriority,
since: NaiveDateTime,
) -> Result<usize, StorageError>
{
self.connection
.with_connection_async(move |conn| {
diesel::delete(stored_messages::table)
.filter(stored_messages::stored_at.lt(since))
.filter(stored_messages::priority.eq(priority as i32))
.execute(conn)
.map_err(Into::into)
})
.await
}
}
#[cfg(test)]
mod test {
use super::*;
use tari_test_utils::random;
#[tokio_macros::test_basic]
async fn insert_messages() {
let conn = DbConnection::connect_memory(random::string(8)).await.unwrap();
// let conn = DbConnection::connect_path("/tmp/tmp.db").await.unwrap();
conn.migrate().await.unwrap();
let db = StoreAndForwardDatabase::new(conn);
db.insert_message(Default::default()).await.unwrap();
let messages = db.get_all_messages().await.unwrap();
assert_eq!(messages.len(), 1);
}
}
|
//! The vast majority of the code is taken from https://github.com/markschl/seq_io/blob/master/src/fasta.rs
use crate::errors::{ErrorPosition, ParseError};
use crate::parser::record::SequenceRecord;
use crate::parser::utils::{
fill_buf, find_line_ending, grow_to, trim_cr, FastxReader, Format, LineEnding, Position,
BUFSIZE,
};
use memchr::{memchr2, Memchr};
use std::borrow::Cow;
use std::fs::File;
use std::io;
use std::io::BufRead;
use std::path::Path;
#[derive(Clone, Debug)]
pub struct BufferPosition {
/// index of '>'
pub(crate) start: usize,
/// Indicate line start, but actually it is one byte before (start - 1), which is usually
/// the line terminator of the header (if there is one). The last index in the Vec is always
/// the last byte of the last sequence line (including line terminator if present).
/// Therefore, the length of this Vec should never be 0.
pub(crate) seq_pos: Vec<usize>,
}
impl BufferPosition {
#[inline]
fn is_new(&self) -> bool {
self.seq_pos.is_empty()
}
#[inline]
fn reset(&mut self, start: usize) {
self.seq_pos.clear();
self.start = start;
}
#[inline]
fn find_line_ending(&self, buffer: &[u8]) -> Option<LineEnding> {
find_line_ending(self.all(buffer))
}
#[inline]
pub(crate) fn all<'a>(&self, buffer: &'a [u8]) -> &'a [u8] {
&buffer[self.start..*self.seq_pos.last().unwrap()]
}
#[inline]
pub(crate) fn id<'a>(&self, buffer: &'a [u8]) -> &'a [u8] {
trim_cr(&buffer[self.start + 1..*self.seq_pos.first().unwrap()])
}
#[inline]
pub(crate) fn raw_seq<'a>(&self, buffer: &'a [u8]) -> &'a [u8] {
if self.seq_pos.len() > 1 {
let start = *self.seq_pos.first().unwrap() + 1;
let end = *self.seq_pos.last().unwrap();
trim_cr(&buffer[start..end])
} else {
b""
}
}
#[inline]
pub(crate) fn seq<'a>(&self, buffer: &'a [u8]) -> Cow<'a, [u8]> {
// TODO: make that DRY
let seq = if self.seq_pos.len() > 1 {
let start = *self.seq_pos.first().unwrap() + 1;
let end = *self.seq_pos.last().unwrap();
trim_cr(&buffer[start..end])
} else {
b""
};
// first part is a fast check to see if we need to do any allocations
let mut i;
match memchr2(b'\r', b'\n', seq) {
Some(break_loc) => i = break_loc,
None => return seq.into(),
}
// we found a newline; create a new buffer and stripping out newlines
// and writing into it
let mut new_buf = Vec::with_capacity(seq.len() - 1);
new_buf.extend_from_slice(&seq[..i]);
while i < seq.len() {
match memchr2(b'\r', b'\n', &seq[i..]) {
None => {
new_buf.extend_from_slice(&seq[i..]);
break;
}
Some(match_pos) => {
new_buf.extend_from_slice(&seq[i..i + match_pos]);
i += match_pos + 1;
}
}
}
new_buf.into()
}
#[inline]
pub(crate) fn num_bases(&self, buffer: &[u8]) -> usize {
let seq = self.raw_seq(buffer);
let num_lines = bytecount::count(seq, b'\n');
let windows_num_lines = bytecount::count(seq, b'\r');
seq.len() - num_lines - windows_num_lines
}
}
/// Parser for FASTA files.
/// Only use this directly if you know your file is FASTA and that it is not compressed as
/// it does not handle decompression.
/// If you are unsure, it's better to use [parse_fastx_file](fn.parse_fastx_file.html).
pub struct Reader<R: io::Read> {
buf_reader: buffer_redux::BufReader<R>,
buf_pos: BufferPosition,
search_pos: usize,
position: Position,
finished: bool,
line_ending: Option<LineEnding>,
}
impl<R> Reader<R>
where
R: io::Read,
{
/// Creates a new reader with the default buffer size of 64 KiB
///
/// # Example:
///
/// ```
/// use needletail::parser::{FastaReader, FastxReader};
/// let fasta = b">id\nSEQUENCE";
///
/// let mut reader = FastaReader::new(&fasta[..]);
/// let record = reader.next().unwrap().unwrap();
/// assert_eq!(record.id(), b"id")
/// ```
#[inline]
pub fn new(reader: R) -> Reader<R> {
Reader::with_capacity(reader, BUFSIZE)
}
/// Creates a new reader with a given buffer capacity. The minimum allowed
/// capacity is 3.
#[inline]
pub fn with_capacity(reader: R, capacity: usize) -> Reader<R> {
assert!(capacity >= 3);
Reader {
buf_reader: buffer_redux::BufReader::with_capacity(capacity, reader),
buf_pos: BufferPosition {
start: 0,
seq_pos: Vec::with_capacity(1),
},
position: Position::new(0, 0),
search_pos: 0,
finished: false,
line_ending: None,
}
}
}
impl Reader<File> {
/// Creates a reader from a file path.
///
/// # Example:
///
/// ```no_run
/// use needletail::parser::{FastaReader, FastxReader};
///
/// let mut reader = FastaReader::from_path("seqs.fasta").unwrap();
///
/// // (... do something with the reader)
/// ```
#[inline]
pub fn from_path<P: AsRef<Path>>(path: P) -> io::Result<Reader<File>> {
File::open(path).map(Reader::new)
}
}
impl<R> Reader<R>
where
R: io::Read,
{
#[inline]
fn get_buf(&self) -> &[u8] {
self.buf_reader.buffer()
}
#[inline]
fn next_pos(&mut self) {
self.position.line += self.buf_pos.seq_pos.len() as u64;
self.position.byte += (self.search_pos - self.buf_pos.start) as u64;
self.buf_pos.reset(self.search_pos);
}
/// Finds the position of the next record
/// and returns true if found; false if end of buffer reached.
#[inline]
fn find(&mut self) -> bool {
if self._find() {
return true;
}
// nothing found
if self.get_buf().len() < self.buf_reader.capacity() {
// EOF reached, there will be no next record
self.finished = true;
if !self.buf_pos.seq_pos.is_empty() {
self.buf_pos.seq_pos.push(self.search_pos);
}
return true;
}
false
}
/// Returns true if complete position found, false if end of buffer reached.
#[inline]
fn _find(&mut self) -> bool {
let bufsize = self.get_buf().len();
for pos in Memchr::new(b'\n', &self.buf_reader.buffer()[self.search_pos..]) {
let pos = self.search_pos + pos;
let next_line_start = pos + 1;
if next_line_start == bufsize {
// cannot check next byte -> treat as incomplete
self.search_pos = pos; // make sure last byte is re-searched next time
return false;
}
self.buf_pos.seq_pos.push(pos);
if self.get_buf()[next_line_start] == b'>' {
// complete record was found
self.search_pos = next_line_start;
return true;
}
}
// record end not found
self.search_pos = bufsize;
false
}
/// To be called when the end of the buffer is reached and `next_pos` does not find
/// the next record. Incomplete bytes will be moved to the start of the buffer.
/// If the record still doesn't fit in, the buffer will be enlarged.
/// After calling this function, the position will therefore always be 'complete'.
/// this function assumes that the buffer was fully searched
fn next_complete(&mut self) -> Result<bool, ParseError> {
loop {
if self.buf_pos.start == 0 {
// first record -> buffer too small
self.grow();
} else {
// not the first record -> buffer may be big enough
self.make_room();
}
// fill up remaining buffer
fill_buf(&mut self.buf_reader)?;
if self.find() {
return Ok(true);
}
}
}
/// Grow internal buffer as needed
fn grow(&mut self) {
let cap = self.buf_reader.capacity();
let new_size = grow_to(cap);
let additional = new_size - cap;
self.buf_reader.reserve(additional);
}
/// Move incomplete bytes to start of buffer
fn make_room(&mut self) {
let consumed = self.buf_pos.start;
self.buf_reader.consume(consumed);
self.buf_reader.make_room();
self.buf_pos.start = 0;
self.search_pos -= consumed;
for s in &mut self.buf_pos.seq_pos {
*s -= consumed;
}
}
}
impl<R: io::Read + Send> FastxReader for Reader<R> {
fn next(&mut self) -> Option<Result<SequenceRecord, ParseError>> {
if self.finished {
return None;
}
// Load some data in the buffer to start
if self.position.line == 0 {
match fill_buf(&mut self.buf_reader) {
Ok(n) => {
if n == 0 {
self.finished = true;
return None;
}
}
Err(e) => {
return Some(Err(e.into()));
}
};
if self.get_buf()[0] == b'>' {
self.position.line = 1;
self.position.byte = 0;
self.buf_pos.start = 0;
self.search_pos = 1;
} else {
return Some(Err(ParseError::new_invalid_start(
self.get_buf()[0],
ErrorPosition {
line: self.position.line,
id: None,
},
Format::Fasta,
)));
}
}
if !self.buf_pos.is_new() {
self.next_pos();
}
// Can we identify the start of the next record ?
let complete = self.find();
if !complete {
// Did we get a record?
let got_record = match self.next_complete() {
Ok(f) => f,
Err(e) => {
return Some(Err(e));
}
};
if !got_record {
return None;
}
}
if self.buf_pos.seq_pos.is_empty() {
return Some(Err(ParseError::new_unexpected_end(
ErrorPosition {
line: self.position.line,
id: None,
},
Format::Fasta,
)));
}
if self.line_ending.is_none() {
self.line_ending = self.buf_pos.find_line_ending(self.get_buf());
}
Some(Ok(SequenceRecord::new_fasta(
self.get_buf(),
&self.buf_pos,
&self.position,
self.line_ending,
)))
}
fn position(&self) -> &Position {
&self.position
}
fn line_ending(&self) -> Option<LineEnding> {
self.line_ending
}
}
#[cfg(test)]
mod tests {
use std::io::Cursor;
use super::*;
use crate::errors::ParseErrorKind;
fn seq(s: &[u8]) -> Cursor<&[u8]> {
Cursor::new(&s[..])
}
#[test]
fn test_basic() {
let mut reader = Reader::new(seq(b">test\nACGT\n>test2\nTGCA\n"));
assert!(reader.line_ending().is_none());
let rec = reader.next().unwrap();
assert!(rec.is_ok());
let r = rec.unwrap();
assert_eq!(r.id(), b"test");
assert_eq!(r.raw_seq(), b"ACGT");
assert_eq!(r.all(), b">test\nACGT");
assert_eq!(reader.line_ending().unwrap(), LineEnding::Unix);
let rec = reader.next().unwrap();
assert!(rec.is_ok());
let r = rec.unwrap();
assert_eq!(r.id(), b"test2");
assert_eq!(r.raw_seq(), b"TGCA");
assert!(reader.next().is_none());
}
#[test]
fn test_wrapped_fasta() {
let mut reader = Reader::new(seq(b">test\nACGT\nACGT\n>test2\nTGCA\nTG"));
let rec = reader.next().unwrap();
assert!(rec.is_ok());
let r = rec.unwrap();
assert_eq!(r.id(), b"test");
assert_eq!(r.raw_seq(), b"ACGT\nACGT");
assert_eq!(r.num_bases(), 8);
assert_eq!(reader.line_ending().unwrap(), LineEnding::Unix);
let rec = reader.next().unwrap();
assert!(rec.is_ok());
let r = rec.unwrap();
assert_eq!(r.id(), b"test2");
assert_eq!(r.raw_seq(), b"TGCA\nTG");
assert_eq!(r.num_bases(), 6);
assert!(reader.next().is_none());
}
#[test]
fn test_wrapped_fasta_windows_newlines() {
let mut reader = Reader::new(seq(b">test\r\nACGT\r\nACGT\r\n>test2\r\nTGCA\r\nTG"));
let rec = reader.next().unwrap();
assert!(rec.is_ok());
let r = rec.unwrap();
assert_eq!(r.id(), b"test");
assert_eq!(r.raw_seq(), b"ACGT\r\nACGT");
assert_eq!(r.num_bases(), 8);
assert_eq!(r.start_line_number(), 1);
assert_eq!(reader.line_ending().unwrap(), LineEnding::Windows);
let rec = reader.next().unwrap();
assert!(rec.is_ok());
let r = rec.unwrap();
assert_eq!(r.id(), b"test2");
assert_eq!(r.raw_seq(), b"TGCA\r\nTG");
assert_eq!(r.num_bases(), 6);
assert_eq!(r.start_line_number(), 4);
assert!(reader.next().is_none());
}
#[test]
fn test_premature_ending() {
let mut reader = Reader::new(seq(b">test\nAGCT\n>test2"));
reader.next().unwrap().unwrap();
let rec = reader.next().unwrap();
assert!(rec.is_err());
let r = rec.unwrap_err();
assert_eq!(r.kind, ParseErrorKind::UnexpectedEnd);
let mut reader = Reader::new(seq(b">test\r\nAGCT\r\n>test2\r\n"));
reader.next().unwrap().unwrap();
let rec = reader.next().unwrap();
assert!(rec.is_err());
let r = rec.unwrap_err();
assert_eq!(r.kind, ParseErrorKind::UnexpectedEnd);
}
#[test]
fn test_empty_records() {
let mut reader = Reader::new(seq(b">\n\n>shine\nAGGAGGU"));
let rec = reader.next().unwrap().unwrap();
assert_eq!(rec.id(), b"");
assert_eq!(rec.raw_seq(), b"");
let rec = reader.next().unwrap().unwrap();
assert_eq!(rec.id(), b"shine");
assert_eq!(rec.raw_seq(), b"AGGAGGU");
let mut reader = Reader::new(seq(b">\r\n\r\n>shine\r\nAGGAGGU"));
let rec = reader.next().unwrap().unwrap();
assert_eq!(rec.id(), b"");
assert_eq!(rec.raw_seq(), b"");
let rec = reader.next().unwrap().unwrap();
assert_eq!(rec.id(), b"shine");
assert_eq!(rec.raw_seq(), b"AGGAGGU");
}
}
|
use rand::Rng;
use std::io;
use std::io::Write;
// keeps track of number of tries
struct Tries {
init: u8,
left: u8,
}
impl Tries {
fn init(x: u8) -> Tries {
Tries { init: x, left: x }
}
// decrement number of tries left
fn dec(&mut self) {
self.left = self.left - 1u8;
}
// pass total tries used
fn total(&self) -> &'static str {
match self.init - self.left {
5u8 => "five tries",
4u8 => "four tries",
3u8 => "three tries",
2u8 => "two tries",
_ => "ONE TRY",
}
}
// out put tries left as a word to avoid confusion
fn left_str(&self) -> &'static str {
match self.left {
4u8 => "four",
3u8 => "three",
_ => "two",
}
}
}
struct Code {
value: [char; 3],
}
impl Code {
// generates randomized Code struct
fn gen_rng() -> Code {
let mut rng_letters: [char; 3] = ['A', 'A', 'A'];
let mut rng = rand::thread_rng();
for i in 0..3 {
match rng.gen_range(0..3) {
0 => (),
1 => rng_letters[i] = 'B',
_ => rng_letters[i] = 'C',
}
}
Code { value: rng_letters }
}
// generates Code from input
fn gen_from_input(s: &str) -> Code {
let mut input_arr: [char; 3] = ['A', 'A', 'A'];
for i in 0..3 {
match s.chars().nth(i).unwrap() {
'A' | 'a' => (),
'B' | 'b' => input_arr[i] = 'B',
_ => input_arr[i] = 'C',
}
}
Code { value: input_arr }
}
// compares self to another Code
// returns number of letters shared in the same indexes
fn cmp(&self, abc: &Code) -> u8 {
let mut m = 0u8;
for i in 0..3 {
if &abc.value[i] == &self.value[i] {
m += 1u8;
}
}
m
}
}
fn verify_input(input: &str) -> [bool; 2] {
match input {
"quit" | "exit" | "q" => {
return [true, false]; // valid input, exit
}
_ => {
if input.chars().count() == 3 {
for i in 0..3 {
match input.chars().nth(i).unwrap() {
'A' | 'B' | 'C' | 'a' | 'b' | 'c' => (),
_ => return [false, true], // invalid input
}
}
return [true, true]; // valid input
} else {
return [false, true]; // invalid input
}
}
}
}
fn rng_death(deaths: [&'static str; 10]) -> &'static str {
let i = rand::thread_rng().gen_range(0..10);
deaths[i]
}
fn main() {
const DEATH_LIST: [&'static str; 10] = [
"Killed by the Paper Cut Serial Killer.",
"Died while tring to traverse the code maze.",
"The floor gave way to a lava pit.",
"Killed while trying to escape the testing facility.",
"The monster in the maze found you.",
"You killed yourself after succumbing to pointlessness.",
"Decapitated by a slow-moving truck.",
"Killed by the Guessing Allotment Police.",
"A laser-shooting robot arrived to kill you.",
"You killed yourself after losing hope of ever solving this puzzle.",
];
let mut play_again: bool = true;
while play_again {
let secret_code = Code::gen_rng();
let mut tries = Tries::init(5u8);
println!("\n Try to find the secret code.");
println!(" The code is a series of 3 letters.");
println!(" They are each either \"A\", \"B\", or \"C\".");
print!(" Input your first guess: ");
io::stdout().flush().unwrap();
play_again = loop {
// take user input
let mut input_string = String::new();
match io::stdin().read_line(&mut input_string) {
Ok(_) => (),
Err(_) => {
println!("\n Error when processing your input.");
print!(" Please try again: ");
io::stdout().flush().unwrap();
continue;
}
};
let input_string: &str = input_string.trim();
let [is_input_valid, keep_playing] = verify_input(input_string);
if is_input_valid == false {
println! {"\n That is not a valid input."};
print! {" Please try again: "};
io::stdout().flush().unwrap();
continue;
} else if keep_playing == false {
break false;
}
// convert input into a Code struct
let input_code = Code::gen_from_input(input_string);
// save number of matching letters from guess into a variable
let compared: u8 = secret_code.cmp(&input_code);
tries.dec();
if compared == 3u8 {
println!(
"\n \"{}{}{}\" is the correct secret code.",
secret_code.value[0], secret_code.value[1], secret_code.value[2]
);
println!(" Note: Sucessful attempt after {}.", tries.total());
println!(" Thank you for your cooperation. Goodbye.\n");
break true;
} else if compared == 1u8 {
println!(
"\n 1 character in \"{}{}{}\" is correct.",
input_code.value[0], input_code.value[1], input_code.value[2]
);
} else {
println!(
"\n {} of the characters in \"{}{}{}\" are correct.",
compared, input_code.value[0], input_code.value[1], input_code.value[2]
);
}
// checks how many tries left or if user has lost
if tries.left == 0u8 {
println!(" You've run out of guesses alloted.");
println!(
" The secret code was \"{}{}{}\".",
secret_code.value[0], secret_code.value[1], secret_code.value[2]
);
println!("\n You are now dead.");
println!(" {}", rng_death(DEATH_LIST));
println!(" Thank you for your cooperation. Goodbye.\n");
break true;
} else if tries.left == 1u8 {
println!(" You have one guess remaining.");
} else {
println!(" You have {} guesses remaining.", tries.left_str());
}
print!(" Input your next guess: ");
io::stdout().flush().unwrap();
}; // end of loop
if play_again == true {
print!(" Play again? (y/n): ");
io::stdout().flush().unwrap();
let mut again_string = String::new();
match io::stdin().read_line(&mut again_string) {
Ok(_) => {
if again_string.trim() != "y" {
play_again = false;
}
}
Err(_) => play_again = false,
};
}
} // end of while
println!("");
}
|
use parser::ArgumentParser;
use super::{Store, List};
use test_parser::{check_ok};
fn parse_pos(args: &[&str]) -> isize {
let mut val = 0;
{
let mut ap = ArgumentParser::new();
ap.refer(&mut val)
.add_argument("value", Store, "The value");
check_ok(&ap, args);
}
return val;
}
#[test]
fn test_argument() {
assert_eq!(parse_pos(&["./argparse_test", "10"]), 10);
}
#[test]
#[should_panic]
fn too_much_args() {
parse_pos(&["./argparse_test", "10", "20"]);
}
#[test]
#[should_panic]
fn wrong_value() {
parse_pos(&["./argparse_test", "test", "20"]);
}
#[test]
#[should_panic]
fn float_value() {
parse_pos(&["./argparse_test", "1.5"]);
}
fn parse_two(args: &[&str]) -> (isize, isize) {
let mut val1 = 1;
let mut val2 = 2;
{
let mut ap = ArgumentParser::new();
ap.refer(&mut val1)
.add_argument("v1", Store, "The value 1");
ap.refer(&mut val2)
.add_argument("v2", Store, "The value 2");
check_ok(&ap, args);
}
return (val1, val2);
}
#[test]
fn test_two() {
assert_eq!(parse_two(&["./argparse_test", "10"]), (10, 2));
assert_eq!(parse_two(&["./argparse_test", "11", "21"]), (11, 21));
}
#[test]
#[should_panic]
fn test_two_fail_many() {
parse_two(&["./argparse_test", "10", "20", "30"]);
}
#[test]
#[should_panic]
fn test_two_fail_value() {
parse_two(&["./argparse_test", "test", "20"]);
}
#[test]
#[should_panic]
fn test_two_fail_float() {
parse_two(&["./argparse_test", "1.5"]);
}
fn parse_pos_opt(args: &[&str]) -> (isize, isize) {
let mut val1 = 1;
let mut val2 = 2;
{
let mut ap = ArgumentParser::new();
ap.refer(&mut val1)
.add_option(&["--v1"], Store, "The value 1")
.add_argument("v1", Store, "The value 1");
ap.refer(&mut val2)
.add_argument("v2", Store, "The value 2");
check_ok(&ap, args);
}
return (val1, val2);
}
#[test]
fn test_positional_optional() {
assert_eq!(parse_pos_opt(&["./argparse_test", "10"]), (10, 2));
assert_eq!(parse_pos_opt(&["./argparse_test", "11", "21"]), (11, 21));
assert_eq!(parse_pos_opt(&["./argparse_test", "--v1=7", "8"]), (7, 8));
assert_eq!(parse_pos_opt(&["./argparse_test", "10", "--v1=9"]), (9, 10));
}
#[test]
#[should_panic]
fn test_pos_opt_err() {
parse_pos_opt(&["./argparse_test", "--v1=10", "20", "30"]);
}
fn parse_pos_req(args: &[&str]) -> (isize, isize) {
let mut val1 = 1;
let mut val2 = 2;
{
let mut ap = ArgumentParser::new();
ap.refer(&mut val1)
.add_option(&["--v1"], Store, "The value 1")
.add_argument("v1", Store, "The value 1")
.required();
ap.refer(&mut val2)
.add_argument("v2", Store, "The value 2");
check_ok(&ap, args);
}
return (val1, val2);
}
#[test]
fn test_positional_required() {
assert_eq!(parse_pos_req(&["./argparse_test", "10"]), (10, 2));
assert_eq!(parse_pos_req(&["./argparse_test", "11", "21"]), (11, 21));
assert_eq!(parse_pos_req(&["./argparse_test", "--v1=7"]), (7, 2));
assert_eq!(parse_pos_req(&["./argparse_test", "10", "--v1=9"]), (9, 10));
}
#[test]
#[should_panic]
fn test_pos_extra() {
parse_pos_req(&["./argparse_test", "--v1=10", "20", "30"]);
}
#[test]
#[should_panic]
fn test_pos_no_req() {
parse_pos_req(&["./argparse_test"]);
}
fn pos_stop(args: &[&str]) -> (isize, Vec<String>) {
let mut val1 = 1;
let mut val2 = Vec::new();
{
let mut ap = ArgumentParser::new();
ap.refer(&mut val1)
.add_option(&["--v1"], Store, "The value 1")
.add_argument("v1", Store, "The value 1")
.required();
ap.refer(&mut val2)
.add_argument("v2", List, "The value 2");
ap.stop_on_first_argument(true);
check_ok(&ap, args);
}
return (val1, val2);
}
#[test]
fn test_pos_stop() {
assert_eq!(pos_stop(&["./argparse_test", "10"]), (10, vec!()));
assert_eq!(pos_stop(&["./argparse_test", "11", "21"]),
(11, vec!("21".to_string())));
assert_eq!(pos_stop(&["./argparse_test", "--v1=7"]), (7, vec!()));
assert_eq!(pos_stop(&["./argparse_test", "10", "--v1=9", "--whatever"]),
(10, vec!("--v1=9".to_string(), "--whatever".to_string())));
}
#[test]
#[should_panic]
fn test_test() {
pos_stop(&["./argparse_test"]);
}
fn pos_dash(args: &[&str], dash: bool) -> Vec<String> {
let mut val = Vec::new();
{
let mut ap = ArgumentParser::new();
ap.refer(&mut val)
.add_argument("v1", List, "The value");
ap.silence_double_dash(dash);
check_ok(&ap, args);
}
return val;
}
#[test]
fn test_pos_dash() {
assert_eq!(pos_dash(&["./argparse_test", "1"], true),
vec!("1".to_string()));
assert_eq!(pos_dash(&["./argparse_test", "--", "1"], true),
vec!("1".to_string()));
assert_eq!(pos_dash(&["./argparse_test", "--", "1"], false),
vec!("--".to_string(), "1".to_string()));
}
|
use std::fmt;
use std::sync::RwLock;
lazy_static! {
pub static ref MUTEX_ID_COUNTER:RwLock<u32>= RwLock::new(0);
}
#[derive(Debug, Serialize, Deserialize)]
pub struct LinkTreeNode {
id: u32,
link: String,
node_list: Vec<LinkTreeNode>,
parent_id: u32,
depth: u32,
}
impl fmt::Display for LinkTreeNode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let decimals = f.width().unwrap_or(0);
let mut string = format!(
"{: <width$}{}; id:{}; parent_id:{};\n",
" ",
self.link,
self.id,
self.parent_id,
width = decimals
);
for x in &self.node_list {
string.push_str(&format!("{:<1$}", x, decimals + 10))
}
write!(f, "{}", string)
}
}
impl LinkTreeNode {
pub fn create(link: &String) -> LinkTreeNode {
let id = MUTEX_ID_COUNTER.try_read().unwrap().to_owned();
let result = LinkTreeNode {
id,
link: link.clone(),
node_list: vec![],
parent_id: 0,
depth: 0,
};
*MUTEX_ID_COUNTER.write().unwrap() = id + 1;
result
}
pub fn add_child(&mut self, mut node: LinkTreeNode) {
node.set_parent_id(self.id);
node.set_depth(self.depth + 1);
self.node_list.push(node);
}
pub fn set_parent_id(&mut self, parent_id: u32) {
self.parent_id = parent_id;
}
pub fn set_depth(&mut self, depth: u32) {
self.depth = depth;
}
pub fn link(&self) -> &String {
&self.link
}
pub fn depth(&self) -> &u32 {
&self.depth
}
pub fn node_list(&mut self) -> &mut Vec<LinkTreeNode> {
&mut self.node_list
}
pub fn node_list_immutable(&self) -> &Vec<LinkTreeNode> {
&self.node_list
}
pub fn get_id(&self) -> &u32 {
&self.id
}
}
|
use std::fs::File;
use std::io::{self, Read};
use std::path::Path;
use std::fmt;
mod cpu;
mod ppu;
#[derive(Debug)]
pub enum NESError {
Io(io::Error)
}
impl From<io::Error> for NESError {
fn from(err: io::Error) -> NESError {
NESError::Io(err)
}
}
impl fmt::Display for NESError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
NESError::Io(ref err) => err.fmt(f)
}
}
}
#[derive(Default)]
pub struct ROM {
pub num_prg_banks: u32,
pub num_chr_banks: u32,
pub num_ram_banks: u32,
pub mapper_id: u32,
pub prg_rom: Vec<u8>,
pub chr_rom: Vec<u8>
}
pub const PRG_BANK_SIZE: usize = 0x4000;
pub const CHR_BANK_SIZE: usize = 0x2000;
const HEADER_SIZE: usize = 16;
const TRAINER_SIZE: usize = 512;
pub fn load_nes_file<P: AsRef<Path>>(path: P) -> Result<ROM, NESError> {
let mut file = try!(File::open(&path));
let mut file_buf = Vec::new();
try!(file.read_to_end(&mut file_buf));
assert_eq!(&file_buf[..4], &[0x4e, 0x45, 0x53, 0x1a], "Not a valid NES file.");
let mut rom = ROM::default();
rom.num_prg_banks = file_buf[4] as u32;
rom.num_chr_banks = file_buf[5] as u32;
// TODO: handle the remaining bits
let ctrl1 = file_buf[6];
let ctrl2 = file_buf[7];
rom.mapper_id = ((ctrl2 & 0xf0) | (ctrl1 >> 4)) as u32;
rom.num_ram_banks = file_buf[8] as u32;
let prg_rom_start = HEADER_SIZE;
let prg_rom_end = prg_rom_start + rom.num_prg_banks as usize * PRG_BANK_SIZE;
rom.prg_rom = file_buf[prg_rom_start..prg_rom_end].to_vec();
Ok(rom)
}
#[derive(Default)]
pub struct NES {
cpu: cpu::CPU,
}
impl NES {
pub fn new() -> NES {
NES {
cpu: cpu::CPU::new(),
}
}
pub fn run(&mut self, rom: &ROM) {
self.cpu.load_rom(rom);
self.cpu.reset();
loop {
self.cpu.run_step();
}
}
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
failure::{format_err, Error, ResultExt},
fidl_fidl_test_compatibility::{
EchoEvent, EchoMarker, EchoProxy, EchoRequest, EchoRequestStream,
},
fidl_fuchsia_sys::LauncherProxy,
fuchsia_async as fasync,
fuchsia_component::{
client::{launch, launcher, App},
server::ServiceFs,
},
futures::{StreamExt, TryStreamExt},
};
fn launch_and_connect_to_echo(
launcher: &LauncherProxy,
url: String,
) -> Result<(EchoProxy, App), Error> {
let app = launch(&launcher, url, None)?;
let echo = app.connect_to_service::<EchoMarker>()?;
Ok((echo, app))
}
async fn echo_server(stream: EchoRequestStream, launcher: &LauncherProxy) -> Result<(), Error> {
let handler = move |request| {
Box::pin(async move {
match request {
EchoRequest::EchoStruct { mut value, forward_to_server, responder } => {
if !forward_to_server.is_empty() {
let (echo, app) = launch_and_connect_to_echo(launcher, forward_to_server)
.context("Error connecting to proxy")?;
value = echo.echo_struct(&mut value, "").await
.context("Error calling echo_struct on proxy")?;
drop(app);
}
responder.send(&mut value).context("Error responding")?;
}
EchoRequest::EchoStructNoRetVal {
mut value,
forward_to_server,
control_handle,
} => {
if !forward_to_server.is_empty() {
let (echo, app) = launch_and_connect_to_echo(launcher, forward_to_server)
.context("Error connecting to proxy")?;
echo.echo_struct_no_ret_val(&mut value, "")
.context("Error sending echo_struct_no_ret_val to proxy")?;
let mut event_stream = echo.take_event_stream();
let EchoEvent::EchoEvent { value: response_val } =
event_stream.try_next().await
.context("Error getting event response from proxy")?
.ok_or_else(|| format_err!("Proxy sent no events"))?;
value = response_val;
drop(app);
}
control_handle
.send_echo_event(&mut value)
.context("Error responding with event")?;
}
EchoRequest::EchoArrays { mut value, forward_to_server, responder } => {
if !forward_to_server.is_empty() {
let (echo, app) = launch_and_connect_to_echo(launcher, forward_to_server)
.context("Error connecting to proxy")?;
value = echo.echo_arrays(&mut value, "").await
.context("Error calling echo_arrays on proxy")?;
drop(app);
}
responder.send(&mut value).context("Error responding")?;
}
EchoRequest::EchoVectors { mut value, forward_to_server, responder } => {
if !forward_to_server.is_empty() {
let (echo, app) = launch_and_connect_to_echo(launcher, forward_to_server)
.context("Error connecting to proxy")?;
value = echo.echo_vectors(&mut value, "").await
.context("Error calling echo_vectors on proxy")?;
drop(app);
}
responder.send(&mut value).context("Error responding")?;
}
EchoRequest::EchoTable { .. } => {
// Enabling this blows the stack.
}
EchoRequest::EchoXunions { .. } => {
// Enabling this blows the stack.
}
/*
EchoRequest::EchoTable { mut value, forward_to_server, responder } => {
if !forward_to_server.is_empty() {
let (echo, app) = launch_and_connect_to_echo(launcher, forward_to_server)
.context("Error connecting to proxy")?;
value = echo.echo_table(value, "").await
.context("Error calling echo_table on proxy")?;
drop(app);
}
responder.send(value)
.context("Error responding")?;
}
EchoRequest::EchoXunions { mut value, forward_to_server, responder } => {
if !forward_to_server.is_empty() {
let (echo, app) = launch_and_connect_to_echo(launcher, forward_to_server)
.context("Error connecting to proxy")?;
value = echo.echo_xunions(&mut value.iter_mut(), "").await
.context("Error calling echo_xunions on proxy")?;
drop(app);
}
responder.send(&mut value.iter_mut()).context("Error responding")?;
}
*/
}
Ok(())
})
};
let handle_requests_fut = stream
.err_into() // change error type from fidl::Error to failure::Error
.try_for_each_concurrent(None /* max concurrent requests per connection */, handler);
handle_requests_fut.await
}
fn main() -> Result<(), Error> {
let mut executor = fasync::Executor::new().context("Error creating executor")?;
let launcher = launcher().context("Error connecting to application launcher")?;
let mut fs = ServiceFs::new_local();
fs.dir("svc").add_fidl_service(|stream| stream);
fs.take_and_serve_directory_handle().context("Error serving directory handle")?;
let serve_fut = fs.for_each_concurrent(None /* max concurrent connections */, |stream| {
async {
if let Err(e) = echo_server(stream, &launcher).await {
eprintln!("Closing echo server {:?}", e);
}
}
});
executor.run_singlethreaded(serve_fut);
Ok(())
}
|
// we can do this since the file `front_of_house.rs`
// exists in the same directory
mod front_of_house;
mod back_of_house {
fn fix_incorrect_order() {
cook_order();
super::front_of_house::serving::serve_order();
}
fn cook_order() {}
pub struct Breakfast {
pub toast: String,
seasonal_fruit: String,
}
impl Breakfast {
pub fn summer(toast: &str) -> Breakfast {
Breakfast {
toast: String::from(toast),
seasonal_fruit: String::from("peaches"),
}
}
}
pub enum Appetizer {
Soup,
Salad,
}
}
// external code importing this library
// can use hosting due to `pub` keyboard
pub use crate::front_of_house::hosting;
pub fn eat_at_restaurant() {
// Absolute path
// crate::front_of_house::hosting::add_to_waitlist();
// Relative path
// front_of_house::hosting::add_to_waitlist();
// Relative path with `use`
hosting::add_to_waitlist();
// Order a breakfast in the summer with Rye toast
let mut meal = back_of_house::Breakfast::summer("naan");
// Change our mind about what bread we'd like
meal.toast = String::from("wholemeal");
println!("I'd like {} toast, please!", meal.toast);
// This line would error:
// meal.seasonal_fruit = String::from("blueberries");
let order1 = back_of_house::Appetizer::Soup;
let order2 = back_of_house::Appetizer::Salad;
}
|
use serde_json::{from_str, Error, Value};
use serial::{PortSettings, SerialPort};
#[allow(unused_imports)]
use std::cell::RefCell;
use std::sync::{Arc, Mutex};
use std::time::Duration;
#[allow(unused_imports)]
use std::*;
use std::io::{self};
/// Состояние программы
static mut PROGRAM_STATE: ProgramState = ProgramState::Starting;
/// Тип соответствует представлению последовательного порта
pub type ISerialPort = Arc<Mutex<SerialPort>>;
/// Тип представляет из себя UUID
pub type IGUID = String;
/// Расход счётчиков
pub type IConsumption = f64;
/// Состояние программы в данный момент времени
pub enum ProgramState {
/// Происходит запуск
Starting,
/// Происходит закрытие
Closing,
/// Штатная работа
Working,
}
/// Состояние линии связи
#[derive(Copy, Clone)]
pub enum StateLink {
/// Не известное состояние связи, возможно происходит инициализация линии связи
Unknown,
/// Связь работает, данные проходят без повреждения
Working,
/// Нет соединения, данные не приходят и не уходят
NoLink,
/// Данные приходят поврежденные или не полные
Corrupted,
/// Объект который осуществляет связь - не активен
Deactive,
}
/// Параметры подключения
pub struct SerialConfig {
pub settings: PortSettings,
pub port_name: String,
pub timeout: u64,
pub port: Option<Box<SerialPort + Send>>,
}
/// Общие данные всех системных объектов
pub trait IGeneralInformation {
/// Уникальный GUID устройства
fn guid(&mut self) -> IGUID;
// Определение типа для динамической диспетчеризации
fn type_name() -> &'static str
where
Self: Sized;
// Описание объекта
fn description() -> &'static str
where
Self: Sized;
}
/// Макросы для создания менеджера свойств
#[macro_export]
macro_rules! propertie_manager {
($factory:ident, $manager:ident) => {
#[derive(Default)]
pub struct $factory;
impl IManagerPropertiesFactory for $factory {
fn spawn() -> Arc<Mutex<IManagerProperties>> {
Arc::new(Mutex::new($manager {list: HashMap::new()}))
}
}
pub struct $manager {
list: HashMap<String, PropertiesItem>,
}
impl IManagerProperties for $manager {
fn add(&mut self, item: PropertiesItem) {
&self.list.insert(item.name.clone(), item);
}
fn set_value_by_name(&mut self, name: &str, value: &str) {
let mut item: PropertiesItem = self.list.get(name).expect("Не смог установить свойство для объекта").clone();
item.value = value.to_string();
&self.list.insert(name.to_string(), item);
}
fn list_properties(&self) -> Vec<&PropertiesItem> {
let mut result = vec![];
for value in self.list.values() {
result.push(value);
}
result
}
}
};
}
/// Фабрика по созданию каналов связи
pub trait ILinkChannelFactory {
fn spawn(&mut self) -> Arc<Mutex<dyn ILinkChannel>>;
fn spawn_with_uuid(&mut self, uuid: IGUID) -> Arc<Mutex<dyn ILinkChannel>>;
}
/// # Типаж канала связи
///
pub trait ILinkChannel {
/// Уникальный GUID устройства
fn guid(&mut self) -> IGUID;
/// Настройка канала связи
fn reconf(&mut self);
/// Отправить данные
fn send(&mut self, data: &Vec<u8>);
/// Прочитать данные
fn read(&mut self) -> Vec<u8>;
// Определение типа
fn type_name() -> &'static str
where
Self: Sized;
/// Настраиваемые свойства объекта
fn properties(&self) -> Arc<Mutex<IManagerProperties>>;
}
/// Фабрика по созданию счётчиков
pub trait ICounterFactory {
fn spawn(&mut self, channel: Arc<Mutex<ILinkChannel>>) -> Arc<Mutex<dyn ICounter>>;
fn spawn_with_uuid(
&mut self,
uuid: IGUID,
channel: Arc<Mutex<ILinkChannel>>,
) -> Arc<Mutex<dyn ICounter>>;
}
pub trait ICounter {
/// Уникальный GUID устройства
fn guid(&mut self) -> IGUID;
/// Добавление в канал связи команд
fn communicate(&mut self);
/// Обработка ответов
fn processing(&mut self, request: Vec<u8>, response: Vec<u8>);
/// Вернуть расход
fn consumption(&self) -> IConsumption;
/// Тип счётчика
fn type_name() -> &'static str
where
Self: Sized;
/// Имя счётчика
fn name(&self) -> Option<String>;
/// Серийный номер
fn serial(&self) -> Option<String>;
/// Выполнить поверку
fn verification(&self) -> io::Result<()>;
/// Дата поверки
fn last_verification_date(&self) -> Option<Duration>;
/// Как часто надо делать поверку
fn verification_interval(&self) -> Option<Duration>;
/// Установим интервал между поверками
fn set_verification_interval(&mut self, interval: Duration) -> io::Result<()>;
/// Вернуть канал связи
fn parent(&self) -> Arc<Mutex<ILinkChannel>>;
/// Настраиваемые свойства объекта
fn properties(&self) -> Arc<Mutex<IManagerProperties>>;
}
pub trait IElectroCounter: ICounter {
type Energy;
type Phase;
type Voltage;
// Активная энергия
fn active_energy(&self, phase: Self::Phase) -> Option<Self::Energy>;
// Реактивная энергия
fn reactive_energy(&self, phase: Self::Phase) -> Option<Self::Energy>;
// Действующие значения фазных токов
fn voltage(&self, phase: Self::Phase) -> Option<Self::Voltage>;
// Частота сети
fn frequencies(&self, phase: Self::Phase) -> Option<i32>;
}
// Фабрика по созданию интерфейсов
pub trait IFaceFactory {
fn spawn(&mut self) -> Arc<Mutex<dyn IFace>>;
fn spawn_with_uuid(&mut self, uuid: IGUID) -> Arc<Mutex<dyn IFace>>;
}
/// Сетевой интерфейс, абстракция которая определяет как подключено любое устройство.
pub trait IFace: Send {
// Обмен со всеми дочерними устройствами
fn processing(&mut self);
// Название класса
fn type_name() -> &'static str
where
Self: Sized;
// Описание объекта
fn description() -> &'static str
where
Self: Sized;
}
// Фабрика для менеджеров свойств
pub trait IManagerPropertiesFactory {
fn spawn() -> Arc<Mutex<dyn IManagerProperties>>;
}
// Типаж менеджера свойств
pub trait IManagerProperties {
fn add(&mut self, item: PropertiesItem);
fn set_value_by_name(&mut self, name: &str, value: &str);
fn list_properties(&self) -> Vec<&PropertiesItem>;
}
// Тип каждого свойства
#[derive(Clone)]
pub enum PropertiesType {
Read,
ReadWrite,
Hide,
}
impl From<i8> for PropertiesType {
fn from(val: i8) -> Self {
match val {
0 => PropertiesType::Read,
1 => PropertiesType::ReadWrite,
_ => PropertiesType::Hide,
}
}
}
impl From<PropertiesType> for i8 {
fn from(val: PropertiesType) -> i8 {
match val {
PropertiesType::Read => 0,
PropertiesType::ReadWrite => 1,
PropertiesType::Hide => 2,
}
}
}
// Каждое свойство в менеджере свойств является структурой
pub struct PropertiesItem {
pub name: String,
pub value: String,
pub ptype: PropertiesType,
pub variants: Vec<String>,
pub regexpr: String,
pub min: i16,
pub max: i16,
pub err_msg: String,
pub required: bool,
}
impl Clone for PropertiesItem {
fn clone(&self) -> PropertiesItem {
PropertiesItem {
name: self.name.clone(),
value: self.value.clone(),
ptype: self.ptype.clone(),
variants: self.variants.clone(),
regexpr: self.regexpr.clone(),
min: self.min,
max: self.max,
err_msg: self.err_msg.clone(),
required: self.required,
}
}
}
#[allow(dead_code)]
fn terminated() -> bool {
// Узнать завершается ли программа
unsafe {
match PROGRAM_STATE {
ProgramState::Closing => true,
_ => false,
}
}
}
///
/// Обработка команд от сервера\клиента
///
pub fn processing(request: &str) -> Result<String, Error> {
let val: Value = from_str(request)?;
let action = match val["action"] {
Value::String(ref expr) => expr,
_ => "",
};
match action {
"init" => {
let respone = json!({
"action" : "init",
"code" : 200,
"guid": "Тестовый GUID"
});
return Ok(respone.to_string());
}
_ => return Ok("No result!!!".to_string()),
};
} |
//! Tries to unify the two traits via templating on the error type and using
//! the never type (still experimental). Tests have shown no performance
//! overhead.
//!
//! Rng exists as a separate trait only so that users don't have to unwrap
//! the `Result<T, !>` type themselves.
//!
//! Note: this *only* considers the next_u32 member function.
//!
//! Thoughts: a common super-trait which is not object safe doesn't really help
//! anything(?). At the same time, it's no longer possible to make one version
//! implement the other, so IMO this is strictly worse than extends_CryptoRng2.
//! And don't forget, this also depends on an unstable language feature.
#![feature(never_type)]
// ——— traits ———
trait RawRng<Error> {
fn try_next_u32(&mut self) -> Result<u32, Error>;
}
trait Rng: RawRng<!> {
fn next_u32(&mut self) -> u32 {
self.try_next_u32().unwrap_or_else(|e| e)
}
}
#[derive(Debug)]
struct CryptoError;
type CryptoRng = RawRng<CryptoError>;
// ——— impls ———
impl<R: Rng+?Sized> RawRng<!> for R {
fn try_next_u32(&mut self) -> Result<u32, !> {
Ok(self.next_u32())
}
}
// Required for `as_rng(&mut rng)` and `as_rng_ref` definition.
impl<'a, CR: RawRng<CryptoError>+?Sized> RawRng<CryptoError> for &'a mut CR {
fn try_next_u32(&mut self) -> Result<u32, CryptoError> {
(*self).try_next_u32()
}
}
// ——— adaptor ———
// Given `rng` of type `T` where `T: CryptoRng`, this can consume
// `rng` (`as_rng(rng)`)
fn as_rng<CR: RawRng<CryptoError>>(rng: CR) -> AsRng<CR> {
AsRng { rng }
}
struct AsRng<CR: RawRng<CryptoError>+?Sized> {
rng: CR
}
impl<CR: RawRng<CryptoError>+?Sized> Rng for AsRng<CR> {
fn next_u32(&mut self) -> u32 {
self.rng.try_next_u32().unwrap()
}
}
// ——— test RNGs ———
// A non-crypto Rng
#[derive(Debug)]
struct TestRng(u32);
impl Rng for TestRng {
fn next_u32(&mut self) -> u32 {
self.0
}
}
// A CryptoRng
#[derive(Debug)]
struct TestCRng(u32);
impl RawRng<CryptoError> for TestCRng {
fn try_next_u32(&mut self) -> Result<u32, CryptoError> {
Ok(self.0)
}
}
// ——— usage ———
fn main() {
let mut t = TestRng(13);
let mut c = TestCRng(42);
println!("t: {:?} impls Rng", t);
println!("c: {:?} impls CryptoRng", c);
{
// Do both traits support both functions via static dispatch?
println!("t, static dispatch, using CryptoRng: {:?}", t.try_next_u32());
println!("t, static dispatch, using Rng: {:?}", t.next_u32());
println!("c, static dispatch, using CryptoRng: {:?}", c.try_next_u32());
println!("c, static dispatch, using Rng: {:?}", as_rng(&mut c).next_u32());
}
{
// Can both types be used via CryptoRng with dynamic dispatch?
let cr = &mut c as &mut CryptoRng;
println!("c, dynamic dispatch, using CryptoRng: {:?}", cr.try_next_u32());
/* TODO: this would also need an adaptor, and it would be problematic
(impl RawRng<!> for &mut RawRng<!> would conflict).
let tr = &mut t as &mut CryptoRng;
println!("t, dynamic dispatch, using CryptoRng: {:?}", tr.try_next_u32());
*/
}
{
// Can both types be used via Rng with dynamic dispatch?
let mut cr = as_rng(&mut c as &mut CryptoRng);
let tr = &mut t as &mut Rng;
println!("c, dynamic dispatch, using Rng: {:?}", cr.next_u32());
println!("t, dynamic dispatch, using Rng: {:?}", tr.next_u32());
}
}
|
use proconio::input;
use proconio::marker::Chars;
fn main() {
input! {
h: usize,
w: usize,
s: [Chars; h],
};
let mut si = 0;
let mut sj = 0;
for i in 0..h {
for j in 0.. w {
if s[i][j] == 'o' {
si = i;
sj = j;
break;
}
}
}
let mut gi = 0;
let mut gj = 0;
for i in (0..h).rev() {
for j in (0.. w).rev() {
if s[i][j] == 'o' {
gi = i;
gj = j;
break;
}
}
}
let ans = (si.max(gi) - si.min(gi)) + (sj.max(gj) - sj.min(gj));
println!("{}", ans);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.