text stringlengths 8 4.13M |
|---|
use crate::{
openapi::{
generate::{
controller_info::ControllerInfo,
crate_syn_browser::{Browser, Item, ItemKind, Module, UseScope},
type_info::TypeInfo,
utils::{find_macro_attribute_flag, find_macro_attribute_named_value, get_serde_field},
},
schema::{
OpenApi, OpenApiContent, OpenApiMimeType, OpenApiObjectType, OpenApiParameter, OpenApiParameterLocation, OpenApiPath, OpenApiPathMethod,
OpenApiRequestBody, OpenApiResponse, OpenApiSchema, OpenApiType,
},
},
Command, CommandResult,
};
use http::StatusCode;
use serde_derive::Deserialize;
use std::{
cell::RefCell,
collections::{BTreeMap, HashMap, HashSet},
fs::File as FsFile,
io::Read,
path::PathBuf,
time::Instant,
};
use structopt::StructOpt;
use syn::{Attribute, Fields, Item as SynItem, ItemEnum, ItemStruct, Lit, Meta, NestedMeta, Signature};
mod controller_info;
mod crate_syn_browser;
mod handler_info;
mod response_info;
mod route_info;
mod type_info;
mod utils;
macro_rules! print_project_path_error {
($file:expr, $project_path:expr) => {{
let project_path = $project_path.to_str().map(|s| s.to_owned()).unwrap_or_else(|| format!("{:?}", $project_path));
format!(
"Unable to find `{}` in project root `{}`.
Make sure that you are either running this command from your project's root,
or that the argument --project-path (-p) point to the project's root.
You can see help with the --help flag.",
$file, project_path
)
}};
}
/// Generate OpenAPI v3 from a Saphir application.
///
/// See: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md
#[derive(StructOpt, Debug, Default)]
pub(crate) struct GenArgs {
/// (Optional) Limit doc generation to the URIs under this scope.
///
/// For example, if you pass `/api/v1` and that your Saphir server had
/// handlers for the following routes :
/// - GET /
/// - GET /about
/// - GET /api/v1/user
/// - POST /api/v1/user
/// , the generated doc would contain only the `/api/v2/user` endpoints.
#[structopt(short = "s", long = "scope", default_value = "/", verbatim_doc_comment)]
scope: String,
/// (Optional) path to the Saphir server's root
#[structopt(parse(from_os_str), short = "p", long = "project-path", default_value = ".")]
project_path: PathBuf,
/// (Optional) If running on a workspace, name of the package of the lucid
/// server for which we want to build openapi doc
#[structopt(long = "package")]
package_name: Option<String>,
/// (optionnal) Path to the `.cargo` directory. By default, read from the
/// current executable's environment, which work when
/// running this command as a cargo sub-command.
#[structopt(parse(from_os_str), long = "cargo-path", default_value = "~/.cargo")]
cargo_path: PathBuf,
/// (Optional) Resulting output path. Either the path to the resulting yaml
/// file, or a dir, which would then contain a openapi.yaml
#[structopt(parse(from_os_str), default_value = ".")]
output_file: PathBuf,
}
pub(crate) struct Gen {
pub args: <Gen as Command>::Args,
pub doc: OpenApi,
pub operation_ids: RefCell<HashSet<String>>,
}
impl Command for Gen {
type Args = GenArgs;
fn new(args: Self::Args) -> Self {
let mut doc = OpenApi::default();
doc.openapi_version = "3.0.3".to_string();
Self {
args,
doc,
operation_ids: RefCell::new(Default::default()),
}
}
fn run<'b>(mut self) -> CommandResult {
let now = Instant::now();
self.read_project_cargo_toml()?;
let browser = Browser::new(self.args.project_path.clone()).map_err(|e| format!("{}", e))?;
let browser = unsafe { &*(&browser as *const Browser) }; // FIXME: Definitely find a better way to handle the lifetime issue here
let entrypoint = self.get_crate_entrypoint(self.args.package_name.as_ref(), browser)?;
let controllers = self.load_controllers(entrypoint)?;
self.fill_openapi_with_controllers(entrypoint, controllers)?;
let file = self.write_doc_file()?;
println!("Succesfully created `{}` in {}ms", file, now.elapsed().as_millis());
Ok(())
}
}
impl Gen {
fn get_crate_entrypoint<'s, 'r, 'b: 'r>(&'s self, package_name: Option<&'r String>, browser: &'b Browser<'b>) -> Result<&'b Module<'b>, String> {
let main_package = if let Some(main_name) = package_name {
let package = browser.package_by_name(main_name);
package.unwrap_or_else(|| panic!("Crate does not include a member named `{}`.", main_name))
} else if browser.packages().len() == 1 {
browser.packages().first().expect("Condition ensure exactly 1 workspace member")
} else {
return Err("This crate is a workspace with multiple packages!
Please select the package for which you want to generate the openapi documentation
by using the --package flag."
.to_string());
};
let bin_target = main_package.bin_target().expect("Crate does not have a Binary target.");
let entrypoint = bin_target.entrypoint().map_err(|e| e.to_string())?;
Ok(entrypoint)
}
fn write_doc_file(&self) -> Result<String, String> {
let mut path = self.args.output_file.clone();
if path.is_dir() {
path = path.join("openapi.yaml");
}
match path.extension() {
None => path = path.with_extension("yaml"),
Some(ext) => {
if ext.to_str() != Some("yaml") && ext.to_str() != Some("yml") {
return Err("output must be a yaml file.".to_string());
}
}
}
let f = FsFile::create(&path).map_err(|_| format!("Unable to create file `{:?}`", &path))?;
serde_yaml::to_writer(f, &self.doc).map_err(|_| format!("Unable to write to `{:?}`", path))?;
Ok(path.to_str().unwrap_or_default().to_string())
}
fn read_project_cargo_toml(&mut self) -> CommandResult {
#[derive(Deserialize)]
struct Cargo {
pub package: Package,
}
#[derive(Deserialize)]
struct Package {
pub name: String,
pub version: String,
}
let cargo_path = self.args.project_path.clone().join("Cargo.toml");
let mut f = FsFile::open(&cargo_path).map_err(|_| print_project_path_error!("Cargo.toml", self.args.project_path))?;
let mut buffer = String::new();
f.read_to_string(&mut buffer)
.map_err(|_| print_project_path_error!("Cargo.toml", self.args.project_path))?;
let cargo: Cargo = toml::from_str(buffer.as_str()).map_err(|_| print_project_path_error!("Cargo.toml", self.args.project_path))?;
self.doc.info.title = cargo.package.name;
self.doc.info.version = cargo.package.version;
Ok(())
}
fn load_controllers<'b>(&self, entrypoint: &'b Module<'b>) -> Result<Vec<ControllerInfo>, String> {
let controllers: Vec<ControllerInfo> = entrypoint
.all_items()
.map_err(|e| format!("{}", e))?
.iter()
.filter_map(|i| match i.kind() {
ItemKind::Impl(im) => self.extract_controller_info(im).transpose(),
_ => None,
})
.collect::<Result<Vec<_>, _>>()?;
Ok(controllers)
}
fn fill_openapi_with_controllers<'b>(&mut self, entrypoint: &'b Module<'b>, controllers: Vec<ControllerInfo>) -> CommandResult {
for controller in controllers {
for handler in controller.handlers {
for route in handler.routes {
let path = route.uri;
let method = route.method;
let description = if handler.use_cookies {
Some("NOTE: This request consume cookies.".to_string())
} else {
None
};
let mut data = OpenApiPath {
parameters: handler.parameters.clone(),
description,
operation_id: route.operation_id,
..Default::default()
};
if let Some(body_info) = &handler.body_info {
if method == OpenApiPathMethod::Get {
let parameters = self.get_open_api_parameters_from_body_info(entrypoint, body_info);
data.parameters.extend(parameters);
} else {
data.request_body = self.get_open_api_body_param(entrypoint, body_info);
}
}
for response in &handler.responses {
let mut content = HashMap::new();
if let Some(openapi_type) = response.openapi_type.clone().or_else(|| {
response
.type_info
.as_ref()
.filter(|t| t.is_type_serializable)
.map(|t| self.get_open_api_type_from_type_info(entrypoint, &t))
.flatten()
}) {
content.insert(
response.mime.clone(),
OpenApiContent {
schema: OpenApiSchema::Inline(openapi_type),
},
);
}
let status = StatusCode::from_u16(response.code);
data.responses.insert(
response.code,
OpenApiResponse {
description: response
.type_info
.as_ref()
.map(|t| t.name.clone())
.unwrap_or_else(|| status.map(|s| s.canonical_reason()).ok().flatten().map(|s| s.to_owned()).unwrap_or_default()),
content,
},
);
}
if !self.doc.paths.contains_key(path.as_str()) {
self.doc.paths.insert(path.clone(), BTreeMap::new());
}
let path_map = self.doc.paths.get_mut(path.as_str()).expect("Should work because of previous statement");
path_map.insert(method, data);
}
}
}
Ok(())
}
fn get_open_api_body_param<'b>(&self, entrypoint: &'b Module<'b>, body_info: &BodyParamInfo) -> Option<OpenApiRequestBody> {
let t = if body_info.type_info.is_type_deserializable {
self.get_open_api_type_from_type_info(entrypoint, &body_info.type_info)?
} else {
OpenApiType::anonymous_input_object()
};
let mut content: HashMap<OpenApiMimeType, OpenApiContent> = HashMap::new();
content.insert(
body_info.openapi_type.clone(),
OpenApiContent {
schema: OpenApiSchema::Inline(t),
},
);
Some(OpenApiRequestBody {
description: body_info.type_info.name.clone(),
required: !body_info.type_info.is_optional,
content,
})
}
fn get_open_api_parameters_from_body_info<'b>(&self, entrypoint: &'b Module<'b>, body_info: &BodyParamInfo) -> Vec<OpenApiParameter> {
let mut parameters = Vec::new();
if let Some(t) = if body_info.type_info.is_type_deserializable {
self.get_open_api_type_from_type_info(entrypoint, &body_info.type_info)
} else {
None
} {
if let OpenApiType::Object {
object: OpenApiObjectType::Object { properties, required },
} = t
{
for (name, openapi_type) in &properties {
parameters.push(OpenApiParameter {
name: name.clone(),
location: OpenApiParameterLocation::Query,
required: required.contains(name),
schema: OpenApiSchema::Inline(openapi_type.as_ref().clone()),
..Default::default()
});
}
}
}
parameters
}
fn get_open_api_type_from_type_info<'b>(&self, scope: &'b dyn UseScope<'b>, type_info: &TypeInfo) -> Option<OpenApiType> {
let type_path = type_info.type_path.as_ref()?;
let type_mod = scope.target().module_by_use_path(type_path).ok().flatten()?;
let type_impl = type_mod.find_type_definition(type_info.name.as_str()).ok().flatten()?;
match type_impl.item {
SynItem::Struct(s) => self.get_open_api_type_from_struct(type_impl, &s),
SynItem::Enum(e) => self.get_open_api_type_from_enum(type_impl, e),
_ => unreachable!(),
}
}
fn get_open_api_type_from_struct<'b>(&self, item: &'b Item<'b>, s: &ItemStruct) -> Option<OpenApiType> {
let mut properties = HashMap::new();
let mut required = Vec::new();
for field in &s.fields {
if let Some(field_name) = field.ident.as_ref().map(|i| get_serde_field(i.to_string(), &field.attrs, &s.attrs)).flatten() {
if let Some(field_type_info) = TypeInfo::new(item.scope, &field.ty) {
let field_type = self
.get_open_api_type_from_type_info(item.scope, &field_type_info)
.unwrap_or_else(|| OpenApiType::from_rust_type_str(field_type_info.name.as_str()));
if !field_type_info.is_optional
&& !find_macro_attribute_flag(&field.attrs, "serde", "default")
&& find_macro_attribute_named_value(&field.attrs, "serde", "default").is_none()
{
required.push(field_name.clone());
}
properties.insert(field_name, Box::new(field_type));
} else {
println!("Unsupported type : {:?}", &field.ty);
}
}
}
if !properties.is_empty() {
Some(OpenApiType::object(properties, required))
} else {
Some(OpenApiType::anonymous_input_object())
}
}
fn get_open_api_type_from_enum<'b>(&self, _item: &Item<'b>, e: &'b ItemEnum) -> Option<OpenApiType> {
if e.variants.iter().all(|v| v.fields == Fields::Unit) {
let mut values: Vec<String> = Vec::new();
for variant in &e.variants {
if let Some(name) = get_serde_field(variant.ident.to_string(), &variant.attrs, &e.attrs) {
values.push(name);
}
}
return Some(OpenApiType::enums(values));
}
// TODO: properly support tuple and struct enum variants.
// this will require the item param
Some(OpenApiType::anonymous_input_object())
}
fn handler_operation_id_from_sig(&self, sig: &Signature) -> String {
let method_name = sig.ident.to_string();
let mut operation_id = method_name.clone();
let mut i = 2;
let mut operation_ids = self.operation_ids.borrow_mut();
while operation_ids.contains(operation_id.as_str()) {
operation_id = format!("{}_{}", &method_name, &i);
i += 1;
}
operation_ids.insert(operation_id.clone());
operation_id
}
fn handler_method_from_attr(&self, attr: &Attribute) -> Option<OpenApiPathMethod> {
let ident = attr.path.get_ident()?;
OpenApiPathMethod::from_str(ident.to_string().as_str())
}
fn handler_path_from_attr(&self, attr: &Attribute) -> Option<(String, Vec<String>)> {
if let Ok(Meta::List(meta)) = attr.parse_meta() {
if let Some(NestedMeta::Lit(Lit::Str(l))) = meta.nested.first() {
let mut chars: Vec<char> = l.value().chars().collect();
let mut params: Vec<String> = Vec::new();
let mut i = 0;
while i < chars.len() {
if chars[i] == '<' || chars[i] == '{' {
chars[i] = '{';
let start = i;
for j in start..chars.len() {
if chars[j] == '>' || chars[j] == '}' {
chars[j] = '}';
params.push((&chars[(i + 1)..j]).iter().collect());
i = j;
break;
}
}
}
i += 1;
}
return Some((chars.into_iter().collect(), params));
}
}
None
}
pub(crate) fn openapitype_from_raw<'b>(&self, scope: &'b dyn UseScope<'b>, raw: &str) -> Option<OpenApiType> {
self._openapitype_from_raw(scope, raw).map(|(t, _)| t)
}
fn _openapitype_from_raw<'b>(&self, scope: &'b dyn UseScope<'b>, raw: &str) -> Option<(OpenApiType, usize)> {
let raw = raw.trim();
let len = raw.len();
let mut chars = raw.chars();
let first_char = chars.next()?;
match first_char {
'{' => {
let mut cur_key: Option<&str> = None;
let mut properties = HashMap::new();
let mut required = Vec::new();
let mut s = 1;
let mut e = 1;
for i in 1..len {
let char = chars.next()?;
if e > i {
continue;
} else {
e = i;
}
match char {
':' => {
let key = &raw[s..e].trim();
cur_key = Some(key);
s = e + 1;
}
'{' | '[' => {
let (t, end) = self._openapitype_from_raw(scope, &raw[s..(len - 1)])?;
e += end + 1;
if let Some(key) = cur_key {
properties.insert(key.to_string(), Box::new(t));
required.push(key.to_string());
s = e + 1;
cur_key = None;
}
}
',' | '}' => {
if let Some(key) = cur_key {
let value = &raw[s..e].trim();
let (t, _) = self._openapitype_from_raw(scope, value)?;
properties.insert(key.to_string(), Box::new(t));
required.push(key.to_string());
}
s = e + 1;
if char == '}' {
return if !properties.is_empty() {
Some((OpenApiType::object(properties, required), e))
} else {
None
};
}
}
_ => {}
}
}
None
}
'[' => {
if chars.last()? != ']' {
return None;
}
self._openapitype_from_raw(scope, &raw[1..(len - 1)])
}
_ => Some((
syn::parse_str::<syn::Path>(raw)
.ok()
.map(|p| TypeInfo::new_from_path(scope, &p))
.flatten()
.as_ref()
.filter(|t| t.is_type_serializable)
.map(|t| self.get_open_api_type_from_type_info(scope, t))
.flatten()
.unwrap_or_else(|| OpenApiType::from_rust_type_str(raw)),
len,
)),
}
}
}
#[derive(Clone, Debug)]
pub(crate) struct BodyParamInfo {
openapi_type: OpenApiMimeType,
type_info: TypeInfo,
}
#[derive(Clone, Debug, Default)]
pub(crate) struct RouteParametersInfo {
parameters: Vec<OpenApiParameter>,
has_cookies_param: bool,
body_info: Option<BodyParamInfo>,
}
|
/// Supported virtual keys.
///
/// Virtual keys represent the intended meaning of the key, and have no relation to where
/// the key physically is on the keyboard. Use virtual key where the meaning of the key
/// is most important (textual input). When meaning matters less, but physical location
/// is more important (WASD-like control schemes) use [`crate::Scancode`].
#[repr(u16)]
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum VirtualKey {
Unknown,
Escape,
One,
Two,
Three,
Four,
Five,
Six,
Seven,
Eight,
Nine,
Zero,
Minus,
Equal,
Backspace,
Tab,
Q,
W,
E,
R,
T,
Y,
U,
I,
O,
P,
LeftBrace,
RightBrace,
Enter,
LeftControl,
A,
D,
S,
F,
G,
H,
J,
K,
L,
Semicolon,
Apostrope,
Grave,
LeftShift,
Backslash,
Z,
X,
C,
V,
B,
N,
M,
Comma,
Dot,
Slash,
RightShift,
KeyPadAsterick,
LeftAlt,
Space,
CapsLock,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
NumLock,
ScrollLock,
KeyPad7,
KeyPad8,
KeyPad9,
KeyPadMinus,
KeyPad4,
KeyPad5,
KeyPad6,
KeyPadPlus,
KeyPad1,
KeyPad2,
KeyPad3,
KeyPad0,
KeyPadDot,
NonUsBackslash,
F11,
F12,
KeyPadEnter,
RightAlt,
Home,
Up,
PageUp,
Left,
Right,
End,
Down,
PageDown,
Insert,
Delete,
KeyPadEqual,
KeyPadPlusMinus,
}
|
#![cfg(feature = "serde")]
use bitarray::BitArray;
#[test]
fn bincode_serde_json_cycle() {
let old_bits = vec![BitArray::new([0, 1, 2, 3, 255])];
let mut bdata = vec![];
bincode::serialize_into(&mut bdata, &old_bits).expect("failed to serialize with bincode");
let middle_bits: Vec<BitArray<5>> =
bincode::deserialize_from(bdata.as_slice()).expect("failed to deserialize with bincode");
let new_bits: Vec<BitArray<5>> = serde_json::from_str(
&serde_json::to_string(&middle_bits).expect("failed to serialize with serde_json"),
)
.expect("failed to deserialize with serde_json");
assert_eq!(old_bits, new_bits);
}
|
#[doc = "Register `SR` reader"]
pub type R = crate::R<SR_SPEC>;
#[doc = "Field `IFEM` reader - Input FIFO empty"]
pub type IFEM_R = crate::BitReader;
#[doc = "Field `IFNF` reader - Input FIFO not full"]
pub type IFNF_R = crate::BitReader;
#[doc = "Field `OFNE` reader - Output FIFO not empty"]
pub type OFNE_R = crate::BitReader;
#[doc = "Field `OFFU` reader - Output FIFO full"]
pub type OFFU_R = crate::BitReader;
#[doc = "Field `BUSY` reader - Busy bit"]
pub type BUSY_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - Input FIFO empty"]
#[inline(always)]
pub fn ifem(&self) -> IFEM_R {
IFEM_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Input FIFO not full"]
#[inline(always)]
pub fn ifnf(&self) -> IFNF_R {
IFNF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Output FIFO not empty"]
#[inline(always)]
pub fn ofne(&self) -> OFNE_R {
OFNE_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Output FIFO full"]
#[inline(always)]
pub fn offu(&self) -> OFFU_R {
OFFU_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Busy bit"]
#[inline(always)]
pub fn busy(&self) -> BUSY_R {
BUSY_R::new(((self.bits >> 4) & 1) != 0)
}
}
#[doc = "status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SR_SPEC;
impl crate::RegisterSpec for SR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`sr::R`](R) reader structure"]
impl crate::Readable for SR_SPEC {}
#[doc = "`reset()` method sets SR to value 0x03"]
impl crate::Resettable for SR_SPEC {
const RESET_VALUE: Self::Ux = 0x03;
}
|
/*
* Binary array set (Rust)
*
* Copyright (c) 2022 Project Nayuki. (MIT License)
* https://www.nayuki.io/page/binary-array-set
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
* - The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* - The Software is provided "as is", without warranty of any kind, express or
* implied, including but not limited to the warranties of merchantability,
* fitness for a particular purpose and noninfringement. In no event shall the
* authors or copyright holders be liable for any claim, damages or other
* liability, whether in an action of contract, tort or otherwise, arising from,
* out of or in connection with the Software or the use or other dealings in the
* Software.
*/
use std;
use std::convert::TryFrom;
#[derive(Clone,Default)]
pub struct BinaryArraySet<E> {
// Each values[i]'s length is either 0 or 2^i, with elements in ascending order
values: Vec<Vec<E>>,
size: usize,
}
impl<E: Ord> BinaryArraySet<E> {
pub fn new() -> Self {
Self {
values: Vec::new(),
size: 0,
}
}
// Runs in O(1) time
pub fn is_empty(&self) -> bool {
self.size == 0
}
// Runs in O(1) time
pub fn len(&self) -> usize {
self.size
}
pub fn clear(&mut self) {
self.values.clear();
self.size = 0;
}
// Runs in O((log n)^2) time
pub fn contains(&self, val: &E) -> bool {
self.values.iter().any(
|vals| vals.binary_search(val).is_ok())
}
// Runs in average-case O((log n)^2) time, worst-case O(n) time
pub fn insert(&mut self, val: E) -> bool {
let result = !self.contains(&val); // Checking for duplicates is expensive
if result {
self.insert_unique(val);
}
result
}
// Runs in amortized O(1) time, worst-case O(n) time
pub fn insert_unique(&mut self, val: E) {
self.size = self.size.checked_add(1).expect("Maximum size reached");
let mut toput: Vec<E> = vec![val];
for vals in &mut self.values {
if vals.is_empty() {
*vals = toput;
return;
}
// Merge two sorted arrays
assert_eq!(vals.len(), toput.len());
toput = Self::merge_vecs(vals, toput);
}
self.values.push(toput);
}
pub fn check_structure(&self) {
let mut sum: usize = 0;
for (i, vals) in self.values.iter().enumerate() {
let len = vals.len();
assert!(len == 0 || len == 1usize.checked_shl(u32::try_from(i).unwrap()).unwrap(), "Invalid sub-vector length");
for j in 1 .. len {
assert!(vals[j - 1] < vals[j], "Invalid ordering of elements in vector");
}
sum = sum.checked_add(len).unwrap();
}
assert_eq!(sum, self.size, "Size mismatch between counter and sub-vectors");
}
// (Private) Assuming that xs and ys are both in ascending order, this
// moves all their elements into a new sorted vector zs and returns it.
fn merge_vecs(xs: &mut Vec<E>, ys: Vec<E>) -> Vec<E> {
let mut result = Vec::<E>::with_capacity(xs.len().checked_add(ys.len()).unwrap());
let mut xiter = xs.drain(..);
let mut yiter = ys.into_iter();
let mut xnext = xiter.next();
let mut ynext = yiter.next();
loop {
let takex: bool = match (xnext.as_ref(), ynext.as_ref()) {
(None, None) => break,
(_, None) => true,
(None, _) => false,
(Some(x), Some(y)) => *x <= *y,
};
if takex {
result.push(xnext.unwrap());
xnext = xiter.next();
} else {
result.push(ynext.unwrap());
ynext = yiter.next();
}
}
result
}
}
/*---- Helper structs ----*/
impl<E> IntoIterator for BinaryArraySet<E> {
type Item = E;
type IntoIter = MoveIter<E>;
fn into_iter(self) -> Self::IntoIter {
MoveIter::<E>::new(self)
}
}
pub struct MoveIter<E> {
values: std::vec::IntoIter<Vec<E>>,
vals: std::vec::IntoIter<E>,
count: usize,
}
impl<E> MoveIter<E> {
// Runs in O(1) time
fn new(set: BinaryArraySet<E>) -> Self {
Self {
values: set.values.into_iter(),
vals: Vec::<E>::new().into_iter(),
count: set.size,
}
}
}
impl<E> Iterator for MoveIter<E> {
type Item = E;
// Runs in amortized O(1) time, worst-case O(log n) time
fn next(&mut self) -> Option<Self::Item> {
loop {
let result = self.vals.next();
if result.is_some() {
self.count -= 1;
return result;
}
self.vals = self.values.next()?.into_iter();
}
}
fn size_hint(&self) -> (usize,Option<usize>) {
(self.count, Some(self.count))
}
fn count(self) -> usize {
self.count
}
}
impl<'a, E> IntoIterator for &'a BinaryArraySet<E> {
type Item = &'a E;
type IntoIter = RefIter<'a, E>;
fn into_iter(self) -> Self::IntoIter {
RefIter::<E>::new(&self)
}
}
#[derive(Clone)]
pub struct RefIter<'a, E:'a> {
values: std::slice::Iter<'a, Vec<E>>,
vals: Option<std::slice::Iter<'a, E>>,
count: usize,
}
impl<'a, E> RefIter<'a, E> {
// Runs in O(log n) time
fn new(set: &'a BinaryArraySet<E>) -> Self {
let mut temp = set.values.iter();
Self {
vals: temp.next().map(|v| v.iter()),
values: temp,
count: set.size,
}
}
}
impl<'a, E> Iterator for RefIter<'a, E> {
type Item = &'a E;
// Runs in amortized O(1) time, worst-case O(log n) time
fn next(&mut self) -> Option<Self::Item> {
loop {
let vals = self.vals.as_mut()?;
if let result@Some(_) = vals.next() {
self.count -= 1;
return result;
}
self.vals = self.values.next().map(|v| v.iter());
}
}
fn size_hint(&self) -> (usize,Option<usize>) {
(self.count, Some(self.count))
}
fn count(self) -> usize {
self.count
}
}
|
fn main() {
cfg_aliases::cfg_aliases! {
headless: { not(feature = "with_graphics") }
}
}
|
#![feature(prelude_import)]
#![no_std]
#[prelude_import]
use std::prelude::v1::*;
#[macro_use]
extern crate std as std;
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_derive(Nothing)]
pub fn nothing_derive(_input: TokenStream) -> TokenStream {
TokenStream::new()
}
#[proc_macro_attribute]
pub fn nothing(_attr: TokenStream, item: TokenStream) -> TokenStream {
item
}
#[proc_macro]
pub fn other_nothing(input: TokenStream) -> TokenStream {
input
}
pub mod decls {
extern crate proc_macro;
#[rustc_proc_macro_decls]
pub static _DECLS: &[proc_macro::bridge::client::ProcMacro] = &[
proc_macro::bridge::client::ProcMacro::custom_derive("Nothing", &[], crate::nothing_derive),
proc_macro::bridge::client::ProcMacro::attr("nothing", crate::nothing),
proc_macro::bridge::client::ProcMacro::bang("other_nothing", crate::other_nothing),
];
}
|
/*!
```rudra-poc
[target]
crate = "stackvector"
version = "1.0.8"
[report]
issue_date = 2021-02-19
issue_url = "https://github.com/Alexhuszagh/rust-stackvector/issues/2"
rustsec_url = "https://github.com/RustSec/advisory-db/pull/847"
rustsec_id = "RUSTSEC-2021-0048"
[[bugs]]
analyzer = "UnsafeDataflow"
bug_class = "HigherOrderInvariant"
rudra_report_locations = ["src/lib.rs:896:5: 920:6"]
```
!*/
#![forbid(unsafe_code)]
use stackvector::StackVec;
// An iterator that reports an incorrect size hint.
// -----
struct IncorrectIterator(u32);
impl IncorrectIterator {
pub fn new() -> Self {
IncorrectIterator(0)
}
}
impl Iterator for IncorrectIterator {
type Item = u8;
fn next(&mut self) -> Option<Self::Item> {
self.0 += 1;
if (self.0 >= 20) {
None
} else {
Some(0x41)
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let lower_bound = 20;
let upper_bound = Some(0);
(lower_bound, upper_bound)
}
}
// -----
fn main() {
let mut stack_vec = StackVec::<[u8; 4]>::new();
let i: i32 = 42;
// Causes a stack overflow overwriting i.
stack_vec.extend(IncorrectIterator::new());
println!("i: {}", i);
assert!(i == 42);
}
|
mod navmesh_struct;
pub use navmesh_struct::Navmesh;
pub use navmesh_struct::NavmeshBuilder;
|
use std::fmt::Debug;
#[derive(Debug)]
enum List<T: Debug> {
Cons(T, Rc<List<T>>),
Nil,
}
impl<T: Debug> Drop for List<T> {
fn drop(&mut self) {
println!("Dropping List with data {:?}!", self);
}
}
use self::List::{Cons, Nil};
use std::rc::Rc;
fn main() {
ref_counter_test();
}
fn ref_counter_test() {
let a = Rc::new(Cons(5, Rc::new(Cons(10, Rc::new(Nil)))));
println!("a -> {:?}", a);
println!("count after creating a = {}", Rc::strong_count(&a));
let b = Cons(3, Rc::clone(&a));
println!("b -> {:?}", b);
println!("count after creating a = {}", Rc::strong_count(&a));
{
let c = Cons(4, Rc::clone(&a));
println!(" c -> {:?}", c);
println!(" count after creating a = {}", Rc::strong_count(&a));
}
println!("count after c goes out of scope = {}", Rc::strong_count(&a));
}
|
//! # read subcommand
//!
//! Prints the license text to standard output.
//! Will attempt to download if no file is found in the local
//! file cache.
//!
//! Might consider using this command in conjunction with a pager:
//!
//! ```bash
//! $ license read MIT
//! MIT License Copyright (c) <year> <copyrigt holders>
//!
//! Permission is hereby granted...
//! ```
//!
//! You might want to use this command in conjuction with a pager:
//!
//! ```bash
//! $ license read MIT | less
//! ```
use crate::prelude::*;
use std::fs::File;
use std::io;
use license::net;
use license::License;
/// Build the subcommand.
pub fn subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("read")
.about("Print the license text")
.settings(&[AppSettings::UnifiedHelpMessage])
.arg(
Arg::with_name("LICENSE")
.help("SPDX identifier of license to read")
.required(true),
)
}
/// Run the subcommand.
pub fn exec(args: &ArgMatches) -> Failure {
let license = args.value_of("LICENSE").unwrap();
let license = License::from(license)?;
let path = license.path();
if !path.exists() {
println!("Downloading from: {:?}", license.url());
net::download(license.url(), path)?;
}
io::copy(&mut File::open(path)?, &mut io::stdout())?;
Ok(())
}
|
use rand::Rng;
use rocket::{FromForm, FromFormValue};
use serde_derive::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, FromForm, Copy, Clone)]
pub struct Sensor {
pub sensor_id: u16,
pub sensor_type: SensorType,
}
impl Sensor {
pub fn new(sensor_id: u16, sensor_type: SensorType) -> Sensor {
Sensor {
sensor_id,
sensor_type,
}
}
pub fn polled(&self) -> bool {
match self.sensor_type {
SensorType::Thermometer => true,
_ => false,
}
}
pub fn discovery(&self) -> SensorMessage {
SensorMessage {
sensor_id: self.sensor_id,
message_id: rand::thread_rng().gen(),
sensor_type: self.sensor_type,
request_type: RequestType::Discovery,
payload: String::new(),
}
}
}
#[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Clone)]
pub struct SensorMessage {
pub sensor_id: u16,
pub sensor_type: SensorType,
pub message_id: u16,
pub request_type: RequestType,
pub payload: String,
}
impl SensorMessage {
pub fn get(sensor: Sensor) -> SensorMessage {
SensorMessage {
sensor_id: sensor.sensor_id,
message_id: rand::thread_rng().gen(),
sensor_type: sensor.sensor_type,
request_type: RequestType::Get,
payload: String::new(),
}
}
pub fn set(sensor: Sensor, set_val: String) -> SensorMessage {
SensorMessage {
sensor_id: sensor.sensor_id,
message_id: rand::thread_rng().gen(),
sensor_type: sensor.sensor_type,
request_type: RequestType::Set,
payload: set_val,
}
}
pub fn extract_payload(&self) -> String {
self.payload.clone()
}
pub fn replace_payload(&mut self, new_payload: String) {
self.payload = new_payload;
}
pub fn change_request_type(&mut self, t: RequestType) {
self.request_type = t;
}
pub fn sensor_id(&self) -> u16 {
self.sensor_id
}
pub fn message_id(&self) -> u16 {
self.message_id
}
pub fn id(&self) -> (u16, u16) {
(self.sensor_id, self.message_id)
}
pub fn sensor(&self) -> Sensor {
Sensor {
sensor_id: self.sensor_id,
sensor_type: self.sensor_type,
}
}
}
#[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, Copy, Clone)]
pub enum RequestType {
Get,
GetResponse,
Set,
Discovery,
}
#[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq, FromFormValue, Copy, Clone)]
pub enum SensorType {
Thermometer, // float
Light, // On-Off
SmartSwitch, // On-Off
Thermostat, // float
MusicPlayer, // On-Off (play pause next prev volume)
Store, // Up-Down (On-off)
Lock, // Locked-Unlocked (On-off)
}
|
// pest. The Elegant Parser
// Copyright (c) 2018 Dragoș Tiselice
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
#![feature(test)]
extern crate pest;
extern crate pest_grammars;
extern crate test;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use test::Bencher;
use pest::Parser;
use pest::Span;
use pest::iterators::Pair;
use pest_grammars::json::*;
enum Json<'i> {
Null,
Bool(bool),
Number(f64),
String(Span<'i>),
Array(Vec<Json<'i>>),
Object(HashMap<Span<'i>, Json<'i>>)
}
fn consume(pair: Pair<Rule>) -> Json {
fn value(pair: Pair<Rule>) -> Json {
let pair = pair.into_inner().next().unwrap();
match pair.as_rule() {
Rule::null => Json::Null,
Rule::bool => match pair.as_str() {
"false" => Json::Bool(false),
"true" => Json::Bool(true),
_ => unreachable!()
},
Rule::number => Json::Number(pair.as_str().parse().unwrap()),
Rule::string => Json::String(pair.as_span()),
Rule::array => Json::Array(pair.into_inner().map(value).collect()),
Rule::object => {
let pairs = pair.into_inner().map(|pos| {
let mut pair = pos.into_inner();
let key = pair.next().unwrap().as_span();
let value = value(pair.next().unwrap());
(key, value)
});
Json::Object(pairs.collect())
}
_ => unreachable!()
}
}
value(pair)
}
#[bench]
fn data(b: &mut Bencher) {
let mut file = File::open("benches/data.json").unwrap();
let mut data = String::new();
file.read_to_string(&mut data).unwrap();
b.iter(|| {
// consume(
JsonParser::parse(Rule::json, &data).unwrap()
// .next()
// .unwrap()
// )
});
}
|
use worker::ffi::{Schema_CommandRequest, Schema_CommandResponse, Schema_ComponentData,
Schema_ComponentUpdate};
use ComponentBitField;
use std::any::Any;
use std::collections::HashMap;
use std::fmt;
use std::fmt::Debug;
use std::hash::Hash;
use std::ops::{Deref, DerefMut};
use worker::ComponentId;
pub type FieldId = u32;
pub trait GeneratedSchema: Sized + Default {
const NUMBER_OF_COMPONENTS: usize;
type ComponentData: GlobalComponentDataInterface<Self>;
type ComponentUpdate: GlobalComponentUpdateInterface<Self>;
type ComponentBitField: ComponentBitField
+ Clone
+ Copy
+ PartialEq
+ PartialOrd
+ Eq
+ Hash
+ Debug
+ Default
+ Send
+ Sync;
fn serialise_entity_acl(
read: Vec<String>,
write: HashMap<ComponentId, String>,
) -> (ComponentId, Box<Schema_ComponentData>);
fn run_dynamic_component_handler<D: DynamicComponentHandler<Self>>(handler: &mut D);
unsafe fn deserialise_command_request(
component_id: ComponentId,
command_index: u32,
request: Box<Schema_CommandRequest>,
) -> Option<Box<Any>>;
unsafe fn deserialise_command_response(
component_id: ComponentId,
command_index: u32,
response: Box<Schema_CommandResponse>,
) -> Option<Box<Any>>;
}
pub trait GlobalComponentDataInterface<S: GeneratedSchema> {
fn deserialise(
component_id: ComponentId,
data: Box<Schema_ComponentData>,
) -> Option<S::ComponentData>;
fn serialise(&self) -> Box<Schema_ComponentData>;
fn apply_update(&mut self, update: &S::ComponentUpdate);
}
pub trait GlobalComponentUpdateInterface<S: GeneratedSchema> {
fn deserialise(
component_id: ComponentId,
update: Box<Schema_ComponentUpdate>,
) -> Option<S::ComponentUpdate>;
}
pub trait Component<S: GeneratedSchema>: Default {
type Data: ComponentDataInterface<S> + Default;
type Update: ComponentUpdateInterface<S>;
fn component_id() -> ComponentId;
fn apply_update_to_data(data: &mut Self::Data, update: &Self::Update);
fn extract_data_borrow(data: &S::ComponentData) -> Option<&Self::Data>;
fn extract_data(data: S::ComponentData) -> Option<Self::Data>;
fn extract_update(update: &S::ComponentUpdate) -> Option<&Self::Update>;
fn serialise_snapshot(self) -> Box<Schema_ComponentData>;
}
pub trait ComponentDataInterface<S: GeneratedSchema>: Sized {
fn deserialise_data(update: Box<Schema_ComponentData>) -> S::ComponentData;
fn serialise_data(&self) -> Box<Schema_ComponentData>;
// fn mark_as_dirty(&mut self, field_index: usize);
fn serialise_update(&mut self) -> Box<Schema_ComponentUpdate>;
fn get_and_clear_dirty_bit(&mut self) -> bool;
// fn is_dirty(&self) -> bool;
fn make_dirty(&mut self);
fn cleanup_after_frame(&mut self);
}
pub trait ComponentUpdateInterface<S: GeneratedSchema>: Sized {
fn deserialise_update(update: Box<Schema_ComponentUpdate>) -> S::ComponentUpdate;
fn contains_events(&self) -> bool;
}
pub trait Command<S: GeneratedSchema> {
type Component: Component<S>;
type Request: CommandRequestInterface;
type Response: CommandResponseInterface;
fn command_index() -> u32;
}
pub trait CommandRequestInterface {
fn deserialise_request(request: Box<Schema_CommandRequest>) -> Self;
fn serialise_request(&self) -> Box<Schema_CommandRequest>;
}
pub trait CommandResponseInterface {
fn deserialise_response(response: Box<Schema_CommandResponse>) -> Self;
fn serialise_response(&self) -> Box<Schema_CommandResponse>;
}
pub trait DynamicComponentHandler<S: GeneratedSchema> {
fn register_component<C: 'static + Component<S>>(&mut self);
}
#[derive(Clone, Debug, Default)]
pub struct Event<T> {
events: Vec<T>,
staged_events: Vec<T>,
}
impl<T> Event<T> {
pub fn new() -> Event<T> {
Event {
events: Vec::new(),
staged_events: Vec::new(),
}
}
pub fn process<F>(&self, mut cb: F)
where
F: FnMut(&T),
{
for event in &self.events {
cb(event);
}
}
pub fn clear(&mut self) {
self.events.clear();
}
pub fn trigger(&mut self, value: T) {
self.staged_events.push(value);
}
pub fn add_event(&mut self, value: T) {
self.events.push(value);
}
pub fn get_staged_events(&self) -> &Vec<T> {
&self.staged_events
}
pub fn clear_staged_events(&mut self) {
self.staged_events.clear()
}
}
impl<T> IntoIterator for Event<T> {
type Item = T;
type IntoIter = ::std::vec::IntoIter<T>;
fn into_iter(self) -> Self::IntoIter {
self.events.into_iter()
}
}
impl<'a, T> IntoIterator for &'a Event<T> {
type Item = &'a T;
type IntoIter = ::std::slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.events.iter()
}
}
#[derive(Clone, Default)]
pub struct Property<T: fmt::Debug> {
is_dirty: bool,
value: T,
}
impl<T: fmt::Debug> From<T> for Property<T> {
fn from(value: T) -> Property<T> {
Property {
is_dirty: true,
value,
}
}
}
impl<T: fmt::Debug> fmt::Debug for Property<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.value.fmt(f)
}
}
impl<T: fmt::Debug> Property<T> {
pub fn new(value: T) -> Property<T> {
Property {
is_dirty: false,
value,
}
}
pub fn get_dirty_bit(&self) -> bool {
self.is_dirty
}
pub fn get_and_clear_dirty_bit(&mut self) -> bool {
let dirty = self.is_dirty;
self.is_dirty = false;
dirty
}
}
impl<T: fmt::Debug> Deref for Property<T> {
type Target = T;
fn deref(&self) -> &T {
&self.value
}
}
impl<T: fmt::Debug> DerefMut for Property<T> {
fn deref_mut(&mut self) -> &mut T {
self.is_dirty = true;
&mut self.value
}
}
|
use crate::client::Client as ViewClient;
use crate::view::View;
use crate::vix::CoreEvent;
use futures::sync::mpsc::UnboundedReceiver;
use futures::{Async, Future, Stream};
use std::collections::HashMap;
use std::io::Write;
use termion::event::Event;
use tokio;
use xrl::{Client, ClientResult, ModifySelection, ScrollTo, Style, Update, ViewId};
pub struct Editor {
clipboard: String, // FIXME: Replace this with something better
pub pending_open_requests: Vec<ClientResult<(ViewId, View)>>,
pub delayed_events: Vec<CoreEvent>,
pub views: HashMap<ViewId, View>,
pub current_view: ViewId,
pub events: UnboundedReceiver<CoreEvent>,
pub client: Client,
pub size: (u16, u16),
pub styles: HashMap<u64, Style>,
}
impl Editor {
pub fn new(client: Client, events: UnboundedReceiver<CoreEvent>) -> Editor {
let mut styles = HashMap::new();
styles.insert(0, Default::default());
Editor {
clipboard: String::default(),
events,
delayed_events: Vec::new(),
pending_open_requests: Vec::new(),
size: (0, 0),
views: HashMap::new(),
styles,
current_view: ViewId(0),
client,
}
}
}
impl Editor {
pub fn handle_input(&mut self, event: Event) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.handle_input(event)
}
}
pub fn handle_resize(&mut self, size: (u16, u16)) {
info!("Setting new terminal size");
self.size = (size.0, size.1 - 1);
if let Some(view) = self.views.get_mut(&self.current_view) {
view.resize(self.size.1)
} else {
warn!("View {} not found", self.current_view);
}
}
}
impl Editor {
pub fn dispatch_core_event(&mut self, event: CoreEvent) {
match event {
CoreEvent::Update(update) => self.handle_update(update),
CoreEvent::SetStyle(style) => self.handle_def_style(style),
CoreEvent::ScrollTo(scroll_to) => self.handle_scroll_to(scroll_to),
}
}
fn handle_update(&mut self, update: Update) {
match self.views.get_mut(&update.view_id) {
Some(view) => view.update_cache(update),
None => self.delayed_events.push(CoreEvent::Update(update)),
}
}
fn handle_scroll_to(&mut self, scroll_to: ScrollTo) {
match self.views.get_mut(&scroll_to.view_id) {
Some(view) => view.set_cursor(scroll_to.line, scroll_to.column),
None => self.delayed_events.push(CoreEvent::ScrollTo(scroll_to)),
}
}
fn handle_def_style(&mut self, style: Style) {
self.styles.insert(style.id, style);
}
}
impl Editor {
pub fn open(&mut self, file_path: Option<String>) {
let client = self.client.clone();
let task = self
.client
.new_view(file_path.clone())
.and_then(move |view_id| {
let view_client = ViewClient::new(client, view_id);
Ok((
view_id,
View::new(view_client, Some(file_path.unwrap_or_else(|| "".into()))),
))
});
self.pending_open_requests.push(Box::new(task));
}
pub fn paste(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.paste(&self.clipboard);
}
}
pub fn copy(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
match view.copy().wait() {
Ok(value) => {
self.clipboard = match value.as_str() {
Some(value) => String::from(value),
None => {
error!("could not copy clipboard");
String::default()
}
};
}
Err(err) => error!("error copying: {}", err),
}
}
}
pub fn cut(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
match view.cut().wait() {
Ok(value) => {
self.clipboard = match value.as_str() {
Some(value) => String::from(value),
None => {
error!("could not copy clipboard");
String::default()
}
};
}
Err(err) => error!("error copying: {}", err),
}
}
}
pub fn undo(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.undo();
}
}
pub fn redo(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.redo();
}
}
pub fn find(
&mut self,
search_term: &str,
case_sensitive: bool,
regex: bool,
whole_words: bool,
) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.find(search_term, case_sensitive, regex, whole_words);
}
}
pub fn find_next(
&mut self,
wrap_around: bool,
allow_same: bool,
modify_selection: ModifySelection,
) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.find_next(wrap_around, allow_same, modify_selection);
}
}
pub fn find_prev(
&mut self,
wrap_around: bool,
allow_same: bool,
modify_selection: ModifySelection,
) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.find_prev(wrap_around, allow_same, modify_selection);
}
}
pub fn find_all(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.find_all();
}
}
pub fn highlight_find(&mut self, visible: bool) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.highlight_find(visible);
}
}
pub fn set_theme(&mut self, theme: &str) {
let future = self.client.set_theme(theme).map_err(|_| ());
tokio::run(future);
}
pub fn collapse_selections(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.collapse_selections();
}
}
pub fn select_line(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.select_line();
}
}
pub fn select_line_end(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.select_line_end();
}
}
pub fn delete_line(&mut self) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.delete_line();
}
}
pub fn goto_line(&mut self, line: u64) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.goto_line(line);
}
}
pub fn down(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.down(self.current_view);
}
}
pub fn up(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.up(self.current_view);
}
}
// pub fn left(&mut self) {
// if self.views.contains_key(&self.current_view) {
// self.client.left(self.current_view);
// }
// }
pub fn right(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.right(self.current_view);
}
}
pub fn select_down(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.down_sel(self.current_view);
}
}
pub fn select_up(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.up_sel(self.current_view);
}
}
pub fn select_left(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.left_sel(self.current_view);
}
}
pub fn select_right(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.right_sel(self.current_view);
}
}
pub fn select_page_up(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.page_up_sel(self.current_view);
}
}
pub fn select_page_down(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.page_down_sel(self.current_view);
}
}
pub fn select_home(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.line_start_sel(self.current_view);
}
}
pub fn select_end(&mut self) {
if self.views.contains_key(&self.current_view) {
self.client.line_end_sel(self.current_view);
}
}
pub fn save(&mut self, view: Option<ViewId>) {
match view {
Some(view_id) => {
if let Some(view) = self.views.get_mut(&view_id) {
if let Err(err) = view.save().wait() {
error!("could not save view: {}", err);
}
}
}
None => {
if let Some(view) = self.views.get_mut(&self.current_view) {
if let Err(err) = view.save().wait() {
error!("could not save view: {}", err);
}
}
}
}
}
}
impl Editor {
pub fn process_open_requests(&mut self) {
if self.pending_open_requests.is_empty() {
return;
}
info!("process pending open requests");
let mut done = vec![];
for (idx, task) in self.pending_open_requests.iter_mut().enumerate() {
match task.poll() {
Ok(Async::Ready((id, mut view))) => {
info!("open request succeeded for {}", &id);
done.push(idx);
view.resize(self.size.1);
self.views.insert(id, view);
self.current_view = id;
}
Ok(Async::NotReady) => continue,
Err(e) => panic!("\"open\" task failed: {}", e),
}
}
for idx in done.iter().rev() {
self.pending_open_requests.remove(*idx);
}
if self.pending_open_requests.is_empty() {
info!("no more pending open request");
}
}
pub fn process_core_events(&mut self) {
loop {
match self.events.poll() {
Ok(Async::Ready(Some(event))) => {
self.dispatch_core_event(event);
}
Ok(Async::Ready(None)) => {
error!("Error core stdout shut down => panicking");
panic!("Error core stdout shut down");
}
Ok(Async::NotReady) => break,
Err(_) => {
error!("Error while polling core => panicking");
panic!("Error while polling core");
}
}
}
}
pub fn process_delayed_events(&mut self) {
let delayed_events: Vec<CoreEvent> = self.delayed_events.drain(..).collect();
for event in delayed_events {
self.dispatch_core_event(event);
}
}
pub fn render<W: Write>(&mut self, term: &mut W, state: &str) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.render(term, &self.styles, state);
}
}
pub fn render_error<W: Write>(&mut self, term: &mut W, msg: &str) {
if let Some(view) = self.views.get_mut(&self.current_view) {
view.render_error(term, msg);
}
}
}
|
use std::collections::HashMap;
use proc_macro2::TokenStream;
use quote::{quote, ToTokens, TokenStreamExt};
use syn::{Ident, ItemEnum, LitStr};
pub struct Class {
class_enum: ItemEnum,
subclasses: HashMap<Ident, LitStr>,
is_standard: bool,
}
impl Class {
pub fn new(mut class_enum: ItemEnum, is_standard: bool) -> Self {
let mut subclasses = HashMap::new();
for variant in class_enum.variants.iter_mut() {
let attrs = &mut variant.attrs;
// TODO: Replace with attrs.drain_filter(...)
let mut i = 0;
while i < attrs.len() {
if attrs[i].path.is_ident("subclass") {
let attr = attrs.remove(i);
let code: LitStr = attr.parse_args().unwrap();
subclasses.insert(variant.ident.clone(), code);
} else {
i += 1;
}
}
}
Self {
class_enum,
subclasses,
is_standard,
}
}
fn enum_definition(&self) -> TokenStream {
if self.is_standard {
let attributes = &self.class_enum.attrs;
let visibility = &self.class_enum.vis;
let class_ident = &self.class_enum.ident;
let variants = &self.class_enum.variants;
quote! {
#(#attributes)*
#visibility enum #class_ident {
#variants
Other(::std::string::String),
}
}
} else {
let enum_definition = &self.class_enum;
quote! {
#enum_definition
}
}
}
fn from_str_impl(&self) -> TokenStream {
let class_ident = &self.class_enum.ident;
let err_type = if self.is_standard {
quote! { ::std::convert::Infallible }
} else {
quote! { crate::error::ParseError }
};
let from_str_arms = self
.subclasses
.iter()
.map(|(variant, code)| quote! { #code => Ok(Self::#variant), });
let wildcard_arm = if self.is_standard {
// For standard types, wrap unknown strings in 'Other' variant
quote! { _ => Ok(Self::Other(s.to_string())), }
} else {
// For non-standard types, return an error
quote! { _ => Err(crate::error::ParseError::UnknownSubclass(s.to_string())) }
};
quote! {
impl ::std::str::FromStr for #class_ident {
type Err = #err_type;
fn from_str(s: &str) -> ::std::result::Result<Self, Self::Err> {
match s {
#(#from_str_arms)*
#wildcard_arm
}
}
}
}
}
fn as_str_impl(&self) -> TokenStream {
let class_ident = &self.class_enum.ident;
let as_str_arms = self
.subclasses
.iter()
.map(|(variant, code)| quote! { Self::#variant => #code, });
let other_arm = if self.is_standard {
quote! { Self::Other(subclass) => subclass.as_str(), }
} else if self.subclasses.is_empty() {
// SAFETY: This is only included for non-standard state types with zero variants,
// which can never be constructed.
quote! { _ => unsafe { ::std::hint::unreachable_unchecked() } }
} else {
quote! {}
};
quote! {
impl #class_ident {
pub fn as_str(&self) -> &str {
match self {
#(#as_str_arms)*
#other_arm
}
}
}
}
}
}
impl ToTokens for Class {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all([
self.enum_definition(),
self.from_str_impl(),
self.as_str_impl(),
]);
}
}
|
use std::convert::TryFrom;
use std::io::prelude::*;
use chrono::{DateTime, Local};
use crate::FloppyType;
#[derive(Clone, Debug)]
pub struct MapFile {
start_time: Option<DateTime<Local>>,
current_time: Option<DateTime<Local>>,
current_pos: u64,
status: Status,
pass: u64,
total_size: u64,
blocks: Vec<Block>,
floppy_type: FloppyType,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Status {
CopyingNonTriedBlocks,
TrimmingNonTrimmerBlocks,
ScrapingNonScrapedBlocks,
RetryingBadSectors,
FillingSpecifiedBlocks,
GeneratingApproximateMapfile,
Finished,
}
#[derive(Debug)]
pub enum Error {
NoStatusLine,
UnknownFloppyType,
InvalidLine(usize),
Io(std::io::Error),
}
impl MapFile {
pub fn load(reader: impl BufRead, floppy_type: Option<FloppyType>) -> Result<Self, Error> {
fn parse_number(s: &str, line_index: usize) -> Result<u64, Error> {
let (s, radix) = if s.starts_with("0x") {
(&s[2..], 16)
} else if s.starts_with("0") {
(&s[1..], 8)
} else {
(s, 10)
};
u64::from_str_radix(s, radix).map_err(|_| Error::InvalidLine(line_index + 1))
}
fn try_parse_comment_line<T, E>(
s: &str,
prefix: &str,
value: &mut Option<T>,
parse: impl FnOnce(&str) -> Result<T, E>)
{
if value.is_none() && s.starts_with(prefix) {
*value = parse(&s[prefix.len()..].trim()).ok();
}
}
fn parse_datetime(s: &str) -> chrono::ParseResult<DateTime<Local>> {
DateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S").map(|d| d.into())
}
const COMMENT_START_TIME: &str = "# Start time:";
const COMMENT_CURRENT_TIME: &str = "# Current time:";
let mut start_time = None;
let mut current_time = None;
let mut status_line: Option<(u64, Status, u64)> = None;
let mut total_size = 0;
let mut blocks = Vec::new();
for (index, line) in reader.lines().enumerate() {
let line = line.map_err(Error::Io)?;
let line = line.trim();
if line.starts_with("#") {
try_parse_comment_line(line, COMMENT_START_TIME, &mut start_time, parse_datetime);
try_parse_comment_line(line, COMMENT_CURRENT_TIME, &mut current_time, parse_datetime);
} else {
let parts = line.split_whitespace().collect::<Vec<_>>();
if parts.len() != 3 {
return Err(Error::InvalidLine(index + 1));
}
if status_line.is_none() {
let current_pos = parse_number(parts[0], index)?;
let status = match parts[1] {
"?" => Status::CopyingNonTriedBlocks,
"*" => Status::TrimmingNonTrimmerBlocks,
"/" => Status::ScrapingNonScrapedBlocks,
"-" => Status::RetryingBadSectors,
"F" => Status::FillingSpecifiedBlocks,
"G" => Status::GeneratingApproximateMapfile,
"+" => Status::Finished,
_ => return Err(Error::InvalidLine(index + 1)),
};
let pass = parse_number(parts[2], index)?;
status_line = Some((current_pos, status, pass));
} else {
let pos = parse_number(parts[0], index)?;
let size = parse_number(parts[1], index)?;
let status = match parts[2] {
"?" => BlockStatus::NonTried,
"*" => BlockStatus::NonTrimmed,
"/" => BlockStatus::NonScraped,
"-" => BlockStatus::BadSector,
"+" => BlockStatus::Finished,
_ => return Err(Error::InvalidLine(index + 1)),
};
blocks.push(Block {
pos,
size,
status,
});
total_size += size;
}
}
}
let floppy_type = floppy_type
.or_else(|| FloppyType::find_by_total_size(total_size).cloned())
.ok_or(Error::UnknownFloppyType)?;
if let Some((current_pos, status, pass)) = status_line {
Ok(MapFile {
start_time,
current_time,
current_pos,
status,
pass,
total_size,
blocks,
floppy_type,
})
} else {
Err(Error::NoStatusLine)
}
}
pub fn start_time(&self) -> Option<DateTime<Local>> {
self.start_time
}
pub fn current_time(&self) -> Option<DateTime<Local>> {
self.current_time
}
pub fn current_pos(&self) -> u64 {
self.current_pos
}
pub fn status(&self) -> Status {
self.status
}
pub fn pass(&self) -> u64 {
self.pass
}
pub fn total_size(&self) -> u64 {
self.total_size
}
pub fn floppy_type(&self) -> &FloppyType {
&self.floppy_type
}
/*pub fn blocks(&self) -> std::slice::Iter<Block> {
self.blocks.iter()
}*/
pub fn sectors(&self) -> Sectors {
Sectors {
map: self,
block: 0,
index: 0,
side: 0,
track: 0,
sector: 0,
}
}
}
pub struct Sectors<'a> {
map: &'a MapFile,
block: usize,
index: u64,
side: u64,
track: u64,
sector: u64,
}
impl Iterator for Sectors<'_> {
type Item = Sector;
fn next(&mut self) -> Option<Self::Item> {
let floppy = &self.map.floppy_type;
while let Some(block) = self.map.blocks.get(self.block) {
let pos = self.index * floppy.sector_size;
let block_pos = pos - block.pos();
if block_pos < block.size() {
let r = Some(Sector {
pos,
index: self.index,
side: self.side,
track: self.track,
sector: self.sector + 1,
status: block.status,
});
self.sector += 1;
if self.sector == floppy.sectors {
self.sector = 0;
self.track += 1;
if self.track == floppy.tracks {
self.track = 0;
self.side += 1;
}
}
self.index += 1;
return r;
}
self.block += 1;
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
let sector_size = self.map.floppy_type.sector_size;
let total_sectors = (self.map.total_size + sector_size - 1) / sector_size;
let hint = usize::try_from(total_sectors - self.index)
.expect("this disk is absurdly big");
(hint, Some(hint))
}
}
impl ExactSizeIterator for Sectors<'_> { }
#[derive(Clone, Debug)]
struct Block {
pos: u64,
size: u64,
status: BlockStatus,
}
impl Block {
pub fn pos(&self) -> u64 {
self.pos
}
pub fn size(&self) -> u64 {
self.size
}
pub fn status(&self) -> BlockStatus {
self.status
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum BlockStatus {
NonTried,
NonTrimmed,
NonScraped,
BadSector,
Finished,
}
pub struct Sector {
pub pos: u64,
pub index: u64,
pub side: u64,
pub track: u64,
pub sector: u64,
pub status: BlockStatus,
}
|
mod commands;
mod directory;
mod opts;
mod shape;
use std::fs::File;
use std::io::Read;
use anyhow::Result;
use opts::Opts;
#[cfg(test)]
pub mod tests;
pub const REPLACEABLE_NAME: &str = "{{__NAME__}}";
pub const REPLACEABLE_OUTPUT: &str = "{{__OUT__}}";
fn main() -> Result<(), anyhow::Error> {
let opts = Opts::opts()?;
let mut config_file = String::new();
File::open(&opts.config)?.read_to_string(&mut config_file)?;
let toml_config = toml::from_str(&config_file)?;
let dir_location = shape::get_lang_location(&toml_config, &opts.language)?;
let output_path = &opts.output.as_path().to_str().unwrap();
if let Some(commands) =
shape::get_commands(&toml_config, &opts.language, shape::CommandVariants::Before)?
{
for command in commands {
commands::exec(&command, &opts.project_name, &output_path)?;
}
}
directory::copy_dir_all(
&dir_location,
&opts.output,
&opts.project_name,
&output_path,
)?;
std::env::set_current_dir(&opts.output)?;
if let Some(commands) =
shape::get_commands(&toml_config, &opts.language, shape::CommandVariants::After)?
{
for command in commands {
commands::exec(&command, &opts.project_name, &output_path)?;
}
}
Ok(())
}
|
// https://adventofcode.com/2017/day/19
use std::io::{BufRead, BufReader};
use std::fs::File;
fn main() {
let f = BufReader::new(File::open("input.txt").expect("Opening input.txt failed"));
// Dump routing diagram to table
let diagram = f.lines()
.map(|line| line.expect("Invalid line").chars().collect())
.collect::<Vec<Vec<char>>>();
// Find starting position
let mut y = 0;
let mut x = diagram[0]
.iter()
.position(|&c| c == '|')
.expect("No '|' on first line");
let mut dir = Dir::Down;
// Follow route and count steps
let mut letters = String::new();
let mut steps = 0;
while dir != Dir::Stop {
steps += 1;
// Check current character
match diagram[y][x] {
// Keep track of letters and check if packet should stop
c @ 'A'...'Z' => {
letters.push(c);
dir = check_stop(x, y, dir, &diagram);
}
// Find next direction at corner
'+' => dir = check_dir(x, y, dir, &diagram),
// Straight pipes won't affect direction
'|' | '-' => {}
c @ _ => panic!("Invalid diagram character {}", c),
}
// Move one step to current direction
match dir {
Dir::Up => y -= 1,
Dir::Down => y += 1,
Dir::Left => x -= 1,
Dir::Right => x += 1,
Dir::Stop => {}
}
}
// Assert to facilitate further tweaks
assert_eq!("DTOUFARJQ", letters);
assert_eq!(16642, steps);
println!(
"The packet saw letters '{}' and took {} steps",
letters,
steps
);
}
fn check_dir(x: usize, y: usize, dir: Dir, diagram: &Vec<Vec<char>>) -> Dir {
// Check if packet shoud continue up
if dir != Dir::Down && diagram[y.saturating_sub(1)][x] == '|' {
return Dir::Up;
}
// Check if packet shoud continue down
if dir != Dir::Up && diagram[y.saturating_add(1)][x] == '|' {
return Dir::Down;
}
// Check if packet shoud continue left
if dir != Dir::Right && diagram[y][x.saturating_sub(1)] == '-' {
return Dir::Left;
}
// Expect correct input so should go right
return Dir::Right;
}
fn check_stop(x: usize, y: usize, dir: Dir, diagram: &Vec<Vec<char>>) -> Dir {
// Get next character on line
let next = match dir {
Dir::Up => diagram[y - 1][x],
Dir::Down => diagram[y + 1][x],
Dir::Left => diagram[y][x - 1],
Dir::Right => diagram[y][x + 1],
Dir::Stop => unreachable!(),
};
// Check if packet can continue
match next {
// Can continue
'A'...'Z' | '|' | '-' | '+' => return dir,
// Otherwise stops
_ => return Dir::Stop,
}
}
#[derive(PartialEq)]
enum Dir {
Up,
Down,
Left,
Right,
Stop,
}
|
use std::collections::BTreeMap;
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsKey {
pub size: i32,
pub strength: i32,
pub alg: String,
pub debianFlaw: Option<bool>,
pub q: Option<i32>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsCert {
pub subject: String,
pub commonNames: Vec<String>,
pub altNames: Vec<String>,
pub notBefore: i64,
pub notAfter: i64,
pub issuerSubject: String,
pub sigAlg: String,
pub issuerLabel: String,
pub revocationInfo: i32,
pub crlURIs: Vec<String>,
pub ocspURIs: Vec<String>,
pub revocationStatus: i32,
pub crlRevocationStatus: i32,
pub ocspRevocationStatus: i32,
pub sgc: Option<i32>,
pub validationType: Option<String>,
pub issues: i32,
pub sct: Option<bool>,
pub mustStaple: i32,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsChainCert {
pub subject: String,
pub notBefore: i64,
pub notAfter: i64,
pub issuerSubject: String,
pub issuerLabel: String,
pub sigAlg: String,
pub issues: i32,
pub keyAlg: String,
pub keySize: i32,
pub keyStrength: i32,
pub revocationStatus: i32,
pub crlRevocationStatus: i32,
pub ocspRevocationStatus: i32,
pub raw: String,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsChain {
pub certs: Vec<LabsChainCert>,
pub issues: i32,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsProtocol {
pub id: i32,
pub name: String,
pub version: String,
pub v2SuitesDisabled: Option<bool>,
pub errorMessage: Option<bool>,
pub q: Option<i32>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsSimClient {
id: i32,
name: String,
platform: Option<String>,
version: Option<String>,
isReference: Option<bool>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsSimulation {
pub client: LabsSimClient,
pub errorCode: Option<i32>,
pub attempts: i32,
pub protocolId: Option<i32>,
pub suiteId: Option<i32>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsSimDetails {
pub results: Vec<LabsSimulation>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsSuite {
pub id: i32,
pub name: String,
pub cipherStrength: i32,
pub dhStrength: Option<i32>,
pub dhP: Option<i32>,
pub dhG: Option<i32>,
pub dhYs: Option<i32>,
pub ecdhBits: Option<i32>,
pub ecdhStrength: Option<i32>,
pub q: Option<i32>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsSuites {
pub list: Vec<LabsSuite>,
pub preference: Option<bool>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsHstsPolicy {
pub LONG_MAX_AGE: i64,
pub header: Option<String>,
pub status: String,
pub error: Option<String>,
pub maxAge: Option<i64>,
pub includeSubDomains: Option<bool>,
pub preload: Option<bool>,
pub directives: BTreeMap<String, String>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsHstsPreload {
pub source: String,
pub status: String,
pub error: Option<String>,
pub sourceTime: Option<i64>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsHpkpPin {
pub hashFunction: String,
pub value: String,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsHpkpPolicy {
pub header: Option<String>,
pub status: String,
pub error: Option<String>,
pub maxAge: Option<i64>,
pub includeSubDomains: Option<bool>,
pub reportUri: Option<String>,
pub pins: Vec<LabsHpkpPin>,
pub matchedPins: Vec<LabsHpkpPin>,
pub directives: BTreeMap<String, String>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct DrownHost {
pub ip: String,
pub export: bool,
pub port: i32,
pub special: bool,
pub sslv2: bool,
pub status: String,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsEndpointDetails {
pub hostStartTime: i64,
pub key: LabsKey,
pub cert: LabsCert,
pub chain: LabsChain,
pub protocols: Vec<LabsProtocol>,
pub suites: LabsSuites,
pub serverSignature: String,
pub prefixDelegation: Option<bool>,
pub nonPrefixDelegation: Option<bool>,
pub vulnBeast: Option<bool>,
pub renegSupport: i32,
pub sessionResumption: i32,
pub compressionMethods: i32,
pub supportsNpn: Option<bool>,
pub npnProtocols: Option<String>,
pub sessionTickets: i32,
pub ocspStapling: Option<bool>,
pub staplingRevocationStatus: Option<i32>,
pub staplingRevocationErrorMessage: Option<String>,
pub sniRequired: Option<bool>,
pub httpStatusCode: i32,
pub httpForwarding: Option<String>,
pub forwardSecrecy: i32,
pub supportsRc4: Option<bool>,
pub rc4WithModern: Option<bool>,
pub sims: LabsSimDetails,
pub heartbleed: Option<bool>,
pub heartbeat: Option<bool>,
pub openSslCcs: i32,
pub openSSLLuckyMinus20: i32,
pub poodle: Option<bool>,
pub poodleTls: i32,
pub fallbackScsv: Option<bool>,
pub freak: Option<bool>,
pub hasSct: i32,
pub dhPrimes: Option<Vec<String>>,
pub dhUsesKnownPrimes: Option<i32>,
pub dhYsReuse: Option<bool>,
pub logjam: Option<bool>,
pub chaCha20Preference: Option<bool>,
pub hstsPolicy: LabsHstsPolicy,
pub hstsPreloads: Vec<LabsHstsPreload>,
pub hpkpPolicy: LabsHpkpPolicy,
pub hpkpRoPolicy: LabsHpkpPolicy,
pub drownHosts: Vec<DrownHost>,
pub drownErrors: Option<bool>,
pub drownVulnerable: Option<bool>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsEndpoint {
pub ipAddress: String,
pub serverName: Option<String>,
pub statusMessage: Option<String>,
pub statusDetailsMessage: Option<String>,
pub grade: Option<String>,
pub gradeTrustIgnored: Option<String>,
pub hasWarnings: Option<bool>,
pub isExceptional: Option<bool>,
pub progress: i32,
pub duration: Option<i32>,
pub eta: i32,
pub delegation: i32,
pub details: Option<LabsEndpointDetails>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsReport {
pub host: String,
pub port: i32,
pub protocol: String,
pub isPublic: Option<bool>,
pub status: String,
pub statusMessage: Option<String>,
pub startTime: i64,
pub testTime: Option<i64>,
pub engineVersion: String,
pub criteriaVersion: String,
pub cacheExpiryTime: Option<i64>,
pub endpoints: Vec<LabsEndpoint>,
pub certHostnames: Option<Vec<String>>,
pub rawJSON: Option<String>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsResults {
pub reports: Vec<LabsReport>,
pub responses: Vec<String>,
}
#[allow(non_snake_case)]
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct LabsInfo {
pub engineVersion: String,
pub criteriaVersion: String,
pub maxAssessments: i32,
pub currentAssessments: i32,
pub newAssessmentCoolOff: i64,
pub messages: Vec<String>,
}
|
use crate::common::*;
pub(crate) fn logical_size_from_winit(size: winit::dpi::LogicalSize<u32>) -> LogicalSize {
LogicalSize {
width: size.width,
height: size.height,
}
}
pub(crate) fn logical_pos_from_winit(pos: winit::dpi::LogicalPosition<u32>) -> LogicalPosition {
LogicalPosition { x: pos.x, y: pos.y }
}
pub(crate) fn logical_vec_to_winit(vec2: LogicalVec2) -> winit::dpi::LogicalPosition<u32> {
winit::dpi::LogicalPosition {
x: vec2.x,
y: vec2.y,
}
}
|
//! Main spinup for gitbot
extern crate curl;
extern crate rustc_serialize;
mod codegen;
use std::fs::File;
use std::io::Read;
use curl::http;
use rustc_serialize::json;
#[derive(RustcDecodable, RustcEncodable)]
/// Represents a Bearer token from github that is saved to disk
struct Bearer {
access_token: String,
scopes: Vec<String>,
token_type: String
}
fn main() {
let mut f = File::open("creds.json").unwrap();
let mut tok_json = String::new();
let _ = f.read_to_string(&mut tok_json);
let tok : Bearer = json::decode(&tok_json).unwrap();
println!("access token is: {}", tok.access_token);
let mut hand = http::handle();
let mut req = hand.get("https://api.github.com/users/allonsy");
req = add_auth_header(req, &tok);
let resp = req.exec().unwrap();
let resp_string = std::string::String::from_utf8(resp.move_body()).unwrap();
println!("response is: {}", resp_string);
}
#[allow(dead_code)]
fn add_auth_header<'a,'b>(req : http::Request<'a,'b>, tok : &Bearer) -> http::Request<'a,'b> {
req.header("Authorization",
&format!("token {}", tok.access_token))
.header("User-Agent", "gitbot v0.0.1 scraper")
}
|
//! <https://github.com/EOSIO/eosio.cdt/blob/4985359a30da1f883418b7133593f835927b8046/libraries/eosiolib/core/eosio/symbol.hpp#L234-L337>
use alloc::string::String;
use crate::{NumBytes, Read, SymbolCode, Write};
use core::{
convert::TryFrom,
fmt,
str::FromStr,
};
#[cfg(feature = "std")]
use serde::{Deserialize, Serialize};
/// All possible characters that can be used in EOSIO symbol codes.
pub const SYMBOL_UTF8_CHARS: [u8; 26] = *b"ABCDEFGHIJKLMNOPQRSTUVWXYZ";
/// The maximum allowed length of EOSIO symbol codes.
pub const SYMBOL_LEN_MAX: usize = 7;
/// An error which can be returned when parsing an EOSIO symbol.
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum ParseSymbolError {
/// Empty strings are not valid symbols.
IsEmpty,
/// Symbols must be 7 characters or less.
TooLong,
/// Symbols can only contain uppercase letters A-Z.
BadChar(char),
/// TODO docs
BadPrecision,
}
impl From<ParseSymbolError> for crate::Error {
fn from(err: ParseSymbolError) -> Self {
crate::Error::ParseSymbolError(err)
}
}
impl fmt::Display for ParseSymbolError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::IsEmpty => write!(f, "symbol is empty"),
Self::TooLong => write!(
f,
"symbol is too long, must be {} chars or less",
SYMBOL_LEN_MAX
),
Self::BadChar(c) => {
write!(f, "symbol contains invalid character '{}'", c)
}
Self::BadPrecision => write!(f, "symbol precision is > 255"),
}
}
}
/// Attemps to create an EOSIO symbol from a `&str`.
///
/// # Examples
///
/// ```
/// use eos_chain::{symbol_from_str, ParseSymbolError};
/// assert_eq!(symbol_from_str(4, "EOS"), Ok(1397703940));
/// assert_eq!(symbol_from_str(0, "TGFT"), Ok(361956332544));
/// assert_eq!(symbol_from_str(2, "SYS"), Ok(1398362882));
/// assert_eq!(symbol_from_str(4, "TSt"), Err(ParseSymbolError::BadChar('t')));
/// assert_eq!(symbol_from_str(0, "TESTING"), Ok(5138124851399447552));
/// assert_eq!(symbol_from_str(0, "TESTINGG"), Err(ParseSymbolError::TooLong));
/// ```
#[inline]
pub fn symbol_from_str(
precision: u8,
value: &str,
) -> Result<u64, ParseSymbolError> {
symbol_from_chars(precision, value.chars())
}
/// Attempts to create an EOSIO symbol from an `Iterator`.
///
/// # Examples
///
/// ```
/// use eos_chain::{symbol_from_chars, ParseSymbolError};
/// assert_eq!(symbol_from_chars(4, "EOS".chars()), Ok(1397703940));
/// assert_eq!(symbol_from_chars(0, "TGFT".chars()), Ok(361956332544));
/// assert_eq!(symbol_from_chars(2, "SYS".chars()), Ok(1398362882));
/// assert_eq!(symbol_from_chars(4, "TSt".chars()), Err(ParseSymbolError::BadChar('t')));
/// assert_eq!(symbol_from_chars(0, "TESTING".chars()), Ok(5138124851399447552));
/// assert_eq!(symbol_from_chars(0, "TESTINGG".chars()), Err(ParseSymbolError::TooLong));
/// ```
#[inline]
pub fn symbol_from_chars<I>(
precision: u8,
chars: I,
) -> Result<u64, ParseSymbolError>
where
I: Iterator<Item = char>,
{
// TODO check precision. what is max precision?
let mut result: u64 = 0;
for (i, c) in chars.enumerate() {
if i == SYMBOL_LEN_MAX {
return Err(ParseSymbolError::TooLong);
} else if c < 'A' || c > 'Z' {
return Err(ParseSymbolError::BadChar(c));
} else {
result |= (c as u64) << (8 * (i + 1));
}
}
// TODO check if zero, IsEmpty error
result |= u64::from(precision);
Ok(result)
}
/// Converts an EOSIO symbol value into a string.
///
/// # Examples
///
/// ```
/// use eos_chain::symbol_to_string;
/// assert_eq!(symbol_to_string(1397703940), "EOS");
/// assert_eq!(symbol_to_string(5138124851399447552), "TESTING");
/// assert_eq!(symbol_to_string(361956332544), "TGFT");
/// assert_eq!(symbol_to_string(1398362882), "SYS");
/// assert_eq!(symbol_to_string(0), "");
/// ```
#[inline]
pub fn symbol_to_string(name: u64) -> String {
String::from_utf8_lossy(&symbol_to_utf8(name)).trim().into()
}
/// Converts an EOSIO symbol into an array of UTF-8 characters.
///
/// # Examples
///
/// ```
/// use eos_chain::symbol_to_utf8;
/// assert_eq!(symbol_to_utf8(1397703940), *b"EOS ");
/// assert_eq!(symbol_to_utf8(5138124851399447552), *b"TESTING");
/// assert_eq!(symbol_to_utf8(361956332544), *b"TGFT ");
/// assert_eq!(symbol_to_utf8(1398362882), *b"SYS ");
/// assert_eq!(symbol_to_utf8(0), *b" ");
/// ```
#[inline]
pub fn symbol_to_utf8(value: u64) -> [u8; SYMBOL_LEN_MAX] {
let mask: u64 = 0xff;
let mut chars = [b' '; SYMBOL_LEN_MAX];
let mut v = value;
for c in &mut chars {
v >>= 8;
if v == 0 {
break;
}
*c = u8::try_from(v & mask).unwrap_or_default();
}
chars
}
/// Gets an EOSIO symbol's precision.
///
/// # Examples
///
/// ```
/// use eos_chain::symbol_precision;
/// assert_eq!(symbol_precision(1397703940), 4); // 4,EOS
/// assert_eq!(symbol_precision(1398362882), 2); // 2,SYS
/// assert_eq!(symbol_precision(5138124851399447552), 0); // 0,TESTING
/// ```
#[inline]
pub fn symbol_precision(value: u64) -> u8 {
u8::try_from(value & 255).unwrap_or_default()
}
/// Gets an EOSIO symbol's code.
///
/// # Examples
///
/// ```
/// use eos_chain::symbol_code;
/// assert_eq!(symbol_code(1397703940), 5459781); // 4,EOS
/// assert_eq!(symbol_code(1398362882), 5462355); // 2,SYS
/// assert_eq!(symbol_code(5138124851399447552), 20070800200779092); // 0,TESTING
/// ```
#[inline]
pub const fn symbol_code(value: u64) -> u64 {
value >> 8
}
/// Gets the length of an EOSIO symbol's code
///
/// # Examples
///
/// ```
/// use eos_chain::symbol_code_length;
/// assert_eq!(symbol_code_length(1397703940), 3); // 4,EOS
/// assert_eq!(symbol_code_length(1398362882), 3); // 2,SYS
/// assert_eq!(symbol_code_length(5138124851399447552), 7); // 0,TESTING
/// ```
#[inline]
pub fn symbol_code_length(symbol: u64) -> usize {
let mut sym = symbol;
sym >>= 8; // skip precision
let mut len = 0;
while sym & 255 > 0 && len <= SYMBOL_LEN_MAX {
len += 1;
sym >>= 8;
}
len
}
/// Stores information about a symbol, the symbol can be 7 characters long.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Default, Read, Write, NumBytes, Hash, PartialOrd, Ord)]
#[cfg_attr(feature = "std", derive(Deserialize, Serialize))]
#[eosio_core_root_path = "crate"]
pub struct Symbol(u64);
impl Symbol {
/// Construct a new symbol given a value.
#[inline]
pub const fn new(value: u64) -> Self {
Self(value)
}
/// Construct a new symbol given a `u8` precision and `SymbolCode`.
#[inline]
pub fn new_with_code(precision: u8, code: SymbolCode) -> Self {
let mut value = code.as_u64() << 8;
value |= u64::from(precision);
Self(value)
}
/// This symbol's precision
#[inline]
pub fn precision(self) -> u8 {
symbol_precision(self.as_u64())
}
/// Returns representation of symbol name
#[inline]
pub fn code(self) -> SymbolCode {
symbol_code(self.as_u64()).into()
}
/// TODO docs
#[inline]
pub const fn as_u64(self) -> u64 {
self.0
}
/// Is this symbol valid
#[inline]
pub fn is_valid(self) -> bool {
self.code().is_valid()
}
}
impl fmt::Display for Symbol {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{},{}", self.precision(), self.code())
}
}
impl FromStr for Symbol {
type Err = ParseSymbolError;
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> {
let value = s.trim();
let mut chars = value.chars();
let mut precision: Option<u16> = None;
loop {
match (precision, chars.next()) {
(None, Some(c)) => {
if '0' <= c && c <= '9' {
match c.to_digit(10) {
Some(p) => {
let p = u8::try_from(p).map_err(|_| ParseSymbolError::BadPrecision)?;
precision = Some(u16::from(p));
},
None => return Err(ParseSymbolError::BadChar(c)),
}
} else {
return Err(ParseSymbolError::BadChar(c));
}
},
(Some(_), Some(c)) => {
if '0' <= c && c <= '9' {
match c.to_digit(10) {
Some(p) => {
let p = u8::try_from(p).map_err(|_| ParseSymbolError::BadPrecision)?;
precision = precision.map(|mut pre| {
pre *= 10;
pre += u16::from(p);
pre
});
}
None => return Err(ParseSymbolError::BadChar(c)),
}
} else {
match c {
',' => break,
' ' => return Err(ParseSymbolError::IsEmpty),
_ => return Err(ParseSymbolError::BadChar(c)),
}
}
},
_ => return Err(ParseSymbolError::IsEmpty),
};
}
match precision {
Some(p) => {
if p.gt(&255) {
return Err(ParseSymbolError::BadPrecision);
}
}
None => return Err(ParseSymbolError::BadPrecision)
}
let precision = precision.ok_or_else(|| ParseSymbolError::BadPrecision)? as u8;
let symbol = symbol_from_chars(precision, chars)?;
Ok(symbol.into())
}
}
impl TryFrom<&str> for Symbol {
type Error = ParseSymbolError;
#[inline]
fn try_from(value: &str) -> Result<Self, Self::Error> {
Self::from_str(value)
}
}
impl TryFrom<String> for Symbol {
type Error = ParseSymbolError;
#[inline]
fn try_from(value: String) -> Result<Self, Self::Error> {
Self::try_from(value.as_str())
}
}
impl From<u64> for Symbol {
#[inline]
fn from(n: u64) -> Self {
Self(n)
}
}
impl PartialEq<u64> for Symbol {
#[inline]
fn eq(&self, other: &u64) -> bool {
self.as_u64() == *other
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::string::ToString;
#[test]
fn from_int() {
let symbol = Symbol::from(361_956_332_546);
assert_eq!(symbol.precision(), 2);
let name = symbol.code();
let num: u64 = name.into();
assert_eq!(num, 1_413_891_924);
}
#[test]
fn is_valid() {
let symbol = Symbol::from(361_956_332_546);
assert_eq!(symbol.is_valid(), true);
}
#[test]
fn to_string() {
fn test(value: u64, expected: &str) {
assert_eq!(Symbol::from(value).to_string(), expected);
}
test(Symbol::from_str("2,TGFT").unwrap().as_u64(), "2,TGFT");
test(Symbol::from_str("0,TGFT").unwrap().as_u64(), "0,TGFT");
test(Symbol::from_str("4,EOS").unwrap().as_u64(), "4,EOS");
}
#[test]
fn code_to_string() {
fn test(value: u64, expected: &str) {
assert_eq!(Symbol::from(value).code().to_string(), expected);
}
test(Symbol::from_str("4,EOS").unwrap().as_u64(), "EOS");
test(Symbol::from_str("0,TGFT").unwrap().as_u64(), "TGFT");
test(Symbol::from_str("9,SYS").unwrap().as_u64(), "SYS");
}
#[test]
fn from_str() {
use core::str::FromStr;
fn test_ok(input: &str, expected: u64) {
let ok = Ok(expected.into());
assert_eq!(Symbol::try_from(input), ok);
assert_eq!(Symbol::try_from(input.to_string()), ok);
assert_eq!(Symbol::from_str(input), ok);
}
fn test_err(input: &str, err: ParseSymbolError) {
let err = Err(err);
assert_eq!(Symbol::try_from(input), err);
assert_eq!(Symbol::try_from(input.to_string()), err);
assert_eq!(Symbol::from_str(input), err);
}
test_ok("4,EOS", Symbol::from_str("4,EOS").unwrap().as_u64());
test_ok("0,TST", Symbol::from_str("0,TST").unwrap().as_u64());
test_ok("9,TGFT", Symbol::from_str("9,TGFT").unwrap().as_u64());
test_ok(" 4,EOS ", Symbol::from_str("4,EOS").unwrap().as_u64());
test_err("4, EOS", ParseSymbolError::BadChar(' '));
test_err(" 4, EOS ", ParseSymbolError::BadChar(' '));
test_err("A", ParseSymbolError::BadChar('A'));
test_err("a", ParseSymbolError::BadChar('a'));
}
#[test]
fn code_from_str() {
use core::str::FromStr;
fn test_ok(input: &str, expected: u64) {
let ok = Ok(Symbol::from(expected).code());
assert_eq!(SymbolCode::from_str(input), ok);
assert_eq!(SymbolCode::try_from(input), ok);
}
fn test_err(input: &str, expected: ParseSymbolError) {
let err = Err(expected);
assert_eq!(SymbolCode::from_str(input), err);
assert_eq!(SymbolCode::try_from(input), err);
}
test_ok("TST", Symbol::from_str("0,TST").unwrap().as_u64());
test_ok("EOS", Symbol::from_str("4,EOS").unwrap().as_u64());
test_ok("TGFT", Symbol::from_str("0,TGFT").unwrap().as_u64());
test_err("tst", ParseSymbolError::BadChar('t'));
}
#[test]
fn symbol_from_code_should_work() {
let symbol = Symbol::from_str("4,EOS");
assert!(symbol.is_ok());
let code = SymbolCode::try_from("EOS");
assert!(code.is_ok());
let rhs = Symbol::new_with_code(4, code.unwrap());
assert_eq!(symbol.unwrap(), rhs);
let rhs = Symbol::new_with_code(0, code.unwrap());
assert_ne!(symbol.unwrap(), rhs);
let rhs = Symbol::new_with_code(8, code.unwrap());
assert_ne!(symbol.unwrap(), rhs);
}
}
|
use anyhow::Result;
use std::{iter::Flatten, vec};
#[derive(Clone, Debug)]
struct Ring<T> {
items: Vec<T>,
}
impl<T: Clone + PartialEq> Ring<T> {
fn position(&self, to_find: &T) -> usize {
// panic if we can't find the requested item
self.items
.iter()
.position(|item| *item == *to_find)
.unwrap_or_else(|| panic!("Start item not found in ring"))
}
fn iter_from(&self, start: &T) -> Flatten<vec::IntoIter<&[T]>> {
let position = self.position(start);
let (last, first) = self.items.split_at(position);
vec![first, last].into_iter().flatten()
}
fn take_after(&mut self, after: &T, amount: usize) -> Vec<T> {
assert!(amount <= self.items.len());
let position = (self.position(after) + 1) % self.items.len();
if position + amount < self.items.len() {
let mut take = self.items.split_off(position);
let mut second = take.split_off(amount);
self.items.append(&mut second);
take
} else {
let mut take = self.items.split_off(position);
take.append(&mut self.items);
self.items = take.split_off(amount);
take
}
}
fn insert_after(&mut self, after: &T, mut to_insert: Vec<T>) {
let position = (self.position(after) + 1) % self.items.len();
if position == 0 {
self.items.append(&mut to_insert);
} else {
let mut tail = self.items.split_off(position);
self.items.append(&mut to_insert);
self.items.append(&mut tail);
}
}
}
fn do_the_thing(input: &str, moves: usize) -> String {
let mut ring = Ring {
items: input.chars().map(|c| c.to_digit(10).unwrap()).collect(),
};
let mut current = ring.items[0];
let highest = ring.items.iter().copied().max().unwrap();
for _ in 0..moves {
let lifted = ring.take_after(¤t, 3);
let mut destination = current;
loop {
destination = ((destination + highest - 2) % highest) + 1;
if ring.items.contains(&destination) {
break;
}
}
ring.insert_after(&destination, lifted);
current = ring.iter_from(¤t).copied().nth(1).unwrap();
}
ring.iter_from(&1)
.skip(1)
.map(|n| n.to_string())
.collect::<String>()
}
fn main() -> Result<()> {
println!("{}", do_the_thing("872495136", 100));
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use test_case::test_case;
#[test_case("389125467", 10 => "92658374")]
#[test_case("389125467", 100 => "67384529")]
fn first(input: &str, moves: usize) -> String {
do_the_thing(&input, moves)
}
#[test]
fn ring_iter_from() {
let items = vec![0, 1, 2, 8, 9, 10];
let ring = Ring { items };
assert_eq!(
ring.iter_from(&0).copied().collect::<Vec<_>>(),
vec![0, 1, 2, 8, 9, 10]
);
assert_eq!(
ring.iter_from(&8).copied().collect::<Vec<_>>(),
vec![8, 9, 10, 0, 1, 2]
);
}
// from the middle
#[test_case(0 => (vec![1, 2, 8], vec![0, 9, 10]))]
#[test_case(1 => (vec![2, 8, 9], vec![0, 1, 10]))]
// exactly the end
#[test_case(2 => (vec![8, 9, 10], vec![0, 1, 2]))]
// wrap around the end
#[test_case(8 => (vec![9, 10, 0], vec![1, 2, 8]))]
#[test_case(9 => (vec![10, 0, 1], vec![2, 8, 9]))]
// exactly from the start
#[test_case(10 => (vec![0, 1, 2], vec![8, 9, 10]))]
fn ring_take(after: usize) -> (Vec<usize>, Vec<usize>) {
let mut ring = Ring {
items: vec![0, 1, 2, 8, 9, 10],
};
let take = ring.take_after(&after, 3);
(take, ring.items)
}
#[test_case(0 => vec![0, 8, 9, 1, 2])]
#[test_case(1 => vec![0, 1, 8, 9, 2])]
#[test_case(2 => vec![0, 1, 2, 8, 9])]
fn ring_insert(after: usize) -> Vec<usize> {
let to_insert = vec![8, 9];
let mut ring = Ring {
items: vec![0, 1, 2],
};
ring.insert_after(&after, to_insert);
ring.items
}
}
|
use super::osgood;
use super::V8;
use std::convert;
use std::env;
use std::ffi::CString;
use std::os::raw::c_char;
use std::os::raw::c_int;
mod local;
pub use local::*;
mod isolate;
pub use isolate::*;
mod handle_scope;
pub use handle_scope::*;
mod functioncallbackinfo;
pub use functioncallbackinfo::*;
mod script;
pub use script::*;
mod module;
pub use module::*;
mod context;
pub use context::*;
mod array;
pub use array::*;
mod object;
pub use object::*;
mod function;
pub use function::*;
mod string;
pub use string::*;
mod number;
pub use number::*;
mod array_buffer;
pub use array_buffer::*;
mod exception;
pub use exception::*;
mod private;
pub use private::*;
/// This is a convenience `None`, which can be used by reference as a "null" in arguments to v8
/// functions.
pub const NULL: Option<u16> = None;
pub fn platform_init(v8_flags: &str) {
let args: Vec<std::string::String> = env::args().collect();
let name = format!("{}\0", args[0]).as_ptr() as *const c_char;
let v8_flags = normalize_v8_flags(v8_flags);
let flags_len = v8_flags.len() as c_int;
let flags = CString::new(v8_flags).unwrap();
let flags = flags.as_ptr() as *const c_char;
unsafe {
osgood::platform_init(name, flags, flags_len);
}
}
fn normalize_v8_flags(flags: &str) -> std::string::String {
flags
.split(' ')
.filter(|x| !x.is_empty())
.map(|x| {
if x.starts_with("--") {
x.to_owned()
} else {
"--".to_owned() + x
}
})
.collect::<Vec<std::string::String>>()
.join(" ")
}
pub fn platform_dispose() {
unsafe {
osgood::platform_dispose();
}
}
pub fn process_messages() {
unsafe {
osgood::process_messages(Isolate::raw());
}
}
|
#[doc = "Register `SDCMR` reader"]
pub type R = crate::R<SDCMR_SPEC>;
#[doc = "Register `SDCMR` writer"]
pub type W = crate::W<SDCMR_SPEC>;
#[doc = "Command mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum MODE_AW {
#[doc = "0: Normal Mode"]
Normal = 0,
#[doc = "1: Clock Configuration Enable"]
ClockConfigurationEnable = 1,
#[doc = "2: PALL (All Bank Precharge) command"]
Pall = 2,
#[doc = "3: Auto-refresh command"]
AutoRefreshCommand = 3,
#[doc = "4: Load Mode Resgier"]
LoadModeRegister = 4,
#[doc = "5: Self-refresh command"]
SelfRefreshCommand = 5,
#[doc = "6: Power-down command"]
PowerDownCommand = 6,
}
impl From<MODE_AW> for u8 {
#[inline(always)]
fn from(variant: MODE_AW) -> Self {
variant as _
}
}
impl crate::FieldSpec for MODE_AW {
type Ux = u8;
}
#[doc = "Field `MODE` writer - Command mode"]
pub type MODE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O, MODE_AW>;
impl<'a, REG, const O: u8> MODE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "Normal Mode"]
#[inline(always)]
pub fn normal(self) -> &'a mut crate::W<REG> {
self.variant(MODE_AW::Normal)
}
#[doc = "Clock Configuration Enable"]
#[inline(always)]
pub fn clock_configuration_enable(self) -> &'a mut crate::W<REG> {
self.variant(MODE_AW::ClockConfigurationEnable)
}
#[doc = "PALL (All Bank Precharge) command"]
#[inline(always)]
pub fn pall(self) -> &'a mut crate::W<REG> {
self.variant(MODE_AW::Pall)
}
#[doc = "Auto-refresh command"]
#[inline(always)]
pub fn auto_refresh_command(self) -> &'a mut crate::W<REG> {
self.variant(MODE_AW::AutoRefreshCommand)
}
#[doc = "Load Mode Resgier"]
#[inline(always)]
pub fn load_mode_register(self) -> &'a mut crate::W<REG> {
self.variant(MODE_AW::LoadModeRegister)
}
#[doc = "Self-refresh command"]
#[inline(always)]
pub fn self_refresh_command(self) -> &'a mut crate::W<REG> {
self.variant(MODE_AW::SelfRefreshCommand)
}
#[doc = "Power-down command"]
#[inline(always)]
pub fn power_down_command(self) -> &'a mut crate::W<REG> {
self.variant(MODE_AW::PowerDownCommand)
}
}
#[doc = "Command target bank 2\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CTB2_AW {
#[doc = "0: Command not issued to SDRAM Bank 1"]
NotIssued = 0,
#[doc = "1: Command issued to SDRAM Bank 1"]
Issued = 1,
}
impl From<CTB2_AW> for bool {
#[inline(always)]
fn from(variant: CTB2_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CTB2` writer - Command target bank 2"]
pub type CTB2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CTB2_AW>;
impl<'a, REG, const O: u8> CTB2_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Command not issued to SDRAM Bank 1"]
#[inline(always)]
pub fn not_issued(self) -> &'a mut crate::W<REG> {
self.variant(CTB2_AW::NotIssued)
}
#[doc = "Command issued to SDRAM Bank 1"]
#[inline(always)]
pub fn issued(self) -> &'a mut crate::W<REG> {
self.variant(CTB2_AW::Issued)
}
}
#[doc = "Field `CTB1` writer - Command target bank 1"]
pub use CTB2_W as CTB1_W;
#[doc = "Field `NRFS` reader - Number of Auto-refresh"]
pub type NRFS_R = crate::FieldReader;
#[doc = "Field `NRFS` writer - Number of Auto-refresh"]
pub type NRFS_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 4, O>;
#[doc = "Field `MRD` reader - Mode Register definition"]
pub type MRD_R = crate::FieldReader<u16>;
#[doc = "Field `MRD` writer - Mode Register definition"]
pub type MRD_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 13, O, u16>;
impl R {
#[doc = "Bits 5:8 - Number of Auto-refresh"]
#[inline(always)]
pub fn nrfs(&self) -> NRFS_R {
NRFS_R::new(((self.bits >> 5) & 0x0f) as u8)
}
#[doc = "Bits 9:21 - Mode Register definition"]
#[inline(always)]
pub fn mrd(&self) -> MRD_R {
MRD_R::new(((self.bits >> 9) & 0x1fff) as u16)
}
}
impl W {
#[doc = "Bits 0:2 - Command mode"]
#[inline(always)]
#[must_use]
pub fn mode(&mut self) -> MODE_W<SDCMR_SPEC, 0> {
MODE_W::new(self)
}
#[doc = "Bit 3 - Command target bank 2"]
#[inline(always)]
#[must_use]
pub fn ctb2(&mut self) -> CTB2_W<SDCMR_SPEC, 3> {
CTB2_W::new(self)
}
#[doc = "Bit 4 - Command target bank 1"]
#[inline(always)]
#[must_use]
pub fn ctb1(&mut self) -> CTB1_W<SDCMR_SPEC, 4> {
CTB1_W::new(self)
}
#[doc = "Bits 5:8 - Number of Auto-refresh"]
#[inline(always)]
#[must_use]
pub fn nrfs(&mut self) -> NRFS_W<SDCMR_SPEC, 5> {
NRFS_W::new(self)
}
#[doc = "Bits 9:21 - Mode Register definition"]
#[inline(always)]
#[must_use]
pub fn mrd(&mut self) -> MRD_W<SDCMR_SPEC, 9> {
MRD_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "SDRAM Command Mode register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sdcmr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sdcmr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SDCMR_SPEC;
impl crate::RegisterSpec for SDCMR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`sdcmr::R`](R) reader structure"]
impl crate::Readable for SDCMR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`sdcmr::W`](W) writer structure"]
impl crate::Writable for SDCMR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SDCMR to value 0"]
impl crate::Resettable for SDCMR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
fn main() {
let age:u8 = 255;
// 0 to 255 only allowed for u8
let weight:u8 = 256; //overflow value is 0
let height:u8 = 257; //overflow value is 1
let score:u8 = 258; //overflow value is 2
println!("age is {} ",age);
println!("weight is {}",weight);
println!("height is {}",height);
println!("score is {}",score);
} |
fn main() {
let guess: u32 = 1;
println!("{}", guess);
println!("Add numbers {}", add_numbers(guess));
}
fn add_numbers(x: u32) -> u32 {
x + 1
} |
pub fn reverse(x: i32) -> i32 {
let mut big_x: i64 = x as i64;
let mut neg = 1i64;
let mut result = 0i64;
if big_x < 0 {
big_x = -big_x;
neg = -1;
}
while big_x > 0 {
let low_int = (big_x - (big_x / 10) * 10) as i64;
big_x /= 10;
result = result * 10 + low_int;
}
result = neg * result;
if result < i32::min_value() as i64 || result > i32::max_value() as i64 {
0i32
} else {
result as i32
}
}
fn main() {
assert_eq!(reverse(123), 321);
assert_eq!(reverse(-123), -321);
assert_eq!(reverse(2147483647), 0);
assert_eq!(reverse(-2147483648), 0);
assert_eq!(reverse(0), 0);
assert_eq!(reverse(1), 1);
assert_eq!(reverse(-1), -1);
}
|
use std::i32;
fn main() {
proconio::input! {
n: usize,
a: [[i32; 3]; n]
}
let mut p_t = 0;
let mut p_x1 = 0;
let mut p_x2 = 0;
let mut successed = true;
for b in a {
let t = b[0] - p_t;
let x1 = b[1] - p_x1;
let x2 = b[2] - p_x2;
let distance = t - (i32::abs(x1) + i32::abs(x2));
// 時間が足りないとき
// または、余った時間が奇数の時はその点に居られない。
if distance < 0 || distance % 2 == 1 {
successed = false;
break;
}
p_t = b[0];
p_x1 = b[1];
p_x2 = b[2];
}
println!("{}", if successed { "Yes" } else { "No" });
}
|
use crate::schema::*;
pub type Connection<C> = websub_sub::db::diesel2::Connection<
C,
subscriptions::id,
subscriptions::hub,
subscriptions::topic,
subscriptions::secret,
subscriptions::expires_at,
>;
pub type Pool<M> = websub_sub::db::diesel2::Pool<
M,
subscriptions::id,
subscriptions::hub,
subscriptions::topic,
subscriptions::secret,
subscriptions::expires_at,
>;
|
use crate::render::svg::*;
use crate::shape::point::Point;
use svg::Node;
/// Area shape.
#[derive(Clone)]
pub struct Area {
points: Vec<Point>,
fill_color: String,
stroke_color: String,
}
impl Area {
/// Create a new Area.
pub fn new(points: Vec<Point>, fill_color: &str, stroke_color: &str) -> Self {
Self {
points,
fill_color: fill_color.to_string(),
stroke_color: stroke_color.to_string(),
}
}
/// Get area SVG representation.
pub fn to_svg(&self) -> svg::node::element::Group {
let mut res = svg::node::element::Group::new().set(CLASS_ATTR, CLASS_AREA);
let mut data = svg::node::element::path::Data::new();
for (point_idx, point) in self.points.iter().enumerate() {
if point_idx == 0 {
data = data.move_to((point.x(), point.y()));
} else {
data = data.line_to((point.x(), point.y()));
}
res.append(point.to_svg());
}
data = data.close();
let area = svg::node::element::Path::new()
.set(FILL_ATTR, self.fill_color.as_ref())
.set(STROKE_ATTR, self.stroke_color.as_ref())
.set(D_ATTR, data);
res.append(area);
for point in self.points.iter() {
res.append(point.to_svg());
}
res
}
}
impl Default for Area {
fn default() -> Self {
Self {
points: Vec::new(),
fill_color: String::new(),
stroke_color: String::new(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::shape::point::PointType;
#[test]
fn bar_basic() {
let expected_svg_group = r##"<g class="area">
<g class="point" transform="translate(10,20)">
<circle cx="0" cy="0" fill="#ed5d74" r="1" stroke="#e6121f"/>
<text dy=".35em" fill="#080808" font-family="sans-serif" font-size="14px" text-anchor="middle" x="0" y="-13">
12
</text>
</g>
<g class="point" transform="translate(30,40)">
<circle cx="0" cy="0" fill="#ed5d74" r="1" stroke="#e6121f"/>
<text dy=".35em" fill="#080808" font-family="sans-serif" font-size="14px" text-anchor="middle" x="0" y="-13">
12
</text>
</g>
<path d="M10,20 L30,40 z" fill="#e93620" stroke="#370725"/>
<g class="point" transform="translate(10,20)">
<circle cx="0" cy="0" fill="#ed5d74" r="1" stroke="#e6121f"/>
<text dy=".35em" fill="#080808" font-family="sans-serif" font-size="14px" text-anchor="middle" x="0" y="-13">
12
</text>
</g>
<g class="point" transform="translate(30,40)">
<circle cx="0" cy="0" fill="#ed5d74" r="1" stroke="#e6121f"/>
<text dy=".35em" fill="#080808" font-family="sans-serif" font-size="14px" text-anchor="middle" x="0" y="-13">
12
</text>
</g>
</g>"##;
let area_svg = Area::new(
vec![
Point::new(
10_f32,
20_f32,
PointType::Circle,
1,
&12.to_string(),
"#ed5d74",
"#e6121f",
),
Point::new(
30_f32,
40_f32,
PointType::Circle,
1,
&12.to_string(),
"#ed5d74",
"#e6121f",
),
],
"#e93620",
"#370725",
)
.to_svg();
assert_eq!(area_svg.to_string(), expected_svg_group);
}
}
|
use core::ptr;
use core::mem;
const AES_BASE: u32 = 0x10009000u32;
#[derive(Clone, Copy)]
#[allow(non_camel_case_types)]
#[allow(dead_code)]
enum Reg {
CNT = 0x000,
BLK_CNT = 0x006,
FIFO_IN = 0x008,
FIFO_OUT = 0x00C,
KEY_SEL = 0x010,
KEY_CNT = 0x011,
CTR = 0x020,
TWL_KEY0 = 0x040,
TWL_KEY1 = 0x070,
TWL_KEY2 = 0x0A0,
TWL_KEY3 = 0x0D0,
KEY_FIFO = 0x100,
KEYX_FIFO = 0x104,
KEYY_FIFO = 0x108
}
#[derive(Clone, Copy)]
#[allow(dead_code)]
enum TwlKeyReg {
NORMAL = 0x00,
KEYX = 0x10,
KEYY = 0x20
}
bf!(CntReg[u32] {
fifo_in_count: 0:4,
fifo_out_count: 5:9,
flush_fifo_in: 10:10,
flush_fifo_out: 11:11,
fifo_in_dma_size: 12:13,
fifo_out_dma_size: 14:15,
mac_size: 16:18,
mac_source_reg: 20:20,
mac_verified: 21:21,
out_big_endian: 22:22,
in_big_endian: 23:23,
out_normal_order: 24:24,
in_normal_order: 25:25,
update_keyslot: 26:26,
mode: 27:29,
enable_irq: 30:30,
busy: 31:31
});
bf!(KeyCntReg[u8] {
keyslot: 0:5,
force_dsi_keygen: 6:6,
enable_fifo_flush: 7:7
});
#[inline(never)]
fn read_reg<T: Copy>(reg: Reg) -> T {
unsafe { ptr::read_volatile((AES_BASE + reg as u32) as *const T) }
}
#[inline(never)]
fn write_reg<T: Copy>(reg: Reg, val: T) {
unsafe { ptr::write_volatile((AES_BASE + reg as u32) as *mut T, val); }
}
#[inline(never)]
fn write_reg_twlkey<T: Copy>(keyslot: u8, target: TwlKeyReg, val: T) {
assert!(keyslot < 4);
let reg = Reg::TWL_KEY0 as u32 + (keyslot as u32) * 0x30;
unsafe { ptr::write_volatile((AES_BASE + reg + target as u32) as *mut T, val); }
}
#[derive(Clone, Copy)]
pub enum Mode {
CCM,
CTR,
CBC,
ECB
}
#[derive(Clone, Copy)]
pub enum Direction {
Encrypt,
Decrypt
}
fn byte4iter<'a>(slice: &'a [u8]) -> impl Iterator<Item = [u8;4]> + 'a {
assert!(slice.len() % 4 == 0);
slice.chunks(4).map(|c| [c[0], c[1], c[2], c[3]])
}
fn u128_bytes(mut num: u128) -> [u8;0x10] {
let mut data = [0u8; 0x10];
for b in data.iter_mut().rev() {
*b = num as u8;
num >>= 8;
}
data
}
fn u128_from_bytes(data: &[u8]) -> u128 {
assert!(data.len() == 16);
let mut new = 0u128;
for b in data.iter() {
new <<= 8;
new |= *b as u128;
}
new
}
// Returns Ok(blocks) if aligned to block length, Err(rounded_up) if not aligned
pub fn buf_num_blocks(buf: &[u8]) -> Result<usize, usize> {
if buf.len() % 16 == 0 {
Ok(buf.len() >> 4)
} else {
Err(buf.len() >> 4 + 1)
}
}
pub fn ctr_add(ctr: &[u8], blocks: usize) -> [u8;0x10] {
let num = u128_from_bytes(ctr) + blocks as u128;
u128_bytes(num)
}
pub struct AesContext<'a> {
keyslot: u8,
keywriter: fn(&AesContext, u8, &[u8], Option<&[u8]>),
key: Option<&'a [u8]>,
key_y: Option<&'a [u8]>,
input_le: bool,
output_le: bool,
input_rev_words: bool,
output_rev_words: bool,
force_dsi_keygen: bool
}
impl<'a> AesContext<'a> {
pub fn new() -> Result<AesContext<'a>, ()> {
Ok(AesContext {
keyslot: 0x3F,
keywriter: keywriter::anykey,
key: None,
key_y: None,
input_le: false,
output_le: false,
input_rev_words: false,
output_rev_words: false,
force_dsi_keygen: false
})
}
pub fn with_keyslot(self, keyslot: u8) -> AesContext<'a> {
AesContext { keyslot: keyslot, ..self }
}
pub fn with_keywriter(self, keywriter: fn(&AesContext, u8, &[u8], Option<&[u8]>)) -> AesContext<'a> {
AesContext { keywriter: keywriter, ..self }
}
pub fn with_normalkey(self, key: &'a [u8]) -> AesContext<'a> {
AesContext { key: Some(key), key_y: None, ..self }
}
pub fn with_keypair(self, keyx: &'a [u8], keyy: &'a [u8]) -> AesContext<'a> {
AesContext { key: Some(keyx), key_y: Some(keyy), ..self }
}
pub fn with_input_le(self, state: bool) -> AesContext<'a> {
AesContext { input_le: state, ..self }
}
pub fn with_output_le(self, state: bool) -> AesContext<'a> {
AesContext { output_le: state, ..self }
}
pub fn with_output_rev_words(self, state: bool) -> AesContext<'a> {
AesContext { output_rev_words: state, ..self }
}
pub fn with_input_rev_words(self, state: bool) -> AesContext<'a> {
AesContext { input_rev_words: state, ..self }
}
pub fn force_dsi_keygen(self, force: bool) -> AesContext<'a> {
AesContext { force_dsi_keygen: force, ..self }
}
pub fn crypt128(&self, mode: Mode, direction: Direction, msg: &mut [u8], iv_ctr: Option<&[u8]>) {
let mut cnt = CntReg::new(0);
cnt.flush_fifo_in.set(1);
cnt.flush_fifo_out.set(1);
cnt.out_big_endian.set(!self.output_le as u32);
cnt.out_normal_order.set(!self.output_rev_words as u32);
cnt.in_big_endian.set(!self.input_le as u32);
cnt.in_normal_order.set(!self.input_rev_words as u32);
write_reg(Reg::CNT, cnt);
if let Some(key) = self.key {
(self.keywriter)(self, self.keyslot, key, self.key_y);
}
let (mode_base, requires_iv) = match mode {
Mode::CCM => (0, true),
Mode::CTR => (2, true),
Mode::CBC => (4, true),
Mode::ECB => (6, false)
};
// Write IV
if requires_iv {
let mut iv_words = [0u32; 4];
if let Some(iv) = iv_ctr {
assert!(iv.len() == 0x10);
let iv_it = byte4iter(iv);
let iv_word_it = iv_words.iter_mut().rev();
for (word, bytes4) in iv_word_it.zip(iv_it) {
*word = unsafe { mem::transmute(bytes4) };
}
} else {
panic!("This crypto mode requires an IV/CTR");
}
write_reg(Reg::CTR, iv_words);
}
{ // Select keyslot
write_reg(Reg::KEY_SEL, self.keyslot);
let mut cnt = read_reg::<CntReg::Bf>(Reg::CNT);
cnt.update_keyslot.set(1);
write_reg(Reg::CNT, cnt);
}
{ // Start processing
let msg_blocks = buf_num_blocks(msg).unwrap();
write_reg(Reg::BLK_CNT, msg_blocks as u16);
let mode_num = match direction {
Direction::Decrypt => mode_base,
Direction::Encrypt => mode_base + 1,
};
let mut cnt = read_reg::<CntReg::Bf>(Reg::CNT);
cnt.mode.set(mode_num);
cnt.busy.set(1);
write_reg(Reg::CNT, cnt);
}
{ // Perform crypto
let fifo_in_full = || {
let cnt: CntReg::Bf = read_reg(Reg::CNT);
cnt.fifo_in_count.get() == 16
};
let fifo_out_empty = || {
let cnt: CntReg::Bf = read_reg(Reg::CNT);
cnt.fifo_out_count.get() == 0
};
let mut pos = 0;
while pos < msg.len() {
while fifo_in_full() { }
for bytes4 in byte4iter(&msg[pos .. pos + 16]) {
write_reg::<u32>(Reg::FIFO_IN, unsafe { mem::transmute(bytes4) });
}
while fifo_out_empty() { }
for c in msg[pos .. pos + 16].chunks_mut(4) {
let data = read_reg::<u32>(Reg::FIFO_OUT);
let data_bytes: [u8; 4] = unsafe { mem::transmute(data) };
c.copy_from_slice(&data_bytes);
}
pos += 16;
}
}
}
}
pub mod keywriter {
use super::*;
pub fn anykey(ctx: &AesContext, keyslot: u8, key: &[u8], key_y: Option<&[u8]>) {
let mut key_cnt = KeyCntReg::new(0);
key_cnt.keyslot.set(keyslot);
key_cnt.enable_fifo_flush.set(1);
key_cnt.force_dsi_keygen.set(ctx.force_dsi_keygen as u8);
write_reg(Reg::KEY_CNT, key_cnt);
let key_reg = if key_y.is_some() { Reg::KEYX_FIFO }
else { Reg::KEY_FIFO };
assert!(key.len() == 0x10);
for bytes4 in byte4iter(key) {
write_reg::<u32>(key_reg, unsafe { mem::transmute(bytes4) });
}
if let Some(y) = key_y {
assert!(y.len() == 0x10);
for bytes4 in byte4iter(y) {
write_reg::<u32>(Reg::KEYY_FIFO, unsafe { mem::transmute(bytes4) });
}
}
}
pub fn twlkey(_ctx: &AesContext, keyslot: u8, key: &[u8], key_y: Option<&[u8]>) {
assert!(keyslot < 4);
let mut key_cnt = KeyCntReg::new(0);
key_cnt.keyslot.set(keyslot);
write_reg(Reg::KEY_CNT, key_cnt);
if let Some(_y) = key_y {
unimplemented!();
} else {
let mut buf = [0u8; 16];
buf.copy_from_slice(key);
let mut buf: [u32; 4] = unsafe { mem::transmute(buf) };
buf.reverse();
write_reg_twlkey::<[u32;4]>(keyslot, TwlKeyReg::NORMAL, buf);
}
}
}
|
use std::time::SystemTime;
fn main() {
let sys_time = SystemTime::now();
// println!("{:?}",sys_time.tv_sec);
let new_sys_time = SystemTime::now();
let difference = new_sys_time.duration_since(sys_time)
.expect("Clock may have gone backwards");
println!("{:?}", difference);
} |
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use crate::builder::factories::SubsystemFactory;
use crate::framework::Runnable;
use crate::pinouts::digital::input::DigitalInput;
use crate::sensors::digital::DigitalInputMonitor;
pub struct DigitalMonitorFactory {
update_field: Arc<AtomicBool>,
input: Box<DigitalInput>,
}
impl DigitalMonitorFactory {
pub fn new(state: Arc<AtomicBool>, input: Box<DigitalInput>) -> Self {
Self {
update_field: state,
input,
}
}
}
impl ToString for DigitalMonitorFactory {
fn to_string(&self) -> String {
"production digital monitor".to_owned()
}
}
impl SubsystemFactory<Box<Runnable>> for DigitalMonitorFactory {
fn produce(self: Box<Self>) -> Box<Runnable> {
Box::new(DigitalInputMonitor::new(self.input, self.update_field))
}
} |
use wasm_bindgen::prelude::*;
use wee_alloc::WeeAlloc;
#[global_allocator]
static ALLOC: WeeAlloc = WeeAlloc::INIT;
#[wasm_bindgen(start)]
pub fn main() -> siro_web::Result<()> {
console_error_panic_hook::set_once();
let env = siro_web::Env::new()?;
let mut app = env.mount::<()>("#app")?;
app.render({
use siro::{
html::{div, span},
vdom::style,
};
div(
(),
(
span(style("color", "red"), "Hello"),
", from ",
span(
(
style("fontWeight", "bold"),
style("textDecoration", "underline"),
),
"Rust",
),
"!",
),
)
})?;
Ok(())
}
|
use std::hash::Hash;
use std::{collections::HashMap, fmt::Debug};
use petgraph::{
visit::{
depth_first_search, GraphRef, IntoNeighborsDirected, IntoNodeIdentifiers, VisitMap,
Visitable, Walker,
},
EdgeDirection::Outgoing,
};
pub struct BiasedRevPostOrderDfs<TNode, TVisit> {
stack: Vec<TNode>,
counter: HashMap<TNode, usize>,
visited: TVisit,
}
impl<TNode, TVisit> BiasedRevPostOrderDfs<TNode, TVisit>
where
TNode: Clone + Eq + Hash + Debug,
TVisit: VisitMap<TNode>,
{
pub fn new<TGraph>(graph: TGraph, start: TNode) -> BiasedRevPostOrderDfs<TNode, TVisit>
where
TGraph: GraphRef
+ Visitable<NodeId = TNode, Map = TVisit>
+ IntoNodeIdentifiers
+ IntoNeighborsDirected<NodeId = TNode>,
{
let mut map = Self::empty(graph);
map.move_to(start, graph);
map
}
pub fn empty<TGraph>(graph: TGraph) -> BiasedRevPostOrderDfs<TNode, TVisit>
where
TGraph: GraphRef + Visitable<NodeId = TNode, Map = TVisit>,
{
BiasedRevPostOrderDfs {
stack: vec![],
counter: HashMap::new(),
visited: graph.visit_map(),
}
}
pub fn move_to<TGraph>(&mut self, node: TNode, graph: TGraph)
where
TGraph: GraphRef
+ Visitable<NodeId = TNode, Map = TVisit>
+ IntoNodeIdentifiers
+ IntoNeighborsDirected<NodeId = TNode>,
{
self.stack.clear();
self.count_cycles(node.clone(), graph);
self.stack.push(node.clone());
self.counter.insert(node, 1);
}
fn count_cycles<TGraph>(&mut self, starting_node: TNode, graph: TGraph)
where
TGraph: GraphRef
+ Visitable<NodeId = TNode, Map = TVisit>
+ IntoNodeIdentifiers
+ IntoNeighborsDirected<NodeId = TNode>,
{
depth_first_search(graph, std::iter::once(starting_node), |ev| {
if let petgraph::visit::DfsEvent::TreeEdge(_, target) = ev {
self.counter
.entry(target)
.and_modify(|x| *x += 1)
.or_insert(1);
}
});
}
pub fn next<TGraph>(&mut self, graph: TGraph) -> Option<TNode>
where
TGraph: GraphRef
+ Visitable<NodeId = TNode, Map = TVisit>
+ IntoNeighborsDirected<NodeId = TNode>,
{
// count == None && visited == false ==>> Not visited yet
// count == Some && visited == true ==>> Revisiting
// count == None && visited == true ==>> Visited
while let Some(node) = self.stack.pop() {
let count = match self.counter.get_mut(&node) {
Some(x) => x,
// This node is already fully visited. Might be a loop to
// start node, anyway we'll ignore it.
_ => continue,
};
*count -= 1;
if *count == 0 {
// this node is completely visited
self.counter.remove(&node);
// add its proceeding nodes
for proceeding in graph.neighbors_directed(node.clone(), Outgoing) {
self.stack.push(proceeding);
}
// emit node
return Some(node);
}
}
None
}
}
impl<TNode, TVisit, TGraph> Walker<TGraph> for BiasedRevPostOrderDfs<TNode, TVisit>
where
TNode: Clone + Eq + Hash + Debug,
TGraph:
GraphRef + Visitable<NodeId = TNode, Map = TVisit> + IntoNeighborsDirected<NodeId = TNode>,
TVisit: VisitMap<TNode>,
{
type Item = TNode;
fn walk_next(&mut self, context: TGraph) -> Option<Self::Item> {
self.next(context)
}
}
|
use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign};
#[derive(Clone, Copy, Debug)]
pub struct Color {
pub r: f32,
pub g: f32,
pub b: f32,
}
impl Color {
pub fn to_rgb(&self) -> image::Rgb<u8> {
image::Rgb([
(self.r * 256.0) as u8,
(self.g * 256.0) as u8,
(self.b * 256.0) as u8,
])
}
}
impl Color {
pub fn new(r: f32, g: f32, b: f32) -> Self {
Self { r, g, b }
}
}
impl Add<Color> for Color {
type Output = Self;
fn add(self, other: Self) -> Self {
Color::new(self.r + other.r, self.g + other.g, self.b + other.b)
}
}
impl AddAssign<Color> for Color {
fn add_assign(&mut self, other: Color) {
self.r += other.r;
self.g += other.g;
self.b += other.b;
}
}
impl Mul<Color> for f32 {
type Output = Color;
fn mul(self, color: Color) -> Color {
Color::new(color.r * self, color.g * self, color.b * self)
}
}
impl Mul<f32> for Color {
type Output = Self;
fn mul(self, factor: f32) -> Self {
Color::new(self.r * factor, self.g * factor, self.b * factor)
}
}
impl MulAssign<f32> for Color {
fn mul_assign(&mut self, factor: f32) {
self.r *= factor;
self.g *= factor;
self.b *= factor;
}
}
impl Mul<Color> for Color {
type Output = Self;
fn mul(self, other: Self) -> Self {
Color::new(self.r * other.r, self.g * other.g, self.b * other.b)
}
}
impl MulAssign<Color> for Color {
fn mul_assign(&mut self, other: Self) {
self.r *= other.r;
self.g *= other.g;
self.b *= other.b;
}
}
impl Div<f32> for Color {
type Output = Self;
fn div(self, factor: f32) -> Self {
Color::new(self.r / factor, self.g / factor, self.b / factor)
}
}
impl DivAssign<f32> for Color {
fn div_assign(&mut self, factor: f32) {
self.r /= factor;
self.g /= factor;
self.b /= factor;
}
}
impl Sub<f32> for Color {
type Output = Self;
fn sub(self, factor: f32) -> Self {
Color::new(self.r - factor, self.g - factor, self.b - factor)
}
}
impl SubAssign<f32> for Color {
fn sub_assign(&mut self, factor: f32) {
self.r -= factor;
self.g -= factor;
self.b -= factor;
}
}
|
extern crate enumflags2;
extern crate qtablepdf;
use qtablepdf::config::{check_infiles, info, read_config};
use qtablepdf::pdf::Pdf;
use qtablepdf::qtable::QTable;
use qtablepdf::sample;
use std::path::Path;
use std::{env, time::Instant};
fn main() {
const RELEASE: bool = true;
let prognam: String;
let mut infiles: Vec<String>;
if RELEASE {
// release
infiles = env::args().collect();
prognam = infiles.remove(0);
} else {
// debug
prognam = "qtablesheet".to_string();
infiles = vec![
sample::fpath(vec!["data", "sample.dat.csv"]),
sample::fpath(vec!["data", "sample.cfg.csv"]),
sample::fpath(vec!["data", "sample.lim.csv"]),
// sample::fpath(vec!["samples"]),
// sample::fpath(vec!["limits"]),
// sample::fpath(vec!["help"]),
];
}
match run_app(&prognam, infiles) {
Ok(_) => 0,
Err(err) => {
info(err);
sample::help(&prognam);
1
}
};
}
fn run_app(prognam: &str, infiles: Vec<String>) -> Result<(), String> {
// set start for duration output
let start = Instant::now();
// check infiles, return error, if something goes wrong. At least a data file must be provided
let (datpath, limpath, cfgpath) = check_infiles(infiles, &prognam)?;
// info for the user, which files are used
println!("starting .....: {}", prognam);
println!("using dat file: {}", datpath);
println!("using lim file: {}", limpath);
println!("using cfg file: {}", cfgpath);
// read qtableprops and columns from config file, if any, otherwise use defaults
let (qtableprops, columns) = read_config(&cfgpath)?;
// set pdf paper with input from qtableprops
let mut pdf = Pdf::new(&qtableprops.paper);
// create the qtable
QTable::new(&mut pdf, &datpath, &limpath, &columns, &qtableprops)?;
// get output folder from config or data file
let path = Path::new(&datpath);
let pdffolder = match qtableprops.pdffolder.is_empty() {
true => path.parent().unwrap().to_str().unwrap(),
false => &qtableprops.pdffolder,
};
// get output file name from config or data file
let pdffile = match qtableprops.pdffile.is_empty() {
true => path.file_name().unwrap().to_str().unwrap(),
false => &qtableprops.pdffile,
};
// write output
let pdfpath = pdf.save(&pdffolder, &pdffile, &qtableprops.pdftimestamp)?;
// info for the user
println!["written to pdf: {}", pdfpath];
println!["time to finish: {:#?}", start.elapsed()];
Ok(())
}
|
#![allow(dead_code)]
use text_io::read;
fn putere_modul(a: usize, i: usize, p: usize) -> usize {
let mut result = 1;
for _ in 0..i {
result = (result % p) * a;
}
return result % p;
}
fn gamal_tabel() {
let p: usize = read!();
let puteri: Vec<usize> = (1..p).into_iter().collect();
let generatori: Vec<Vec<usize>> = (1..p)
.into_iter()
.map(|x| puteri.iter().map(|i| putere_modul(x, *i, p)).collect())
.collect();
println!("Table puterilor pentru Z{}* este:", p);
for (pos, e) in generatori.iter().enumerate() {
println!("{}: {:?}", pos + 1, e);
}
()
}
fn log_discret() {
let baza: usize = read!();
let rez: usize = read!();
let modul: usize = read!();
let mut solutie: usize = 0;
let mut are_solutie: bool = false;
let mut x = baza;
for i in 1..=modul {
x = (x * baza) % modul;
if x == rez {
solutie = i + 1;
are_solutie = true;
}
}
if are_solutie {
println!(
"Solutie: log_{}({}) modulo {} = {}",
baza, rez, modul, solutie
);
} else {
println!("Problema nu are solutii")
}
}
fn criptare_rsa() {
let x: u32 = read!("Mesaj {}");
println!("{}", x);
}
fn main() {
// gamal_tabel();
// log_discret();
criptare_rsa();
}
|
extern crate super_sniffle;
use super_sniffle::test;
fn main() {
print!("Hello");
test();
}
|
use regex::Regex;
use serde::{Deserialize, Serialize};
use crate::resolver::ResolveResponse;
use crate::storage::local::LocalStorage;
#[cfg(feature = "s3-storage")]
use crate::storage::s3::S3Storage;
use crate::types::{Query, Response, Result};
pub mod local;
#[cfg(feature = "s3-storage")]
pub mod s3;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub enum TaggedStorageTypes {
#[serde(alias = "local", alias = "LOCAL")]
Local,
#[cfg(feature = "s3-storage")]
#[serde(alias = "s3")]
S3,
}
/// If s3-storage is enabled, then the default is `S3`, otherwise it is `Local`.
impl Default for TaggedStorageTypes {
#[cfg(not(feature = "s3-storage"))]
fn default() -> Self {
Self::Local
}
#[cfg(feature = "s3-storage")]
fn default() -> Self {
Self::S3
}
}
/// A new type representing a resolved id.
#[derive(Debug)]
pub struct ResolvedId(String);
impl ResolvedId {
/// Create a new resolved id.
pub fn new(resolved_id: String) -> Self {
Self(resolved_id)
}
/// Get the inner resolved id value.
pub fn into_inner(self) -> String {
self.0
}
}
/// A new type to represent a resolver and its regex match
#[derive(Debug)]
pub struct ResolverMatcher<'a>(&'a Regex, &'a str);
impl<'a> ResolverMatcher<'a> {
/// Create a new resovler and query.
pub fn new(resolver: &'a Regex, regex_match: &'a str) -> Self {
Self(resolver, regex_match)
}
/// Get the inner values.
pub fn into_inner(self) -> (&'a Regex, &'a str) {
(self.0, self.1)
}
}
/// Specify the storage backend to use as config values.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(untagged, deny_unknown_fields)]
#[non_exhaustive]
pub enum Storage {
Tagged(TaggedStorageTypes),
Local {
#[serde(flatten)]
local_storage: LocalStorage,
},
#[cfg(feature = "s3-storage")]
S3 {
#[serde(flatten)]
s3_storage: S3Storage,
},
}
impl Storage {
/// Resolve the local component `Storage` into a type that implements `FromStorage`. Tagged
/// `Local` storage is not resolved because it is resolved into untagged `Local` storage when
/// `Config` is constructed.
pub async fn resolve_local_storage<T: ResolveResponse>(
&self,
query: &Query,
) -> Option<Result<Response>> {
match self {
Storage::Local { local_storage } => Some(T::from_local(local_storage, query).await),
_ => None,
}
}
/// Resolve the s3 component of `Storage` into a type that implements `FromStorage`.
#[cfg(feature = "s3-storage")]
pub async fn resolve_s3_storage<T: ResolveResponse>(
&self,
regex: &Regex,
regex_match: &str,
query: &Query,
) -> Option<Result<Response>> {
match self {
Storage::Tagged(TaggedStorageTypes::S3) => {
let storage: Option<S3Storage> = ResolverMatcher::new(regex, regex_match).into();
Some(T::from_s3_storage(&storage?, query).await)
}
Storage::S3 { s3_storage } => Some(T::from_s3_storage(s3_storage, query).await),
_ => None,
}
}
}
impl Default for Storage {
fn default() -> Self {
Self::Tagged(TaggedStorageTypes::default())
}
}
#[cfg(test)]
pub(crate) mod tests {
use crate::config::tests::{test_config_from_env, test_config_from_file};
use super::*;
#[test]
fn config_storage_tagged_local_file() {
test_config_from_file(
r#"
[[resolvers]]
regex = "regex"
storage = "Local"
"#,
|config| {
println!("{:?}", config.resolvers().first().unwrap().storage());
assert!(matches!(
config.resolvers().first().unwrap().storage(),
Storage::Local { .. }
));
},
);
}
#[test]
fn config_storage_tagged_local_env() {
test_config_from_env(vec![("HTSGET_RESOLVERS", "[{storage=Local}]")], |config| {
assert!(matches!(
config.resolvers().first().unwrap().storage(),
Storage::Local { .. }
));
});
}
#[cfg(feature = "s3-storage")]
#[test]
fn default_tagged_storage_type_s3() {
assert_eq!(TaggedStorageTypes::default(), TaggedStorageTypes::S3);
}
#[cfg(not(feature = "s3-storage"))]
#[test]
fn default_tagged_storage_type_local() {
assert_eq!(TaggedStorageTypes::default(), TaggedStorageTypes::Local);
}
#[cfg(feature = "s3-storage")]
#[test]
fn config_storage_tagged_s3_file() {
test_config_from_file(
r#"
[[resolvers]]
regex = "regex"
storage = "S3"
"#,
|config| {
println!("{:?}", config.resolvers().first().unwrap().storage());
assert!(matches!(
config.resolvers().first().unwrap().storage(),
Storage::Tagged(TaggedStorageTypes::S3)
));
},
);
}
#[cfg(feature = "s3-storage")]
#[test]
fn config_storage_tagged_s3_env() {
test_config_from_env(vec![("HTSGET_RESOLVERS", "[{storage=S3}]")], |config| {
assert!(matches!(
config.resolvers().first().unwrap().storage(),
Storage::Tagged(TaggedStorageTypes::S3)
));
});
}
}
|
use bytes::{Buf, BufMut, Bytes, BytesMut};
use super::Frame;
use crate::error::RSocketError;
use crate::utils::{u24, Writeable};
#[inline]
pub(crate) fn read_payload(
flag: u16,
bf: &mut BytesMut,
) -> crate::Result<(Option<Bytes>, Option<Bytes>)> {
let m: Option<Bytes> = if flag & Frame::FLAG_METADATA != 0 {
if bf.len() < 3 {
return Err(RSocketError::InCompleteFrame.into());
}
let n = u24::read_advance(bf);
Some(bf.split_to(n.into()).freeze())
} else {
None
};
let d: Option<Bytes> = if bf.is_empty() {
None
} else {
Some(bf.split().freeze())
};
Ok((m, d))
}
pub(crate) fn calculate_payload_length(metadata: Option<&Bytes>, data: Option<&Bytes>) -> usize {
metadata.map(|v| 3 + v.len()).unwrap_or(0) + data.map(|v| v.len()).unwrap_or(0)
}
#[inline]
pub(crate) fn write_payload(bf: &mut BytesMut, metadata: Option<&Bytes>, data: Option<&Bytes>) {
if let Some(v) = metadata {
u24::from(v.len()).write_to(bf);
bf.extend_from_slice(v);
}
if let Some(v) = data {
bf.extend_from_slice(v);
}
}
|
fn main() {
let x = 2usize as *const u32;
// This must fail because alignment is violated
let _ = unsafe { &*x }; //~ ERROR: tried to access memory with alignment 2, but alignment 4 is required
}
|
//! Rusty Horde
#![feature(drain,path_ext,plugin,slice_patterns)]
#![plugin(peg_syntax_ext)]
extern crate curl;
extern crate docopt;
extern crate libmultilog;
extern crate librhd;
#[macro_use] extern crate log;
extern crate mio;
extern crate mush as ssh2;
extern crate rand;
extern crate regex;
extern crate repl;
extern crate rl_sys as readline;
extern crate rustc_serialize;
extern crate sha1;
extern crate term;
extern crate time;
extern crate toml;
use ast::expression;
use docopt::Docopt;
use libmultilog::multi::{init_multi_logger,MultiLogger};
use librhd::{
ClientLoop,
ClientHandler,
ServerLoop,
ServerSender,
ServerHandler,
};
use log::{LogLevelFilter,LogRecord};
use mio::*;
use regex::Regex;
use repl::{Repl,ReplEnv,ReplErr};
use repl::ReplErr::*;
use std::default::Default;
use std::fs::{self,File,PathExt};
use std::io::{self,BufWriter,Write};
use std::path::PathBuf;
use std::process;
use std::thread;
use types::{RhReplEnv,RhResult,RhVal};
use types::RhType::*;
use log::LogLevel::*;
mod ast;
mod bootstrap;
mod broadcast;
mod help;
mod list;
mod send;
mod types;
mod upload;
mod version;
static USAGE: &'static str = "Rusty Horde
Usage:
rh [(-v | --verbose) --no-color]
rh -h | --help [--no-color]
rh (-V | --version) [(-v | --verbose)] [--no-color]
Options:
-v --verbose Turn on debug logging.
--no-color Turn off colored output.
-h --help Show this message.
-V --version Show version information.";
#[derive(RustcDecodable)]
struct RhArgs {
flag_verbose: bool,
flag_no_color: bool,
flag_version: bool,
flag_help: bool,
}
struct RustyHorde;
impl Repl<RhVal,ReplErr,RhReplEnv> for RustyHorde {
fn preamble(&self, env: &RhReplEnv) -> &RustyHorde {
let mut t = term::stdout().unwrap();
if env.colorize() {
t.attr(term::Attr::Bold).unwrap();
t.fg(term::color::GREEN).unwrap();
}
writeln!(t, "rh {} - {}",
version::semver(),
env.repl_preamble_intro.clone()).unwrap();
if env.colorize() {
t.reset().unwrap();
}
writeln!(t, "Enter 'help' to get started.").unwrap();
let hist_file = PathBuf::from(env.history_path());
readline::preload_history(&hist_file);
self
}
fn read(&self, input: String, env: &RhReplEnv) -> RhResult {
match expression(&input[..]) {
Ok(exp) => {
// Add expression to readline
let hist_file = PathBuf::from(env.history_path());
let mut cmd = input.clone();
cmd.push('\n');
readline::add_history_persist(cmd, &hist_file);
Ok(exp)
},
Err(e) => Err(ReadErr(format!("{}",e))),
}
}
fn eval(&self, ast: RhVal, env: &RhReplEnv) -> RhResult {
match *ast {
Vector(ref x) => {
let mut v = x.clone();
let cmd = v.pop().unwrap();
match *cmd {
Bootstrap => bootstrap::eval(cmd, &mut v),
Broadcast => broadcast::eval(cmd, v, env),
Exit => Ok(cmd),
Help => help::eval(cmd, v),
List => list::eval(cmd, v),
Nil => Ok(cmd),
Send => send::eval(cmd, v, env),
Start => Ok(cmd),
Upload => upload::eval(cmd, v, env),
_ => invalid_ast()
}
},
_ => invalid_ast(),
}
}
fn print(&self, exp: RhVal, _: &RhReplEnv) -> RhResult {
print!("{}", *exp);
Ok(exp)
}
fn break_loop(&self, v: &RhVal, _: &RhReplEnv) -> bool {
match **v {
Exit => true,
_ => false,
}
}
}
fn invalid_ast() -> RhResult {
Err(EvalErr(format!("Invalid AST!")))
}
fn start_server(env: &RhReplEnv) -> Result<ServerSender,io::Error> {
let mut event_loop: ServerLoop = try!(EventLoop::new());
let ip_tuple = (&env.ip_server_addr[..], env.ip_port);
let srv = try!(ServerHandler::setup(&mut event_loop, ip_tuple));
ServerHandler::start(move || {
let sender = event_loop.channel();
thread::spawn(move || {
let _ = event_loop.run(&mut ServerHandler::new(srv));
});
Ok(sender)
})
}
fn start_client(env: &RhReplEnv) -> Result<(),io::Error> {
let mut event_loop: ClientLoop = try!(EventLoop::new());
let ip_tuple = (&env.ip_client_addr[..], env.ip_port);
let srv = try!(ClientHandler::setup(&mut event_loop, ip_tuple));
ClientHandler::start(move || {
thread::spawn(move || {
let _ = event_loop.run(&mut ClientHandler::new(srv));
});
Ok(())
})
}
fn stdoutfn(record: &LogRecord) {
println!("{}", record.args());
}
fn fileoutfn(record: &LogRecord, w: &mut BufWriter<File>) {
let now = time::now();
w.write_fmt(format_args!("{} {:5} {:4} -- {}: {}\n",
now.rfc3339(),
record.level(),
record.location().line(),
record.location().module_path(),
record.args()))
.and(w.flush()).unwrap();
}
fn init(env: &RhReplEnv) {
let mut ml: MultiLogger = Default::default();
if env.stdout_enabled {
ml.enable_stdout(stdoutfn);
}
if env.file_enabled {
let ref mut logpath = env.file_path();
let parent = match logpath.parent() {
Some(p) => p,
None => panic!("Unable to determine log path parent!"),
};
if !parent.exists() {
let _ = fs::create_dir_all(parent).unwrap();
}
ml.enable_file(fileoutfn, logpath.clone());
let r = match Regex::new("^mio") {
Ok(r) => r,
Err(e) => panic!("Unable to create regex! {}", e),
};
ml.add_file_filter(Debug, r);
}
match init_multi_logger(if env.verbose {
LogLevelFilter::Debug
} else {
LogLevelFilter::Info
}, ml) {
Ok(_) => {},
Err(e) => { println!("Unable to initialize logging! {}", e); },
};
}
fn run() -> i32 {
let dargs: RhArgs = Docopt::new(USAGE)
.and_then(|d| Ok(d.help(false)))
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
let res: i32 = if dargs.flag_version {
println!("{}", version::version(dargs.flag_verbose,
!dargs.flag_no_color));
0
} else if dargs.flag_help {
println!("{}", USAGE);
0
} else {
let mut env = RhReplEnv::new();
env.colorize = !dargs.flag_no_color;
env.verbose = dargs.flag_verbose;
init(&env);
let ss = match start_server(&env) {
Ok(els) => {
info!("Server started");
els
},
Err(e) => panic!("Failed to start server! {}", e),
};
match start_client(&env) {
Ok(_) => {},
Err(e) => panic!("Failed to start client! {}", e),
};
env.add_sender(ss);
debug!("ENV: {:?}", env);
info!("Starting REPL");
RustyHorde._loop(&env);
0
};
res
}
pub fn main() {
process::exit(run());
}
|
use crate::person::Person;
use crate::user::{User, DbPrivilege};
use std::rc::Rc;
use std::cell::RefCell;
pub type DbConn = Rc<RefCell<Database>>;
#[derive(Debug)]
pub struct Database {
persons: Vec<Person>,
users: Vec<User>,
}
impl Database {
pub fn new() -> Self {
Self {
persons: vec![
Person::new(1, "Esteban"),
Person::new(2, "June"),
Person::new(3, "Carlos"),
Person::new(4, "Ana"),
],
users: vec![
User::new("root", "root", vec![DbPrivilege::CanRead, DbPrivilege::CanWrite]),
User::new("john", "appleseed", vec![DbPrivilege::CanRead]),
]
}
}
pub fn new_thread_safe() -> DbConn {
let database = Database::new();
Rc::new(RefCell::new(database))
}
pub fn get_all_persons(&self) -> Vec<Person> {
self.persons.clone()
}
pub fn get_persons_by_name<'a>(
&'a self,
partial: &'a str,
) -> impl Iterator<Item = &Person> + 'a {
self.persons
.iter()
.filter(move |p| p.name.contains(partial))
}
pub fn get_person_by_id(&self, id: u32) -> Option<Person> {
self.persons.clone().into_iter().find(|p| p.id == id)
}
pub fn delete(&mut self, id: u32) -> bool {
if let Some(person_index) = self.persons.iter().position(|p| p.id == id) {
self.persons.remove(person_index);
return true;
}
false
}
pub fn insert(&mut self, name: &str) -> u32 {
let next_id = self.persons.len() + 1;
let next_id = next_id as u32;
self.persons.push(Person::new(next_id, name));
next_id
}
pub fn update(&mut self, id: u32, name: &str) -> u32 {
if let Some((index, _)) = self
.persons
.iter()
.enumerate()
.find(|(_, person)| person.id == id) {
self.persons[index] = Person::new(id, name);
return id;
}
return 0;
}
pub fn get_user_by_username(&self, username: &str) -> Option<&User> {
if let Some(user) = self.users.iter().find(|u| u.username == username) {
return Some(user);
}
None
}
} |
mod block_template;
mod chain_reorg;
mod estimate_fee_rate;
mod estimator_process_block;
mod estimator_track_tx;
mod fetch_tx_for_rpc;
mod fetch_txs;
mod fetch_txs_with_cycles;
mod fresh_proposals_filter;
mod new_uncle;
mod plug;
mod submit_txs;
mod tx_pool_info;
pub use block_template::{
BlockTemplateBuilder, BlockTemplateCacheProcess, BuildCellbaseProcess, PackageTxsProcess,
PrepareUnclesProcess, UpdateBlockTemplateCache,
};
pub use chain_reorg::ChainReorgProcess;
pub use ckb_verification::txs_verify_cache::{FetchCache, UpdateCache};
pub use estimate_fee_rate::EstimateFeeRateProcess;
pub use estimator_process_block::EstimatorProcessBlockProcess;
pub use estimator_track_tx::EstimatorTrackTxProcess;
pub use fetch_tx_for_rpc::FetchTxRPCProcess;
pub use fetch_txs::FetchTxsProcess;
pub use fetch_txs_with_cycles::FetchTxsWithCyclesProcess;
pub use fresh_proposals_filter::FreshProposalsFilterProcess;
pub use new_uncle::NewUncleProcess;
pub use plug::{PlugEntryProcess, PlugTarget};
pub use submit_txs::{PreResolveTxsProcess, SubmitTxsProcess, VerifyTxsProcess};
pub use tx_pool_info::TxPoolInfoProcess;
|
use std::fmt;
// 对于 《汉语拼音方案》 当中的声母表的补充说明
// `y` 和 `w` 在现代学说里面被称为 `零声母` ,
// 但是汉语拼音并不承认他的地位,所以它的出现与否应该按照 前缀补写规则 来。
/// 声母表
pub const INITIAL_TABLE: [char; 21] = [
'b', 'c', 'ĉ', 'd', 'f', 'g',
'h', 'j', 'k', 'l', 'm', 'n',
'p', 'q', 'r', 's', 'ŝ', 't',
'x', 'z', 'ẑ',
];
/// 声母
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub struct Initial(char);
impl Initial {
pub const M: Initial = Initial('m');
pub const N: Initial = Initial('n');
pub const J: Initial = Initial('j');
pub const Q: Initial = Initial('q');
pub const X: Initial = Initial('x');
pub fn new(c: char) -> Result<Self, ()> {
// NOTE: `zh/sh/ch` 需要预先自动处理成 `ẑ/ĉ/ŝ` 以方便结构化处理。
if INITIAL_TABLE.contains(&c) {
Ok(Initial(c))
} else {
Err(())
}
}
}
impl fmt::Display for Initial {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.0 == 'ĉ' {
write!(f, "ch")
} else if self.0 == 'ŝ' {
write!(f, "sh")
} else if self.0 == 'ẑ' {
write!(f, "zh")
} else {
write!(f, "{}", self.0)
}
}
}
|
use common::{BinarySerializable, VInt};
use std::cmp::max;
use std::marker::PhantomData;
static EMPTY: [u8; 0] = [];
struct Layer<'a, T> {
data: &'a [u8],
cursor: &'a [u8],
next_id: Option<u64>,
_phantom_: PhantomData<T>,
}
impl<'a, T: BinarySerializable> Iterator for Layer<'a, T> {
type Item = (u64, T);
fn next(&mut self) -> Option<(u64, T)> {
if let Some(cur_id) = self.next_id {
let cur_val = T::deserialize(&mut self.cursor).unwrap();
self.next_id = VInt::deserialize_u64(&mut self.cursor).ok();
Some((cur_id, cur_val))
} else {
None
}
}
}
impl<'a, T: BinarySerializable> From<&'a [u8]> for Layer<'a, T> {
fn from(data: &'a [u8]) -> Layer<'a, T> {
let mut cursor = data;
let next_id = VInt::deserialize_u64(&mut cursor).ok();
Layer {
data,
cursor,
next_id,
_phantom_: PhantomData,
}
}
}
impl<'a, T: BinarySerializable> Layer<'a, T> {
fn empty() -> Layer<'a, T> {
Layer {
data: &EMPTY,
cursor: &EMPTY,
next_id: None,
_phantom_: PhantomData,
}
}
fn seek_offset(&mut self, offset: usize) {
self.cursor = &self.data[offset..];
self.next_id = VInt::deserialize_u64(&mut self.cursor).ok();
}
// Returns the last element (key, val)
// such that (key < doc_id)
//
// If there is no such element anymore,
// returns None.
//
// If the element exists, it will be returned
// at the next call to `.next()`.
fn seek(&mut self, key: u64) -> Option<(u64, T)> {
let mut result: Option<(u64, T)> = None;
loop {
if let Some(next_id) = self.next_id {
if next_id < key {
if let Some(v) = self.next() {
result = Some(v);
continue;
}
}
}
return result;
}
}
}
pub struct SkipList<'a, T: BinarySerializable> {
data_layer: Layer<'a, T>,
skip_layers: Vec<Layer<'a, u64>>,
}
impl<'a, T: BinarySerializable> Iterator for SkipList<'a, T> {
type Item = (u64, T);
fn next(&mut self) -> Option<(u64, T)> {
self.data_layer.next()
}
}
impl<'a, T: BinarySerializable> SkipList<'a, T> {
pub fn seek(&mut self, key: u64) -> Option<(u64, T)> {
let mut next_layer_skip: Option<(u64, u64)> = None;
for skip_layer in &mut self.skip_layers {
if let Some((_, offset)) = next_layer_skip {
skip_layer.seek_offset(offset as usize);
}
next_layer_skip = skip_layer.seek(key);
}
if let Some((_, offset)) = next_layer_skip {
self.data_layer.seek_offset(offset as usize);
}
self.data_layer.seek(key)
}
}
impl<'a, T: BinarySerializable> From<&'a [u8]> for SkipList<'a, T> {
fn from(mut data: &'a [u8]) -> SkipList<'a, T> {
let offsets: Vec<u64> = Vec::<VInt>::deserialize(&mut data)
.unwrap()
.into_iter()
.map(|el| el.0)
.collect();
let num_layers = offsets.len();
let layers_data: &[u8] = data;
let data_layer: Layer<'a, T> = if num_layers == 0 {
Layer::empty()
} else {
let first_layer_data: &[u8] = &layers_data[..offsets[0] as usize];
Layer::from(first_layer_data)
};
let skip_layers = (0..max(1, num_layers) - 1)
.map(|i| (offsets[i] as usize, offsets[i + 1] as usize))
.map(|(start, stop)| Layer::from(&layers_data[start..stop]))
.collect();
SkipList {
skip_layers,
data_layer,
}
}
}
|
/**
Window Class Structures (Windows) / WNDCLASS structure (Windows)
**/
extern crate std;
use super::super::prelude::{
UINT , WNDPROC , CCINT , HINSTANCE , HICON , HCURSOR ,
HBRUSH , LPCTSTR , wapi , ToWindowTextConvertion ,
Application , Cursor , Icon , Text , WindowProcedure ,
Atom , Brush , WindowService , WindowClassStyles ,
WindowClassStyle
};
#[repr(C)]
pub struct WindowClass {
style : UINT , /* WindowClassStyle */
lpfnWndProc : WNDPROC , /* WindowProcedure */
cbClsExtra : CCINT , /* CCINT */
cbWndExtra : CCINT , /* CCINT */
hInstance : HINSTANCE , /* Application */
hIcon : HICON , /* Icon */
hCursor : HCURSOR , /* Cursor */
hbrBackground : HBRUSH , /* Brush */
lpszMenuName : LPCTSTR , /* Text */
lpszClassName : LPCTSTR , /* Text */
}
pub struct WindowClassLayout {
pub class_style : Option<WindowClassStyle> ,
pub window_procedure : WindowProcedure ,
pub class_extra_size : CCINT ,
pub window_extra_size : CCINT ,
pub application : Application ,
pub icon : Option<Icon> ,
pub cursor : Option<Cursor> ,
pub background : Option<Brush> ,
pub menu_name : Option<Text> ,
pub class_name : Text ,
}
pub type WNDCLASS = WindowClass;
impl WindowClassLayout {
pub fn asWindowClass(&self) -> WindowClass {
WindowClass::new(self)
}
}
impl WindowClass {
pub fn new(layout : &WindowClassLayout) -> WindowClass {
let default_style = WindowClassStyles::VerticalRedraw |
WindowClassStyles::HorizontalRedraw;
WindowClass {
style : layout.class_style.unwrap_or(default_style) ,
lpfnWndProc : layout.window_procedure ,
cbClsExtra : layout.class_extra_size ,
cbWndExtra : layout.window_extra_size ,
hInstance : layout.application ,
hIcon : layout.icon.unwrap_or(std::ptr::mut_null()) ,
hCursor : layout.cursor.unwrap_or(std::ptr::mut_null()) ,
hbrBackground : layout.background.unwrap_or(std::ptr::mut_null()) ,
lpszMenuName : layout.menu_name.unwrap_or(std::ptr::null()) ,
lpszClassName : layout.class_name
}
}
pub fn RegisterClass(&self) -> Atom {
unsafe {
wapi::WindowClass::RegisterClassW(self as *const WNDCLASS)
}
}
pub fn unregister(&self , app : Option<Application>) -> bool {
WindowService::UnregisterClass(self.lpszClassName , app)
}
} |
#![allow(unused_imports)]
#![allow(unused_variables)]
mod call;
mod fuzzy_distance_cluster;
mod fuzzy_point_map;
use std::vec::Vec;
use std::collections::{HashMap, BinaryHeap};
use crate::fuzzy_point_map::FuzzyPointMap;
use crate::fuzzy_distance_cluster::FuzzyDistanceCluster;
fn main() {
let g_size = 15.0;
let distance = 100.0;
let p1 = fuzzy_point_map::FuzzyPoint{x: 10.1, y: 9.9, weight: 1.0};
let p2 = fuzzy_point_map::FuzzyPoint{x: 17.8, y: 10.0, weight: 1.0};
let p3 = fuzzy_point_map::FuzzyPoint{x: 10.2, y: 17.8, weight: 1.0};
let p4 = fuzzy_point_map::FuzzyPoint{x: 32.1, y: 13.2, weight: 1.0};
let p4_1 = fuzzy_point_map::FuzzyPoint{x: 32.1, y: 13.2, weight: 1.0};
// cluster(10, &[p1, p2, p3, p4]);
// let points =
let v1 = vec![p1, p2, p3, p4, p4_1];
let indexed_vec = v1.iter().enumerate()
.map(|(index, &fzm)| (fzm, index as i32))
.collect::<Vec<(fuzzy_point_map::FuzzyPoint, i32)>>();
let fuzzy_cluster_app = fuzzy_distance_cluster::FuzzyDistanceClusterClass{distance: distance, grid_size: distance * 0.09};
fuzzy_cluster_app.apply(indexed_vec);
} |
// #![no_std] indicates that this program will not link to the standard
// crate, std. Instead it will link to its subset: the core crate.
#![no_std]
// #![no_main] indicates that this program won't use the standard main
// interface that most Rust programs use. The main (no pun intended)
// reason to go with no_main is that using the main interface in no_std
// context requires nightly.
#![no_main]
// Panicking behaviors
extern crate panic_halt; // you can put a breakpoint on `rust_begin_unwind` to catch panics
// extern crate panic_abort; // requires nightly
// extern crate panic_itm; // logs messages over ITM; requires ITM support
// extern crate panic_semihosting; // logs messages to the host stderr; requires a debugger
use cortex_m::asm;
use cortex_m_rt::entry;
// #[entry] is an attribute provided by the cortex-m-rt crate that's
// used to mark the entry point of the program. As we are not using
// the standard main interface we need another way to indicate the
// entry point of the program and that'd be #[entry].
#[entry]
// Our program will be the only process running on the target hardware
// so we don't want it to end! We use a divergent function (the -> ! bit
// in the function signature) to ensure at compile time that'll be the case.
fn main() -> ! {
asm::nop(); // To not have main optimize to abort in release mode, remove when you add code
loop {}
}
|
use crate::front_of_house::serving;
pub struct Breakfast {
pub toast: String,
seasonal_fruit: String,
}
impl Breakfast {
// Since fruit is private, we need this function to construct a Breakfast
pub fn summer(toast: &str) -> Breakfast {
Breakfast {
toast: String::from(toast),
seasonal_fruit: String::from("peaches"),
}
}
}
// For public enums, all variants are public automatically
pub enum Appetizer {
Soup,
Salad,
}
fn fix_incorrect_order() {
cook_order();
serving::serve_order();
}
fn cook_order() {}
|
use common::{rsip, tokio::time::Instant};
use std::time::Duration;
use super::super::TIMER_I;
#[derive(Debug)]
pub struct Confirmed {
pub request: rsip::Request,
pub entered_at: Instant,
}
impl Confirmed {
pub fn should_terminate(&self) -> bool {
self.entered_at.elapsed() > Duration::from_millis(TIMER_I)
}
}
|
use std::env;
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
fn parse_mass(line: std::io::Result<String>, input_filename: String) -> i32 {
let mass_string = match line {
Err(why) => panic!("Couldn't read line from {}: {}", input_filename, why),
Ok(line) => line
};
return match mass_string.parse() {
Err(why) => panic!("Couldn't parse \"{}\" into a number: {}", mass_string, why),
Ok(mass) => mass
};
}
fn parse_file(file_name: &Path) -> Vec<i32> {
let file = match File::open(&file_name) {
Err(why) => panic!("Couldn't open {}: {}", file_name.display(), why),
Ok(file) => file
};
let lines = io::BufReader::new(file).lines();
return lines
.map(|line| parse_mass(line, file_name.display().to_string()))
.collect();
}
fn calculate_fuel(mass: i32) -> i32 {
// anything that would produce negative (or 0) fuel should be treated as if it had 0 fuel requirement
if mass <= 6 {
return 0;
} else {
return mass / 3 - 2;
}
}
fn calculate_fuel_fuel(module_fuel: i32) -> i32 {
let mut extra_fuel: i32 = 0;
let mut fuel = calculate_fuel(module_fuel);
while fuel > 0 {
extra_fuel += fuel;
fuel = calculate_fuel(fuel);
}
return extra_fuel;
}
fn calculate_module_fuel(masses: Vec<i32>) -> i32 {
let mut total_fuel: i32 = 0;
for mass in masses {
let fuel = calculate_fuel(mass);
total_fuel += fuel;
println!("Fuel needed for module with mass {}: {}", mass, fuel);
let fuel_fuel: i32 = calculate_fuel_fuel(fuel);
total_fuel += fuel_fuel;
println!("Extra fuel needed: {}", fuel_fuel);
}
return total_fuel;
}
fn main() {
let args: Vec<String> = env::args().collect();
let input_file = Path::new(&args[1]);
let masses = parse_file(&input_file);
let total_fuel = calculate_module_fuel(masses);
println!("Total fuel required: {}", total_fuel);
} |
use crate::session_ctx::SessionContext;
use crate::socket::ws::ClientSocket;
use anyhow::Result;
use rustimate_core::profile::UserProfile;
use rustimate_core::RequestMessage;
use std::rc::Rc;
use std::sync::RwLock;
use uuid::Uuid;
use web_sys::{Document, Window};
#[derive(Debug)]
pub(crate) struct ClientContext {
window: Window,
document: Document,
socket: ClientSocket,
connection_id: Option<Uuid>,
user_id: Option<Uuid>,
session_ctx: Option<SessionContext>,
user_profile: UserProfile
}
impl ClientContext {
pub(crate) fn new() -> Result<Rc<RwLock<Self>>> {
let binary = true;
let window = web_sys::window().ok_or_else(|| anyhow::anyhow!("Can't find [window]"))?;
let document = window.document().ok_or_else(|| anyhow::anyhow!("Can't find [document]"))?;
let loc = &document.location().ok_or_else(|| anyhow::anyhow!("Can't find [location]"))?;
let url = loc.href().map_err(|_| anyhow::anyhow!("Can't find [href]"))?;
let socket = ClientSocket::new(&url, binary)?;
let user_profile = UserProfile::default();
let rc = Rc::new(RwLock::new(Self {
window,
document,
socket,
connection_id: None,
user_id: None,
session_ctx: None,
user_profile
}));
crate::socket::ws_events::wire_socket(&Rc::clone(&rc));
crate::socket::ws_events::on_load(&Rc::clone(&rc).read().expect("Cannot lock ClientContext for read"))?;
debug!("[{}] has started", rustimate_core::APPNAME);
Ok(rc)
}
pub(crate) const fn document(&self) -> &Document {
&self.document
}
pub(crate) const fn socket(&self) -> &ClientSocket {
&self.socket
}
pub(crate) const fn _connection_id(&self) -> &Option<Uuid> {
&self.connection_id
}
pub(crate) const fn user_id(&self) -> &Option<Uuid> {
&self.user_id
}
pub(crate) const fn session_ctx(&self) -> &Option<SessionContext> {
&self.session_ctx
}
pub(crate) fn session_ctx_mut(&mut self) -> &mut Option<SessionContext> {
&mut self.session_ctx
}
pub(crate) const fn user_profile(&self) -> &UserProfile {
&self.user_profile
}
pub(crate) fn update_name(&mut self, name: &str) -> () {
self.user_profile.set_name(name)
}
pub(crate) fn on_open(&self) -> Result<()> {
debug!("Open success for [{}]", self.user_profile().name());
Ok(())
}
pub(crate) fn on_connected(&mut self, connection_id: Uuid, user_id: Uuid, user_profile: UserProfile, binary: bool) {
self.socket.set_binary(binary);
self.connection_id = Some(connection_id);
self.user_id = Some(user_id);
self.user_profile = user_profile;
}
pub(crate) fn on_session_joined(&mut self, s: SessionContext) {
self.session_ctx = Some(s);
}
pub(crate) fn send(&self, rm: &RequestMessage) -> Result<()> {
self.socket.send(rm);
Ok(())
}
pub(crate) fn on_event(&mut self, t: &str, k: &str, v: &str) -> Result<()> {
crate::event_handler::EventHandler::handle(self, t, k, v)
}
pub(crate) fn on_error(&self) -> Result<()> {
warn!("Error for [{}]", self.user_profile().name());
Ok(())
}
pub(crate) fn on_close(&self) -> Result<()> {
debug!("Close for [{}]", self.user_profile().name());
Ok(())
}
}
|
use std::mem;
use std::ops::Deref;
use ffi;
use eal::ProcType;
use lcore;
use memzone;
// pub type RawMemConfig = ffi::rte_mem_config;
// pub type RawMemConfigPtr = *mut ffi::rte_mem_config;
// the structure for the memory configuration for the RTE.
// pub struct MemoryConfig(RawMemConfigPtr);
// impl From<RawMemConfigPtr> for MemoryConfig {
// fn from(p: RawMemConfigPtr) -> Self {
// MemoryConfig(p)
// }
// }
// impl Deref for MemoryConfig {
// type Target = RawMemConfig;
// fn deref(&self) -> &Self::Target {
// unsafe { &*self.0 }
// }
// }
// impl MemoryConfig {
// /// Number of channels (0 if unknown).
// pub fn nchannel(&self) -> u32 {
// self.nchannel
// }
// /// Number of ranks (0 if unknown).
// pub fn nrank(&self) -> u32 {
// self.nrank
// }
// /// Memzone descriptors.
// pub fn memzones(&self) -> Vec<memzone::MemoryZone> {
// (0..self.memzones.len)
// .map(|idx| unsafe { ffi::rte_fbarray_get(&self.memzones, idx) as *const _ })
// .map(memzone::from_raw)
// .collect()
// }
// }
// pub type RawConfig = ffi::rte_config;
// pub type RawConfigPtr = *mut ffi::rte_config;
// The global RTE configuration structure.
// pub struct Config(RawConfigPtr);
// impl From<RawConfigPtr> for Config {
// fn from(p: RawConfigPtr) -> Self {
// Config(p)
// }
// }
// impl Deref for Config {
// type Target = RawConfig;
// fn deref(&self) -> &Self::Target {
// unsafe { &*self.0 }
// }
// }
// impl Config {
// /// Id of the master lcore
// pub fn master_lcore(&self) -> lcore::Id {
// self.master_lcore.into()
// }
// /// Number of available logical cores.
// pub fn lcore_count(&self) -> usize {
// self.lcore_count as usize
// }
// /// Primary or secondary configuration
// pub fn process_type(&self) -> ProcType {
// unsafe { mem::transmute(self.process_type) }
// }
// /// State of cores.
// pub fn lcore_roles(&self) -> &'static [lcore::Role] {
// unsafe { &*(&self.lcore_role[..self.lcore_count as usize] as *const _ as *const [lcore::Role]) }
// }
// /// State of core.
// pub fn lcore_role(&self, lcore_id: lcore::Id) -> lcore::Role {
// self.lcore_role[usize::from(lcore_id)].into()
// }
// /// Memory configuration, which may be shared across multiple DPDK instances
// pub fn memory_config(&self) -> MemoryConfig {
// self.mem_config.into()
// }
// }
/// Get the global configuration structure.
// pub fn config() -> Config {
// unsafe { ffi::rte_eal_get_configuration().into() }
// }
|
#[cfg(test)]
mod test_cases {
use unique::Unique;
#[test]
pub fn empty() {
let nums = vec![];
let even: fn(&&usize) -> bool = |&&n| n % 2 == 0;
assert_eq!(None, nums.iter().unique(even));
}
#[test]
pub fn unique_even() {
let mut nums = vec![];
let even: fn(&&usize) -> bool = |&&n| n % 2 == 0;
nums.push(1);
nums.push(3);
nums.push(4);
nums.push(5);
nums.push(7);
nums.push(9);
assert_eq!(Some(&4), nums.iter().unique(even));
}
#[test]
pub fn unique_odd() {
let mut nums = vec![];
let odd: fn(&&usize) -> bool = |&&n| n % 2 != 0;
nums.push(0);
nums.push(2);
nums.push(4);
nums.push(5);
nums.push(6);
nums.push(8);
assert_eq!(Some(&5), nums.iter().unique(odd));
}
#[test]
pub fn nonunique_even_or_odd() {
let mut nums = vec![];
let even: fn(&&usize) -> bool = |&&n| n % 2 == 0;
let odd: fn(&&usize) -> bool = |&&n| n % 2 != 0;
nums.push(1);
nums.push(2);
nums.push(3);
nums.push(4);
assert_eq!(None, nums.iter().unique(even));
assert_eq!(None, nums.iter().unique(odd));
}
#[test]
pub fn unique_number() {
let mut nums = vec![];
let find_3: fn(&&usize) -> bool = |&&n| n == 3;
nums.push(0);
nums.push(0);
nums.push(0);
nums.push(0);
nums.push(3);
nums.push(0);
assert_eq!(Some(&3), nums.iter().unique(find_3));
}
#[test]
pub fn nonunique_number() {
let mut nums = vec![];
let find_0: fn(&&usize) -> bool = |&&n| n == 0;
nums.push(0);
nums.push(0);
nums.push(0);
nums.push(0);
nums.push(3);
nums.push(0);
assert_eq!(None, nums.iter().unique(find_0));
}
#[test]
pub fn unique_name() {
let mut names = vec![];
let find_ronnie: fn(&&&str) -> bool = |&&n| n == "Ronnie";
names.push("Ronnie");
names.push("John");
names.push("David");
names.push("David");
assert_eq!(Some(&"Ronnie"), names.iter().unique(find_ronnie));
}
#[test]
pub fn nonunique_name() {
let mut names = vec![];
let find_david: fn(&&&str) -> bool = |&&n| n == "David";
names.push("Ronnie");
names.push("John");
names.push("David");
names.push("David");
assert_eq!(None, names.iter().unique(find_david));
}
}
|
error_chain! {
errors {
ProjectDirMissing { description("project directory is missing") }
}
}
|
use crate::util::CountryCode;
use chrono::{DateTime, Utc};
use rosu_v2::prelude::{GameMode, GameMods, Grade, RankStatus};
use serde::{de, Deserialize, Deserializer};
#[derive(Deserialize)]
pub struct ScraperScores {
scores: Vec<ScraperScore>,
}
impl ScraperScores {
pub fn get(self) -> Vec<ScraperScore> {
self.scores
}
}
pub struct ScraperScore {
pub id: u64,
pub user_id: u32,
pub username: String,
pub country_code: CountryCode,
pub accuracy: f32,
pub mods: GameMods,
pub score: u32,
pub max_combo: u32,
pub perfect: bool,
pub pp: Option<f32>,
pub grade: Grade,
pub date: DateTime<Utc>,
pub mode: GameMode,
pub replay: bool,
pub count50: u32,
pub count100: u32,
pub count300: u32,
pub count_geki: u32,
pub count_katu: u32,
pub count_miss: u32,
}
impl<'de> Deserialize<'de> for ScraperScore {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
#[derive(Deserialize)]
struct Outer {
id: u64,
user_id: u32,
#[serde(deserialize_with = "adjust_acc")]
accuracy: f32,
mods: GameMods,
score: u32,
max_combo: u32,
perfect: bool,
statistics: ScraperScoreStatistics,
pp: Option<f32>,
rank: Grade,
#[serde(deserialize_with = "adjust_datetime")]
created_at: DateTime<Utc>,
mode_int: GameMode,
replay: bool,
user: ScraperUser,
}
#[derive(Deserialize)]
pub struct ScraperScoreStatistics {
#[serde(default)]
count_50: u32,
#[serde(default)]
count_100: u32,
#[serde(default)]
count_300: u32,
#[serde(default)]
count_geki: u32,
#[serde(default)]
count_katu: u32,
#[serde(default)]
count_miss: u32,
}
#[derive(Deserialize)]
pub struct ScraperUser {
username: String,
country_code: CountryCode,
}
let helper = Outer::deserialize(deserializer)?;
Ok(ScraperScore {
id: helper.id,
user_id: helper.user_id,
username: helper.user.username,
country_code: helper.user.country_code,
accuracy: helper.accuracy,
mods: helper.mods,
score: helper.score,
max_combo: helper.max_combo,
perfect: helper.perfect,
pp: helper.pp,
grade: helper.rank,
date: helper.created_at,
mode: helper.mode_int,
replay: helper.replay,
count50: helper.statistics.count_50,
count100: helper.statistics.count_100,
count300: helper.statistics.count_300,
count_geki: helper.statistics.count_geki,
count_katu: helper.statistics.count_katu,
count_miss: helper.statistics.count_miss,
})
}
}
#[derive(Deserialize)]
pub struct ScraperBeatmap {
pub id: u32,
pub beatmapset_id: u32,
#[serde(rename = "mode_int")]
pub mode: GameMode,
pub difficulty_rating: f32,
pub version: String,
pub total_length: u32,
pub hit_length: u32,
pub bpm: f32,
pub cs: f32,
#[serde(rename = "drain")]
pub hp: f32,
#[serde(rename = "accuracy")]
pub od: f32,
pub ar: f32,
#[serde(default)]
pub playcount: u32,
#[serde(default)]
pub passcount: u32,
#[serde(default)]
pub count_circles: u32,
#[serde(default)]
pub count_sliders: u32,
#[serde(default)]
pub count_spinner: u32,
#[serde(default)]
pub count_total: u32,
#[serde(deserialize_with = "adjust_datetime")]
pub last_updated: DateTime<Utc>,
pub ranked: RankStatus,
}
fn adjust_acc<'de, D: Deserializer<'de>>(d: D) -> Result<f32, D::Error> {
let f: f32 = Deserialize::deserialize(d)?;
Ok(f * 100.0)
}
fn adjust_datetime<'de, D: Deserializer<'de>>(d: D) -> Result<DateTime<Utc>, D::Error> {
let d: &str = Deserialize::deserialize(d)?;
let d = DateTime::parse_from_rfc3339(d)
.map_err(de::Error::custom)?
.with_timezone(&Utc);
Ok(d)
}
|
extern crate gcc;
extern crate submodules;
use gcc::Config;
use std::env;
fn main() {
submodules::update()
.init()
.recursive()
.run();
compile_library();
}
fn compile_library() {
println!("The ARM embedded toolchain must be available in the PATH");
env::set_var("CC", "arm-none-eabi-gcc");
env::set_var("AR", "arm-none-eabi-ar");
let mut config = Config::new();
config
.define("EFM32GG990F1024", None)
.include("efm32-common/CMSIS/Include")
.include("efm32-common/Device/EFM32GG/Include")
.flag("-Wall")
.flag("-mcpu=cortex-m3")
.flag("-mthumb")
.file("efm32-common/Device/EFM32GG/Source/GCC/startup_efm32gg.S")
.file("efm32-common/Device/EFM32GG/Source/system_efm32gg.c")
.compile("libcompiler-rt.a");
}
|
extern crate futures;
extern crate telegram_bot;
extern crate tokio_core;
extern crate rspotify;
use rspotify::spotify::client::Spotify;
use rspotify::spotify::model::playlist::PlaylistTrack;
use rspotify::spotify::model::track::FullTrack;
use rspotify::spotify::model::user::PrivateUser;
use rspotify::spotify::oauth2::{SpotifyClientCredentials, SpotifyOAuth, TokenInfo};
use std::env;
use std::panic;
use std::sync::{Arc, Mutex};
use futures::{Future, Stream};
use telegram_bot::*;
use tokio_core::reactor::Core;
use lazy_static::lazy_static;
lazy_static! {
static ref SPOTIFY_RGX: regex::Regex = regex::Regex::new(
r#"(?im)(?:https?://(?:open|play).spotify.com/track/|spotify:track:)(?P<id>.*?)(?:\?.*?)?$"#,
)
.unwrap();
static ref SPOTIFY_TOKEN: Arc<Mutex<TokenInfo>> = Arc::new(Mutex::new(TokenInfo::default()));
}
fn update_spotify_token(
spotify_client_id: &str,
spotify_client_secret: &str,
spotify_root_token: &str,
) {
if let Some(token_info) = SpotifyOAuth::default()
.client_id(spotify_client_id)
.client_secret(spotify_client_secret)
.scope("user-read-private,user-read-birthdate,user-read-email,playlist-read-private,user-library-read,user-library-modify,user-top-read,playlist-read-collaborative,playlist-modify-public,playlist-modify-private,user-follow-read,user-follow-modify,user-read-playback-state,user-read-currently-playing,user-modify-playback-state,user-read-recently-played")
.refresh_access_token(spotify_root_token) {
*SPOTIFY_TOKEN.lock().unwrap() = token_info;
}
}
fn spotify(
spotify_client_id: &str,
spotify_client_secret: &str,
) -> Option<Spotify> {
let client_credential = SpotifyClientCredentials::default()
.client_id(spotify_client_id)
.client_secret(spotify_client_secret)
.token_info(SPOTIFY_TOKEN.lock().unwrap().clone())
.build();
Some(
Spotify::default()
.client_credentials_manager(client_credential)
.build(),
)
}
fn get_playlist_tracks(
spotify: &Spotify,
spotify_user: &str,
spotify_playlist: &str,
) -> Vec<FullTrack> {
let mut tracks: Vec<PlaylistTrack> = Vec::new();
let limit = 100;
let mut offset = 0;
loop {
match spotify.user_playlist_tracks(
spotify_user,
spotify_playlist,
None,
limit,
offset,
None,
) {
Ok(mut track_page) => {
tracks.append(&mut track_page.items);
if track_page.total <= (limit + offset) {
break;
}
offset += limit;
}
_ => break,
}
}
tracks
.into_iter()
.map(|playlist_track| playlist_track.track)
.collect()
}
fn main() {
let spotify_user = env::var("SPOTIFY_USER").unwrap();
let spotify_playlist = env::var("SPOTFIY_PLAYLIST").unwrap();
let spotify_client_id = env::var("SPOTIFY_CLIENT_ID").unwrap();
let spotify_client_secret = env::var("SPOTIFY_CLIENT_SECRET").unwrap();
let spotify_root_token = env::var("SPOTFIY_ROOT_TOKEN").unwrap();
std::thread::spawn(|| loop {
std::thread::sleep(std::time::Duration::from_secs(1800u64));
panic::catch_unwind(
|| {
let spotify_client_id = env::var("SPOTIFY_CLIENT_ID").unwrap();
let spotify_client_secret = env::var("SPOTIFY_CLIENT_SECRET").unwrap();
let spotify_root_token = env::var("SPOTFIY_ROOT_TOKEN").unwrap();
update_spotify_token(
&spotify_client_id.clone(),
&spotify_client_secret.clone(),
&spotify_root_token.clone(),
);
},
);
});
update_spotify_token(
&spotify_client_id.clone(),
&spotify_client_secret.clone(),
&spotify_root_token.clone(),
);
let mut core = Core::new().unwrap();
let token = env::var("TELEGRAM_BOT_TOKEN").unwrap();
let api = Api::configure(token).build(core.handle()).unwrap();
let future =
api.stream().for_each(|update| {
if let UpdateKind::Message(message) = update.kind {
if let MessageKind::Text { ref data, .. } = message.kind {
//println!(
// "<{}; {}>: {}",
// &message.from.id, &message.from.first_name, data
//);
let captures = SPOTIFY_RGX.captures(data);
if captures.is_none() {
return Ok(());
}
let id = captures.unwrap().name("id");
if !id.is_some() {
return Ok(());
}
let id = id.unwrap().as_str();
spotify(
&spotify_client_id,
&spotify_client_secret,
)
.map(|spotify| {
let tracks = get_playlist_tracks(
&spotify,
&spotify_user,
&spotify_playlist,
);
let track_match = tracks
.into_iter()
.enumerate()
.find(
|(i, track)|
match &track.id {
Some(id_string) => id_string.as_str() == id,
None => false
}
);
match track_match {
Some((i, _)) => {
if i != 0 {
spotify.user_playlist_recorder_tracks(
&spotify_user,
&spotify_playlist,
i as i32,
1,
0,
None,
);
api.spawn(message.text_reply(
"Track already in playlist, moved it to the top.",
));
}
}
None => {
spotify.user_playlist_add_tracks(
&spotify_user,
&spotify_playlist,
&[id.into()],
Some(0),
);
api.spawn(
message.text_reply("Track added to playlist!"),
);
}
}
});
}
}
Ok(())
});
core.run(future).unwrap();
}
|
#[doc = "Register `MTLISR` reader"]
pub type R = crate::R<MTLISR_SPEC>;
#[doc = "Register `MTLISR` writer"]
pub type W = crate::W<MTLISR_SPEC>;
#[doc = "Field `Q0IS` reader - Queue interrupt status"]
pub type Q0IS_R = crate::BitReader;
#[doc = "Field `Q0IS` writer - Queue interrupt status"]
pub type Q0IS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Queue interrupt status"]
#[inline(always)]
pub fn q0is(&self) -> Q0IS_R {
Q0IS_R::new((self.bits & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Queue interrupt status"]
#[inline(always)]
#[must_use]
pub fn q0is(&mut self) -> Q0IS_W<MTLISR_SPEC, 0> {
Q0IS_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Interrupt status Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mtlisr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mtlisr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MTLISR_SPEC;
impl crate::RegisterSpec for MTLISR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mtlisr::R`](R) reader structure"]
impl crate::Readable for MTLISR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`mtlisr::W`](W) writer structure"]
impl crate::Writable for MTLISR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MTLISR to value 0"]
impl crate::Resettable for MTLISR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use newton_raphson::newton_raphson;
use plot::line_dash;
fn f(x: f32) -> f32 {
x.powi(4) - 6.4 * x.powi(3) + 6.45 * x.powi(2) + 20.538 * x - 31.752
}
fn df(x: f32) -> f32 {
4.0 * x.powi(3) - 19.2 * x.powi(2) + 12.9 * x + 20.538
}
fn result(x: Vec<f32>) -> Vec<f32> {
let mut data = Vec::new();
for i in 0..10 {
data.push(f(x[i]));
}
data
}
fn main() {
let data = vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0];
let tol = 1e-9;
let result = result(data.clone());
let (_root, _results) = newton_raphson(1.9, f, df, tol);
line_dash(data.clone(), result, "", "Newton-Raphson");
}
|
use ast;
use proc_macro2::Ident;
use syn;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ImportedTypeKind {
/// The definition of an imported type.
Definition,
/// A reference to an imported type.
Reference,
}
/// Iterate over definitions of and references to imported types in the AST.
pub trait ImportedTypes {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind);
}
/// Iterate over definitions of imported types in the AST.
pub trait ImportedTypeDefinitions {
fn imported_type_definitions<F>(&self, f: &mut F)
where
F: FnMut(&Ident);
}
impl<T> ImportedTypeDefinitions for T
where
T: ImportedTypes,
{
fn imported_type_definitions<F>(&self, f: &mut F)
where
F: FnMut(&Ident),
{
self.imported_types(&mut |id, kind| {
if let ImportedTypeKind::Definition = kind {
f(id);
}
});
}
}
/// Iterate over references to imported types in the AST.
pub trait ImportedTypeReferences {
fn imported_type_references<F>(&self, f: &mut F)
where
F: FnMut(&Ident);
}
impl<T> ImportedTypeReferences for T
where
T: ImportedTypes,
{
fn imported_type_references<F>(&self, f: &mut F)
where
F: FnMut(&Ident),
{
self.imported_types(&mut |id, kind| {
if let ImportedTypeKind::Reference = kind {
f(id);
}
});
}
}
impl ImportedTypes for ast::Program {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
self.imports.imported_types(f);
self.type_aliases.imported_types(f);
self.consts.imported_types(f);
}
}
impl<T> ImportedTypes for Vec<T>
where
T: ImportedTypes,
{
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
for x in self {
x.imported_types(f);
}
}
}
impl ImportedTypes for ast::Import {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
self.kind.imported_types(f)
}
}
impl ImportedTypes for ast::ImportKind {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
match self {
ast::ImportKind::Static(s) => s.imported_types(f),
ast::ImportKind::Function(fun) => fun.imported_types(f),
ast::ImportKind::Type(ty) => ty.imported_types(f),
ast::ImportKind::Enum(enm) => enm.imported_types(f),
}
}
}
impl ImportedTypes for ast::ImportStatic {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
self.ty.imported_types(f);
}
}
impl ImportedTypes for syn::Type {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
match self {
syn::Type::Reference(ref r) => r.imported_types(f),
syn::Type::Path(ref p) => p.imported_types(f),
_ => {}
}
}
}
impl ImportedTypes for syn::TypeReference {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
self.elem.imported_types(f);
}
}
impl ImportedTypes for syn::TypePath {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
if self.qself.is_some()
|| self.path.leading_colon.is_some()
|| self.path.segments.len() != 1
{
return;
}
f(
&self.path.segments.last().unwrap().value().ident,
ImportedTypeKind::Reference,
);
}
}
impl ImportedTypes for ast::ImportFunction {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
self.function.imported_types(f);
self.kind.imported_types(f);
}
}
impl ImportedTypes for ast::ImportFunctionKind {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
match self {
ast::ImportFunctionKind::Method { ty, .. } => ty.imported_types(f),
ast::ImportFunctionKind::Normal => {}
}
}
}
impl ImportedTypes for ast::Function {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
self.arguments.imported_types(f);
if let Some(ref r) = self.ret {
r.imported_types(f);
}
}
}
impl ImportedTypes for syn::ArgCaptured {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
self.ty.imported_types(f);
}
}
impl ImportedTypes for ast::ImportType {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
f(&self.name, ImportedTypeKind::Definition);
}
}
impl ImportedTypes for ast::ImportEnum {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
f(&self.name, ImportedTypeKind::Definition);
}
}
impl ImportedTypes for ast::TypeAlias {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
f(&self.dest, ImportedTypeKind::Reference);
}
}
impl ImportedTypes for ast::Const {
fn imported_types<F>(&self, f: &mut F)
where
F: FnMut(&Ident, ImportedTypeKind),
{
self.ty.imported_types(f);
}
}
/// Remove any methods, statics, &c, that reference types that are *not*
/// defined.
pub trait RemoveUndefinedImports {
fn remove_undefined_imports<F>(&mut self, is_defined: &F)
where
F: Fn(&Ident) -> bool;
}
impl RemoveUndefinedImports for ast::Program {
fn remove_undefined_imports<F>(&mut self, is_defined: &F)
where
F: Fn(&Ident) -> bool,
{
self.imports.remove_undefined_imports(is_defined);
self.type_aliases.remove_undefined_imports(is_defined);
self.consts.remove_undefined_imports(is_defined);
}
}
impl<T> RemoveUndefinedImports for Vec<T>
where
T: ImportedTypeReferences,
{
fn remove_undefined_imports<F>(&mut self, is_defined: &F)
where
F: Fn(&Ident) -> bool,
{
self.retain(|x| {
let mut all_defined = true;
x.imported_type_references(&mut |id| {
if all_defined {
if !is_defined(id) {
info!("removing due to {} not being defined", id);
all_defined = false;
}
}
});
all_defined
});
}
}
|
//
// Register interfaces for STM32L4x6
//
// See ST reference manual RM0351
//
#![no_std]
pub mod firewall;
pub mod flash;
pub mod pwr;
pub mod rcc;
pub mod timer_opt;
use crc;
use gpio;
use rng;
use rtc;
use timer;
extern {
#[link_name="stm32l4x6_CRC"] pub static CRC: crc::CRC;
#[link_name="stm32l4x6_FIREWALL"] pub static FIREWALL: firewall::FW;
#[link_name="stm32l4x6_FLASH"] pub static FLASH: flash::FLASH;
#[link_name="stm32l4x6_GPIOA"] pub static GPIOA: gpio::GPIO;
#[link_name="stm32l4x6_GPIOB"] pub static GPIOB: gpio::GPIO;
#[link_name="stm32l4x6_GPIOC"] pub static GPIOC: gpio::GPIO;
#[link_name="stm32l4x6_GPIOD"] pub static GPIOD: gpio::GPIO;
#[link_name="stm32l4x6_GPIOE"] pub static GPIOE: gpio::GPIO;
#[link_name="stm32l4x6_GPIOF"] pub static GPIOF: gpio::GPIO;
#[link_name="stm32l4x6_GPIOG"] pub static GPIOG: gpio::GPIO;
#[link_name="stm32l4x6_GPIOH"] pub static GPIOH: gpio::GPIO;
#[link_name="stm32l4x6_PWR"] pub static PWR: pwr::PWR;
#[link_name="stm32l4x6_RCC"] pub static RCC: rcc::RCC;
#[link_name="stm32l4x6_RNG"] pub static RNG: rng::RNG;
#[link_name="stm32l4x6_RTC"] pub static RTC: rtc::RTC;
#[link_name="stm32l4x6_TIM2"] pub static TIM2: timer::GPTIM32;
#[link_name="stm32l4x6_TIM2_OR"] pub static TIM2_OR: timer_opt::TIM2_OPT;
#[link_name="stm32l4x6_TIM3"] pub static TIM3: timer::GPTIM32;
#[link_name="stm32l4x6_TIM3_OR"] pub static TIM3_OR: timer_opt::TIM3_OPT;
#[link_name="stm32l4x6_TIM4"] pub static TIM4: timer::GPTIM32;
#[link_name="stm32l4x6_TIM5"] pub static TIM5: timer::GPTIM32;
} |
use super::calc_avg_join_size;
use crate::{error::VelociError, indices::*, persistence::*, type_info::TypeInfo, util::*};
use directory::Directory;
use ownedbytes::OwnedBytes;
use std::{self, cmp::Ordering::Greater, io, marker::PhantomData, path::PathBuf, u32};
use vint32::{iterator::VintArrayIterator, vint_array::VIntArray};
impl_type_info_single_templ!(IndirectIMFlushingInOrderVintNoDirectEncode);
impl_type_info_single_templ!(IndirectIMBinarySearch);
/// This data structure assumes that a set is only called once for a id, and ids are set in order.
#[derive(Debug, Clone)]
pub(crate) struct IndirectIMFlushingInOrderVintNoDirectEncode<T> {
pub(crate) ids_cache: Vec<(T, u32)>,
pub(crate) data_cache: Vec<u8>,
pub(crate) current_data_offset: u32,
/// Already written ids_cache
pub(crate) current_id_offset: u32,
pub(crate) path: PathBuf,
#[allow(dead_code)]
pub(crate) metadata: IndexValuesMetadata,
directory: Box<dyn Directory>,
}
impl<T: Default + std::fmt::Debug> IndirectIMFlushingInOrderVintNoDirectEncode<T> {
pub(crate) fn new(directory: Box<dyn Directory>, path: PathBuf, max_value_id: u32) -> Self {
let mut data_cache = vec![];
data_cache.resize(1, 0); // resize data by one, because 0 is reserved for the empty buckets
IndirectIMFlushingInOrderVintNoDirectEncode {
ids_cache: vec![],
data_cache,
current_data_offset: 0,
current_id_offset: 0,
metadata: IndexValuesMetadata::new(max_value_id),
directory,
path,
}
}
pub(crate) fn into_im_store(mut self) -> IndirectIMBinarySearchIM<T> {
let mut store = IndirectIMBinarySearchIM::default();
store.start_pos = self.ids_cache;
store.data = self.data_cache;
self.metadata.avg_join_size = calc_avg_join_size(self.metadata.num_values, self.metadata.num_ids);
store.metadata = self.metadata;
store
}
#[inline]
pub(crate) fn add(&mut self, id: T, add_data: &[u32]) -> Result<(), io::Error> {
self.metadata.num_values += 1;
self.metadata.num_ids += add_data.len() as u32;
let data_pos = self.current_data_offset + self.data_cache.len() as u32;
self.ids_cache.push((id, data_pos));
self.data_cache.extend(to_serialized_vint_array(add_data));
if self.ids_cache.len() * std::mem::size_of::<T>() + self.data_cache.len() >= 4_000_000 {
self.flush()?;
}
Ok(())
}
#[inline]
pub(crate) fn is_in_memory(&self) -> bool {
self.current_id_offset == 0
}
#[inline]
pub(crate) fn is_empty(&self) -> bool {
self.ids_cache.is_empty() && self.current_id_offset == 0
}
pub(crate) fn flush(&mut self) -> Result<(), io::Error> {
if self.ids_cache.is_empty() {
return Ok(());
}
self.current_id_offset += self.ids_cache.len() as u32;
self.current_data_offset += self.data_cache.len() as u32;
self.directory.append(&self.path.set_ext(Ext::Indirect), &vec_to_bytes(&self.ids_cache))?;
self.directory.append(&self.path.set_ext(Ext::Data), &self.data_cache)?;
self.data_cache.clear();
self.ids_cache.clear();
self.metadata.avg_join_size = calc_avg_join_size(self.metadata.num_values, self.metadata.num_ids);
Ok(())
}
}
fn to_serialized_vint_array(add_data: &[u32]) -> Vec<u8> {
let vint = VIntArray::from_vals(add_data);
vint.serialize()
}
#[derive(Debug, Clone, Default)]
pub(crate) struct IndirectIMBinarySearchIM<T> {
pub(crate) start_pos: Vec<(T, u32)>,
pub(crate) data: Vec<u8>,
pub(crate) metadata: IndexValuesMetadata,
}
impl<T: 'static + Ord + Copy + Default + std::fmt::Debug + Sync + Send> PhrasePairToAnchor for IndirectIMBinarySearchIM<T> {
type Input = T;
#[inline]
fn get_values(&self, id: Self::Input) -> Option<Vec<u32>> {
let hit = self.start_pos.binary_search_by_key(&id, |el| el.0);
match hit {
Ok(pos) => {
let data_pos = self.start_pos[pos].1;
let iter = VintArrayIterator::from_serialized_vint_array(&self.data[data_pos as usize..]);
let decoded_data: Vec<u32> = iter.collect();
Some(decoded_data)
}
Err(_) => None,
}
}
}
#[derive(Debug)]
pub(crate) struct IndirectIMBinarySearch<T> {
pub(crate) start_pos: OwnedBytes,
pub(crate) data: OwnedBytes,
pub(crate) ok: PhantomData<T>,
#[allow(dead_code)]
pub(crate) metadata: IndexValuesMetadata,
pub(crate) size: usize,
}
impl<T: Ord + Copy + Default + std::fmt::Debug> IndirectIMBinarySearch<T> {
pub fn from_data(start_pos: OwnedBytes, data: OwnedBytes, metadata: IndexValuesMetadata) -> Result<Self, VelociError> {
let size = start_pos.len() / std::mem::size_of::<(T, u32)>();
Ok(IndirectIMBinarySearch {
start_pos,
data,
size,
ok: PhantomData,
metadata,
})
}
#[inline]
fn binary_search(&self, id: T) -> Option<(T, u32)> {
binary_search_slice(self.size, id, &self.start_pos)
}
}
#[inline]
#[allow(trivial_casts)]
fn decode_pos<T: Copy + Default, K: Copy + Default>(pos: usize, slice: &[u8]) -> (T, K) {
let mut out: (T, K) = Default::default();
let byte_pos = std::mem::size_of::<(T, K)>() * pos;
unsafe {
slice[byte_pos..]
.as_ptr()
.copy_to_nonoverlapping(&mut out as *mut (T, K) as *mut u8, std::mem::size_of::<(T, K)>());
}
out
}
#[inline]
pub(crate) fn binary_search_slice<T: Ord + Copy + Default + std::fmt::Debug, K: Copy + Default>(mut size: usize, id: T, slice: &[u8]) -> Option<(T, K)> {
if size == 0 {
return None;
}
let mut base = 0usize;
while size > 1 {
let half = size / 2;
let mid = base + half;
// mid is always in [0, size), that means mid is >= 0 and < size.
// mid >= 0: by definition
// mid < size: mid = size / 2 + size / 4 + size / 8 ...
let cmp = decode_pos::<T, K>(mid, slice).0.cmp(&id); //(unsafe { s.decode_pos(mid) });
base = if cmp == Greater { base } else { mid };
size -= half;
}
// base is always in [0, size) because base <= mid.
// let cmp = f(unsafe { s.decode_pos(base) });
let hit = decode_pos(base, slice);
if id == hit.0 {
Some(hit)
} else {
None
}
}
impl<T: 'static + Ord + Copy + Default + std::fmt::Debug + Sync + Send> PhrasePairToAnchor for IndirectIMBinarySearch<T> {
type Input = T;
#[inline]
fn get_values(&self, id: Self::Input) -> Option<Vec<u32>> {
let hit = self.binary_search(id);
hit.map(|el| {
let data_pos = el.1;
VintArrayIterator::from_serialized_vint_array(&self.data[data_pos as usize..]).collect()
})
}
}
#[cfg(test)]
mod tests {
use crate::directory::load_data_pair;
use super::*;
use directory::RamDirectory;
fn get_test_data_1_to_n_ind(directory: Box<dyn Directory>, path: PathBuf) -> IndirectIMFlushingInOrderVintNoDirectEncode<(u32, u32)> {
let mut store = IndirectIMFlushingInOrderVintNoDirectEncode::new(directory, path, u32::MAX);
store.add((0, 0), &[5, 6]).unwrap();
store.add((0, 1), &[9]).unwrap();
store.add((2, 0), &[9]).unwrap();
store.add((2, 3), &[9, 50000]).unwrap();
store.add((5, 0), &[80]).unwrap();
store.add((5, 9), &[0]).unwrap();
store.add((5, 10), &[0]).unwrap();
store
}
#[test]
fn test_in_memory() {
let directory: Box<dyn Directory> = Box::new(RamDirectory::create());
let path = Path::new("yop").to_owned();
let store = get_test_data_1_to_n_ind(directory.box_clone(), path);
let yop = store.into_im_store();
assert_eq!(yop.get_values((0, 0)), Some(vec![5, 6]));
assert_eq!(yop.get_values((0, 1)), Some(vec![9]));
assert_eq!(yop.get_values((0, 2)), None);
assert_eq!(yop.get_values((2, 0)), Some(vec![9]));
assert_eq!(yop.get_values((2, 3)), Some(vec![9, 50000]));
assert_eq!(yop.get_values((5, 0)), Some(vec![80]));
assert_eq!(yop.get_values((5, 9)), Some(vec![0]));
assert_eq!(yop.get_values((5, 10)), Some(vec![0]));
}
#[test]
fn test_mmap() {
let directory: Box<dyn Directory> = Box::new(RamDirectory::create());
let path = Path::new("yop").to_owned();
let mut store = get_test_data_1_to_n_ind(directory.box_clone(), path.clone());
store.flush().unwrap();
let (ind, data) = load_data_pair(&directory, Path::new(&path)).unwrap();
let store = IndirectIMBinarySearch::<(u32, u32)>::from_data(ind, data, store.metadata).unwrap();
assert_eq!(store.size, 7);
assert_eq!(decode_pos(0, &store.start_pos), ((0, 0), 1));
assert_eq!(decode_pos(1, &store.start_pos), ((0, 1), 4));
assert_eq!(store.get_values((0, 0)), Some(vec![5, 6]));
assert_eq!(store.get_values((0, 1)), Some(vec![9]));
assert_eq!(store.get_values((0, 2)), None);
assert_eq!(store.get_values((2, 0)), Some(vec![9]));
assert_eq!(store.get_values((2, 3)), Some(vec![9, 50000]));
assert_eq!(store.get_values((5, 0)), Some(vec![80]));
assert_eq!(store.get_values((5, 9)), Some(vec![0]));
assert_eq!(store.get_values((5, 10)), Some(vec![0]));
}
}
|
use std::cell::RefCell;
use std::rc::Rc;
use crate::treenode::TreeNode;
pub fn preorder_traversal(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
fn core(root: Rc<RefCell<TreeNode>>, v: &mut Vec<i32>) {
v.push(root.borrow().val);
if let Some(left) = root.borrow().left.clone() {
core(left, v);
}
if let Some(right) = root.borrow().right.clone() {
core(right, v);
}
}
let mut v = vec![];
root.map(|r| core(r, &mut v));
v
} |
use std::{fs, io, path::Path, error::Error};
use std::io::{BufRead, BufReader};
use std::fs::File;
pub fn check_leng_vectors(vectors: &Vec<Vec<f32>>) -> bool{
let mut result = true;
match vectors.len() {
0 => {
return false;
},
1 => {
return false;
},
_ => {
}
}
let leng = vectors[0].len();
for v in vectors {
match v.len() == leng {
false => {
return false;
},
true => {
}
}
}
result
}
pub fn create_vectors(path: &std::ffi::OsString) -> Result<Vec<Vec<f32>>, Box<Error>>{
let path = Path::new(path);
if !path.is_file(){
return Err(Box::from("Файл не найден"));
}
let file = File::open(path);
let mut vectors = vec![];
let mut vector = vec![];
match file {
Ok(f) => {
let buff = BufReader::new(f);
for line in buff.lines() {
match line {
Ok(l) => {
let s = l.trim().to_string();
if s != "" {
let words: Vec<&str> = s.split(",").collect();
for w in words {
match w.parse::<f32>() {
Ok(n) => {
vector.push(n);
},
Err(e) => {
return Err(Box::from("Ошибка парсинга файла"));
}
};
}
vectors.push(vector);
vector = vec![];
}
},
Err(e) => {
return Err(Box::from("Ошибка чтения файла"));
},
};
}
},
Err(e) => {
return Err(Box::from("Файл не найден"));
},
};
Ok(vectors)
}
|
#[macro_use]
extern crate serde_derive;
extern crate docopt;
extern crate ro_scalar_set;
extern crate rand;
extern crate memmap;
extern crate rayon;
use docopt::Docopt;
mod enumerations;
mod evaluation;
// use evaluation::WithGpu;
mod traits;
mod test;
mod utility;
use enumerations::*;
const USAGE: &'static str = "
Scalar Set Evaluator.
Usage:
scalar_set_eval new [--floats] [--gpu] <file> <minvalue> <maxvalue> <values> <sets>
scalar_set_eval eval [--floats] [--gpu] <file> <minvalue> <maxvalue> <values> [<sets>]
scalar_set_eval test [--floats] [--gpu] <report> <minvalue> <maxvalue> [<values>] [<sets>]
scalar_set_eval (-h | --help)
scalar_set_eval --version
ro_scalar_set
Options:
-h --help Show this screen.
--version Show version.
--mt Multi-threaded
--floats Run tests using floating points
--gpu Run tests on GPU
";
#[derive(Debug, Deserialize)]
struct Args
{
arg_file: String,
arg_report: String,
arg_minvalue: i32,
arg_maxvalue: i32,
arg_sets: i32,
arg_values: i32,
flag_version: bool,
flag_mt: bool,
flag_floats: bool,
flag_gpu: bool,
cmd_new: bool,
cmd_eval: bool,
cmd_test: bool,
}
fn main()
{
// Test
let args: Args = Docopt::new( USAGE )
.and_then( |d| d.deserialize() )
.unwrap_or_else( |e| e.exit() );
let eval_engine = if args.flag_gpu
{
EvaluationEngine::Gpu
}
else
{
EvaluationEngine::Cpu
};
// Determine action.
let start = std::time::Instant::now();
if args.cmd_new
{
// Data type
if args.flag_floats
{
utility::generate::<f32>(
&args.arg_file,
args.arg_sets,
args.arg_values,
args.arg_minvalue,
args.arg_maxvalue,
);
}
else
{
utility::generate::<i32>(
&args.arg_file,
args.arg_sets,
args.arg_values,
args.arg_minvalue,
args.arg_maxvalue,
);
}
}
else if args.cmd_eval
{
// Construct parameters
let params = evaluation::EvaluationParams
{
file: &args.arg_file,
values_in_set: args.arg_values,
min_value: args.arg_minvalue,
max_value: args.arg_maxvalue,
preload_data: false,
max_threads: 0,
eval_engine: &eval_engine,
};
// Data type
if args.flag_floats
{
let result= evaluation::evaluate::<f32>( ¶ms );
println!(
"Found {} matches in {}.{:06} s",
result.match_count,
result.duration.as_secs(),
result.duration.subsec_nanos() / 1000
);
}
else
{
let result = evaluation::evaluate::<i32>( ¶ms );
println!(
"Found {} matches in {}.{:06} s",
result.match_count,
result.duration.as_secs(),
result.duration.subsec_nanos() / 1000
);
}
}
else if args.cmd_test
{
test::run_tests(
&args.arg_report,
args.arg_minvalue,
args.arg_maxvalue,
args.flag_floats,
&eval_engine,
);
}
else
{
println!( "{}", "No tests selected." );
}
let stop = std::time::Instant::now();
let duration = stop.duration_since( start );
println!(
"Operation took {}.{:06} s.",
duration.as_secs(),
duration.subsec_nanos() / 1000
);
}
|
#[macro_use]
extern crate enum_from_derive;
extern crate compiletest_rs as compiletest;
use std::path::PathBuf;
fn run_mode(mode: &'static str) {
let mut config = compiletest::Config::default();
config.mode = mode.parse().expect("Invalid mode");
config.src_base = PathBuf::from(format!("tests/{}", mode));
config.link_deps(); // Populate config.target_rustcflags with dependencies on the path
compiletest::run_tests(&config);
}
#[test]
fn compile_test() {
run_mode("ui");
}
#[derive(Debug, PartialEq)]
struct TestError;
#[test]
fn simple() {
#[derive(Debug, PartialEq, Error)]
enum Error {
Test(TestError)
}
assert_eq!(Error::Test(TestError), Error::from(TestError));
}
#[test]
fn generics() {
#[derive(Debug, PartialEq, Error)]
enum Error<'a, T> where T: Send + 'a {
Test(TestError),
Other(&'a T)
}
assert_eq!(Error::Test::<i32>(TestError), Error::from(TestError));
assert_eq!(Error::Other(&42), Error::from(&42));
}
#[test]
fn ignore_multiple_field_variants() {
#[derive(Debug, PartialEq, Error)]
enum Error {
Test(TestError),
#[allow(dead_code)]
Other(i32, i32)
}
assert_eq!(Error::Test(TestError), Error::from(TestError));
} |
mod utils;
use wasm_bindgen::prelude::*;
// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global
// allocator.
#[cfg(feature = "wee_alloc")]
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
#[wasm_bindgen]
pub fn add(a: f32,b: f32)-> f32 {
let c: f32=a+b;
return c;
}
#[wasm_bindgen]
pub fn sub(a: f32,b: f32)-> f32 {
let c: f32=a-b;
return c;
}
#[wasm_bindgen]
pub fn mul(a: f32,b: f32)-> f32 {
let c: f32=a*b;
return c;
}
#[wasm_bindgen]
pub fn div(a: f32,b: f32)-> f32 {
let c: f32=a/b;
return c;
}
|
use std::io::{Cursor, Read, Seek, SeekFrom};
use anyhow::{anyhow, Result};
use byteorder::{LittleEndian, NativeEndian, ReadBytesExt, WriteBytesExt};
use las_rs::{point::Format, Header};
use las_rs::{raw, Builder, Vlr};
use laz::{
las::laszip::{LASZIP_RECORD_ID, LASZIP_USER_ID},
LasZipDecompressor,
};
use pasture_core::layout::PointAttributeDefinition;
use pasture_core::{
containers::InterleavedPointView,
containers::{InterleavedVecPointStorage, PointBuffer, PointBufferWriteable},
layout::attributes,
layout::conversion::get_converter_for_attributes,
layout::{conversion::AttributeConversionFn, PointLayout},
meta::Metadata,
nalgebra::Vector3,
util::view_raw_bytes,
};
use super::{
map_laz_err, point_layout_from_las_point_format, BitAttributes, BitAttributesExtended,
BitAttributesRegular, LASMetadata,
};
use crate::base::{PointReader, SeekToPoint};
/// Is the given VLR the LASzip VLR? Function taken from the `las` crate because it is not exported there
fn is_laszip_vlr(vlr: &Vlr) -> bool {
if &vlr.user_id == LASZIP_USER_ID && vlr.record_id == LASZIP_RECORD_ID {
true
} else {
false
}
}
pub(crate) trait LASReaderBase {
/// Returns the remaining number of points in the underyling `LASReaderBase`
fn remaining_points(&self) -> usize;
fn header(&self) -> &Header;
}
pub(crate) struct RawLASReader<T: Read + Seek> {
reader: T,
metadata: LASMetadata,
layout: PointLayout,
current_point_index: usize,
point_offsets: Vector3<f64>,
point_scales: Vector3<f64>,
offset_to_first_point_in_file: u64,
size_of_point_in_file: u64,
//TODO Add an option to not convert the position fields into world space
}
impl<T: Read + Seek> RawLASReader<T> {
pub fn from_read(mut read: T) -> Result<Self> {
let raw_header = raw::Header::read_from(&mut read)?;
let offset_to_first_point_in_file = raw_header.offset_to_point_data as u64;
let size_of_point_in_file = raw_header.point_data_record_length as u64;
let point_offsets = Vector3::new(
raw_header.x_offset,
raw_header.y_offset,
raw_header.z_offset,
);
let point_scales = Vector3::new(
raw_header.x_scale_factor,
raw_header.y_scale_factor,
raw_header.z_scale_factor,
);
let header = Header::from_raw(raw_header)?;
let metadata: LASMetadata = header.clone().into();
let point_layout = point_layout_from_las_point_format(header.point_format())?;
read.seek(SeekFrom::Start(offset_to_first_point_in_file as u64))?;
Ok(Self {
reader: read,
metadata: metadata,
layout: point_layout,
current_point_index: 0,
point_offsets,
point_scales,
offset_to_first_point_in_file,
size_of_point_in_file,
})
}
fn read_chunk_default_layout(
&mut self,
chunk_buffer: &mut [u8],
num_points_in_chunk: usize,
) -> Result<()> {
let mut buffer_cursor = Cursor::new(chunk_buffer);
let format = Format::new(self.metadata.point_format())?;
let offset_to_first_point_in_file = self.reader.seek(SeekFrom::Current(0))?;
for point_index in 0..num_points_in_chunk {
// Point size might be larger than what the format indicates due to extra bytes. Extra bytes are not
// supported by pasture at the moment, so we skip over them
let start_of_source_point =
offset_to_first_point_in_file + point_index as u64 * self.size_of_point_in_file;
self.reader.seek(SeekFrom::Start(start_of_source_point))?;
// XYZ
let local_x = self.reader.read_i32::<LittleEndian>()?;
let local_y = self.reader.read_i32::<LittleEndian>()?;
let local_z = self.reader.read_i32::<LittleEndian>()?;
let global_x = (local_x as f64 * self.point_scales.x) + self.point_offsets.x;
let global_y = (local_y as f64 * self.point_scales.y) + self.point_offsets.y;
let global_z = (local_z as f64 * self.point_scales.z) + self.point_offsets.z;
buffer_cursor.write_f64::<NativeEndian>(global_x)?;
buffer_cursor.write_f64::<NativeEndian>(global_y)?;
buffer_cursor.write_f64::<NativeEndian>(global_z)?;
// Intensity
buffer_cursor.write_i16::<NativeEndian>(self.reader.read_i16::<LittleEndian>()?)?;
// Bit attributes
if self.metadata.point_format() > 5 {
let bit_attributes_first_byte = self.reader.read_u8()?;
let bit_attributes_second_byte = self.reader.read_u8()?;
let return_number = bit_attributes_first_byte & 0b1111;
let number_of_returns = (bit_attributes_first_byte >> 4) & 0b1111;
let classification_flags = bit_attributes_second_byte & 0b1111;
let scanner_channel = (bit_attributes_second_byte >> 4) & 0b11;
let scan_direction_flag = (bit_attributes_second_byte >> 6) & 0b1;
let edge_of_flight_line = (bit_attributes_second_byte >> 7) & 0b1;
buffer_cursor.write_u8(return_number)?;
buffer_cursor.write_u8(number_of_returns)?;
buffer_cursor.write_u8(classification_flags)?;
buffer_cursor.write_u8(scanner_channel)?;
buffer_cursor.write_u8(scan_direction_flag)?;
buffer_cursor.write_u8(edge_of_flight_line)?;
} else {
let bit_attributes = self.reader.read_u8()?;
let return_number = bit_attributes & 0b111;
let number_of_returns = (bit_attributes >> 3) & 0b111;
let scan_direction_flag = (bit_attributes >> 6) & 0b1;
let edge_of_flight_line = (bit_attributes >> 7) & 0b1;
buffer_cursor.write_u8(return_number)?;
buffer_cursor.write_u8(number_of_returns)?;
buffer_cursor.write_u8(scan_direction_flag)?;
buffer_cursor.write_u8(edge_of_flight_line)?;
}
// Classification
buffer_cursor.write_u8(self.reader.read_u8()?)?;
// User data in format > 5, scan angle rank in format <= 5
buffer_cursor.write_u8(self.reader.read_u8()?)?;
if self.metadata.point_format() <= 5 {
// User data
buffer_cursor.write_u8(self.reader.read_u8()?)?;
} else {
// Scan angle
buffer_cursor.write_i16::<NativeEndian>(self.reader.read_i16::<LittleEndian>()?)?;
}
// Point source ID
buffer_cursor.write_u16::<NativeEndian>(self.reader.read_u16::<LittleEndian>()?)?;
// Format 0 is done here, the other formats are handled now
if format.has_gps_time {
buffer_cursor.write_f64::<NativeEndian>(self.reader.read_f64::<LittleEndian>()?)?;
}
if format.has_color {
buffer_cursor.write_u16::<NativeEndian>(self.reader.read_u16::<LittleEndian>()?)?;
buffer_cursor.write_u16::<NativeEndian>(self.reader.read_u16::<LittleEndian>()?)?;
buffer_cursor.write_u16::<NativeEndian>(self.reader.read_u16::<LittleEndian>()?)?;
}
if format.has_nir {
buffer_cursor.write_u16::<NativeEndian>(self.reader.read_u16::<LittleEndian>()?)?;
}
if format.has_waveform {
buffer_cursor.write_u8(self.reader.read_u8()?)?;
buffer_cursor.write_u64::<NativeEndian>(self.reader.read_u64::<LittleEndian>()?)?;
buffer_cursor.write_u32::<NativeEndian>(self.reader.read_u32::<LittleEndian>()?)?;
buffer_cursor.write_f32::<NativeEndian>(self.reader.read_f32::<LittleEndian>()?)?;
buffer_cursor.write_f32::<NativeEndian>(self.reader.read_f32::<LittleEndian>()?)?;
buffer_cursor.write_f32::<NativeEndian>(self.reader.read_f32::<LittleEndian>()?)?;
buffer_cursor.write_f32::<NativeEndian>(self.reader.read_f32::<LittleEndian>()?)?;
}
}
Ok(())
}
fn read_chunk_custom_layout(
&mut self,
source_data: &mut [u8],
chunk_buffer: &mut [u8],
num_points_in_chunk: usize,
target_layout: &PointLayout,
) -> Result<()> {
//let mut buffer_cursor = Cursor::new(chunk_buffer);
let source_format = Format::new(self.metadata.point_format())?;
// This probably works best by introducing a type that stores all information needed for reading and writing a single
// attribute:
// - does the source format of the LAS file have this attribute?
// - does the target layout have this attribute?
// - if the target layout has the attribute, we may need an attribute converter
// - if the target layout has the attribute, we need the byte offset of the attribute to the start of the point record within the point layout
//
// With this information, we can build a bunch of these objects and execute the I/O operations with them, should be more readable
fn get_attribute_parser(
default_attribute: &PointAttributeDefinition,
source_layout: &PointLayout,
target_layout: &PointLayout,
) -> Option<(usize, usize, Option<AttributeConversionFn>)> {
target_layout
.get_attribute_by_name(default_attribute.name())
.map_or(None, |target_attribute| {
let converter = source_layout
.get_attribute_by_name(default_attribute.name())
.and_then(|source_attribute| {
get_converter_for_attributes(
&source_attribute.into(),
&target_attribute.into(),
)
})
.or_else(|| {
// If the source_layout does not contain the desired attribute, we still might need a converter
// because in this case a default attribute is read (e.g. Vector3<u16> for COLOR_RGB), and this
// default attribute might have a different data type from the target attribute
get_converter_for_attributes(
default_attribute,
&target_attribute.into(),
)
});
let offset_of_attribute = target_attribute.offset() as usize;
let size_of_attribute = target_attribute.size() as usize;
Some((offset_of_attribute, size_of_attribute, converter))
})
}
let target_position_parser =
get_attribute_parser(&attributes::POSITION_3D, &self.layout, target_layout);
let target_intensity_parser =
get_attribute_parser(&attributes::INTENSITY, &self.layout, target_layout);
let target_return_number_parser =
get_attribute_parser(&attributes::RETURN_NUMBER, &self.layout, target_layout);
let target_number_of_returns_parser =
get_attribute_parser(&attributes::NUMBER_OF_RETURNS, &self.layout, target_layout);
let target_classification_flags_parser = get_attribute_parser(
&attributes::CLASSIFICATION_FLAGS,
&self.layout,
target_layout,
);
let target_scanner_channel_parser =
get_attribute_parser(&attributes::SCANNER_CHANNEL, &self.layout, target_layout);
let target_scan_direction_flag_parser = get_attribute_parser(
&attributes::SCAN_DIRECTION_FLAG,
&self.layout,
target_layout,
);
let target_eof_parser = get_attribute_parser(
&attributes::EDGE_OF_FLIGHT_LINE,
&self.layout,
target_layout,
);
let target_classification_parser =
get_attribute_parser(&attributes::CLASSIFICATION, &self.layout, target_layout);
let target_scan_angle_rank_parser =
get_attribute_parser(&attributes::SCAN_ANGLE_RANK, &self.layout, target_layout);
let target_user_data_parser =
get_attribute_parser(&attributes::USER_DATA, &self.layout, target_layout);
let target_point_source_id_parser =
get_attribute_parser(&attributes::POINT_SOURCE_ID, &self.layout, target_layout);
let target_gps_time_parser =
get_attribute_parser(&attributes::GPS_TIME, &self.layout, target_layout);
let target_color_parser =
get_attribute_parser(&attributes::COLOR_RGB, &self.layout, target_layout);
let target_nir_parser = get_attribute_parser(&attributes::NIR, &self.layout, target_layout);
let target_wave_packet_index_parser = get_attribute_parser(
&attributes::WAVE_PACKET_DESCRIPTOR_INDEX,
&self.layout,
target_layout,
);
let target_waveform_byte_offset_parser = get_attribute_parser(
&attributes::WAVEFORM_DATA_OFFSET,
&self.layout,
target_layout,
);
let target_waveform_packet_size_parser = get_attribute_parser(
&attributes::WAVEFORM_PACKET_SIZE,
&self.layout,
target_layout,
);
let target_waveform_return_point_parser = get_attribute_parser(
&attributes::RETURN_POINT_WAVEFORM_LOCATION,
&self.layout,
target_layout,
);
let target_waveform_parameters_parser = get_attribute_parser(
&attributes::WAVEFORM_PARAMETERS,
&self.layout,
target_layout,
);
let target_point_size = target_layout.size_of_point_entry() as usize;
fn run_parser<T: Read + Seek, U>(
decoder_fn: impl Fn(&mut T) -> Result<U>,
maybe_parser: Option<(usize, usize, Option<AttributeConversionFn>)>,
start_of_target_point_in_chunk: usize,
size_of_attribute: Option<usize>,
reader: &mut T,
chunk_buffer: &mut [u8],
) -> Result<()> {
if let Some((offset, size, maybe_converter)) = maybe_parser {
let source_data = decoder_fn(reader)?;
let source_slice = unsafe { view_raw_bytes(&source_data) };
let pos_start = start_of_target_point_in_chunk + offset;
let pos_end = pos_start + size;
let target_slice = &mut chunk_buffer[pos_start..pos_end];
if let Some(converter) = maybe_converter {
unsafe {
converter(source_slice, target_slice);
}
} else {
target_slice.copy_from_slice(source_slice);
}
} else if let Some(bytes_to_skip) = size_of_attribute {
reader.seek(SeekFrom::Current(bytes_to_skip as i64))?;
}
Ok(())
}
let point_offsets = self.point_offsets.clone();
let point_scales = self.point_scales.clone();
let mut source_reader = Cursor::new(source_data);
for point_index in 0..num_points_in_chunk {
// Point size might be larger than what the format indicates due to extra bytes. Extra bytes are not
// supported by pasture at the moment, so we skip over them
let start_of_source_point = point_index as u64 * self.size_of_point_in_file;
source_reader.seek(SeekFrom::Start(start_of_source_point))?;
let start_of_target_point_in_chunk = point_index * target_point_size;
run_parser(
|reader| {
Self::read_next_world_space_position(reader, &point_scales, &point_offsets)
},
target_position_parser,
start_of_target_point_in_chunk,
Some(12),
&mut source_reader,
chunk_buffer,
)?;
run_parser(
|buf| Ok(buf.read_u16::<LittleEndian>()?),
target_intensity_parser,
start_of_target_point_in_chunk,
Some(2),
&mut source_reader,
chunk_buffer,
)?;
let bit_attributes =
Self::read_next_bit_attributes(&mut source_reader, &source_format)?;
run_parser(
|_| Ok(bit_attributes.return_number()),
target_return_number_parser,
start_of_target_point_in_chunk,
None,
&mut source_reader,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.number_of_returns()),
target_number_of_returns_parser,
start_of_target_point_in_chunk,
None,
&mut source_reader,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.classification_flags_or_default()),
target_classification_flags_parser,
start_of_target_point_in_chunk,
None,
&mut source_reader,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.scanner_channel_or_default()),
target_scanner_channel_parser,
start_of_target_point_in_chunk,
None,
&mut source_reader,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.scan_direction_flag()),
target_scan_direction_flag_parser,
start_of_target_point_in_chunk,
None,
&mut source_reader,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.edge_of_flight_line()),
target_eof_parser,
start_of_target_point_in_chunk,
None,
&mut source_reader,
chunk_buffer,
)?;
run_parser(
|buf| Ok(buf.read_u8()?),
target_classification_parser,
start_of_target_point_in_chunk,
Some(1),
&mut source_reader,
chunk_buffer,
)?;
if source_format.is_extended {
// Extended LAS format has user data before scan angle
run_parser(
|buf| Ok(buf.read_u8()?),
target_user_data_parser,
start_of_target_point_in_chunk,
Some(1),
&mut source_reader,
chunk_buffer,
)?;
run_parser(
|buf| Ok(buf.read_i16::<LittleEndian>()?),
target_scan_angle_rank_parser,
start_of_target_point_in_chunk,
Some(2),
&mut source_reader,
chunk_buffer,
)?;
} else {
// Regular formats have scan angle rank before user data
run_parser(
|buf| Ok(buf.read_i8()?),
target_scan_angle_rank_parser,
start_of_target_point_in_chunk,
Some(1),
&mut source_reader,
chunk_buffer,
)?;
run_parser(
|buf| Ok(buf.read_u8()?),
target_user_data_parser,
start_of_target_point_in_chunk,
Some(1),
&mut source_reader,
chunk_buffer,
)?;
}
run_parser(
|buf| Ok(buf.read_u16::<LittleEndian>()?),
target_point_source_id_parser,
start_of_target_point_in_chunk,
Some(2),
&mut source_reader,
chunk_buffer,
)?;
let gps_bytes_in_current_format = if source_format.has_gps_time {
Some(8)
} else {
None
};
run_parser(
|buf| Ok(buf.read_f64::<LittleEndian>()?),
target_gps_time_parser,
start_of_target_point_in_chunk,
gps_bytes_in_current_format,
&mut source_reader,
chunk_buffer,
)?;
let color_bytes_in_current_format = if source_format.has_color {
Some(6)
} else {
None
};
run_parser(
|reader| Self::read_next_color_or_default(reader, &source_format),
target_color_parser,
start_of_target_point_in_chunk,
color_bytes_in_current_format,
&mut source_reader,
chunk_buffer,
)?;
let nir_bytes_in_current_format = if source_format.has_nir { Some(2) } else { None };
run_parser(
|buf| Ok(buf.read_u16::<LittleEndian>()?),
target_nir_parser,
start_of_target_point_in_chunk,
nir_bytes_in_current_format,
&mut source_reader,
chunk_buffer,
)?;
let wave_packet_index_bytes_in_current_format = if source_format.has_waveform {
Some(1)
} else {
None
};
run_parser(
|buf| Ok(buf.read_u8()?),
target_wave_packet_index_parser,
start_of_target_point_in_chunk,
wave_packet_index_bytes_in_current_format,
&mut source_reader,
chunk_buffer,
)?;
let waveform_data_offset_bytes_in_current_format = if source_format.has_waveform {
Some(8)
} else {
None
};
run_parser(
|buf| Ok(buf.read_u64::<LittleEndian>()?),
target_waveform_byte_offset_parser,
start_of_target_point_in_chunk,
waveform_data_offset_bytes_in_current_format,
&mut source_reader,
chunk_buffer,
)?;
let waveform_packet_bytes_in_current_format = if source_format.has_waveform {
Some(4)
} else {
None
};
run_parser(
|buf| Ok(buf.read_u32::<LittleEndian>()?),
target_waveform_packet_size_parser,
start_of_target_point_in_chunk,
waveform_packet_bytes_in_current_format,
&mut source_reader,
chunk_buffer,
)?;
let waveform_location_bytes_in_current_format = if source_format.has_waveform {
Some(4)
} else {
None
};
run_parser(
|buf| Ok(buf.read_f32::<LittleEndian>()?),
target_waveform_return_point_parser,
start_of_target_point_in_chunk,
waveform_location_bytes_in_current_format,
&mut source_reader,
chunk_buffer,
)?;
let waveform_params_bytes_in_current_format = if source_format.has_waveform {
Some(12)
} else {
None
};
run_parser(
|reader| Self::read_next_waveform_parameters(reader, &source_format),
target_waveform_parameters_parser,
start_of_target_point_in_chunk,
waveform_params_bytes_in_current_format,
&mut source_reader,
chunk_buffer,
)?;
}
Ok(())
}
fn read_into_default_layout(
&mut self,
point_buffer: &mut dyn PointBufferWriteable,
count: usize,
) -> Result<usize> {
let num_points_to_read = usize::min(count, self.remaining_points());
if num_points_to_read == 0 {
return Ok(0);
}
// Read into chunks of a fixed size. Within each chunk, read all data into an untyped buffer
// then push the untyped data into 'buffer'
let chunk_size = 50_000;
let point_size = self.layout.size_of_point_entry() as usize;
let chunk_bytes = point_size as usize * chunk_size;
let num_chunks = (num_points_to_read + chunk_size - 1) / chunk_size;
let mut points_chunk: Vec<u8> = vec![0; chunk_bytes];
for chunk_index in 0..num_chunks {
let points_in_chunk =
std::cmp::min(chunk_size, num_points_to_read - (chunk_index * chunk_size));
let bytes_in_chunk = points_in_chunk * point_size;
self.read_chunk_default_layout(&mut points_chunk[..], points_in_chunk)?;
point_buffer.push(&InterleavedPointView::from_raw_slice(
&points_chunk[0..bytes_in_chunk],
self.layout.clone(),
));
}
self.current_point_index += num_points_to_read;
Ok(num_points_to_read)
}
fn read_into_custom_layout(
&mut self,
point_buffer: &mut dyn PointBufferWriteable,
count: usize,
) -> Result<usize> {
let num_points_to_read = usize::min(count, self.remaining_points());
if num_points_to_read == 0 {
return Ok(0);
}
// Read in interleaved chunks, even if the `point_buffer` is not interleaved. `push_points_interleaved` will
// handle the memory transpose in this case
let chunk_size = 50_000;
let point_size = point_buffer.point_layout().size_of_point_entry() as usize;
let chunk_bytes = point_size * chunk_size;
let num_chunks = (num_points_to_read + chunk_size - 1) / chunk_size;
let mut points_chunk: Vec<u8> = vec![0; chunk_bytes];
let las_buffer_bytes = self.size_of_point_in_file as usize * chunk_size;
let mut buffer: Vec<u8> = vec![0; las_buffer_bytes];
for chunk_index in 0..num_chunks {
let points_in_chunk =
std::cmp::min(chunk_size, num_points_to_read - (chunk_index * chunk_size));
let bytes_in_chunk = points_in_chunk * point_size;
let chunk_size_in_file = points_in_chunk * self.size_of_point_in_file as usize;
self.reader.read_exact(&mut buffer[0..chunk_size_in_file])?;
self.read_chunk_custom_layout(
&mut buffer[0..chunk_size_in_file],
&mut points_chunk[..],
points_in_chunk,
point_buffer.point_layout(),
)?;
point_buffer.push(&InterleavedPointView::from_raw_slice(
&points_chunk[0..bytes_in_chunk],
point_buffer.point_layout().clone(),
));
}
self.current_point_index += num_points_to_read;
Ok(num_points_to_read)
}
/// Read the next position, converted into world space of the current LAS file
fn read_next_world_space_position<U: Read>(
reader: &mut U,
point_scales: &Vector3<f64>,
point_offsets: &Vector3<f64>,
) -> Result<Vector3<f64>> {
let local_x = reader.read_i32::<LittleEndian>()?;
let local_y = reader.read_i32::<LittleEndian>()?;
let local_z = reader.read_i32::<LittleEndian>()?;
let global_x = (local_x as f64 * point_scales.x) + point_offsets.x;
let global_y = (local_y as f64 * point_scales.y) + point_offsets.y;
let global_z = (local_z as f64 * point_scales.z) + point_offsets.z;
Ok(Vector3::new(global_x, global_y, global_z))
}
/// Read the next bit flag attributes from the current LAS file
fn read_next_bit_attributes<U: Read>(
reader: &mut U,
las_format: &Format,
) -> Result<BitAttributes> {
if las_format.is_extended {
let low_byte = reader.read_u8()?;
let high_byte = reader.read_u8()?;
Ok(BitAttributes::Extended(BitAttributesExtended {
return_number: low_byte & 0b1111,
number_of_returns: (low_byte >> 4) & 0b1111,
classification_flags: high_byte & 0b1111,
scanner_channel: (high_byte >> 4) & 0b11,
scan_direction_flag: (high_byte >> 6) & 0b1,
edge_of_flight_line: (high_byte >> 7) & 0b1,
}))
} else {
let byte = reader.read_u8()?;
Ok(BitAttributes::Regular(BitAttributesRegular {
return_number: byte & 0b111,
number_of_returns: (byte >> 3) & 0b111,
scan_direction_flag: (byte >> 6) & 0b1,
edge_of_flight_line: (byte >> 7) & 0b1,
}))
}
}
fn read_next_color_or_default<U: Read>(
reader: &mut U,
las_format: &Format,
) -> Result<Vector3<u16>> {
if !las_format.has_color {
Ok(Default::default())
} else {
let r = reader.read_u16::<LittleEndian>()?;
let g = reader.read_u16::<LittleEndian>()?;
let b = reader.read_u16::<LittleEndian>()?;
Ok(Vector3::new(r, g, b))
}
}
fn read_next_waveform_parameters<U: Read>(
reader: &mut U,
las_format: &Format,
) -> Result<Vector3<f32>> {
if !las_format.has_waveform {
Ok(Default::default())
} else {
let px = reader.read_f32::<LittleEndian>()?;
let py = reader.read_f32::<LittleEndian>()?;
let pz = reader.read_f32::<LittleEndian>()?;
Ok(Vector3::new(px, py, pz))
}
}
}
impl<T: Read + Seek> LASReaderBase for RawLASReader<T> {
fn remaining_points(&self) -> usize {
self.metadata.point_count() - self.current_point_index
}
fn header(&self) -> &Header {
self.metadata.raw_las_header().unwrap()
}
}
impl<T: Read + Seek> PointReader for RawLASReader<T> {
fn read(&mut self, count: usize) -> Result<Box<dyn pasture_core::containers::PointBuffer>> {
let num_points_to_read = usize::min(count, self.remaining_points());
let mut buffer =
InterleavedVecPointStorage::with_capacity(num_points_to_read, self.layout.clone());
self.read_into(&mut buffer, num_points_to_read)?;
Ok(Box::new(buffer))
}
fn read_into(
&mut self,
point_buffer: &mut dyn PointBufferWriteable,
count: usize,
) -> Result<usize> {
if *point_buffer.point_layout() != self.layout {
self.read_into_custom_layout(point_buffer, count)
} else {
self.read_into_default_layout(point_buffer, count)
}
}
fn get_metadata(&self) -> &dyn Metadata {
&self.metadata
}
fn get_default_point_layout(&self) -> &PointLayout {
&self.layout
}
}
impl<T: Read + Seek> SeekToPoint for RawLASReader<T> {
fn seek_point(&mut self, position: SeekFrom) -> Result<usize> {
let new_position = match position {
SeekFrom::Start(from_start) => from_start as i64,
SeekFrom::End(from_end) => self.metadata.point_count() as i64 + from_end,
SeekFrom::Current(from_current) => self.current_point_index as i64 + from_current,
};
if new_position < 0 {
panic!("RawLASReader::seek_point: It is an error to seek to a point position smaller than zero!");
}
let clamped_position =
std::cmp::min(self.metadata.point_count() as i64, new_position) as usize;
if self.current_point_index != clamped_position {
let position_within_file = self.offset_to_first_point_in_file
+ clamped_position as u64 * self.size_of_point_in_file;
self.reader.seek(SeekFrom::Start(position_within_file))?;
self.current_point_index = clamped_position;
}
Ok(self.current_point_index)
}
}
pub(crate) struct RawLAZReader<'a, T: Read + Seek + Send + 'a> {
reader: LasZipDecompressor<'a, T>,
metadata: LASMetadata,
layout: PointLayout,
current_point_index: usize,
point_offsets: Vector3<f64>,
point_scales: Vector3<f64>,
size_of_point_in_file: u64,
}
impl<'a, T: Read + Seek + Send + 'a> RawLAZReader<'a, T> {
pub fn from_read(mut read: T) -> Result<Self> {
let raw_header = raw::Header::read_from(&mut read)?;
let offset_to_first_point_in_file = raw_header.offset_to_point_data as u64;
let size_of_point_in_file = raw_header.point_data_record_length as u64;
let number_of_vlrs = raw_header.number_of_variable_length_records;
let point_offsets = Vector3::new(
raw_header.x_offset,
raw_header.y_offset,
raw_header.z_offset,
);
let point_scales = Vector3::new(
raw_header.x_scale_factor,
raw_header.y_scale_factor,
raw_header.z_scale_factor,
);
let mut header_builder = Builder::new(raw_header)?;
// Read VLRs
for _ in 0..number_of_vlrs {
let vlr = las_rs::raw::Vlr::read_from(&mut read, false).map(Vlr::new)?;
header_builder.vlrs.push(vlr);
}
// TODO Read EVLRs
let header = header_builder.into_header()?;
if header.point_format().has_waveform {
return Err(anyhow!(
"Compressed LAZ files with wave packet data are currently not supported!"
));
}
if header.point_format().is_extended {
return Err(anyhow!(
"Compressed LAZ files with extended formats (6-10) are currently not supported!"
));
}
let metadata: LASMetadata = header.clone().into();
let point_layout = point_layout_from_las_point_format(header.point_format())?;
read.seek(SeekFrom::Start(offset_to_first_point_in_file as u64))?;
let laszip_vlr = match header.vlrs().iter().find(|vlr| is_laszip_vlr(*vlr)) {
None => Err(anyhow!(
"RawLAZReader::new: LAZ variable length record not found in file!"
)),
Some(ref vlr) => {
let laz_record =
laz::las::laszip::LazVlr::from_buffer(&vlr.data).map_err(map_laz_err)?;
Ok(laz_record)
}
}?;
let reader = LasZipDecompressor::new(read, laszip_vlr).map_err(map_laz_err)?;
Ok(Self {
reader,
metadata: metadata,
layout: point_layout,
current_point_index: 0,
point_offsets,
point_scales,
size_of_point_in_file,
})
}
fn read_chunk_default_layout(
&mut self,
chunk_buffer: &mut [u8],
decompression_buffer: &mut [u8],
num_points_in_chunk: usize,
) -> Result<()> {
let bytes_in_chunk = num_points_in_chunk * self.size_of_point_in_file as usize;
let las_format = Format::new(self.metadata.point_format())?;
self.reader
.decompress_many(&mut decompression_buffer[0..bytes_in_chunk])?;
let mut decompression_chunk_cursor = Cursor::new(decompression_buffer);
let mut target_chunk_cursor = Cursor::new(chunk_buffer);
// Convert the decompressed points - which have XYZ as u32 - into the target layout
for point_index in 0..num_points_in_chunk {
// Point size might be larger than what the format indicates due to extra bytes. Extra bytes are not
// supported by pasture at the moment, so we skip over them
let start_of_point_in_decompressed_data =
point_index as u64 * self.size_of_point_in_file;
decompression_chunk_cursor
.seek(SeekFrom::Start(start_of_point_in_decompressed_data))?;
let local_x = decompression_chunk_cursor.read_i32::<LittleEndian>()?;
let local_y = decompression_chunk_cursor.read_i32::<LittleEndian>()?;
let local_z = decompression_chunk_cursor.read_i32::<LittleEndian>()?;
let global_x = (local_x as f64 * self.point_scales.x) + self.point_offsets.x;
let global_y = (local_y as f64 * self.point_scales.y) + self.point_offsets.y;
let global_z = (local_z as f64 * self.point_scales.z) + self.point_offsets.z;
target_chunk_cursor.write_f64::<NativeEndian>(global_x)?;
target_chunk_cursor.write_f64::<NativeEndian>(global_y)?;
target_chunk_cursor.write_f64::<NativeEndian>(global_z)?;
// Intensity
target_chunk_cursor.write_i16::<NativeEndian>(
decompression_chunk_cursor.read_i16::<LittleEndian>()?,
)?;
// Bit attributes
if las_format.is_extended {
let bit_attributes_first_byte = decompression_chunk_cursor.read_u8()?;
let bit_attributes_second_byte = decompression_chunk_cursor.read_u8()?;
let return_number = bit_attributes_first_byte & 0b1111;
let number_of_returns = (bit_attributes_first_byte >> 4) & 0b1111;
let classification_flags = bit_attributes_second_byte & 0b1111;
let scanner_channel = (bit_attributes_second_byte >> 4) & 0b11;
let scan_direction_flag = (bit_attributes_second_byte >> 6) & 0b1;
let edge_of_flight_line = (bit_attributes_second_byte >> 7) & 0b1;
target_chunk_cursor.write_u8(return_number)?;
target_chunk_cursor.write_u8(number_of_returns)?;
target_chunk_cursor.write_u8(classification_flags)?;
target_chunk_cursor.write_u8(scanner_channel)?;
target_chunk_cursor.write_u8(scan_direction_flag)?;
target_chunk_cursor.write_u8(edge_of_flight_line)?;
} else {
let bit_attributes = decompression_chunk_cursor.read_u8()?;
let return_number = bit_attributes & 0b111;
let number_of_returns = (bit_attributes >> 3) & 0b111;
let scan_direction_flag = (bit_attributes >> 6) & 0b1;
let edge_of_flight_line = (bit_attributes >> 7) & 0b1;
target_chunk_cursor.write_u8(return_number)?;
target_chunk_cursor.write_u8(number_of_returns)?;
target_chunk_cursor.write_u8(scan_direction_flag)?;
target_chunk_cursor.write_u8(edge_of_flight_line)?;
}
// Classification
target_chunk_cursor.write_u8(decompression_chunk_cursor.read_u8()?)?;
// User data in format > 5, scan angle rank in format <= 5
target_chunk_cursor.write_u8(decompression_chunk_cursor.read_u8()?)?;
if self.metadata.point_format() <= 5 {
// User data
target_chunk_cursor.write_u8(decompression_chunk_cursor.read_u8()?)?;
} else {
// Scan angle
target_chunk_cursor.write_i16::<NativeEndian>(
decompression_chunk_cursor.read_i16::<LittleEndian>()?,
)?;
}
// Point source ID
target_chunk_cursor.write_u16::<NativeEndian>(
decompression_chunk_cursor.read_u16::<LittleEndian>()?,
)?;
// Format 0 is done here, the other formats are handled now
if las_format.has_gps_time {
target_chunk_cursor.write_f64::<NativeEndian>(
decompression_chunk_cursor.read_f64::<LittleEndian>()?,
)?;
}
if las_format.has_color {
target_chunk_cursor.write_u16::<NativeEndian>(
decompression_chunk_cursor.read_u16::<LittleEndian>()?,
)?;
target_chunk_cursor.write_u16::<NativeEndian>(
decompression_chunk_cursor.read_u16::<LittleEndian>()?,
)?;
target_chunk_cursor.write_u16::<NativeEndian>(
decompression_chunk_cursor.read_u16::<LittleEndian>()?,
)?;
}
if las_format.has_nir {
target_chunk_cursor.write_u16::<NativeEndian>(
decompression_chunk_cursor.read_u16::<LittleEndian>()?,
)?;
}
if las_format.has_waveform {
target_chunk_cursor.write_u8(decompression_chunk_cursor.read_u8()?)?;
target_chunk_cursor.write_u64::<NativeEndian>(
decompression_chunk_cursor.read_u64::<LittleEndian>()?,
)?;
target_chunk_cursor.write_u32::<NativeEndian>(
decompression_chunk_cursor.read_u32::<LittleEndian>()?,
)?;
target_chunk_cursor.write_f32::<NativeEndian>(
decompression_chunk_cursor.read_f32::<LittleEndian>()?,
)?;
target_chunk_cursor.write_f32::<NativeEndian>(
decompression_chunk_cursor.read_f32::<LittleEndian>()?,
)?;
target_chunk_cursor.write_f32::<NativeEndian>(
decompression_chunk_cursor.read_f32::<LittleEndian>()?,
)?;
target_chunk_cursor.write_f32::<NativeEndian>(
decompression_chunk_cursor.read_f32::<LittleEndian>()?,
)?;
}
}
Ok(())
}
fn read_chunk_custom_layout(
&mut self,
chunk_buffer: &mut [u8],
decompression_buffer: &mut [u8],
num_points_in_chunk: usize,
target_layout: &PointLayout,
) -> Result<()> {
// HACK Not happy with how large this function is... But there are so many special
// cases, I don't know how to clean it up at the moment. Maybe revise in future?
let source_format = Format::new(self.metadata.point_format())?;
fn get_attribute_parser(
default_attribute: &PointAttributeDefinition,
source_layout: &PointLayout,
target_layout: &PointLayout,
) -> Option<(usize, usize, Option<AttributeConversionFn>)> {
target_layout
.get_attribute_by_name(default_attribute.name())
.map_or(None, |target_attribute| {
let converter = source_layout
.get_attribute_by_name(default_attribute.name())
.and_then(|source_attribute| {
get_converter_for_attributes(
&source_attribute.into(),
&target_attribute.into(),
)
})
.or_else(|| {
// If the source_layout does not contain the desired attribute, we still might need a converter
// because in this case a default attribute is read (e.g. Vector3<u16> for COLOR_RGB), and this
// default attribute might have a different data type from the target attribute
get_converter_for_attributes(
default_attribute,
&target_attribute.into(),
)
});
let offset_of_attribute = target_attribute.offset() as usize;
let size_of_attribute = target_attribute.size() as usize;
Some((offset_of_attribute, size_of_attribute, converter))
})
}
let target_position_parser =
get_attribute_parser(&attributes::POSITION_3D, &self.layout, target_layout);
let target_intensity_parser =
get_attribute_parser(&attributes::INTENSITY, &self.layout, target_layout);
let target_return_number_parser =
get_attribute_parser(&attributes::RETURN_NUMBER, &self.layout, target_layout);
let target_number_of_returns_parser =
get_attribute_parser(&attributes::NUMBER_OF_RETURNS, &self.layout, target_layout);
let target_classification_flags_parser = get_attribute_parser(
&attributes::CLASSIFICATION_FLAGS,
&self.layout,
target_layout,
);
let target_scanner_channel_parser =
get_attribute_parser(&attributes::SCANNER_CHANNEL, &self.layout, target_layout);
let target_scan_direction_flag_parser = get_attribute_parser(
&attributes::SCAN_DIRECTION_FLAG,
&self.layout,
target_layout,
);
let target_eof_parser = get_attribute_parser(
&attributes::EDGE_OF_FLIGHT_LINE,
&self.layout,
target_layout,
);
let target_classification_parser =
get_attribute_parser(&attributes::CLASSIFICATION, &self.layout, target_layout);
let target_scan_angle_rank_parser =
get_attribute_parser(&attributes::SCAN_ANGLE_RANK, &self.layout, target_layout);
let target_user_data_parser =
get_attribute_parser(&attributes::USER_DATA, &self.layout, target_layout);
let target_point_source_id_parser =
get_attribute_parser(&attributes::POINT_SOURCE_ID, &self.layout, target_layout);
let target_gps_time_parser =
get_attribute_parser(&attributes::GPS_TIME, &self.layout, target_layout);
let target_color_parser =
get_attribute_parser(&attributes::COLOR_RGB, &self.layout, target_layout);
let target_nir_parser = get_attribute_parser(&attributes::NIR, &self.layout, target_layout);
let target_wave_packet_index_parser = get_attribute_parser(
&attributes::WAVE_PACKET_DESCRIPTOR_INDEX,
&self.layout,
target_layout,
);
let target_waveform_byte_offset_parser = get_attribute_parser(
&attributes::WAVEFORM_DATA_OFFSET,
&self.layout,
target_layout,
);
let target_waveform_packet_size_parser = get_attribute_parser(
&attributes::WAVEFORM_PACKET_SIZE,
&self.layout,
target_layout,
);
let target_waveform_return_point_parser = get_attribute_parser(
&attributes::RETURN_POINT_WAVEFORM_LOCATION,
&self.layout,
target_layout,
);
let target_waveform_parameters_parser = get_attribute_parser(
&attributes::WAVEFORM_PARAMETERS,
&self.layout,
target_layout,
);
let target_point_size = target_layout.size_of_point_entry() as usize;
self.reader.decompress_many(
&mut decompression_buffer
[0..(num_points_in_chunk * self.size_of_point_in_file as usize)],
)?;
let mut decompressed_data = Cursor::new(decompression_buffer);
fn run_parser<T>(
decoder_fn: impl Fn(&mut Cursor<&mut [u8]>) -> Result<T>,
maybe_parser: Option<(usize, usize, Option<AttributeConversionFn>)>,
start_of_target_point_in_chunk: usize,
size_of_attribute: Option<usize>,
decompressed_data: &mut Cursor<&mut [u8]>,
chunk_buffer: &mut [u8],
) -> Result<()> {
if let Some((offset, size, maybe_converter)) = maybe_parser {
let source_data = decoder_fn(decompressed_data)?;
let source_slice = unsafe { view_raw_bytes(&source_data) };
let pos_start = start_of_target_point_in_chunk + offset;
let pos_end = pos_start + size;
let target_slice = &mut chunk_buffer[pos_start..pos_end];
if let Some(converter) = maybe_converter {
unsafe {
converter(source_slice, target_slice);
}
} else {
target_slice.copy_from_slice(source_slice);
}
} else if let Some(bytes_to_skip) = size_of_attribute {
decompressed_data.seek(SeekFrom::Current(bytes_to_skip as i64))?;
}
Ok(())
}
for point_index in 0..num_points_in_chunk {
// Point size might be larger than what the format indicates due to extra bytes. Extra bytes are not
// supported by pasture at the moment, so we skip over them
let start_of_point_in_decompressed_data =
point_index as u64 * self.size_of_point_in_file;
decompressed_data.seek(SeekFrom::Start(start_of_point_in_decompressed_data))?;
let start_of_target_point_in_chunk = point_index * target_point_size;
run_parser(
|buf| self.read_next_world_space_position(buf),
target_position_parser,
start_of_target_point_in_chunk,
Some(12),
&mut decompressed_data,
chunk_buffer,
)?;
run_parser(
|buf| Ok(buf.read_u16::<LittleEndian>()?),
target_intensity_parser,
start_of_target_point_in_chunk,
Some(2),
&mut decompressed_data,
chunk_buffer,
)?;
let bit_attributes =
self.read_next_bit_attributes(&mut decompressed_data, &source_format)?;
run_parser(
|_| Ok(bit_attributes.return_number()),
target_return_number_parser,
start_of_target_point_in_chunk,
None,
&mut decompressed_data,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.number_of_returns()),
target_number_of_returns_parser,
start_of_target_point_in_chunk,
None,
&mut decompressed_data,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.classification_flags_or_default()),
target_classification_flags_parser,
start_of_target_point_in_chunk,
None,
&mut decompressed_data,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.scanner_channel_or_default()),
target_scanner_channel_parser,
start_of_target_point_in_chunk,
None,
&mut decompressed_data,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.scan_direction_flag()),
target_scan_direction_flag_parser,
start_of_target_point_in_chunk,
None,
&mut decompressed_data,
chunk_buffer,
)?;
run_parser(
|_| Ok(bit_attributes.edge_of_flight_line()),
target_eof_parser,
start_of_target_point_in_chunk,
None,
&mut decompressed_data,
chunk_buffer,
)?;
run_parser(
|buf| Ok(buf.read_u8()?),
target_classification_parser,
start_of_target_point_in_chunk,
Some(1),
&mut decompressed_data,
chunk_buffer,
)?;
if source_format.is_extended {
// Extended LAS format has user data before scan angle
run_parser(
|buf| Ok(buf.read_u8()?),
target_user_data_parser,
start_of_target_point_in_chunk,
Some(1),
&mut decompressed_data,
chunk_buffer,
)?;
run_parser(
|buf| Ok(buf.read_i16::<LittleEndian>()?),
target_scan_angle_rank_parser,
start_of_target_point_in_chunk,
Some(2),
&mut decompressed_data,
chunk_buffer,
)?;
} else {
// Regular formats have scan angle rank before user data
run_parser(
|buf| Ok(buf.read_i8()?),
target_scan_angle_rank_parser,
start_of_target_point_in_chunk,
Some(1),
&mut decompressed_data,
chunk_buffer,
)?;
run_parser(
|buf| Ok(buf.read_u8()?),
target_user_data_parser,
start_of_target_point_in_chunk,
Some(1),
&mut decompressed_data,
chunk_buffer,
)?;
}
run_parser(
|buf| Ok(buf.read_u16::<LittleEndian>()?),
target_point_source_id_parser,
start_of_target_point_in_chunk,
Some(2),
&mut decompressed_data,
chunk_buffer,
)?;
let gps_bytes_in_current_format = if source_format.has_gps_time {
Some(8)
} else {
None
};
run_parser(
|buf| Ok(buf.read_f64::<LittleEndian>()?),
target_gps_time_parser,
start_of_target_point_in_chunk,
gps_bytes_in_current_format,
&mut decompressed_data,
chunk_buffer,
)?;
let color_bytes_in_current_format = if source_format.has_color {
Some(6)
} else {
None
};
run_parser(
|buf| Self::read_next_colors_or_default(buf, &source_format),
target_color_parser,
start_of_target_point_in_chunk,
color_bytes_in_current_format,
&mut decompressed_data,
chunk_buffer,
)?;
let nir_bytes_in_current_format = if source_format.has_nir { Some(2) } else { None };
run_parser(
|buf| Ok(buf.read_u16::<LittleEndian>()?),
target_nir_parser,
start_of_target_point_in_chunk,
nir_bytes_in_current_format,
&mut decompressed_data,
chunk_buffer,
)?;
let wave_packet_index_bytes_in_current_format = if source_format.has_waveform {
Some(1)
} else {
None
};
run_parser(
|buf| Ok(buf.read_u8()?),
target_wave_packet_index_parser,
start_of_target_point_in_chunk,
wave_packet_index_bytes_in_current_format,
&mut decompressed_data,
chunk_buffer,
)?;
let waveform_data_offset_bytes_in_current_format = if source_format.has_waveform {
Some(8)
} else {
None
};
run_parser(
|buf| Ok(buf.read_u64::<LittleEndian>()?),
target_waveform_byte_offset_parser,
start_of_target_point_in_chunk,
waveform_data_offset_bytes_in_current_format,
&mut decompressed_data,
chunk_buffer,
)?;
let waveform_packet_bytes_in_current_format = if source_format.has_waveform {
Some(4)
} else {
None
};
run_parser(
|buf| Ok(buf.read_u32::<LittleEndian>()?),
target_waveform_packet_size_parser,
start_of_target_point_in_chunk,
waveform_packet_bytes_in_current_format,
&mut decompressed_data,
chunk_buffer,
)?;
let waveform_location_bytes_in_current_format = if source_format.has_waveform {
Some(4)
} else {
None
};
run_parser(
|buf| Ok(buf.read_f32::<LittleEndian>()?),
target_waveform_return_point_parser,
start_of_target_point_in_chunk,
waveform_location_bytes_in_current_format,
&mut decompressed_data,
chunk_buffer,
)?;
let waveform_params_bytes_in_current_format = if source_format.has_waveform {
Some(12)
} else {
None
};
run_parser(
|buf| Self::read_next_waveform_parameters_or_default(buf, &source_format),
target_waveform_parameters_parser,
start_of_target_point_in_chunk,
waveform_params_bytes_in_current_format,
&mut decompressed_data,
chunk_buffer,
)?;
}
Ok(())
}
fn read_into_default_layout(
&mut self,
point_buffer: &mut dyn PointBufferWriteable,
count: usize,
) -> Result<usize> {
let num_points_to_read = usize::min(count, self.remaining_points());
if num_points_to_read == 0 {
return Ok(0);
}
// Read into chunks of a fixed size. Within each chunk, read all data into an untyped buffer
// then push the untyped data into 'buffer'
let chunk_size = 50_000;
let point_size = self.layout.size_of_point_entry() as usize;
let chunk_bytes = point_size as usize * chunk_size;
let num_chunks = (num_points_to_read + chunk_size - 1) / chunk_size;
let mut points_chunk: Vec<u8> = vec![0; chunk_bytes];
let decompression_chunk_size = self.size_of_point_in_file as usize * chunk_size;
let mut decompression_chunk: Vec<u8> = vec![0; decompression_chunk_size];
for chunk_index in 0..num_chunks {
let points_in_chunk =
std::cmp::min(chunk_size, num_points_to_read - (chunk_index * chunk_size));
let bytes_in_chunk = points_in_chunk * point_size;
self.read_chunk_default_layout(
&mut points_chunk[..],
&mut decompression_chunk[..],
points_in_chunk,
)?;
point_buffer.push(&InterleavedPointView::from_raw_slice(
&points_chunk[0..bytes_in_chunk],
self.layout.clone(),
));
}
self.current_point_index += num_points_to_read;
Ok(num_points_to_read)
}
fn read_into_custom_layout(
&mut self,
point_buffer: &mut dyn PointBufferWriteable,
count: usize,
) -> Result<usize> {
let num_points_to_read = usize::min(count, self.remaining_points());
if num_points_to_read == 0 {
return Ok(0);
}
// Read in interleaved chunks, even if the `point_buffer` is not interleaved. `push_points_interleaved` will
// handle the memory transpose in this case
let chunk_size = 50_000;
let point_size = point_buffer.point_layout().size_of_point_entry() as usize;
let chunk_bytes = point_size * chunk_size;
let num_chunks = (num_points_to_read + chunk_size - 1) / chunk_size;
let mut points_chunk: Vec<u8> = vec![0; chunk_bytes];
let decompression_chunk_size = self.size_of_point_in_file as usize * chunk_size;
let mut decompression_chunk: Vec<u8> = vec![0; decompression_chunk_size];
for chunk_index in 0..num_chunks {
let points_in_chunk =
std::cmp::min(chunk_size, num_points_to_read - (chunk_index * chunk_size));
let bytes_in_chunk = points_in_chunk * point_size;
self.read_chunk_custom_layout(
&mut points_chunk[..],
&mut decompression_chunk[..],
points_in_chunk,
point_buffer.point_layout(),
)?;
point_buffer.push(&InterleavedPointView::from_raw_slice(
&points_chunk[0..bytes_in_chunk],
point_buffer.point_layout().clone(),
));
}
self.current_point_index += num_points_to_read;
Ok(num_points_to_read)
}
fn read_next_world_space_position(
&self,
decompressed_data: &mut Cursor<&mut [u8]>,
) -> Result<Vector3<f64>> {
let local_x = decompressed_data.read_i32::<LittleEndian>()?;
let local_y = decompressed_data.read_i32::<LittleEndian>()?;
let local_z = decompressed_data.read_i32::<LittleEndian>()?;
let global_x = (local_x as f64 * self.point_scales.x) + self.point_offsets.x;
let global_y = (local_y as f64 * self.point_scales.y) + self.point_offsets.y;
let global_z = (local_z as f64 * self.point_scales.z) + self.point_offsets.z;
Ok(Vector3::new(global_x, global_y, global_z))
}
fn read_next_bit_attributes(
&self,
decompressed_data: &mut Cursor<&mut [u8]>,
las_format: &Format,
) -> Result<BitAttributes> {
if las_format.is_extended {
let low_byte = decompressed_data.read_u8()?;
let high_byte = decompressed_data.read_u8()?;
Ok(BitAttributes::Extended(BitAttributesExtended {
return_number: low_byte & 0b1111,
number_of_returns: (low_byte >> 4) & 0b1111,
classification_flags: high_byte & 0b1111,
scanner_channel: (high_byte >> 4) & 0b11,
scan_direction_flag: (high_byte >> 6) & 0b1,
edge_of_flight_line: (high_byte >> 7) & 0b1,
}))
} else {
let byte = decompressed_data.read_u8()?;
Ok(BitAttributes::Regular(BitAttributesRegular {
return_number: byte & 0b111,
number_of_returns: (byte >> 3) & 0b111,
scan_direction_flag: (byte >> 6) & 0b1,
edge_of_flight_line: (byte >> 7) & 0b1,
}))
}
}
fn read_next_colors_or_default(
decompressed_data: &mut Cursor<&mut [u8]>,
las_format: &Format,
) -> Result<Vector3<u16>> {
if !las_format.has_color {
return Ok(Default::default());
}
let r = decompressed_data.read_u16::<LittleEndian>()?;
let g = decompressed_data.read_u16::<LittleEndian>()?;
let b = decompressed_data.read_u16::<LittleEndian>()?;
Ok(Vector3::new(r, g, b))
}
fn read_next_waveform_parameters_or_default(
decompressed_data: &mut Cursor<&mut [u8]>,
las_format: &Format,
) -> Result<Vector3<f32>> {
if !las_format.has_waveform {
return Ok(Default::default());
}
let px = decompressed_data.read_f32::<LittleEndian>()?;
let py = decompressed_data.read_f32::<LittleEndian>()?;
let pz = decompressed_data.read_f32::<LittleEndian>()?;
Ok(Vector3::new(px, py, pz))
}
}
impl<'a, T: Read + Seek + Send + 'a> LASReaderBase for RawLAZReader<'a, T> {
fn remaining_points(&self) -> usize {
self.metadata.point_count() - self.current_point_index
}
fn header(&self) -> &Header {
self.metadata.raw_las_header().unwrap()
}
}
impl<'a, T: Read + Seek + Send + 'a> PointReader for RawLAZReader<'a, T> {
fn read(&mut self, count: usize) -> Result<Box<dyn PointBuffer>> {
let num_points_to_read = usize::min(count, self.remaining_points());
let mut buffer =
InterleavedVecPointStorage::with_capacity(num_points_to_read, self.layout.clone());
self.read_into(&mut buffer, num_points_to_read)?;
Ok(Box::new(buffer))
}
fn read_into(
&mut self,
point_buffer: &mut dyn PointBufferWriteable,
count: usize,
) -> Result<usize> {
if *point_buffer.point_layout() != self.layout {
self.read_into_custom_layout(point_buffer, count)
} else {
self.read_into_default_layout(point_buffer, count)
}
}
fn get_metadata(&self) -> &dyn Metadata {
&self.metadata
}
fn get_default_point_layout(&self) -> &PointLayout {
&self.layout
}
}
impl<'a, T: Read + Seek + Send + 'a> SeekToPoint for RawLAZReader<'a, T> {
fn seek_point(&mut self, position: SeekFrom) -> Result<usize> {
let new_position = match position {
SeekFrom::Start(from_start) => from_start as i64,
SeekFrom::End(from_end) => self.metadata.point_count() as i64 + from_end,
SeekFrom::Current(from_current) => self.current_point_index as i64 + from_current,
};
if new_position < 0 {
panic!("RawLAZReader::seek_point: It is an error to seek to a point position smaller than zero!");
}
let clamped_position =
std::cmp::min(self.metadata.point_count() as i64, new_position) as usize;
if self.current_point_index != clamped_position {
self.reader.seek(clamped_position as u64)?;
self.current_point_index = clamped_position;
}
Ok(self.current_point_index)
}
}
#[cfg(test)]
mod tests {
use std::{fs::File, io::BufReader};
use las_rs::point::Format;
use pasture_core::containers::PointBufferExt;
use pasture_core::layout::PointAttributeDataType;
use crate::las::{
compare_to_reference_data, compare_to_reference_data_range, get_test_las_path,
get_test_laz_path, test_data_bounds, test_data_classifications, test_data_colors,
test_data_point_count, test_data_point_source_ids, test_data_positions,
test_data_wavepacket_parameters,
};
use super::*;
// LAS:
// - Check that metadata is correct (num points etc.)
// - `read` has to be correct
// - it has to return a buffer with the expected format
// - it has to return the correct points
// - `read_into` has to be correct for a buffer with the same layout
// - `read_into` has to be correct for a buffer with a different layout
// - all attributes, but different formats
// - some attributes missing
// - `seek` has to be correct
// - it finds the correct position (checked by successive read call)
// - it deals correctly with out of bounds, forward, backward search
macro_rules! test_read_with_format {
($name:ident, $format:expr, $reader:ident, $get_test_file:ident) => {
mod $name {
use super::*;
use pasture_core::containers::PerAttributeVecPointStorage;
use std::path::PathBuf;
fn get_test_file_path() -> PathBuf {
$get_test_file($format)
}
#[test]
fn test_raw_las_reader_metadata() -> Result<()> {
let read = BufReader::new(File::open(get_test_file_path())?);
let mut reader = $reader::from_read(read)?;
assert_eq!(reader.remaining_points(), test_data_point_count());
assert_eq!(reader.point_count()?, test_data_point_count());
assert_eq!(reader.point_index()?, 0);
let layout = reader.get_default_point_layout();
let expected_layout =
point_layout_from_las_point_format(&Format::new($format)?)?;
assert_eq!(expected_layout, *layout);
let bounds = reader.get_metadata().bounds();
let expected_bounds = test_data_bounds();
assert_eq!(Some(expected_bounds), bounds);
Ok(())
}
#[test]
fn test_raw_las_reader_read() -> Result<()> {
let read = BufReader::new(File::open(get_test_file_path())?);
let mut reader = $reader::from_read(read)?;
let points = reader.read(10)?;
let expected_layout =
point_layout_from_las_point_format(&Format::new($format)?)?;
assert_eq!(*points.point_layout(), expected_layout);
compare_to_reference_data(points.as_ref(), ($format));
assert_eq!(10, reader.point_index()?);
assert_eq!(0, reader.remaining_points());
Ok(())
}
#[test]
fn test_raw_las_reader_read_into_interleaved() -> Result<()> {
let read = BufReader::new(File::open(get_test_file_path())?);
let mut reader = $reader::from_read(read)?;
let layout = point_layout_from_las_point_format(&Format::new($format)?)?;
let mut buffer = InterleavedVecPointStorage::new(layout);
reader.read_into(&mut buffer, 10)?;
compare_to_reference_data(&buffer, $format);
assert_eq!(10, reader.point_index()?);
assert_eq!(0, reader.remaining_points());
Ok(())
}
#[test]
fn test_raw_las_reader_read_into_perattribute() -> Result<()> {
let read = BufReader::new(File::open(get_test_file_path())?);
let mut reader = $reader::from_read(read)?;
let layout = point_layout_from_las_point_format(&Format::new($format)?)?;
let mut buffer = PerAttributeVecPointStorage::new(layout);
reader.read_into(&mut buffer, 10)?;
compare_to_reference_data(&buffer, $format);
assert_eq!(10, reader.point_index()?);
assert_eq!(0, reader.remaining_points());
Ok(())
}
#[test]
fn test_raw_las_reader_read_into_different_layout_interleaved() -> Result<()> {
let read = BufReader::new(File::open(get_test_file_path())?);
let mut reader = $reader::from_read(read)?;
let format = Format::new($format)?;
let layout = PointLayout::from_attributes(&[
attributes::POSITION_3D
.with_custom_datatype(PointAttributeDataType::Vec3f32),
attributes::CLASSIFICATION
.with_custom_datatype(PointAttributeDataType::U32),
attributes::COLOR_RGB.with_custom_datatype(PointAttributeDataType::Vec3u8),
attributes::POINT_SOURCE_ID,
attributes::WAVEFORM_PARAMETERS,
]);
let mut buffer = InterleavedVecPointStorage::new(layout);
reader.read_into(&mut buffer, 10)?;
let positions = buffer
.iter_attribute::<Vector3<f32>>(
&attributes::POSITION_3D
.with_custom_datatype(PointAttributeDataType::Vec3f32),
)
.collect::<Vec<_>>();
let expected_positions = test_data_positions()
.into_iter()
.map(|p| Vector3::new(p.x as f32, p.y as f32, p.z as f32))
.collect::<Vec<_>>();
assert_eq!(expected_positions, positions, "Positions do not match");
let classifications = buffer
.iter_attribute::<u32>(
&attributes::CLASSIFICATION
.with_custom_datatype(PointAttributeDataType::U32),
)
.collect::<Vec<_>>();
let expected_classifications = test_data_classifications()
.into_iter()
.map(|c| c as u32)
.collect::<Vec<_>>();
assert_eq!(
expected_classifications, classifications,
"Classifications do not match"
);
let colors = buffer
.iter_attribute::<Vector3<u8>>(
&attributes::COLOR_RGB
.with_custom_datatype(PointAttributeDataType::Vec3u8),
)
.collect::<Vec<_>>();
let expected_colors = if format.has_color {
test_data_colors()
.iter()
.map(|c| {
Vector3::new((c.x >> 8) as u8, (c.y >> 8) as u8, (c.z >> 8) as u8)
})
.collect::<Vec<_>>()
} else {
(0..10)
.map(|_| -> Vector3<u8> { Default::default() })
.collect::<Vec<_>>()
};
assert_eq!(expected_colors, colors, "Colors do not match");
let point_source_ids = buffer
.iter_attribute::<u16>(&attributes::POINT_SOURCE_ID)
.collect::<Vec<_>>();
let expected_point_source_ids = test_data_point_source_ids();
assert_eq!(
expected_point_source_ids, point_source_ids,
"Point source IDs do not match"
);
let waveform_params = buffer
.iter_attribute::<Vector3<f32>>(&attributes::WAVEFORM_PARAMETERS)
.collect::<Vec<_>>();
let expected_waveform_params = if format.has_waveform {
test_data_wavepacket_parameters()
} else {
(0..10)
.map(|_| -> Vector3<f32> { Default::default() })
.collect::<Vec<_>>()
};
assert_eq!(
expected_waveform_params, waveform_params,
"Wavepacket parameters do not match"
);
assert_eq!(10, reader.point_index()?);
assert_eq!(0, reader.remaining_points());
Ok(())
}
#[test]
fn test_raw_las_reader_seek() -> Result<()> {
let read = BufReader::new(File::open(get_test_file_path())?);
let mut reader = $reader::from_read(read)?;
let seek_index: usize = 5;
let new_pos = reader.seek_point(SeekFrom::Current(seek_index as i64))?;
assert_eq!(seek_index, new_pos);
let points = reader.read((10 - seek_index) as usize)?;
assert_eq!(10 - seek_index, points.len());
compare_to_reference_data_range(points.as_ref(), $format, seek_index..10);
Ok(())
}
#[test]
fn test_raw_las_reader_seek_out_of_bounds() -> Result<()> {
let read = BufReader::new(File::open(get_test_file_path())?);
let mut reader = $reader::from_read(read)?;
let seek_index: usize = 23;
let new_pos = reader.seek_point(SeekFrom::Current(seek_index as i64))?;
assert_eq!(10, new_pos);
let points = reader.read(10)?;
assert_eq!(0, points.len());
Ok(())
}
}
};
}
test_read_with_format!(las_format_0, 0, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_1, 1, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_2, 2, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_3, 3, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_4, 4, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_5, 5, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_6, 6, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_7, 7, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_8, 8, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_9, 9, RawLASReader, get_test_las_path);
test_read_with_format!(las_format_10, 10, RawLASReader, get_test_las_path);
test_read_with_format!(laz_format_0, 0, RawLAZReader, get_test_laz_path);
test_read_with_format!(laz_format_1, 1, RawLAZReader, get_test_laz_path);
test_read_with_format!(laz_format_2, 2, RawLAZReader, get_test_laz_path);
test_read_with_format!(laz_format_3, 3, RawLAZReader, get_test_laz_path);
// Formats 4,5,9,10 have wave packet data, which is currently unsupported by laz-rs
// Format 6,7,8 seem to be unsupported by LASzip and give weird results with laz-rs (e.g. seek does not work correctly)
// test_read_with_format!(laz_format_4, 4, RawLAZReader);
// test_read_with_format!(laz_format_5, 5, RawLAZReader);
// test_read_with_format!(laz_format_6, 6, RawLAZReader, get_test_laz_path);
// test_read_with_format!(laz_format_7, 7, RawLAZReader, get_test_laz_path);
// test_read_with_format!(laz_format_8, 8, RawLAZReader, get_test_laz_path);
// test_read_with_format!(laz_format_9, 9, RawLAZReader);
// test_read_with_format!(laz_format_10, 10, RawLAZReader);
//######### TODO ###########
// We have tests now for various formats and various conversions. We should extend them for a wider range, maybe even
// fuzz-test (though this is more effort to setup...)
// Also include comparisons for the additional attributes in the '_read_into_different_attribute_...' tests
}
|
extern crate bindgen;
use std::env;
use std::path::Path;
use std::path::PathBuf;
static SPDK_INCLUDE_DIR: &'static str = "/usr/local/include";
fn generate_bindings() {
let spdk_include_path = env::var("SPDK_INCLUDE").unwrap_or(SPDK_INCLUDE_DIR.to_string());
let output_path = env::var("OUT_DIR").unwrap();
let generator = Generator {
spdk_include_path: Path::new(&spdk_include_path),
output_path: Path::new(&output_path),
};
let headers = [
"nvme",
"event",
"bdev",
"env",
"blob_bdev",
"blob",
"log",
"io_channel",
];
generator.generate(&headers)
}
struct Generator<'a> {
spdk_include_path: &'a Path,
output_path: &'a Path,
}
impl<'a> Generator<'a> {
fn generate(&self, names: &[&str]) {
let mut codegen_config = bindgen::CodegenConfig::empty();
codegen_config.set(bindgen::CodegenConfig::FUNCTIONS, true);
codegen_config.set(bindgen::CodegenConfig::TYPES, true);
let mut builder = bindgen::builder();
for name in names {
let header_path = self.spdk_include_path.join(
PathBuf::from("spdk/header.h")
.with_file_name(name)
.with_extension("h"),
);
builder = builder.header(format!("{}", header_path.display()));
}
let bindings = builder
.derive_default(true)
.with_codegen_config(codegen_config)
.generate_inline_functions(false)
// If there are linking errors and the generated bindings have weird looking
// #link_names (that start with \u{1}), the make sure to flip that to false.
.trust_clang_mangling(false)
.rustfmt_bindings(true)
.rustfmt_configuration_file(Some(PathBuf::from("../rustfmt.toml")))
.layout_tests(false)
.ctypes_prefix("libc")
.generate()
.expect("Unable to generate bindings");
bindings
.write_to_file(self.output_path.join("spdk_bindings.rs"))
.expect("Couldn't write bindings!");
}
}
fn main() {
generate_bindings();
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rustc-link-lib=spdk");
println!("cargo:rustc-link-search=native=/usr/local/lib");
}
|
use crate::bus::Bus;
use twz::device::{BusType, Device};
mod isa;
mod pcie;
pub fn create_bus(dev: Device) -> Option<Box<dyn Bus>> {
let bt = BusType::from_u64(dev.get_device_hdr().bustype);
match bt {
BusType::Isa => Some(Box::new(isa::IsaBus::new(dev))),
BusType::Pcie => Some(Box::new(pcie::PcieBus::new(dev))),
_ => None,
}
}
|
use crate::engine::basic_types::Event;
use crate::engine::element::Element;
use crate::engine::element::ElementData;
use crate::engine::Component;
use crate::engine::Renderer;
pub struct BulletMover {
speed: f32,
}
impl Component for BulletMover {
fn on_collision(&mut self) -> Result<(), String> {
Ok(())
}
fn on_update(&self, parent: &Element, _events: &Vec<Event>) -> Result<Option<ElementData>, String> {
let mut element = parent.data.clone();
element.position.y = element.position.y + self.speed;
Ok(Some(element))
}
fn on_draw(&self, _: &Element, _: &mut dyn Renderer) -> Result<(), String> {
Ok(())
}
}
impl BulletMover {
pub fn new(speed: f32) -> Box<dyn Component> {
Box::new(BulletMover { speed: speed })
}
}
|
#[doc = "Register `MPCBB2_VCTR62` reader"]
pub type R = crate::R<MPCBB2_VCTR62_SPEC>;
#[doc = "Register `MPCBB2_VCTR62` writer"]
pub type W = crate::W<MPCBB2_VCTR62_SPEC>;
#[doc = "Field `B1984` reader - B1984"]
pub type B1984_R = crate::BitReader;
#[doc = "Field `B1984` writer - B1984"]
pub type B1984_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1985` reader - B1985"]
pub type B1985_R = crate::BitReader;
#[doc = "Field `B1985` writer - B1985"]
pub type B1985_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1986` reader - B1986"]
pub type B1986_R = crate::BitReader;
#[doc = "Field `B1986` writer - B1986"]
pub type B1986_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1987` reader - B1987"]
pub type B1987_R = crate::BitReader;
#[doc = "Field `B1987` writer - B1987"]
pub type B1987_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1988` reader - B1988"]
pub type B1988_R = crate::BitReader;
#[doc = "Field `B1988` writer - B1988"]
pub type B1988_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1989` reader - B1989"]
pub type B1989_R = crate::BitReader;
#[doc = "Field `B1989` writer - B1989"]
pub type B1989_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1990` reader - B1990"]
pub type B1990_R = crate::BitReader;
#[doc = "Field `B1990` writer - B1990"]
pub type B1990_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1991` reader - B1991"]
pub type B1991_R = crate::BitReader;
#[doc = "Field `B1991` writer - B1991"]
pub type B1991_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1992` reader - B1992"]
pub type B1992_R = crate::BitReader;
#[doc = "Field `B1992` writer - B1992"]
pub type B1992_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1993` reader - B1993"]
pub type B1993_R = crate::BitReader;
#[doc = "Field `B1993` writer - B1993"]
pub type B1993_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1994` reader - B1994"]
pub type B1994_R = crate::BitReader;
#[doc = "Field `B1994` writer - B1994"]
pub type B1994_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1995` reader - B1995"]
pub type B1995_R = crate::BitReader;
#[doc = "Field `B1995` writer - B1995"]
pub type B1995_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1996` reader - B1996"]
pub type B1996_R = crate::BitReader;
#[doc = "Field `B1996` writer - B1996"]
pub type B1996_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1997` reader - B1997"]
pub type B1997_R = crate::BitReader;
#[doc = "Field `B1997` writer - B1997"]
pub type B1997_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1998` reader - B1998"]
pub type B1998_R = crate::BitReader;
#[doc = "Field `B1998` writer - B1998"]
pub type B1998_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1999` reader - B1999"]
pub type B1999_R = crate::BitReader;
#[doc = "Field `B1999` writer - B1999"]
pub type B1999_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2000` reader - B2000"]
pub type B2000_R = crate::BitReader;
#[doc = "Field `B2000` writer - B2000"]
pub type B2000_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2001` reader - B2001"]
pub type B2001_R = crate::BitReader;
#[doc = "Field `B2001` writer - B2001"]
pub type B2001_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2002` reader - B2002"]
pub type B2002_R = crate::BitReader;
#[doc = "Field `B2002` writer - B2002"]
pub type B2002_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2003` reader - B2003"]
pub type B2003_R = crate::BitReader;
#[doc = "Field `B2003` writer - B2003"]
pub type B2003_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2004` reader - B2004"]
pub type B2004_R = crate::BitReader;
#[doc = "Field `B2004` writer - B2004"]
pub type B2004_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2005` reader - B2005"]
pub type B2005_R = crate::BitReader;
#[doc = "Field `B2005` writer - B2005"]
pub type B2005_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2006` reader - B2006"]
pub type B2006_R = crate::BitReader;
#[doc = "Field `B2006` writer - B2006"]
pub type B2006_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2007` reader - B2007"]
pub type B2007_R = crate::BitReader;
#[doc = "Field `B2007` writer - B2007"]
pub type B2007_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2008` reader - B2008"]
pub type B2008_R = crate::BitReader;
#[doc = "Field `B2008` writer - B2008"]
pub type B2008_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2009` reader - B2009"]
pub type B2009_R = crate::BitReader;
#[doc = "Field `B2009` writer - B2009"]
pub type B2009_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2010` reader - B2010"]
pub type B2010_R = crate::BitReader;
#[doc = "Field `B2010` writer - B2010"]
pub type B2010_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2011` reader - B2011"]
pub type B2011_R = crate::BitReader;
#[doc = "Field `B2011` writer - B2011"]
pub type B2011_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2012` reader - B2012"]
pub type B2012_R = crate::BitReader;
#[doc = "Field `B2012` writer - B2012"]
pub type B2012_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2013` reader - B2013"]
pub type B2013_R = crate::BitReader;
#[doc = "Field `B2013` writer - B2013"]
pub type B2013_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2014` reader - B2014"]
pub type B2014_R = crate::BitReader;
#[doc = "Field `B2014` writer - B2014"]
pub type B2014_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B2015` reader - B2015"]
pub type B2015_R = crate::BitReader;
#[doc = "Field `B2015` writer - B2015"]
pub type B2015_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - B1984"]
#[inline(always)]
pub fn b1984(&self) -> B1984_R {
B1984_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - B1985"]
#[inline(always)]
pub fn b1985(&self) -> B1985_R {
B1985_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - B1986"]
#[inline(always)]
pub fn b1986(&self) -> B1986_R {
B1986_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - B1987"]
#[inline(always)]
pub fn b1987(&self) -> B1987_R {
B1987_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - B1988"]
#[inline(always)]
pub fn b1988(&self) -> B1988_R {
B1988_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - B1989"]
#[inline(always)]
pub fn b1989(&self) -> B1989_R {
B1989_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - B1990"]
#[inline(always)]
pub fn b1990(&self) -> B1990_R {
B1990_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - B1991"]
#[inline(always)]
pub fn b1991(&self) -> B1991_R {
B1991_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - B1992"]
#[inline(always)]
pub fn b1992(&self) -> B1992_R {
B1992_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - B1993"]
#[inline(always)]
pub fn b1993(&self) -> B1993_R {
B1993_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - B1994"]
#[inline(always)]
pub fn b1994(&self) -> B1994_R {
B1994_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - B1995"]
#[inline(always)]
pub fn b1995(&self) -> B1995_R {
B1995_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - B1996"]
#[inline(always)]
pub fn b1996(&self) -> B1996_R {
B1996_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - B1997"]
#[inline(always)]
pub fn b1997(&self) -> B1997_R {
B1997_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - B1998"]
#[inline(always)]
pub fn b1998(&self) -> B1998_R {
B1998_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - B1999"]
#[inline(always)]
pub fn b1999(&self) -> B1999_R {
B1999_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - B2000"]
#[inline(always)]
pub fn b2000(&self) -> B2000_R {
B2000_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - B2001"]
#[inline(always)]
pub fn b2001(&self) -> B2001_R {
B2001_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - B2002"]
#[inline(always)]
pub fn b2002(&self) -> B2002_R {
B2002_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - B2003"]
#[inline(always)]
pub fn b2003(&self) -> B2003_R {
B2003_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - B2004"]
#[inline(always)]
pub fn b2004(&self) -> B2004_R {
B2004_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - B2005"]
#[inline(always)]
pub fn b2005(&self) -> B2005_R {
B2005_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - B2006"]
#[inline(always)]
pub fn b2006(&self) -> B2006_R {
B2006_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - B2007"]
#[inline(always)]
pub fn b2007(&self) -> B2007_R {
B2007_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - B2008"]
#[inline(always)]
pub fn b2008(&self) -> B2008_R {
B2008_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - B2009"]
#[inline(always)]
pub fn b2009(&self) -> B2009_R {
B2009_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - B2010"]
#[inline(always)]
pub fn b2010(&self) -> B2010_R {
B2010_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - B2011"]
#[inline(always)]
pub fn b2011(&self) -> B2011_R {
B2011_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - B2012"]
#[inline(always)]
pub fn b2012(&self) -> B2012_R {
B2012_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - B2013"]
#[inline(always)]
pub fn b2013(&self) -> B2013_R {
B2013_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - B2014"]
#[inline(always)]
pub fn b2014(&self) -> B2014_R {
B2014_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - B2015"]
#[inline(always)]
pub fn b2015(&self) -> B2015_R {
B2015_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - B1984"]
#[inline(always)]
#[must_use]
pub fn b1984(&mut self) -> B1984_W<MPCBB2_VCTR62_SPEC, 0> {
B1984_W::new(self)
}
#[doc = "Bit 1 - B1985"]
#[inline(always)]
#[must_use]
pub fn b1985(&mut self) -> B1985_W<MPCBB2_VCTR62_SPEC, 1> {
B1985_W::new(self)
}
#[doc = "Bit 2 - B1986"]
#[inline(always)]
#[must_use]
pub fn b1986(&mut self) -> B1986_W<MPCBB2_VCTR62_SPEC, 2> {
B1986_W::new(self)
}
#[doc = "Bit 3 - B1987"]
#[inline(always)]
#[must_use]
pub fn b1987(&mut self) -> B1987_W<MPCBB2_VCTR62_SPEC, 3> {
B1987_W::new(self)
}
#[doc = "Bit 4 - B1988"]
#[inline(always)]
#[must_use]
pub fn b1988(&mut self) -> B1988_W<MPCBB2_VCTR62_SPEC, 4> {
B1988_W::new(self)
}
#[doc = "Bit 5 - B1989"]
#[inline(always)]
#[must_use]
pub fn b1989(&mut self) -> B1989_W<MPCBB2_VCTR62_SPEC, 5> {
B1989_W::new(self)
}
#[doc = "Bit 6 - B1990"]
#[inline(always)]
#[must_use]
pub fn b1990(&mut self) -> B1990_W<MPCBB2_VCTR62_SPEC, 6> {
B1990_W::new(self)
}
#[doc = "Bit 7 - B1991"]
#[inline(always)]
#[must_use]
pub fn b1991(&mut self) -> B1991_W<MPCBB2_VCTR62_SPEC, 7> {
B1991_W::new(self)
}
#[doc = "Bit 8 - B1992"]
#[inline(always)]
#[must_use]
pub fn b1992(&mut self) -> B1992_W<MPCBB2_VCTR62_SPEC, 8> {
B1992_W::new(self)
}
#[doc = "Bit 9 - B1993"]
#[inline(always)]
#[must_use]
pub fn b1993(&mut self) -> B1993_W<MPCBB2_VCTR62_SPEC, 9> {
B1993_W::new(self)
}
#[doc = "Bit 10 - B1994"]
#[inline(always)]
#[must_use]
pub fn b1994(&mut self) -> B1994_W<MPCBB2_VCTR62_SPEC, 10> {
B1994_W::new(self)
}
#[doc = "Bit 11 - B1995"]
#[inline(always)]
#[must_use]
pub fn b1995(&mut self) -> B1995_W<MPCBB2_VCTR62_SPEC, 11> {
B1995_W::new(self)
}
#[doc = "Bit 12 - B1996"]
#[inline(always)]
#[must_use]
pub fn b1996(&mut self) -> B1996_W<MPCBB2_VCTR62_SPEC, 12> {
B1996_W::new(self)
}
#[doc = "Bit 13 - B1997"]
#[inline(always)]
#[must_use]
pub fn b1997(&mut self) -> B1997_W<MPCBB2_VCTR62_SPEC, 13> {
B1997_W::new(self)
}
#[doc = "Bit 14 - B1998"]
#[inline(always)]
#[must_use]
pub fn b1998(&mut self) -> B1998_W<MPCBB2_VCTR62_SPEC, 14> {
B1998_W::new(self)
}
#[doc = "Bit 15 - B1999"]
#[inline(always)]
#[must_use]
pub fn b1999(&mut self) -> B1999_W<MPCBB2_VCTR62_SPEC, 15> {
B1999_W::new(self)
}
#[doc = "Bit 16 - B2000"]
#[inline(always)]
#[must_use]
pub fn b2000(&mut self) -> B2000_W<MPCBB2_VCTR62_SPEC, 16> {
B2000_W::new(self)
}
#[doc = "Bit 17 - B2001"]
#[inline(always)]
#[must_use]
pub fn b2001(&mut self) -> B2001_W<MPCBB2_VCTR62_SPEC, 17> {
B2001_W::new(self)
}
#[doc = "Bit 18 - B2002"]
#[inline(always)]
#[must_use]
pub fn b2002(&mut self) -> B2002_W<MPCBB2_VCTR62_SPEC, 18> {
B2002_W::new(self)
}
#[doc = "Bit 19 - B2003"]
#[inline(always)]
#[must_use]
pub fn b2003(&mut self) -> B2003_W<MPCBB2_VCTR62_SPEC, 19> {
B2003_W::new(self)
}
#[doc = "Bit 20 - B2004"]
#[inline(always)]
#[must_use]
pub fn b2004(&mut self) -> B2004_W<MPCBB2_VCTR62_SPEC, 20> {
B2004_W::new(self)
}
#[doc = "Bit 21 - B2005"]
#[inline(always)]
#[must_use]
pub fn b2005(&mut self) -> B2005_W<MPCBB2_VCTR62_SPEC, 21> {
B2005_W::new(self)
}
#[doc = "Bit 22 - B2006"]
#[inline(always)]
#[must_use]
pub fn b2006(&mut self) -> B2006_W<MPCBB2_VCTR62_SPEC, 22> {
B2006_W::new(self)
}
#[doc = "Bit 23 - B2007"]
#[inline(always)]
#[must_use]
pub fn b2007(&mut self) -> B2007_W<MPCBB2_VCTR62_SPEC, 23> {
B2007_W::new(self)
}
#[doc = "Bit 24 - B2008"]
#[inline(always)]
#[must_use]
pub fn b2008(&mut self) -> B2008_W<MPCBB2_VCTR62_SPEC, 24> {
B2008_W::new(self)
}
#[doc = "Bit 25 - B2009"]
#[inline(always)]
#[must_use]
pub fn b2009(&mut self) -> B2009_W<MPCBB2_VCTR62_SPEC, 25> {
B2009_W::new(self)
}
#[doc = "Bit 26 - B2010"]
#[inline(always)]
#[must_use]
pub fn b2010(&mut self) -> B2010_W<MPCBB2_VCTR62_SPEC, 26> {
B2010_W::new(self)
}
#[doc = "Bit 27 - B2011"]
#[inline(always)]
#[must_use]
pub fn b2011(&mut self) -> B2011_W<MPCBB2_VCTR62_SPEC, 27> {
B2011_W::new(self)
}
#[doc = "Bit 28 - B2012"]
#[inline(always)]
#[must_use]
pub fn b2012(&mut self) -> B2012_W<MPCBB2_VCTR62_SPEC, 28> {
B2012_W::new(self)
}
#[doc = "Bit 29 - B2013"]
#[inline(always)]
#[must_use]
pub fn b2013(&mut self) -> B2013_W<MPCBB2_VCTR62_SPEC, 29> {
B2013_W::new(self)
}
#[doc = "Bit 30 - B2014"]
#[inline(always)]
#[must_use]
pub fn b2014(&mut self) -> B2014_W<MPCBB2_VCTR62_SPEC, 30> {
B2014_W::new(self)
}
#[doc = "Bit 31 - B2015"]
#[inline(always)]
#[must_use]
pub fn b2015(&mut self) -> B2015_W<MPCBB2_VCTR62_SPEC, 31> {
B2015_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "MPCBBx vector register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mpcbb2_vctr62::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mpcbb2_vctr62::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MPCBB2_VCTR62_SPEC;
impl crate::RegisterSpec for MPCBB2_VCTR62_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mpcbb2_vctr62::R`](R) reader structure"]
impl crate::Readable for MPCBB2_VCTR62_SPEC {}
#[doc = "`write(|w| ..)` method takes [`mpcbb2_vctr62::W`](W) writer structure"]
impl crate::Writable for MPCBB2_VCTR62_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MPCBB2_VCTR62 to value 0"]
impl crate::Resettable for MPCBB2_VCTR62_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![allow(clippy::mutex_atomic)]
use std::sync::{Arc, Condvar, Mutex};
#[derive(Clone)]
pub struct CommandIsExecuting {
opening_new_pane: Arc<(Mutex<bool>, Condvar)>,
closing_pane: Arc<(Mutex<bool>, Condvar)>,
}
impl CommandIsExecuting {
pub fn new() -> Self {
CommandIsExecuting {
opening_new_pane: Arc::new((Mutex::new(false), Condvar::new())),
closing_pane: Arc::new((Mutex::new(false), Condvar::new())),
}
}
pub fn closing_pane(&mut self) {
let (lock, _cvar) = &*self.closing_pane;
let mut closing_pane = lock.lock().unwrap();
*closing_pane = true;
}
pub fn done_closing_pane(&mut self) {
let (lock, cvar) = &*self.closing_pane;
let mut closing_pane = lock.lock().unwrap();
*closing_pane = false;
cvar.notify_all();
}
pub fn opening_new_pane(&mut self) {
let (lock, _cvar) = &*self.opening_new_pane;
let mut opening_new_pane = lock.lock().unwrap();
*opening_new_pane = true;
}
pub fn done_opening_new_pane(&mut self) {
let (lock, cvar) = &*self.opening_new_pane;
let mut opening_new_pane = lock.lock().unwrap();
*opening_new_pane = false;
cvar.notify_all();
}
pub fn wait_until_pane_is_closed(&self) {
let (lock, cvar) = &*self.closing_pane;
let mut closing_pane = lock.lock().unwrap();
while *closing_pane {
closing_pane = cvar.wait(closing_pane).unwrap();
}
}
pub fn wait_until_new_pane_is_opened(&self) {
let (lock, cvar) = &*self.opening_new_pane;
let mut opening_new_pane = lock.lock().unwrap();
while *opening_new_pane {
opening_new_pane = cvar.wait(opening_new_pane).unwrap();
}
}
}
|
use std::cmp::PartialEq;
use std::io::Cursor;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PktType {
Telemetry = 0,
Telecommand = 1,
}
#[derive(Debug)]
pub struct PrimaryHeader {
pub version_number: u8,
pub packet_type: PktType,
pub secondary_header_flag: bool,
pub apid: u16,
pub sequence_flags: u8,
pub sequence_counter: u16,
pub data_length: u16,
}
impl PrimaryHeader {
pub fn from_buffer(buf: &[u8]) -> PrimaryHeader {
let mut cursor = Cursor::new(buf);
let val = cursor.read_u16::<BigEndian>().unwrap();
let version_number = get_version_number(val);
let packet_type = get_packet_type(val);
let secondary_header_flag = get_secondary_header_flag(val);
let apid = get_apid(val);
let val = cursor.read_u16::<BigEndian>().unwrap();
let sequence_flags = get_sequence_flags(val);
let sequence_counter = get_sequence_counter(val);
let val = cursor.read_u16::<BigEndian>().unwrap();
let data_length = val;
PrimaryHeader {
version_number,
packet_type,
secondary_header_flag,
apid,
sequence_flags,
sequence_counter,
data_length,
}
}
pub fn get_buffer(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(6);
let mut cursor = Cursor::new(&mut buf);
let mut val: u16;
// First 2 bytes
val = (self.version_number as u16) << 13;
val |= (self.packet_type as u16) << 12;
val |= (self.secondary_header_flag as u16) << 11;
val |= self.apid as u16;
cursor.write_u16::<BigEndian>(val).unwrap();
// Next 2 bytes
val = (self.sequence_flags as u16) << 14;
val |= self.sequence_counter as u16;
cursor.write_u16::<BigEndian>(val).unwrap();
// Final 2 bytes
cursor.write_u16::<BigEndian>(self.data_length).unwrap();
buf
}
}
/// Masks to filter the desired fields in the provided buffer
enum FieldsFilter {
// First 2 bytes (u16)
VersionNo = 0xE000,
PkyType = 0x1000,
SecHdrFlag = 0x0800,
Apid = 0x07FF,
// Next 2 bytes (u16)
SeqFlags = 0xC000,
SeqCount = 0x3FFF,
}
fn get_version_number(val: u16) -> u8 {
let filter = FieldsFilter::VersionNo as u16;
((val & filter) >> 13) as u8
}
fn get_packet_type(val: u16) -> PktType {
let filter = FieldsFilter::PkyType as u16;
let flag = ((val & filter) >> 12) as u8;
match flag {
0 => PktType::Telemetry,
1 => PktType::Telecommand,
_ => panic!("The masked value should be 0 or 1"),
}
}
fn get_secondary_header_flag(val: u16) -> bool {
let filter = FieldsFilter::SecHdrFlag as u16;
(val & filter) >> 11 != 0
}
fn get_apid(val: u16) -> u16 {
let filter = FieldsFilter::Apid as u16;
val & filter
}
fn get_sequence_flags(val: u16) -> u8 {
let filter = FieldsFilter::SeqFlags as u16;
((val & filter) >> 14) as u8
}
fn get_sequence_counter(val: u16) -> u16 {
let filter = FieldsFilter::SeqCount as u16;
val & filter
}
//
// UNIT TESTS
//
#[cfg(test)]
mod test {
use super::*;
const SP1_HEADER: [u8; 6] = [0x08, 0x73, 0xC1, 0x23, 0x00, 0x0F];
const SP2_HEADER: [u8; 6] = [0x17, 0x54, 0xC6, 0x82, 0x00, 0x04];
#[test]
fn test_sp1() {
let pkt = PrimaryHeader::from_buffer(&SP1_HEADER);
assert_eq!(pkt.version_number, 0);
assert_eq!(pkt.packet_type, PktType::Telemetry);
assert_eq!(pkt.secondary_header_flag, true);
assert_eq!(pkt.apid, 0x0073);
assert_eq!(pkt.sequence_flags, 0x03);
assert_eq!(pkt.sequence_counter, 0x0123);
assert_eq!(pkt.data_length, 0x000F);
let pkt = PrimaryHeader {
version_number: 0,
packet_type: PktType::Telemetry,
secondary_header_flag: true,
apid: 0x0073,
sequence_flags: 0x03,
sequence_counter: 0x0123,
data_length: 0x000F,
};
let buf = pkt.get_buffer();
assert_eq!(buf, SP1_HEADER);
}
#[test]
fn test_sp2() {
let pkt = PrimaryHeader::from_buffer(&SP2_HEADER);
assert_eq!(pkt.version_number, 0);
assert_eq!(pkt.packet_type, PktType::Telecommand);
assert_eq!(pkt.secondary_header_flag, false);
assert_eq!(pkt.apid, 0x0754);
assert_eq!(pkt.sequence_flags, 0x03);
assert_eq!(pkt.sequence_counter, 0x0682);
assert_eq!(pkt.data_length, 0x0004);
let pkt = PrimaryHeader {
version_number: 0,
packet_type: PktType::Telecommand,
secondary_header_flag: false,
apid: 0x0754,
sequence_flags: 0x03,
sequence_counter: 0x0682,
data_length: 0x0004,
};
let buf = pkt.get_buffer();
assert_eq!(buf, SP2_HEADER);
}
}
|
#[macro_use]
extern crate log;
pub mod controller;
pub mod webserver;
|
use std::fs::File;
use std::io::ErrorKind;
use std::io;
use std::io::Read;
use std::io::File;
/** Topic: Error Handling **/
fn panic_example(name: &str) -> &str {
if name.len() != 6 {
panic!("The string is not equal to six!!!!!!!!!!!!");
}
return name;
}
/** Topic: Result Type **/
// Simple usage of Result Type
fn open_file(file_name: &str) {
// This uses the Result Type by default
let f = File::open(file_name);
let f: File = match f {
Ok(file) => file,
Err(error) => match error.kind() {
ErrorKind::NotFound => match File::create("test.txt") {
Ok(fc) => fc,
Err(e) => panic!("Problems creating the file: {:?}", e)
},
other_error => panic!("Problems opening the file: {:?}", other_error),
}
};
}
// This is more idiomatic Rust
fn open_file_rusty(file_name: &str) {
let f = File::open(file_name).unwrap_or_else(|error| {
if error.kind() == ErrorKind::NotFound {
File::create(file_name).unwrap_or_else(|error| {
panic!("Problem: {:?}", error);
})
} else {
panic!("Problem opening the file: {:?}", error)
}
});
}
/** Example: Error Matching **/
fn error_matching(file_name: &str) {
open_file(file_name);
}
/** Topic: Propagating Errors **/
// Read a username from a file
fn read_username_from_file() -> Result<String, io::Error> {
let f = File::open("yo.txt");
let mut f = match f {
Ok(file) => file,
Err(e) => return Err(e)
};
let mut s = String::new();
match f.read_to_string(&mut s) {
Ok(_) => Ok(s),
Err(e) => Err(e)
}
}
// ?? - shortcut for Errors
fn read_username_from_file_shortcut() -> Result<String, io::Error> {
let mut f = File::open("yo.txt")?;
let mut s = String::new();
f.read_to_string(&mut s);
Ok(s)
}
fn main() {
// This panics and throws an error
// panic_example("hello");
open_file("test.txt");
open_file_rusty("test.txt");
error_matching("test.txt");
// Another way to handle errors:
let f = File::open("test.txt").expect("Failed to open");
}
|
use crate::shared::list_node::ListNode;
struct Solution;
/// https://leetcode.com/problems/add-two-numbers/
impl Solution {
/// 0 ms 2.1 MB
pub fn add_two_numbers(
l1: Option<Box<ListNode>>,
l2: Option<Box<ListNode>>,
) -> Option<Box<ListNode>> {
let mut carry = 0;
let mut root = ListNode::new(0);
let mut cur = &mut root;
let mut l1 = l1;
let mut l2 = l2;
while l1.is_some() || l2.is_some() {
let sum = l1.as_ref().map_or(0, |l| l.val) + l2.as_ref().map_or(0, |l| l.val) + carry;
cur.next = Some(Box::new(ListNode::new(sum % 10)));
carry = sum / 10;
cur = cur.next.as_deref_mut().unwrap();
l1 = l1.and_then(|l| l.next);
l2 = l2.and_then(|l| l.next);
}
if carry != 0 {
cur.next = Some(Box::new(ListNode::new(carry)));
}
root.next
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test() {
fn assert(l1: &[i32], l2: &[i32], expected: &[i32]) {
assert_eq!(
Solution::add_two_numbers(
ListNode::in_option_box_from_array(l1),
ListNode::in_option_box_from_array(l2)
),
ListNode::in_option_box_from_array(expected)
)
}
assert(&[9], &[9], &[8, 1]);
assert(&[3], &[1, 2], &[4, 2]);
assert(&[2, 4, 3], &[5, 6, 4], &[7, 0, 8]);
}
}
|
/*
Given a string with friends to visit in different states:
ad3="John Daggett, 341 King Road, Plymouth MA
Alice Ford, 22 East Broadway, Richmond VA
Sal Carpenter, 73 6th Street, Boston MA"
we want to produce a result that sorts the names by state and lists the name of the state followed
by the name of each person residing in that state (people's names sorted). When the result is
printed we get:
Massachusetts
.....^John Daggett 341 King Road Plymouth Massachusetts
.....^Sal Carpenter 73 6th Street Boston Massachusetts
^Virginia
.....^Alice Ford 22 East Broadway Richmond Virginia
Spaces not being always well seen, in the above result ^ means a white space.
The resulting string (when not printed) will be:
"Massachusetts\n..... John Daggett 341 King Road Plymouth Massachusetts\n..... Sal Carpenter 73 6th
Street Boston Massachusetts\n Virginia\n..... Alice Ford 22 East Broadway Richmond Virginia"
or (the separator is \n or \r\n depending on the language)
"Massachusetts\r\n..... John Daggett 341 King Road Plymouth Massachusetts\r\n..... Sal Carpenter 73
6th Street Boston Massachusetts\r\n Virginia\r\n..... Alice Ford 22 East Broadway Richmond Virginia"
Notes
There can be a blank last line in the given string of addresses.
The tests only contains CA, MA, OK, PA, VA, AZ, ID, IN for states.
You can see another example in the "Sample tests".
States
For the lazy ones:
'AZ': 'Arizona',
'CA': 'California',
'ID': 'Idaho',
'IN': 'Indiana',
'MA': 'Massachusetts',
'OK': 'Oklahoma',
'PA': 'Pennsylvania',
'VA': 'Virginia'
*/
use std::collections::BTreeMap;
fn main() {
let input1:&str = "Alice Ford, 22 East Broadway, Richmond VA\nSal Carpenter, 73 6th Street, \
Boston MA\nJohn Daggett, 341 King Road, Plymouth MA";
println!("{}", by_state(input1));
}
fn by_state(str:&str) -> String {
let state_map:BTreeMap<String, String> = [("AZ", "Arizona"), ("CA", "California"),
("ID", "Idaho"), ("IN", "Indiana"), ("MA", "Massachusetts"), ("OK", "Oklahoma"),
("PA", "Pennsylvania"), ("VA", "Virginia")]
.iter()
.map(|x| (x.0.to_string(), x.1.to_string()))
.collect();
let mut return_string = String::new();
let input_vec:Vec<String> = str.split('\n').map(|x| x.to_string()).collect();
let mut holding_map:BTreeMap<String, Vec<String>> = BTreeMap::new();
for i in input_vec {
if i == "\n" {
continue;
}
let mut state_vec:Vec<char> = i.chars().rev().take(2).collect();
state_vec.reverse();
let abbreviation:String = state_vec.into_iter().collect();
let state:String = state_map.get(abbreviation.as_str()).unwrap().to_string();
let updated_i = i.replace(&abbreviation, &state).replace(",", "");
holding_map.entry(state).or_insert(vec![]).push(updated_i);
}
let mut index_counter = 0;
let map_len = holding_map.len();
for mut pair in holding_map {
return_string.push_str(&pair.0);
pair.1.sort();
for address in pair.1 {
return_string.push_str("\n..... ");
return_string.push_str(&address);
}
if index_counter < map_len - 1 {
return_string.push_str("\n ");
}
index_counter += 1;
}
return_string
}
|
use support::{decl_module, decl_storage, StorageValue, StorageMap};
use codec::{Encode, Decode};
use runtime_io::blake2_128;
use system::ensure_signed;
use rstd::result;
pub trait Trait: system::Trait {
}
#[derive(Encode, Decode, Default)]
pub struct Kitty(pub [u8; 16]);
decl_storage! {
trait Store for Module<T: Trait> as Kitties {
/// Stores all the kitties, key is the kitty id / index
pub Kitties get(kitty): map u32 => Option<Kitty>;
/// Stores the total number of kitties. i.e. the next kitty index
pub KittiesCount get(kitties_count): u32;
}
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
/// Create a new kitty
pub fn create(origin) {
let sender = ensure_signed(origin)?;
let count = Self::kitties_count();
if count == u32::max_value() {
return Err("Kitties count overflow");
}
let payload = (<system::Module<T>>::random_seed(), sender, <system::Module<T>>::extrinsic_index(), <system::Module<T>>::block_number());
let dna = payload.using_encoded(blake2_128);
let kitty = Kitty(dna);
Kitties::insert(count, kitty);
KittiesCount::put(count + 1);
}
pub fn breed(origin, kitty_id_1: u32, kitty_id_2: u32) {
let sender = ensure_signed(origin)?;
Self::do_breed(&sender, kitty_id_1, kitty_id_2)?;
}
}
}
impl<T: Trait> Module<T> {
fn do_breed(sender: &T::AccountId, kitty_id_1: u32, kitty_id_2: u32) -> result::Result<u32, &'static str> {
let kitty1 = Self::kitty(kitty_id_1);
let kitty2 = Self::kitty(kitty_id_2);
let kitty_id = Self::next_kitty_id()?;
let kitty1_dna = kitty1.unwrap().0;
let kitty2_dna = kitty2.unwrap().0;
// Generate a random 128bit value
let selector = Self::random_value(&sender);
let mut new_dna = [0u8; 16];
// Combine parents and selector to create new kitty
for i in 0..kitty1_dna.len() {
new_dna[i] = combine_dna(kitty1_dna[i], kitty2_dna[i], selector[i]);
}
Self::insert_kitty( kitty_id, Kitty(new_dna));
Ok(kitty_id)
}
fn insert_kitty(kitty_id: u32, kitty: Kitty) {
// Create and store kitty
<Kitties>::insert(kitty_id, kitty);
<KittiesCount>::put(kitty_id + 1);
}
fn next_kitty_id() -> result::Result<u32, &'static str> {
let kitty_id = Self::kitties_count();
if kitty_id == u32::max_value() {
return Err("Kitties count overflow");
}
Ok(kitty_id)
}
fn random_value(sender: &T::AccountId) -> [u8; 16] {
let payload = (<system::Module<T>>::random_seed(), sender, <system::Module<T>>::extrinsic_index(), <system::Module<T>>::block_number());
payload.using_encoded(blake2_128)
}
}
fn combine_dna(dna1: u8, dna2: u8, selector: u8) -> u8 {
((selector & dna1) | (!selector & dna2))
} |
#![allow(warnings)]
use std::fs;
use std::error;
use std::fmt;
use std::io::Cursor;
use byteorder::*;
use std::slice::SliceIndex;
use std::convert::TryInto;
pub mod phdr;
pub mod shdr;
pub mod segment;
pub mod section;
use segment::Segment;
use section::Section;
#[derive(Debug, Clone)]
pub enum ParsingError {
NotElf,
ParsingError
}
type Result<T> = std::result::Result<T, ParsingError>;
// This is important for other errors to wrap this one.
impl fmt::Display for ParsingError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "invalid first item to double")
}
}
impl error::Error for ParsingError {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
// Generic error, underlying cause isn't tracked.
None
}
}
#[derive(Copy, Clone)]
pub enum Elf_type {
NONE = 0x0,
REL = 0x1,
EXEC = 0x2,
DYN = 0x3,
CORE = 0x4,
LOOS = 0xfe00,
HIOS = 0xfeff,
LOPROC = 0xff00,
HIPROC = 0xffff
}
#[derive(Copy, Clone)]
pub enum Elf_class {
ELF32 = 1,
ELF64 = 2
}
#[derive(Copy, Clone)]
pub enum Elf_endiannes {
LittleEndian = 1,
BigEndian = 2
}
#[derive(Copy, Clone)]
pub enum Elf_arch {
NONE = 0x0,
SPARC = 0x2,
X86 = 0x3,
MIPS = 0x8,
POWERPC = 0x14,
S390 = 0x16,
ARM = 0x28,
SUPERH = 0x2A,
IA64 = 0x32,
AMD64 = 0x3E,
AARCH64 = 0xB7,
RISCV = 0xF3
}
#[derive(Copy, Clone)]
pub enum Elf_abi {
NONE = 0x0,
HPUX = 0x1,
NetBSD = 0x2,
Linux = 0x3,
GNUHurd = 0x4,
Solaris = 0x6,
AIX = 0x7,
IRIX = 0x8,
FreeBSD = 0x9,
Tru64 = 0xA,
NovellModesto = 0xB,
OpenBSD = 0xC,
OpenVMS = 0xD,
NonStopKernel = 0xE,
AROS = 0xF,
FenixOS = 0x10,
CloudABI = 0x11,
OpenVOS = 0x12
}
pub struct Elf_header {
e_ident: [u8;4],
e_class: Elf_class,
e_endianness: Elf_endiannes,
ei_version: u8,
e_abi: Elf_abi,
e_abi_version: u8,
e_padding: [u8;7],
pub e_type: Elf_type,
e_arch: Elf_arch,
e_version: u32,
e_entry: u64,
e_flags: u32,
size: u16,
pub phdr_offset: u64,
pub phdr_size: u16,
pub phdr_num: u16,
pub shdr_offset: u64,
pub shdr_size: u16,
pub shdr_num: u16,
pub shstrndx: u16
}
impl Elf_header {
pub fn to_le(self) -> Vec<u8> {
let mut bin = vec![];
// ASSEMBLE THE ELF HEADER
bin.extend_from_slice(&self.e_ident);
bin.extend_from_slice(&(self.e_class as u8).to_le_bytes());
bin.extend_from_slice(&(self.e_endianness as u8).to_le_bytes());
bin.extend_from_slice(&self.ei_version.to_le_bytes());
bin.extend_from_slice(&(self.e_abi as u8).to_le_bytes());
bin.extend_from_slice(&[self.e_abi_version]);
bin.extend_from_slice(&self.e_padding);
bin.extend_from_slice(&(self.e_type as u16).to_le_bytes());
bin.extend_from_slice(&(self.e_arch as u16).to_le_bytes());
bin.extend_from_slice(&self.e_version.to_le_bytes());
bin.extend_from_slice(&self.e_entry.to_le_bytes());
bin.extend_from_slice(&self.phdr_offset.to_le_bytes());
bin.extend_from_slice(&self.shdr_offset.to_le_bytes());
bin.extend_from_slice(&self.e_flags.to_le_bytes());
bin.extend_from_slice(&self.size.to_le_bytes());
bin.extend_from_slice(&self.phdr_size.to_le_bytes());
bin.extend_from_slice(&self.phdr_num.to_le_bytes());
bin.extend_from_slice(&self.shdr_size.to_le_bytes());
bin.extend_from_slice(&self.shdr_num.to_le_bytes());
bin.extend_from_slice(&self.shstrndx.to_le_bytes());
return bin;
}
}
pub struct Elf {
pub header: Elf_header, // pub program_hdrs: Vec<phdr::ProgramHeader>,
pub phdrs: Vec<phdr::ProgramHeader>,
pub shdrs: Vec<shdr::SectionHeader>,
pub raw: Vec<u8>
// pub segments: Vec<Segment>,
// pub section_hdrs: Vec<shdr::SectionHeader>,
}
impl Elf {
// return the elf as a binary file
pub fn to_le(mut self) -> Vec<u8> {
// let mut bin = vec![];
// // bin.resize(segment::get_segments_size(&self.segments) as usize, 0);
// // We need to create a new shstrndx using the segments
// // get segment blob
// // TODO: We need to take nested segments into account.
// let segment_blob = segment::get_segments_blob(&self.segments);
// // TODO: calculate the elf header size, program header and section headers.
// let ehdr_offset = 0x0;
// let phdr_offset = 0x40;
// let segment_offset = phdr_offset+segment::phdrs_size(&self.segments);
// let shdr_offset = segment_offset+segment_blob.len();
// // TODO: Set the offsets to be file offsets instead of local offsets
// self.header.phdr_offset = phdr_offset as u64;
// self.header.phdr_num = self.segments.len() as u16;
// self.header.shdr_offset = shdr_offset as u64;
// self.header.shdr_num = segment::shdrs_len(&self.segments) as u16;
// // - we need to have dynamic program header offsets as well -- implemented?
// // alterations to the elf_headers offsets of section headers and program headers should be made before getting the blob
// // - change phdrs offset -- implemented
// // - change shdrs offset -- implemented
// let phdrs_blob = segment::get_phdrs_blob(&self.segments, segment_offset);
// let shdrs_blob = segment::get_shdrs_blob(&self.segments);
// let ehdr_blob = self.header.to_le();
// bin.extend(ehdr_blob);
// bin.extend(phdrs_blob);
// bin.extend(segment_blob);
// bin.extend(shdrs_blob);
let phdrt_start = self.header.phdr_offset;
let phdrt_end = phdrt_start + (self.header.phdr_num*self.header.phdr_size) as u64;
let phdrt_size = phdrt_end-phdrt_start;
// add elf header
self.raw.splice(0..0x40, self.header.to_le());
// add program headers
self.raw.splice(phdrt_start as usize..phdrt_end as usize, phdr::to_le(self.phdrs) );
return self.raw;
}
pub fn write_file(self, path: &str) -> Result<()> {
let bin = self.to_le();
match fs::write(path, bin) {
Ok(res) => Ok(res),
Err(_) => return Err(ParsingError::ParsingError)
}
}
}
fn pad(size: u32) -> Vec<u8> {
return vec![0; size as usize];
}
impl Elf {
fn parse(bin: Vec<u8>) -> Result<Elf> {
let shstrndx = LittleEndian::read_u16(&bin[0x3E..0x40]);
let program_hdrs = phdr::parse_program_header(&bin)?;
let section_hdrs = shdr::parse_section_header(&bin, shstrndx)?;
return Ok(Elf {
header: Elf_header::parse(&bin)?,
phdrs:program_hdrs,
shdrs: section_hdrs,
raw: bin
})
}
}
impl Elf_header {
fn parse(bin: &Vec<u8>) -> Result<Elf_header> {
if !is_elf(&bin) {
return Err(ParsingError::NotElf)
}
// TODO: ADD lengths checks to ensure it is an ELF of prober length
let e_ident = [0x7F, 0x45, 0x4C, 0x46];
let e_endianness = parse_endianness(&bin);
let e_class = parse_class(&bin);
let ei_version = bin[0x06];
let e_abi_version = bin[0x08];
let e_padding = [bin[0x9],bin[0xA],bin[0xB],bin[0xC],bin[0xD],bin[0xE],bin[0xF]];
let e_abi = parse_abi(&bin);
let e_version = LittleEndian::read_u32(&bin[0x14..0x18]);
let e_arch = parse_arch(&bin);
let e_type = parse_type(&bin);
let e_entry = parse_entry64(&bin);
let shstrndx = LittleEndian::read_u16(&bin[0x3E..0x40]);
let e_flags = LittleEndian::read_u32(&bin[0x30..0x34]);
let size = LittleEndian::read_u16(&bin[0x34..0x36]);
let phdr_offset = LittleEndian::read_u64(&bin[0x20..0x28]);
let phdr_size = LittleEndian::read_u16(&bin[0x36..0x38]);
let phdr_num = LittleEndian::read_u16(&bin[0x38..0x3A]);
let shdr_offset = LittleEndian::read_u64(&bin[0x28..0x30]);
let shdr_size = LittleEndian::read_u16(&bin[0x3A..0x3C]);
let shdr_num = LittleEndian::read_u16(&bin[0x3C..0x3E]);
let section_hdrs = shdr::parse_section_header(&bin, shstrndx)?;
// let sections = section::parse_sections(bin,§ion_hdrs);
return Ok(Elf_header{
e_ident,
e_endianness,
e_class,
ei_version,
e_abi_version,
e_padding,
e_abi,
e_version,
e_arch,
e_type,
e_entry,
e_flags,
size,
phdr_offset,
phdr_size,
phdr_num,
shdr_offset,
shdr_size,
shdr_num,
shstrndx
// Add sections to
});
}
}
fn is_elf(bin: &Vec<u8>) -> bool {
if bin.len() < 4 {
return false
}
if bin[0] == 0x7F &&
bin[1] == 0x45 &&
bin[2] == 0x4c &&
bin[3] == 0x46 { return true }
return false;
}
fn parse_entry64(bin: &Vec<u8>) -> u64 {
return LittleEndian::read_u64(&bin[0x18..0x20])
}
fn parse_type(bin: &Vec<u8>) -> Elf_type {
return match LittleEndian::read_u16(&bin[0x10..0x12]) {
0x0 => return Elf_type::NONE,
0x1 => return Elf_type::REL,
0x2 => return Elf_type::EXEC,
0x3 => return Elf_type::DYN,
0x4 => return Elf_type::CORE,
0xFE00 => return Elf_type::LOOS,
0xFEFF => return Elf_type::HIOS,
0xFF00 => return Elf_type::LOPROC,
0xFFFF => return Elf_type::HIPROC,
_ => return Elf_type::NONE,
}
}
fn parse_arch(bin: &Vec<u8>) -> Elf_arch {
return match LittleEndian::read_u16(&bin[0x12..0x14]) {
0x0 => return Elf_arch::NONE,
0x2 => return Elf_arch::SPARC,
0x3 => return Elf_arch::X86,
0x8 => return Elf_arch::MIPS,
0x14 => return Elf_arch::POWERPC,
0x16 => return Elf_arch::S390,
0x28 => return Elf_arch::ARM,
0x2A => return Elf_arch::SUPERH,
0x32 => return Elf_arch::IA64,
0x3E => return Elf_arch::AMD64,
0xB7 => return Elf_arch::AARCH64,
0xF3 => return Elf_arch::RISCV,
_ => return Elf_arch::NONE,
}
}
fn parse_class(bin: &Vec<u8>) -> Elf_class {
return match bin[4] {
1 => return Elf_class::ELF32,
_ => return Elf_class::ELF64
}
}
fn parse_abi(bin: &Vec<u8>) -> Elf_abi {
return match bin[7] {
0x0 => return Elf_abi::NONE,
0x1 => return Elf_abi::HPUX,
0x2 => return Elf_abi::NetBSD,
0x3 => return Elf_abi::Linux,
0x4 => return Elf_abi::GNUHurd,
0x6 => return Elf_abi::Solaris,
0x7 => return Elf_abi::AIX,
0x8 => return Elf_abi::IRIX,
0x9 => return Elf_abi::FreeBSD,
0x0A => return Elf_abi::Tru64,
0x0B => return Elf_abi::NovellModesto,
0x0C => return Elf_abi::OpenBSD,
0x0D => return Elf_abi::OpenVMS,
0x0E => return Elf_abi::NonStopKernel,
0x0F => return Elf_abi::AROS,
0x10 => return Elf_abi::FenixOS,
0x11 => return Elf_abi::CloudABI,
0x12 => return Elf_abi::OpenVOS,
_ => return Elf_abi::NONE
}
}
fn parse_endianness(bin: &Vec<u8>) -> Elf_endiannes {
return match bin[5] {
1 => return Elf_endiannes::LittleEndian,
_ => return Elf_endiannes::BigEndian
}
}
pub fn from_file(path: &str) -> Result<Elf> {
let bin = fs::read(path).expect("Failed to read path");
Elf::parse(bin)
} |
#[doc = "Reader of register MDMA_GISR0"]
pub type R = crate::R<u32, super::MDMA_GISR0>;
#[doc = "GIF0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF0_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF0_A> for bool {
#[inline(always)]
fn from(variant: GIF0_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF0`"]
pub type GIF0_R = crate::R<bool, GIF0_A>;
impl GIF0_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF0_A {
match self.bits {
false => GIF0_A::B_0X0,
true => GIF0_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF0_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF0_A::B_0X1
}
}
#[doc = "GIF1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF1_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF1_A> for bool {
#[inline(always)]
fn from(variant: GIF1_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF1`"]
pub type GIF1_R = crate::R<bool, GIF1_A>;
impl GIF1_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF1_A {
match self.bits {
false => GIF1_A::B_0X0,
true => GIF1_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF1_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF1_A::B_0X1
}
}
#[doc = "GIF2\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF2_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF2_A> for bool {
#[inline(always)]
fn from(variant: GIF2_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF2`"]
pub type GIF2_R = crate::R<bool, GIF2_A>;
impl GIF2_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF2_A {
match self.bits {
false => GIF2_A::B_0X0,
true => GIF2_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF2_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF2_A::B_0X1
}
}
#[doc = "GIF3\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF3_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF3_A> for bool {
#[inline(always)]
fn from(variant: GIF3_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF3`"]
pub type GIF3_R = crate::R<bool, GIF3_A>;
impl GIF3_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF3_A {
match self.bits {
false => GIF3_A::B_0X0,
true => GIF3_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF3_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF3_A::B_0X1
}
}
#[doc = "GIF4\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF4_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF4_A> for bool {
#[inline(always)]
fn from(variant: GIF4_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF4`"]
pub type GIF4_R = crate::R<bool, GIF4_A>;
impl GIF4_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF4_A {
match self.bits {
false => GIF4_A::B_0X0,
true => GIF4_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF4_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF4_A::B_0X1
}
}
#[doc = "GIF5\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF5_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF5_A> for bool {
#[inline(always)]
fn from(variant: GIF5_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF5`"]
pub type GIF5_R = crate::R<bool, GIF5_A>;
impl GIF5_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF5_A {
match self.bits {
false => GIF5_A::B_0X0,
true => GIF5_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF5_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF5_A::B_0X1
}
}
#[doc = "GIF6\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF6_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF6_A> for bool {
#[inline(always)]
fn from(variant: GIF6_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF6`"]
pub type GIF6_R = crate::R<bool, GIF6_A>;
impl GIF6_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF6_A {
match self.bits {
false => GIF6_A::B_0X0,
true => GIF6_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF6_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF6_A::B_0X1
}
}
#[doc = "GIF7\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF7_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF7_A> for bool {
#[inline(always)]
fn from(variant: GIF7_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF7`"]
pub type GIF7_R = crate::R<bool, GIF7_A>;
impl GIF7_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF7_A {
match self.bits {
false => GIF7_A::B_0X0,
true => GIF7_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF7_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF7_A::B_0X1
}
}
#[doc = "GIF8\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF8_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF8_A> for bool {
#[inline(always)]
fn from(variant: GIF8_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF8`"]
pub type GIF8_R = crate::R<bool, GIF8_A>;
impl GIF8_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF8_A {
match self.bits {
false => GIF8_A::B_0X0,
true => GIF8_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF8_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF8_A::B_0X1
}
}
#[doc = "GIF9\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF9_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF9_A> for bool {
#[inline(always)]
fn from(variant: GIF9_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF9`"]
pub type GIF9_R = crate::R<bool, GIF9_A>;
impl GIF9_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF9_A {
match self.bits {
false => GIF9_A::B_0X0,
true => GIF9_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF9_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF9_A::B_0X1
}
}
#[doc = "GIF10\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF10_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF10_A> for bool {
#[inline(always)]
fn from(variant: GIF10_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF10`"]
pub type GIF10_R = crate::R<bool, GIF10_A>;
impl GIF10_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF10_A {
match self.bits {
false => GIF10_A::B_0X0,
true => GIF10_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF10_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF10_A::B_0X1
}
}
#[doc = "GIF11\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF11_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF11_A> for bool {
#[inline(always)]
fn from(variant: GIF11_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF11`"]
pub type GIF11_R = crate::R<bool, GIF11_A>;
impl GIF11_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF11_A {
match self.bits {
false => GIF11_A::B_0X0,
true => GIF11_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF11_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF11_A::B_0X1
}
}
#[doc = "GIF12\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF12_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF12_A> for bool {
#[inline(always)]
fn from(variant: GIF12_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF12`"]
pub type GIF12_R = crate::R<bool, GIF12_A>;
impl GIF12_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF12_A {
match self.bits {
false => GIF12_A::B_0X0,
true => GIF12_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF12_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF12_A::B_0X1
}
}
#[doc = "GIF13\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF13_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF13_A> for bool {
#[inline(always)]
fn from(variant: GIF13_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF13`"]
pub type GIF13_R = crate::R<bool, GIF13_A>;
impl GIF13_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF13_A {
match self.bits {
false => GIF13_A::B_0X0,
true => GIF13_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF13_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF13_A::B_0X1
}
}
#[doc = "GIF14\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF14_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF14_A> for bool {
#[inline(always)]
fn from(variant: GIF14_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF14`"]
pub type GIF14_R = crate::R<bool, GIF14_A>;
impl GIF14_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF14_A {
match self.bits {
false => GIF14_A::B_0X0,
true => GIF14_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF14_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF14_A::B_0X1
}
}
#[doc = "GIF15\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF15_A {
#[doc = "0: No interrupt generated by channel\r\n x"]
B_0X0 = 0,
#[doc = "1: Interrupt generated by channel\r\n x"]
B_0X1 = 1,
}
impl From<GIF15_A> for bool {
#[inline(always)]
fn from(variant: GIF15_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GIF15`"]
pub type GIF15_R = crate::R<bool, GIF15_A>;
impl GIF15_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GIF15_A {
match self.bits {
false => GIF15_A::B_0X0,
true => GIF15_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GIF15_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GIF15_A::B_0X1
}
}
#[doc = "Reader of field `GIF16`"]
pub type GIF16_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF17`"]
pub type GIF17_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF18`"]
pub type GIF18_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF19`"]
pub type GIF19_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF20`"]
pub type GIF20_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF21`"]
pub type GIF21_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF22`"]
pub type GIF22_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF23`"]
pub type GIF23_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF24`"]
pub type GIF24_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF25`"]
pub type GIF25_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF26`"]
pub type GIF26_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF27`"]
pub type GIF27_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF28`"]
pub type GIF28_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF29`"]
pub type GIF29_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF30`"]
pub type GIF30_R = crate::R<bool, bool>;
#[doc = "Reader of field `GIF31`"]
pub type GIF31_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - GIF0"]
#[inline(always)]
pub fn gif0(&self) -> GIF0_R {
GIF0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - GIF1"]
#[inline(always)]
pub fn gif1(&self) -> GIF1_R {
GIF1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - GIF2"]
#[inline(always)]
pub fn gif2(&self) -> GIF2_R {
GIF2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - GIF3"]
#[inline(always)]
pub fn gif3(&self) -> GIF3_R {
GIF3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - GIF4"]
#[inline(always)]
pub fn gif4(&self) -> GIF4_R {
GIF4_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - GIF5"]
#[inline(always)]
pub fn gif5(&self) -> GIF5_R {
GIF5_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - GIF6"]
#[inline(always)]
pub fn gif6(&self) -> GIF6_R {
GIF6_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - GIF7"]
#[inline(always)]
pub fn gif7(&self) -> GIF7_R {
GIF7_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - GIF8"]
#[inline(always)]
pub fn gif8(&self) -> GIF8_R {
GIF8_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - GIF9"]
#[inline(always)]
pub fn gif9(&self) -> GIF9_R {
GIF9_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - GIF10"]
#[inline(always)]
pub fn gif10(&self) -> GIF10_R {
GIF10_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - GIF11"]
#[inline(always)]
pub fn gif11(&self) -> GIF11_R {
GIF11_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - GIF12"]
#[inline(always)]
pub fn gif12(&self) -> GIF12_R {
GIF12_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - GIF13"]
#[inline(always)]
pub fn gif13(&self) -> GIF13_R {
GIF13_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 14 - GIF14"]
#[inline(always)]
pub fn gif14(&self) -> GIF14_R {
GIF14_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - GIF15"]
#[inline(always)]
pub fn gif15(&self) -> GIF15_R {
GIF15_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 16 - GIF16"]
#[inline(always)]
pub fn gif16(&self) -> GIF16_R {
GIF16_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - GIF17"]
#[inline(always)]
pub fn gif17(&self) -> GIF17_R {
GIF17_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - GIF18"]
#[inline(always)]
pub fn gif18(&self) -> GIF18_R {
GIF18_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - GIF19"]
#[inline(always)]
pub fn gif19(&self) -> GIF19_R {
GIF19_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - GIF20"]
#[inline(always)]
pub fn gif20(&self) -> GIF20_R {
GIF20_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 21 - GIF21"]
#[inline(always)]
pub fn gif21(&self) -> GIF21_R {
GIF21_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bit 22 - GIF22"]
#[inline(always)]
pub fn gif22(&self) -> GIF22_R {
GIF22_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bit 23 - GIF23"]
#[inline(always)]
pub fn gif23(&self) -> GIF23_R {
GIF23_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bit 24 - GIF24"]
#[inline(always)]
pub fn gif24(&self) -> GIF24_R {
GIF24_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 25 - GIF25"]
#[inline(always)]
pub fn gif25(&self) -> GIF25_R {
GIF25_R::new(((self.bits >> 25) & 0x01) != 0)
}
#[doc = "Bit 26 - GIF26"]
#[inline(always)]
pub fn gif26(&self) -> GIF26_R {
GIF26_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 27 - GIF27"]
#[inline(always)]
pub fn gif27(&self) -> GIF27_R {
GIF27_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bit 28 - GIF28"]
#[inline(always)]
pub fn gif28(&self) -> GIF28_R {
GIF28_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 29 - GIF29"]
#[inline(always)]
pub fn gif29(&self) -> GIF29_R {
GIF29_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 30 - GIF30"]
#[inline(always)]
pub fn gif30(&self) -> GIF30_R {
GIF30_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - GIF31"]
#[inline(always)]
pub fn gif31(&self) -> GIF31_R {
GIF31_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
|
extern crate whois;
extern crate rustc_serialize;
use whois::WhoIs;
use rustc_serialize::json::Json;
fn main() {
let data = WhoIs::new("google.com").lookup();
let foo = &Json::from_str(&data.unwrap()).unwrap();
let object = foo.as_object().unwrap();
for (key, value) in object {
println!("{}: {}", key, match *value {
Json::String(ref v) => format!("{}", v),
_ => break
});
}
}
|
use crate::errors::*;
use tempfile::NamedTempFile;
use std::io::copy;
// GET http://server.com/search/123 -> String(JSON) -> Vec<Struct>
// [{
// name: "123",
// url: jiji / id: 123
// }]
type Url = String;
const SERVER_URL: &str = "http://localhost:8000";
#[derive(Deserialize)]
struct ServerResponse {
pub results: Vec<SearchResult>,
}
#[derive(Deserialize)]
struct SearchResult {
artist: String,
title: String,
genre: Option<String>,
path: String,
}
/// Search online for a given keyword and either print a list of songs found or pick one of them and return its Url
///
/// pick: if `None`, the list with all fetched songs will be printed,
/// if `Some(i)`, the Url of the `i`th song will be returned
pub fn search(keyword: &str, pick: Option<usize>) -> Result<Option<Url>> {
// TODO: add keyword escaping to avoid injections
let mut response: reqwest::Response = reqwest::get(&format!("{}/search?q={}", SERVER_URL, keyword)).chain_err(|| "server unreachable")?;
let result: ServerResponse = response.json().chain_err(|| "failed deserializing server response")?;
if let Some(index) = pick {
// Ok(files.get(index).chain_err()
let path = &result.results.get(index).chain_err(|| "index out of bounds")?.path;
Ok(Some(format!("{}/{}", SERVER_URL, path)))
} else {
for (i, file) in result.results.iter().enumerate() {
if let Some(genre) = file.genre.as_ref() {
println!("{number:2}: {title} - {artist} ({genre})", number = i, title = file.title, artist = file.artist, genre = genre);
} else {
println!("{number:2}: {title} - {artist}", number = i, title = file.title, artist = file.artist);
}
}
Ok(None)
}
}
/// Try to download the given file
pub fn download_file(url: String) -> Result<NamedTempFile> {
let mut dest = NamedTempFile::new()
.chain_err(|| "could not create temporary file")?;
let mut response = reqwest::get(&url)
.chain_err(|| "could not retrieve .txt from server")?;
copy(&mut response, dest.as_file_mut())
.chain_err(|| "could not write to .txt file")?;
Ok(dest)
}
|
#![feature(test)]
extern crate test;
extern crate delight_book;
use delight_book::chapter5::*;
use delight_book::*;
use std::mem::transmute;
use std::borrow::BorrowMut;
/// https://blog.knoldus.com/safe-way-to-access-private-fields-in-rust/
/// mod delight_book{
/// #[derive(Default)]
/// pub struct c8{
/// }
/// impl c8 {
/// pub fn new(value:c8) -> c8{
/// assert!(value<=c8::MAX.0 && value >=c8::MIN.0);
/// c8(value)
/// }
/// }
/// }
///
/// struct local_c8{}
#[bench]
fn bench_counts(b: &mut test::Bencher) {
b.iter(|| {
for i in 0..100 {
assert_eq!(counts_divide_and_conquer(1), 1);
}
})
}
#[bench]
fn bench_counts_pop(b: &mut test::Bencher) {
b.iter(|| {
for i in 0..100 {
assert_eq!(counts_pop(1), 1);
}
})
}
#[bench]
fn bench_counts_pop_array(b: &mut test::Bencher) {
b.iter(|| {
for i in 0..100 {
assert_eq!(counts_pop_array([1, 2].borrow_mut(), 1), 1);
}
})
}
#[bench]
fn bench_counts_pop_hard(b: &mut test::Bencher) {
let TEST: Vec<i64> = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7, 0xff, 8,
0x4000, 1, 0x4001, 2, 0x7000, 3, 0x7fff, 15,
0x55555555, 16, 0xAAAAAAAA, 16, 0xFF000000, 8, 0xC0C0C0C0, 8,
0x0FFFFFF0, 24, 0x80000000, 1, 0xFFFFFFFF, 32];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
fn bench_counts_pop1_hard(b: &mut test::Bencher) {
let TEST: Vec<i64> = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7, 0xff, 8,
0x4000, 1, 0x4001, 2, 0x7000, 3, 0x7fff, 15,
0x55555555, 16, 0xAAAAAAAA, 16, 0xFF000000, 8, 0xC0C0C0C0, 8,
0x0FFFFFF0, 24, 0x80000000, 1, 0xFFFFFFFF, 32];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop1(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
fn bench_counts_pop2_hard(b: &mut test::Bencher) {
let TEST: Vec<i64> = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7, 0xff, 8,
0x4000, 1, 0x4001, 2, 0x7000, 3, 0x7fff, 15,
0x55555555, 16, 0xAAAAAAAA, 16, 0xFF000000, 8, 0xC0C0C0C0, 8,
0x0FFFFFF0, 24, 0x80000000, 1, 0xFFFFFFFF, 32];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop2(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
fn bench_counts_pop3_hard(b: &mut test::Bencher) {
let TEST: Vec<i64> = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7, 0xff, 8,
0x4000, 1, 0x4001, 2, 0x7000, 3, 0x7fff, 15,
0x55555555, 16, 0xAAAAAAAA, 16, 0xFF000000, 8, 0xC0C0C0C0, 8,
0x0FFFFFF0, 24, 0x80000000, 1, 0xFFFFFFFF, 32];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop3(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
fn bench_counts_pop4_hard(b: &mut test::Bencher) {
let TEST: Vec<i64> = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7, 0xff, 8,
0x4000, 1, 0x4001, 2, 0x7000, 3, 0x7fff, 15,
0x55555555, 16, 0xAAAAAAAA, 16, 0xFF000000, 8, 0xC0C0C0C0, 8,
0x0FFFFFF0, 24, 0x80000000, 1, 0xFFFFFFFF, 32];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop4(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
fn bench_counts_pop5_hard(b: &mut test::Bencher) {
let TEST = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7, 0xff, 8,
0x4000, 1, 0x4001, 2, 0x7000, 3, 0x7fff, 15,
0x55555555, 16, 0xAAAAAAAA, 16, 0xFF000000, 8, 0xC0C0C0C0, 8,
0x0FFFFFF0, 24, 0x80000000, 1, 0xFFFFFFFF, 32];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop5(TEST[2 * i]), TEST[2 * i + 1] as i32)
}
});
}
#[bench]
fn bench_counts_pop5a_hard(b: &mut test::Bencher) {
let TEST = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7, 0xff, 8,
0x4000, 1, 0x4001, 2, 0x7000, 3, 0x7fff, 15,
0x55555555, 16, 0xAAAAAAAA, 16, 0xFF000000, 8, 0xC0C0C0C0, 8,
0x0FFFFFF0, 24, 0x80000000, 1, 0xFFFFFFFF, 32];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop5a(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
fn bench_counts_pop6_hard(b: &mut test::Bencher) {
let TEST: Vec<i64> = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7, 0xff, 8,
0x4000, 1, 0x4001, 2, 0x7000, 3, 0x7fff, 15,
0x55555555, 16, 0xAAAAAAAA, 16, 0xFF000000, 8, 0xC0C0C0C0, 8,
0x0FFFFFF0, 24, 0x80000000, 1, 0xFFFFFFFF, 32];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop6(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
fn bench_counts_pop7_hard(b: &mut test::Bencher) {
let TEST = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop7(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
#[allow(overflowing_literals)]
fn bench_counts_pop8_hard(b: &mut test::Bencher) {
let TEST = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop8(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
fn bench_counts_pop9_hard(b: &mut test::Bencher) {
let TEST = vec![0, 0, 1, 1, 2, 1, 3, 2, 4, 1, 5, 2, 6, 2, 7, 3,
8, 1, 9, 2, 10, 2, 11, 3, 12, 2, 13, 3, 14, 3, 15, 4, 16, 1, 17, 2,
0x3F, 6, 0x40, 1, 0x41, 2, 0x7f, 7, 0x80, 1, 0x81, 2, 0xfe, 7];
b.iter(|| {
let n = TEST.len() / 2;
for i in 0..n {
assert_eq!(counts_pop9(TEST[2 * i]), TEST[2 * i + 1])
}
});
}
#[bench]
fn bench_counts_pop_array_hard(b: &mut test::Bencher) {
let n = 10000;
let mut A: Vec<i64> = Vec::with_capacity(n);
for i in 0..n { A.push(0xffffffff); }
let mut s1 = 0;
for i in 0..n {
s1 = s1 + counts_pop(A[i]);
}
let s2 = counts_pop_array(A.borrow_mut(), n as i64);
assert_eq!(s1, s2);
}
#[bench]
#[allow(overflowing_literals)]
fn bench_counts_popDiff(b: &mut test::Bencher) {
let TEST = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 0x3F, 0x40, 0x41, 0x7f, 0x80, 0x81, 0xfe, 0xff,
0x4000, 0x4001, 0x7000, 0x7fff, 0x55555555, 0xAAAAAAAA, 0xFF000000, 0xC0C0C0C0, 0x0FFFFFF0, 0x80000000, 0xFFFFFFFE, 0xFFFFFFFF];
b.iter(|| {
let n = TEST.len() / 4;
for x in 0..n {
for y in 0..n {
assert_eq!(counts_popDiff(TEST[x], TEST[y]), (counts_pop(TEST[x] as i64) - counts_pop(TEST[y] as i64)) as i32);
}
}
});
}
#[bench]
#[allow(overflowing_literals)]
fn bench_counts_popCmpr(b: &mut test::Bencher) {
let TEST = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 0x3F, 0x40, 0x41, 0x7f, 0x80, 0x81, 0xfe, 0xff,
0x4000, 0x4001, 0x7000, 0x7fff, 0x55555555, 0xAAAAAAAA, 0xFF000000, 0xC0C0C0C0, 0x0FFFFFF0, 0x80000000, 0xFFFFFFFE, 0xFFFFFFFF];
b.iter(|| {
let n = TEST.len() / 4;
for x in 0..n {
for y in 0..n {
let a = counts_pop(TEST[x]);
let b = counts_pop(TEST[y]);
let c = counts_popCmpr(TEST[x] as i32, TEST[y] as i32);
assert_eq!(a > b && c > 0 || a < b && c < 0 || a == b && c == 0, true);
}
}
});
}
#[bench]
#[allow(overflowing_literals)]
fn bench_counts_parity(b: &mut test::Bencher) {
let TEST = [0,0, 1,1, 2,1, 3,0, 4,1, 5,0,
6,0, 7,1, 8,1, 9,0, 10,0, 11,1, 12,0, 13,1, 14,1,
15,0, 16,1, 17,0, 18,0, 19,1, 20,0, 21,1, 22,1, 23,0,
24,0, 25,1, 26,1, 27,0, 28,1, 29,0, 30,0, 31,1,
0x55555555,0, 0xAAAAAAAA,0, 0x77777770,1,
0x80000000,1, 0x80000001,0, 0xFFFFFFFE,1, 0xFFFFFFFF,0,0,0, 1,0x81, 2,0x82, 3,3, 4,0x84,
5,5, 6,6, 7,0x87, 8,0x88, 9,9, 10,10, 11,0x8B, 12,12,
13,0x8D, 14,0x8E, 15,15, 16,0x90, 0x7E,0x7E, 0x7F,0xFF];
for i in (0..116/2).step_by(2){
assert_eq!(counts_parity1(TEST[i]),TEST[i+1]);
assert_eq!(counts_parity1a(TEST[i]),TEST[i+1]);
assert_eq!(counts_parity2(TEST[i]),TEST[i+1]);
// assert_eq!(counts_parity3(TEST[i] as i32),TEST[i+1] as i32);
// assert_eq!(counts_parity4(TEST[i]),TEST[i+1]);
}
} |
extern crate futures;
extern crate log4rs;
extern crate mta_status;
extern crate net2;
extern crate num_cpus;
extern crate tokio_core;
extern crate hyper;
use tokio_core::reactor::Handle;
use hyper::server::{Request, Response};
use hyper::{Method, StatusCode};
use hyper::server::Service;
use hyper::header::Headers;
use futures::Future;
header! { (AccessControl, "Access-Control-Allow-Origin") => [String] }
pub struct GetStatus {
_handle: Handle,
}
impl GetStatus {
pub fn new(handle: Handle) -> Self {
GetStatus { _handle: handle }
}
}
impl Service for GetStatus {
type Request = hyper::server::Request;
type Response = hyper::server::Response;
type Error = hyper::Error;
type Future = Box<Future<Item = hyper::server::Response, Error = hyper::Error>>;
fn call(&self, req: Request) -> Self::Future {
let resp = Response::new();
match (req.method(), req.path()) {
(&Method::Get, "/") => {
let status = mta_status::get_status(&self._handle).map(|stat| {
let mut headers = Headers::new();
headers.set(AccessControl("*".to_owned()));
resp.with_body(stat).with_headers(headers).with_status(
StatusCode::Ok,
)
});
Box::new(status)
}
_ => Box::new(futures::future::ok(
resp.with_body("no path").with_status(StatusCode::NotFound),
)),
}
}
}
|
pub const FRAGMENT_SHADER_SOURCE: &str = r#"
#version 310 es
out highp vec4 FragColor;
in highp vec3 color;
void main()
{
// Set the fragment color to the color passed from the vertex shader
FragColor = vec4(color, 1.0);
}
"#;
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
use clap::{Parser, ValueEnum};
use std::path::PathBuf;
use diskann::{
common::ANNResult,
index::create_inmem_index,
model::{
vertex::{DIM_104, DIM_128, DIM_256},
IndexConfiguration, IndexWriteParametersBuilder,
},
utils::round_up,
utils::{load_metadata_from_file, Timer},
};
use vector::{FullPrecisionDistance, Half, Metric};
/// The main function to build an in-memory index
#[allow(clippy::too_many_arguments)]
fn build_in_memory_index<T>(
metric: Metric,
data_path: &str,
r: u32,
l: u32,
alpha: f32,
save_path: &str,
num_threads: u32,
_use_pq_build: bool,
_num_pq_bytes: usize,
use_opq: bool,
) -> ANNResult<()>
where
T: Default + Copy + Sync + Send + Into<f32>,
[T; DIM_104]: FullPrecisionDistance<T, DIM_104>,
[T; DIM_128]: FullPrecisionDistance<T, DIM_128>,
[T; DIM_256]: FullPrecisionDistance<T, DIM_256>,
{
let index_write_parameters = IndexWriteParametersBuilder::new(l, r)
.with_alpha(alpha)
.with_saturate_graph(false)
.with_num_threads(num_threads)
.build();
let (data_num, data_dim) = load_metadata_from_file(data_path)?;
let config = IndexConfiguration::new(
metric,
data_dim,
round_up(data_dim as u64, 8_u64) as usize,
data_num,
false,
0,
use_opq,
0,
1f32,
index_write_parameters,
);
let mut index = create_inmem_index::<T>(config)?;
let timer = Timer::new();
index.build(data_path, data_num)?;
let diff = timer.elapsed();
println!("Indexing time: {}", diff.as_secs_f64());
index.save(save_path)?;
Ok(())
}
fn main() -> ANNResult<()> {
let args = BuildMemoryIndexArgs::parse();
let _use_pq_build = args.build_pq_bytes > 0;
println!(
"Starting index build with R: {} Lbuild: {} alpha: {} #threads: {}",
args.max_degree, args.l_build, args.alpha, args.num_threads
);
let err = match args.data_type {
DataType::Float => build_in_memory_index::<f32>(
args.dist_fn,
&args.data_path.to_string_lossy(),
args.max_degree,
args.l_build,
args.alpha,
&args.index_path_prefix,
args.num_threads,
_use_pq_build,
args.build_pq_bytes,
args.use_opq,
),
DataType::FP16 => build_in_memory_index::<Half>(
args.dist_fn,
&args.data_path.to_string_lossy(),
args.max_degree,
args.l_build,
args.alpha,
&args.index_path_prefix,
args.num_threads,
_use_pq_build,
args.build_pq_bytes,
args.use_opq,
),
};
match err {
Ok(_) => {
println!("Index build completed successfully");
Ok(())
}
Err(err) => {
eprintln!("Error: {:?}", err);
Err(err)
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum, Debug)]
enum DataType {
/// Float data type.
Float,
/// Half data type.
FP16,
}
#[derive(Debug, Parser)]
struct BuildMemoryIndexArgs {
/// data type <int8/uint8/float / fp16> (required)
#[arg(long = "data_type", default_value = "float")]
pub data_type: DataType,
/// Distance function to use.
#[arg(long = "dist_fn", default_value = "l2")]
pub dist_fn: Metric,
/// Path to the data file. The file should be in the format specified by the `data_type` argument.
#[arg(long = "data_path", short, required = true)]
pub data_path: PathBuf,
/// Path to the index file. The index will be saved to this prefixed name.
#[arg(long = "index_path_prefix", short, required = true)]
pub index_path_prefix: String,
/// Number of max out degree from a vertex.
#[arg(long = "max_degree", short = 'R', default_value = "64")]
pub max_degree: u32,
/// Number of candidates to consider when building out edges
#[arg(long = "l_build", short = 'L', default_value = "100")]
pub l_build: u32,
/// alpha controls density and diameter of graph, set 1 for sparse graph, 1.2 or 1.4 for denser graphs with lower diameter
#[arg(long, short, default_value = "1.2")]
pub alpha: f32,
/// Number of threads to use.
#[arg(long = "num_threads", short = 'T', default_value = "1")]
pub num_threads: u32,
/// Number of PQ bytes to build the index; 0 for full precision build
#[arg(long = "build_pq_bytes", short, default_value = "0")]
pub build_pq_bytes: usize,
/// Set true for OPQ compression while using PQ distance comparisons for building the index, and false for PQ compression
#[arg(long = "use_opq", short, default_value = "false")]
pub use_opq: bool,
}
|
use structopt::StructOpt;
use typos_cli::config;
arg_enum! {
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Format {
Silent,
Brief,
Long,
Json,
}
}
impl Format {
pub(crate) fn reporter(
self,
stdout_palette: crate::report::Palette,
stderr_palette: crate::report::Palette,
) -> Box<dyn typos_cli::report::Report> {
match self {
Format::Silent => Box::new(crate::report::PrintSilent),
Format::Brief => Box::new(crate::report::PrintBrief {
stdout_palette,
stderr_palette,
}),
Format::Long => Box::new(crate::report::PrintLong {
stdout_palette,
stderr_palette,
}),
Format::Json => Box::new(crate::report::PrintJson),
}
}
}
impl Default for Format {
fn default() -> Self {
Format::Long
}
}
#[derive(Debug, StructOpt)]
#[structopt(rename_all = "kebab-case")]
#[structopt(
setting = structopt::clap::AppSettings::UnifiedHelpMessage,
setting = structopt::clap::AppSettings::DeriveDisplayOrder,
setting = structopt::clap::AppSettings::DontCollapseArgsInUsage
)]
#[structopt(group = structopt::clap::ArgGroup::with_name("mode").multiple(false))]
pub(crate) struct Args {
#[structopt(parse(from_os_str), default_value = ".")]
/// Paths to check with `-` for stdin
pub(crate) path: Vec<std::path::PathBuf>,
#[structopt(short = "c", long = "config")]
/// Custom config file
pub(crate) custom_config: Option<std::path::PathBuf>,
#[structopt(long)]
/// Ignore implicit configuration files.
pub(crate) isolated: bool,
#[structopt(long, group = "mode")]
/// Print a diff of what would change
pub(crate) diff: bool,
#[structopt(long, short = "w", group = "mode")]
/// Write fixes out
pub(crate) write_changes: bool,
#[structopt(long, group = "mode")]
/// Debug: Print each file that would be spellchecked.
pub(crate) files: bool,
#[structopt(long, group = "mode")]
/// Debug: Print each identifier that would be spellchecked.
pub(crate) identifiers: bool,
#[structopt(long, group = "mode")]
/// Debug: Print each word that would be spellchecked.
pub(crate) words: bool,
#[structopt(long, group = "mode")]
/// Write the current configuration to file with `-` for stdout
pub(crate) dump_config: Option<std::path::PathBuf>,
#[structopt(long, group = "mode")]
/// Show all supported file types.
pub(crate) type_list: bool,
#[structopt(
long,
possible_values(&Format::variants()),
case_insensitive(true),
default_value("long")
)]
pub(crate) format: Format,
#[structopt(short = "j", long = "threads", default_value = "0")]
/// The approximate number of threads to use.
pub(crate) threads: usize,
#[structopt(flatten)]
pub(crate) config: ConfigArgs,
#[structopt(flatten)]
pub(crate) color: crate::color::ColorArgs,
#[structopt(flatten)]
pub(crate) verbose: clap_verbosity_flag::Verbosity,
}
#[derive(Debug, Clone, StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub(crate) struct FileArgs {
#[structopt(long, overrides_with("no-binary"))]
/// Search binary files.
binary: bool,
#[structopt(long, overrides_with("binary"), hidden(true))]
no_binary: bool,
#[structopt(long, overrides_with("check-filenames"))]
/// Skip verifying spelling in file names.
no_check_filenames: bool,
#[structopt(long, overrides_with("no-check-filenames"), hidden(true))]
check_filenames: bool,
#[structopt(long, overrides_with("check-files"))]
/// Skip verifying spelling in files.
no_check_files: bool,
#[structopt(long, overrides_with("no-check-files"), hidden(true))]
check_files: bool,
#[structopt(long, overrides_with("no-unicode"), hidden(true))]
unicode: bool,
#[structopt(long, overrides_with("unicode"))]
/// Only allow ASCII characters in identifiers
no_unicode: bool,
#[structopt(
long,
possible_values(&config::Locale::variants()),
)]
pub(crate) locale: Option<config::Locale>,
}
impl FileArgs {
pub fn to_config(&self) -> config::EngineConfig {
config::EngineConfig {
binary: self.binary(),
check_filename: self.check_filename(),
check_file: self.check_file(),
tokenizer: Some(config::TokenizerConfig {
unicode: self.unicode(),
..Default::default()
}),
dict: Some(config::DictConfig {
locale: self.locale,
..Default::default()
}),
}
}
fn binary(&self) -> Option<bool> {
resolve_bool_arg(self.binary, self.no_binary)
}
fn check_filename(&self) -> Option<bool> {
resolve_bool_arg(self.check_filenames, self.no_check_filenames)
}
fn unicode(&self) -> Option<bool> {
resolve_bool_arg(self.unicode, self.no_unicode)
}
fn check_file(&self) -> Option<bool> {
resolve_bool_arg(self.check_files, self.no_check_files)
}
}
#[derive(Debug, StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub(crate) struct ConfigArgs {
#[structopt(flatten)]
walk: WalkArgs,
#[structopt(flatten)]
overrides: FileArgs,
}
impl ConfigArgs {
pub fn to_config(&self) -> config::Config {
config::Config {
files: self.walk.to_config(),
overrides: self.overrides.to_config(),
..Default::default()
}
}
}
#[derive(Debug, StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub(crate) struct WalkArgs {
#[structopt(long, name = "GLOB")]
/// Ignore files & directories matching the glob.
exclude: Vec<String>,
#[structopt(long, overrides_with("no-hidden"))]
/// Search hidden files and directories.
hidden: bool,
#[structopt(long, overrides_with("hidden"), hidden(true))]
no_hidden: bool,
#[structopt(long, overrides_with("ignore"))]
/// Don't respect ignore files.
no_ignore: bool,
#[structopt(long, overrides_with("no-ignore"), hidden(true))]
ignore: bool,
#[structopt(long, overrides_with("ignore-dot"))]
/// Don't respect .ignore files.
no_ignore_dot: bool,
#[structopt(long, overrides_with("no-ignore-dot"), hidden(true))]
ignore_dot: bool,
#[structopt(long, overrides_with("ignore-global"))]
/// Don't respect global ignore files.
no_ignore_global: bool,
#[structopt(long, overrides_with("no-ignore-global"), hidden(true))]
ignore_global: bool,
#[structopt(long, overrides_with("ignore-parent"))]
/// Don't respect ignore files in parent directories.
no_ignore_parent: bool,
#[structopt(long, overrides_with("no-ignore-parent"), hidden(true))]
ignore_parent: bool,
#[structopt(long, overrides_with("ignore-vcs"))]
/// Don't respect ignore files in vcs directories.
no_ignore_vcs: bool,
#[structopt(long, overrides_with("no-ignore-vcs"), hidden(true))]
ignore_vcs: bool,
}
impl WalkArgs {
pub fn to_config(&self) -> config::Walk {
config::Walk {
extend_exclude: self.exclude.clone(),
ignore_hidden: self.ignore_hidden(),
ignore_files: self.ignore_files(),
ignore_dot: self.ignore_dot(),
ignore_vcs: self.ignore_vcs(),
ignore_global: self.ignore_global(),
ignore_parent: self.ignore_parent(),
}
}
fn ignore_hidden(&self) -> Option<bool> {
resolve_bool_arg(self.no_hidden, self.hidden)
}
fn ignore_files(&self) -> Option<bool> {
resolve_bool_arg(self.ignore, self.no_ignore)
}
fn ignore_dot(&self) -> Option<bool> {
resolve_bool_arg(self.ignore_dot, self.no_ignore_dot)
}
fn ignore_vcs(&self) -> Option<bool> {
resolve_bool_arg(self.ignore_vcs, self.no_ignore_vcs)
}
fn ignore_global(&self) -> Option<bool> {
resolve_bool_arg(self.ignore_global, self.no_ignore_global)
}
fn ignore_parent(&self) -> Option<bool> {
resolve_bool_arg(self.ignore_parent, self.no_ignore_parent)
}
}
fn resolve_bool_arg(yes: bool, no: bool) -> Option<bool> {
match (yes, no) {
(true, false) => Some(true),
(false, true) => Some(false),
(false, false) => None,
(_, _) => unreachable!("StructOpt should make this impossible"),
}
}
|
pub(crate) use std::{
collections::BTreeMap,
env, fs,
io::Write,
iter,
path::Path,
process::{Command, Stdio},
str,
};
pub(crate) use executable_path::executable_path;
pub(crate) use just::unindent;
pub(crate) use libc::{EXIT_FAILURE, EXIT_SUCCESS};
pub(crate) use test_utilities::{assert_stdout, tempdir, tmptree};
pub(crate) use which::which;
|
use crate::enums::{Align, CallbackTrigger, Color, Damage, Event, Font, FrameType, Key, LabelType};
use crate::image::Image;
use crate::prelude::*;
use crate::utils::FlString;
use fltk_sys::text::*;
use std::{
ffi::{CStr, CString},
mem,
os::raw,
sync::atomic::{AtomicUsize, Ordering},
};
/// Defines the text cursor styles supported by fltk
#[repr(i32)]
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum Cursor {
/// Normal
Normal,
/// Caret
Caret,
/// Dim
Dim,
/// Block
Block,
/// Heavy
Heavy,
/// Simple
Simple,
}
/// Wraps a text buffer, Cloning a text buffer invalidates the underlying pointer, thus the no derive(Clone)
#[derive(Debug)]
pub struct TextBuffer {
inner: *mut Fl_Text_Buffer,
refcount: AtomicUsize,
}
impl TextBuffer {
/// Initialized a default text buffer
pub fn default() -> TextBuffer {
unsafe {
let text_buffer = Fl_Text_Buffer_new();
assert!(!text_buffer.is_null());
TextBuffer {
inner: text_buffer,
refcount: AtomicUsize::new(2),
}
}
}
/// Deletes the `TextBuffer`
/// # Safety
/// The buffer shouldn't be deleted while the Display widget still needs it
pub unsafe fn delete(mut buf: Self) {
Fl_Text_Buffer_delete(buf.inner);
buf.inner = std::ptr::null_mut::<Fl_Text_Buffer>();
}
/// Deletes the `TextBuffer`
/// # Safety
/// The buffer shouldn't be deleted while the Display widget still needs it
pub unsafe fn delete_buffer(mut buf: TextBuffer) {
Fl_Text_Buffer_delete(buf.inner);
buf.inner = std::ptr::null_mut::<Fl_Text_Buffer>();
}
/// Initialized a text buffer from a pointer
/// # Safety
/// The pointer must be valid
pub unsafe fn from_ptr(ptr: *mut Fl_Text_Buffer) -> Self {
assert!(!ptr.is_null());
TextBuffer {
inner: ptr,
refcount: AtomicUsize::new(2),
}
}
/// Returns the inner pointer from a text buffer
/// # Safety
/// Can return multiple mutable pointers to the same buffer
pub unsafe fn as_ptr(&self) -> *mut Fl_Text_Buffer {
self.inner
}
/// Sets the text of the buffer
pub fn set_text(&mut self, txt: &str) {
assert!(!self.inner.is_null());
unsafe {
let txt = CString::safe_new(txt);
Fl_Text_Buffer_set_text(self.inner, txt.as_ptr())
}
}
/// Returns the text of the buffer
pub fn text(&self) -> String {
assert!(!self.inner.is_null());
unsafe {
let text = Fl_Text_Buffer_text(self.inner);
assert!(!text.is_null());
CStr::from_ptr(text as *mut raw::c_char)
.to_string_lossy()
.to_string()
}
}
/**
Appends to the buffer.
To append and scroll to the end of the buffer:
```rust,no_run
use fltk::{prelude::*, *};
let txt = "Some long text!";
let buf = text::TextBuffer::default();
let mut disp = text::TextDisplay::default();
disp.set_buffer(Some(buf));
disp.buffer().unwrap().append(txt);
disp.set_insert_position(disp.buffer().unwrap().length());
disp.scroll(
disp.count_lines(0, disp.buffer().unwrap().length(), true),
0,
);
```
*/
pub fn append(&mut self, text: &str) {
assert!(!self.inner.is_null());
let text = CString::safe_new(text);
unsafe { Fl_Text_Buffer_append(self.inner, text.as_ptr()) }
}
/// Get the length of the buffer
pub fn length(&self) -> i32 {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_length(self.inner) as i32 }
}
/// Removes from the buffer
pub fn remove(&mut self, start: i32, end: i32) {
assert!(!self.inner.is_null());
unsafe {
Fl_Text_Buffer_remove(self.inner, start as i32, end as i32);
}
}
/// Returns the text within the range
pub fn text_range(&self, start: i32, end: i32) -> Option<String> {
assert!(!self.inner.is_null());
unsafe {
let x = Fl_Text_Buffer_text_range(self.inner, start as i32, end as i32);
if x.is_null() {
None
} else {
Some(
CStr::from_ptr(x as *mut raw::c_char)
.to_string_lossy()
.to_string(),
)
}
}
}
/// Inserts text into a position
pub fn insert(&mut self, pos: i32, text: &str) {
assert!(!self.inner.is_null());
let text = CString::safe_new(text);
unsafe { Fl_Text_Buffer_insert(self.inner, pos as i32, text.as_ptr()) }
}
/// Replaces text from position `start` to `end`
pub fn replace(&mut self, start: i32, end: i32, text: &str) {
assert!(!self.inner.is_null());
let text = CString::safe_new(text);
unsafe { Fl_Text_Buffer_replace(self.inner, start as i32, end as i32, text.as_ptr()) }
}
/// Copies text from a source buffer into the current buffer
pub fn copy_from(&mut self, source_buf: &TextBuffer, start: i32, end: i32, to: i32) {
assert!(!self.inner.is_null());
unsafe {
Fl_Text_Buffer_copy(
self.inner,
source_buf.as_ptr(),
start as i32,
end as i32,
to as i32,
)
}
}
/// Copies whole text from a source buffer into a new buffer
pub fn copy(&self) -> TextBuffer {
assert!(!self.inner.is_null());
let mut temp = TextBuffer::default();
temp.copy_from(self, 0, 0, self.length());
temp
}
/// Performs an undo operation on the buffer
/// # Errors
/// Errors on failure to undo
pub fn undo(&mut self) -> Result<(), FltkError> {
assert!(!self.inner.is_null());
unsafe {
match Fl_Text_Buffer_undo(self.inner, std::ptr::null_mut()) {
0 => Err(FltkError::Unknown(String::from("Failed to undo"))),
_ => Ok(()),
}
}
}
/// Sets whether the buffer can undo
pub fn can_undo(&mut self, flag: bool) {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_canUndo(self.inner, flag as raw::c_char) }
}
/// Loads a file into the buffer
/// # Errors
/// Errors on failure to load file
pub fn load_file<P: AsRef<std::path::Path>>(&mut self, path: P) -> Result<(), FltkError> {
assert!(!self.inner.is_null());
if !path.as_ref().exists() {
return Err(FltkError::Internal(FltkErrorKind::ResourceNotFound));
}
let path = path
.as_ref()
.to_str()
.ok_or_else(|| FltkError::Unknown(String::from("Failed to convert path to string")))?;
let path = CString::new(path)?;
unsafe {
match Fl_Text_Buffer_load_file(self.inner, path.as_ptr()) {
0 => Ok(()),
_ => Err(FltkError::Internal(FltkErrorKind::ResourceNotFound)),
}
}
}
/// Saves a buffer into a file
/// # Errors
/// Errors on failure to save file
pub fn save_file<P: AsRef<std::path::Path>>(&mut self, path: P) -> Result<(), FltkError> {
assert!(!self.inner.is_null());
let path = path
.as_ref()
.to_str()
.ok_or_else(|| FltkError::Unknown(String::from("Failed to convert path to string")))?;
let path = CString::new(path)?;
unsafe {
match Fl_Text_Buffer_save_file(self.inner, path.as_ptr()) {
0 => Ok(()),
_ => Err(FltkError::Internal(FltkErrorKind::ResourceNotFound)),
}
}
}
/// Returns the tab distance for the buffer
pub fn tab_distance(&self) -> i32 {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_tab_distance(self.inner) as i32 }
}
/// Sets the tab distance
pub fn set_tab_distance(&mut self, tab_dist: i32) {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_set_tab_distance(self.inner, tab_dist as i32) }
}
/// Selects the text from start to end
pub fn select(&mut self, start: i32, end: i32) {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_select(self.inner, start as i32, end as i32) }
}
/// Returns whether text is selected
pub fn selected(&self) -> bool {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_selected(self.inner) != 0 }
}
/// Unselects text
pub fn unselect(&mut self) {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_unselect(self.inner) }
}
/// Returns the selection position
pub fn selection_position(&mut self) -> Option<(i32, i32)> {
assert!(!self.inner.is_null());
unsafe {
let start: *mut raw::c_int = std::ptr::null_mut();
let end: *mut raw::c_int = std::ptr::null_mut();
let ret = Fl_Text_Buffer_selection_position(self.inner, start, end);
if ret == 0 {
None
} else {
let x = (*start as i32, *end as i32);
Some(x)
}
}
}
/// Returns the selection text
pub fn selection_text(&mut self) -> String {
assert!(!self.inner.is_null());
unsafe {
let x = Fl_Text_Buffer_selection_text(self.inner);
assert!(!x.is_null());
CStr::from_ptr(x as *mut raw::c_char)
.to_string_lossy()
.to_string()
}
}
/// Removes the selection
pub fn remove_selection(&mut self) {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_remove_selection(self.inner) }
}
/// Replaces selection
pub fn replace_selection(&mut self, text: &str) {
assert!(!self.inner.is_null());
let text = CString::safe_new(text);
unsafe { Fl_Text_Buffer_replace_selection(self.inner, text.as_ptr()) }
}
/// Highlights selection
pub fn highlight(&mut self, start: i32, end: i32) {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_highlight(self.inner, start as i32, end as i32) }
}
/// Returns whether text is highlighted
pub fn is_highlighted(&mut self) -> bool {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_is_highlighted(self.inner) != 0 }
}
/// Unhighlights text
pub fn unhighlight(&mut self) {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_unhighlight(self.inner) }
}
/// Returns the highlight position
pub fn highlight_position(&mut self) -> Option<(i32, i32)> {
assert!(!self.inner.is_null());
unsafe {
let start: *mut raw::c_int = std::ptr::null_mut();
let end: *mut raw::c_int = std::ptr::null_mut();
let ret = Fl_Text_Buffer_highlight_position(self.inner, start, end);
if ret == 0 {
None
} else {
let x = (*start as i32, *end as i32);
Some(x)
}
}
}
/// Returns the highlighted text
pub fn highlight_text(&mut self) -> String {
assert!(!self.inner.is_null());
unsafe {
let x = Fl_Text_Buffer_highlight_text(self.inner);
assert!(!x.is_null());
CStr::from_ptr(x as *mut raw::c_char)
.to_string_lossy()
.to_string()
}
}
/// Returns the line at pos
pub fn line_text(&self, pos: i32) -> String {
assert!(!self.inner.is_null());
unsafe {
let x = Fl_Text_Buffer_line_text(self.inner, pos as i32);
assert!(!x.is_null());
CStr::from_ptr(x as *mut raw::c_char)
.to_string_lossy()
.to_string()
}
}
/// Returns the index of the line's start position at pos
pub fn line_start(&self, pos: i32) -> i32 {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_line_start(self.inner, pos as i32) as i32 }
}
/// Returns the index of the first character of a word at pos
pub fn word_start(&self, pos: i32) -> i32 {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_word_start(self.inner, pos as i32) as i32 }
}
/// Returns the index of the last character of a word at pos
pub fn word_end(&self, pos: i32) -> i32 {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_word_end(self.inner, pos as i32) as i32 }
}
/// Counts the lines from start to end
pub fn count_lines(&self, start: i32, end: i32) -> i32 {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_count_lines(self.inner, start as i32, end as i32) as i32 }
}
/// Calls the modify callbacks
pub fn call_modify_callbacks(&mut self) {
assert!(!self.inner.is_null());
unsafe { Fl_Text_Buffer_call_modify_callbacks(self.inner) }
}
/// Adds a modify callback.
/// callback args:
/// pos: i32, inserted items: i32, deleted items: i32, restyled items: i32, `deleted_text`
pub fn add_modify_callback<F: FnMut(i32, i32, i32, i32, &str) + 'static>(&mut self, cb: F) {
assert!(!self.inner.is_null());
unsafe {
unsafe extern "C" fn shim(
pos: raw::c_int,
inserted: raw::c_int,
deleted: raw::c_int,
restyled: raw::c_int,
deleted_text: *const raw::c_char,
data: *mut raw::c_void,
) {
let temp = if deleted_text.is_null() {
String::from("")
} else {
CStr::from_ptr(deleted_text).to_string_lossy().to_string()
};
let a: *mut Box<dyn FnMut(i32, i32, i32, i32, &str)> =
data as *mut Box<dyn for<'r> FnMut(i32, i32, i32, i32, &'r str)>;
let f: &mut (dyn FnMut(i32, i32, i32, i32, &str)) = &mut **a;
let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
f(
pos as i32,
inserted as i32,
deleted as i32,
restyled as i32,
&temp,
)
}));
}
let a: *mut Box<dyn FnMut(i32, i32, i32, i32, &str)> =
Box::into_raw(Box::new(Box::new(cb)));
let data: *mut raw::c_void = a as *mut std::ffi::c_void;
let callback: Fl_Text_Modify_Cb = Some(shim);
Fl_Text_Buffer_add_modify_callback(self.inner, callback, data);
}
}
/// Removes a modify callback.
/// callback args:
/// pos: i32, inserted items: i32, deleted items: i32, restyled items: i32, `deleted_text`
pub fn remove_modify_callback<F: FnMut(i32, i32, i32, i32, &str) + 'static>(&mut self, cb: F) {
assert!(!self.inner.is_null());
unsafe {
unsafe extern "C" fn shim(
pos: raw::c_int,
inserted: raw::c_int,
deleted: raw::c_int,
restyled: raw::c_int,
deleted_text: *const raw::c_char,
data: *mut raw::c_void,
) {
let temp = if deleted_text.is_null() {
String::from("")
} else {
CStr::from_ptr(deleted_text).to_string_lossy().to_string()
};
let a: *mut Box<dyn FnMut(i32, i32, i32, i32, &str)> =
data as *mut Box<dyn for<'r> FnMut(i32, i32, i32, i32, &'r str)>;
let f: &mut (dyn FnMut(i32, i32, i32, i32, &str)) = &mut **a;
let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
f(
pos as i32,
inserted as i32,
deleted as i32,
restyled as i32,
&temp,
)
}));
}
let a: *mut Box<dyn FnMut(i32, i32, i32, i32, &str)> =
Box::into_raw(Box::new(Box::new(cb)));
let data: *mut raw::c_void = a as *mut std::ffi::c_void;
let callback: Fl_Text_Modify_Cb = Some(shim);
Fl_Text_Buffer_remove_modify_callback(self.inner, callback, data);
}
}
}
unsafe impl Sync for TextBuffer {}
unsafe impl Send for TextBuffer {}
impl Clone for TextBuffer {
fn clone(&self) -> TextBuffer {
assert!(!self.inner.is_null());
let x = self.refcount.fetch_add(1, Ordering::Relaxed);
TextBuffer {
inner: self.inner,
refcount: AtomicUsize::new(x),
}
}
}
impl Drop for TextBuffer {
fn drop(&mut self) {
assert!(!self.inner.is_null());
self.refcount.fetch_sub(1, Ordering::Relaxed);
if *self.refcount.get_mut() < 1 {
unsafe {
Fl_Text_Buffer_delete(self.inner);
}
self.inner = std::ptr::null_mut();
}
}
}
/// Defines wrap modes
#[repr(i32)]
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum WrapMode {
/// No wrapping
None,
/// Wrap text at certain column
AtColumn,
/// Wrap text at certain pixel
AtPixel,
/// Wrap text at certain bounds
AtBounds,
}
/// Defines drag types
#[repr(i32)]
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum DragType {
/// No dragging
None = -2,
/// Drag Start "drag n drop" event
StartDnd = -1,
/// Drag single character
Char = 0,
/// Drag single word
Word = 1,
/// Drag single line
Line = 2,
}
/// Creates a non-editable text display widget
#[derive(WidgetBase, WidgetExt, DisplayExt, Debug)]
pub struct TextDisplay {
inner: *mut Fl_Text_Display,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
/// Creates an editable text display widget
#[derive(WidgetBase, WidgetExt, DisplayExt, Debug)]
pub struct TextEditor {
inner: *mut Fl_Text_Editor,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
/// Creates an editable text display widget to handle terminal-like behavior, such as
/// logging events or debug information.
/// `SimpleTerminal` already has an internal buffer.
/// It is NOT is a full terminal emulator; it does NOT
/// handle stdio redirection, pipes, pseudo ttys, termio character cooking,
/// keyboard input processing, screen addressing, random cursor positioning,
/// curses compatibility, or VT100/xterm emulation.
#[derive(WidgetBase, WidgetExt, DisplayExt, Debug)]
pub struct SimpleTerminal {
inner: *mut Fl_Simple_Terminal,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
/// Defines the styles used in the `set_highlight_data`, which is used with style buffers
#[derive(Debug, Clone, Copy)]
pub struct StyleTableEntry {
/// Font color
pub color: Color,
/// Font type
pub font: Font,
/// Font size
pub size: i32,
}
impl TextEditor {
/// Set to insert mode
pub fn set_insert_mode(&mut self, b: bool) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Text_Editor_set_insert_mode(self.inner, b as i32) }
}
/// Returns whether insert mode is set
pub fn insert_mode(&self) -> bool {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Text_Editor_insert_mode(self.inner) != 0 }
}
/// Set tab navigation
pub fn set_tab_nav(&mut self, val: bool) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Text_Editor_set_tab_nav(self.inner, val as i32) }
}
/// Returns whether tab navigation is set
pub fn tab_nav(&self) -> bool {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Text_Editor_tab_nav(self.inner) != 0 }
}
/// Copies the text within the `TextEditor` widget
pub fn copy(&self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_copy(self.inner);
}
}
/// Cuts the text within the `TextEditor` widget
pub fn cut(&self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_cut(self.inner);
}
}
/// Pastes text from the clipboard into the `TextEditor` widget
pub fn paste(&self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_paste(self.inner);
}
}
/// Undo changes in the `TextEditor` widget
pub fn undo(&self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_undo(self.inner);
}
}
/// Inserts the text associated with key 'c'
pub fn kf_default(&mut self, c: Key) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_default(c.bits() as i32, self.inner);
}
}
/// Ignores the key 'c' in editor
pub fn kf_ignore(&mut self, c: Key) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_ignore(c.bits() as i32, self.inner);
}
}
/// Does a backspace
pub fn kf_backspace(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_backspace(self.inner);
}
}
/// Inserts a new line
pub fn kf_enter(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_enter(self.inner);
}
}
/// Moves the cursor in the direction indicated by the key
pub fn kf_move(&mut self, c: Key) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_move(c.bits() as i32, self.inner);
}
}
/// Extends the current selection in the direction of key 'c'
pub fn kf_shift_move(&mut self, c: Key) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_shift_move(c.bits() as i32, self.inner);
}
}
/// Moves the current text cursor in the direction indicated by control key 'c'
pub fn kf_ctrl_move(&mut self, c: Key) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_ctrl_move(c.bits() as i32, self.inner);
}
}
/// Extends the current selection in the direction indicated by control key 'c'
pub fn kf_c_s_move(&mut self, c: Key) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_c_s_move(c.bits() as i32, self.inner);
}
}
/// Moves the current text cursor in the direction indicated by meta key 'c'
pub fn kf_meta_move(&mut self, c: Key) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_meta_move(c.bits() as i32, self.inner);
}
}
/// Extends the current selection in the direction indicated by meta key 'c'
pub fn kf_m_s_move(&mut self, c: Key) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_m_s_move(c.bits() as i32, self.inner);
}
}
/// Moves the text cursor to the beginning of the current line
pub fn kf_home(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_home(self.inner);
}
}
/// Moves the text cursor to the end of the current line
pub fn kf_end(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_end(self.inner);
}
}
/// Moves the text cursor one character to the left
pub fn kf_left(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_left(self.inner);
}
}
/// Moves the text cursor one line up
pub fn kf_up(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_up(self.inner);
}
}
/// Moves the text cursor one character to the right
pub fn kf_right(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_right(self.inner);
}
}
/// Moves the text cursor one line down
pub fn kf_down(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_down(self.inner);
}
}
/// Moves the text cursor up one page
pub fn kf_page_up(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_page_up(self.inner);
}
}
/// Moves the text cursor down one page
pub fn kf_page_down(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_page_down(self.inner);
}
}
/// Toggles the insert mode for the editor
pub fn kf_insert(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_insert(self.inner);
}
}
/// Does a delete of selected text or the current character in the current buffer
pub fn kf_delete(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_delete(self.inner);
}
}
/// Selects all text in the associated buffer
pub fn kf_select_all(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
Fl_Text_Editor_kf_select_all(self.inner);
}
}
}
impl SimpleTerminal {
/// Sets whether the terminal automatically stays at the bottom
pub fn set_stay_at_bottom(&mut self, arg1: bool) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Simple_Terminal_set_stay_at_bottom(self.inner, arg1 as i32) }
}
/// Returns whether the terminal automatically stays at the bottom
pub fn stay_at_bottom(&self) -> bool {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Simple_Terminal_stay_at_bottom(self.inner) != 0 }
}
/// Sets the max lines allowed in history
pub fn set_history_lines(&mut self, arg1: i32) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Simple_Terminal_set_history_lines(self.inner, arg1 as i32) }
}
/// Gets the max lines allowed in history
pub fn history_lines(&self) -> i32 {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Simple_Terminal_history_lines(self.inner) as i32 }
}
/// Enables ANSI sequences within the text to control text colors
pub fn set_ansi(&mut self, val: bool) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Simple_Terminal_set_ansi(self.inner, val as i32) }
}
/// Returns whether ANSI sequences are enabled
pub fn ansi(&self) -> bool {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Simple_Terminal_ansi(self.inner) != 0 }
}
/// Appends text to the terminal buffer
pub fn append(&mut self, s: &str) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
let s = CString::safe_new(s);
unsafe { Fl_Simple_Terminal_append(self.inner, s.into_raw()) }
}
/// Sets the text of the terminal buffer
pub fn set_text(&mut self, s: &str) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
let s = CString::safe_new(s);
unsafe { Fl_Simple_Terminal_set_text(self.inner, s.into_raw()) }
}
/// Gets the text of the terminal buffer
pub fn text(&self) -> String {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe {
let ptr = Fl_Simple_Terminal_text(self.inner);
assert!(!ptr.is_null());
CStr::from_ptr(ptr as *mut raw::c_char)
.to_string_lossy()
.to_string()
}
}
/// Clears the terminal
pub fn clear(&mut self) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Simple_Terminal_clear(self.inner) }
}
/// Removes `count` lines from `start`
pub fn remove_lines(&mut self, start: i32, count: i32) {
assert!(!self.was_deleted());
assert!(self.buffer().is_some());
unsafe { Fl_Simple_Terminal_remove_lines(self.inner, start as i32, count as i32) }
}
}
|
use crate::grammar::ast::Parens;
use crate::grammar::model::HasSourceReference;
use crate::grammar::testing::TestingContext;
use crate::grammar::tracing::input::OptionallyTraceable;
#[test]
fn test_basic() {
let ctx = TestingContext::with(&["(ident)"]);
ctx.test_output(Parens::parse, 0, |(rem, node)| {
rem.get_trace().unwrap().print().unwrap();
assert_eq!(rem.len(), 0);
assert_eq!(node.source, "(ident)");
assert_eq!(node.inner.get_source_ref(), "ident");
});
}
|
use crate::brightnessconverter::BrightnessConversionType;
pub struct Options {
pub scale_config: ScaleConfig,
pub brightness_convertion_algorithm: BrightnessConversionType,
pub should_invert_colors: bool,
}
#[derive(Copy, Clone)]
pub enum ScaleConfig {
OriginalSize,
FitToTerminal,
FixedSize(u32, u32),
}
pub fn get_default_options() -> Options {
return Options {
scale_config: ScaleConfig::FitToTerminal,
brightness_convertion_algorithm: BrightnessConversionType::Luminosity,
should_invert_colors: false,
};
}
|
//! Determinization and minimization of an NFA into the final DFA used by the engines.
// NOTE: Some comments in this module are outdated, because the minimizer doesn't
// actually produce minimal automata as of now - see #91.
use super::nfa::{self, NfaState, NfaStateId};
use super::small_set::{SmallSet, SmallSet256};
use super::state::StateAttributesBuilder;
use super::{Automaton, NondeterministicAutomaton, State as DfaStateId, StateAttributes, StateTable, TransitionLabel};
use crate::debug;
use crate::query::error::CompilerError;
use smallvec::{smallvec, SmallVec};
use vector_map::VecMap;
/// Turn the [`NondeterministicAutomaton`] to an equivalent minimal* deterministic [`Automaton`].
///
/// *Not actually minimal. See #91
pub(super) fn minimize(nfa: NondeterministicAutomaton) -> Result<Automaton, CompilerError> {
let minimizer = Minimizer {
nfa,
superstates: VecMap::new(),
checkpoints: VecMap::new(),
active_superstates: smallvec![],
dfa_states: vec![],
accepting: SmallSet256::default(),
};
minimizer.run()
}
pub(super) struct Minimizer<'q> {
/// The NFA being minimized.
nfa: NondeterministicAutomaton<'q>,
/// All superstates created thus far mapping to their index in the DFA being constructed.
superstates: VecMap<SmallSet256, DfaStateId>,
/// Map from superstates to the furthest reachable checkpoint on a path leading to that superstate.
checkpoints: VecMap<SmallSet256, NfaStateId>,
/// Superstates that have not been processed and expanded yet.
active_superstates: SmallVec<[SmallSet256; 2]>,
/// All superstates created thus far, in order matching the `superstates` map.
dfa_states: Vec<StateTable<'q>>,
/// Set of activated DFA states that are accepting.
accepting: SmallSet256,
}
#[derive(Debug)]
struct SuperstateTransitionTable<'q> {
labelled: VecMap<TransitionLabel<'q>, SmallSet256>,
wildcard: SmallSet256,
}
/**
* Minimization proceeds by superset construction, made easier and ensuring minimality
* due to *checkpoints*.
*
* Every state with a self-loop becomes a checkpoint. They have two crucial properties:
* 1. Any path from the initial to the accepting state goes through each checkpoint.
* 2. Each superstate containing
* a) a checkpoint and;
* b) some states on the path from the initial state to that checkpoint,
* is equivalent to a superstate without the b) states.
* This allows on-the-fly minimization with the `normalize` function, vastly reducing
* the number of superstates to consider.
*
* Identifying checkpoints is easy - these are exactly the singleton sets of Recursive
* NFA states.
*
* We expand each superstate by examining all transitions originating from NFA states
* in the superstate. The targets of those transitions are consolidated into superstates.
* If a superstate is encountered for the first time, it becomes active and will be expanded later.
* The algorithm continues while any states are active.
*
* Superstate number 0 is specifically designated as the rejecting state,
* which is used when there is no available checkpoint to return to.
**/
impl<'q> Minimizer<'q> {
/// Main loop of the algorithm. Initialize rejecting and initial states
/// and perform expansion until we run out of active states.
fn run(mut self) -> Result<Automaton<'q>, CompilerError> {
// Rejecting state has no outgoing transitions except for a self-loop.
self.dfa_states.push(StateTable {
transitions: smallvec![],
fallback_state: Self::rejecting_state(),
attributes: StateAttributesBuilder::new().rejecting().into(),
});
self.superstates.insert(SmallSet256::default(), Self::rejecting_state());
// Initial superstate is {0}.
let initial_superstate = [0].into();
self.activate_if_new(initial_superstate)?;
while let Some(superstate) = self.active_superstates.pop() {
self.process_superstate(superstate)?;
}
Ok(Automaton {
states: self.dfa_states,
})
}
fn rejecting_state() -> DfaStateId {
DfaStateId(0)
}
/// Every time a transition to a superstate is created, we need to check if it is
/// discovered for the first time. If so, we need to initialize and activate it.
fn activate_if_new(&mut self, superstate: SmallSet256) -> Result<(), CompilerError> {
if !self.superstates.contains_key(&superstate) {
let identifier = self
.superstates
.len()
.try_into()
.map(DfaStateId)
.map_err(|err| CompilerError::QueryTooComplex(Some(err)))?;
self.superstates.insert(superstate, identifier);
self.active_superstates.push(superstate);
self.dfa_states.push(StateTable::default());
debug!("New superstate created: {superstate:?} {identifier}");
if superstate.contains(self.nfa.accepting_state().0) {
self.accepting.insert(identifier.0);
}
}
Ok(())
}
/// Create the superstate's [`TransitionTable`] by processing all transitions
/// of NFA states within the superstate.
fn process_superstate(&mut self, current_superstate: SmallSet256) -> Result<(), CompilerError> {
let current_checkpoint = self.determine_checkpoint(current_superstate);
debug!("Expanding superstate: {current_superstate:?}, last checkpoint is {current_checkpoint:?}");
let mut transitions = self.process_nfa_transitions(current_superstate, current_checkpoint)?;
debug!("Raw transitions: {:?}", transitions);
self.normalize_superstate_transitions(&mut transitions, current_checkpoint)?;
debug!("Normalized transitions: {:?}", transitions);
// Translate the transitions to the data model expected by TransitionTable.
let translated_transitions: SmallVec<_> = transitions
.labelled
.into_iter()
.map(|(label, state)| (label, self.superstates[&state]))
.collect();
debug!("Translated transitions: {translated_transitions:?}");
// If a checkpoint was reached, its singleton superstate is this DFA state's fallback state.
// Otherwise, we set the fallback to the rejecting state.
let id = self.superstates[¤t_superstate];
let fallback_state = self.superstates[&transitions.wildcard];
let attributes = self.build_attributes(id, &translated_transitions, fallback_state);
let table = &mut self.dfa_states[id.0 as usize];
table.transitions = translated_transitions;
table.fallback_state = fallback_state;
table.attributes = attributes;
Ok(())
}
/// Build attributes of a DFA state after all of its transitions have been
/// determined.
fn build_attributes(
&self,
id: DfaStateId,
transitions: &[(TransitionLabel, DfaStateId)],
fallback: DfaStateId,
) -> StateAttributes {
let mut attrs = StateAttributesBuilder::new();
if self.accepting.contains(id.0) {
debug!("{id} is accepting");
attrs = attrs.accepting();
}
if id == Self::rejecting_state() {
debug!("{id} is rejecting");
attrs = attrs.rejecting();
}
if transitions.len() == 1 && fallback == Self::rejecting_state() {
debug!("{id} is unitary");
attrs = attrs.unitary();
}
if self.accepting.contains(fallback.0) || transitions.iter().any(|(_, s)| self.accepting.contains(s.0)) {
debug!("{id} has transitions to accepting");
attrs = attrs.transitions_to_accepting();
}
attrs.into()
}
/// Determine what is the furthest reachable checkpoint on the path to this
/// superstate. This is either the superstate itself, if it is a checkpoint,
/// or the one flowed into from a previous superstate via the `checkpoints` map.
fn determine_checkpoint(&mut self, superstate: SmallSet256) -> Option<NfaStateId> {
if let Some(nfa_state) = self.as_checkpoint(superstate) {
self.checkpoints.insert(superstate, nfa_state);
Some(nfa_state)
} else {
self.checkpoints.get(&superstate).copied()
}
}
/// Determine whether the `superstate` is a checkpoint, and if yes
/// return the Recursive NFA state it represents. Otherwise, return `None`.
fn as_checkpoint(&self, superstate: SmallSet256) -> Option<NfaStateId> {
if let Some(single_state) = superstate.singleton().map(NfaStateId) {
if matches!(self.nfa[single_state], NfaState::Recursive(_)) {
return Some(single_state);
}
}
None
}
/// Create the transition table for a superstate by traversing all NFA transitions
/// from states within it.
fn process_nfa_transitions(
&self,
current_superstate: SmallSet256,
current_checkpoint: Option<NfaStateId>,
) -> Result<SuperstateTransitionTable<'q>, CompilerError> {
let mut wildcard_targets = current_superstate
.iter()
.map(NfaStateId)
.filter_map(|id| match self.nfa[id] {
NfaState::Recursive(nfa::Transition::Wildcard) | NfaState::Direct(nfa::Transition::Wildcard) => {
Some(id.next().map(|x| x.0))
}
_ => None,
})
.collect::<Result<SmallSet256, _>>()?;
if let Some(checkpoint) = current_checkpoint {
wildcard_targets.insert(checkpoint.0);
}
debug!("Wildcard target: {wildcard_targets:?}");
let mut transitions = SuperstateTransitionTable {
labelled: VecMap::new(),
wildcard: wildcard_targets,
};
for nfa_state in current_superstate.iter().map(NfaStateId) {
match self.nfa[nfa_state] {
// Direct states simply have a single transition to the next state in the NFA.
// Recursive transitions also have a self-loop, but that is handled by the
// checkpoints mechanism - here we only handle the forward transition.
NfaState::Direct(nfa::Transition::Labelled(label))
| NfaState::Recursive(nfa::Transition::Labelled(label)) => {
debug!("Considering transition {nfa_state} --{}-> {}", label, nfa_state.next()?,);
// Add the target NFA state to the target superstate, or create a singleton
// set if this is the first transition via this label encountered in the loop.
if let Some(target) = transitions.labelled.get_mut(&label) {
target.insert(nfa_state.next()?.0);
} else {
let mut new_set = transitions.wildcard;
new_set.insert(nfa_state.next()?.0);
transitions.labelled.insert(label, new_set);
}
}
NfaState::Direct(nfa::Transition::Wildcard)
| NfaState::Recursive(nfa::Transition::Wildcard)
| NfaState::Accepting => (),
}
}
Ok(transitions)
}
/// Use the checkpoints to perform normalization of superstates
/// and activate them if needed.
fn normalize_superstate_transitions(
&mut self,
transitions: &mut SuperstateTransitionTable,
current_checkpoint: Option<NfaStateId>,
) -> Result<(), CompilerError> {
fn normalize_one(
this: &mut Minimizer,
state: &mut SmallSet256,
current_checkpoint: Option<NfaStateId>,
) -> Result<(), CompilerError> {
if let Some(checkpoint) = current_checkpoint {
state.insert(checkpoint.0);
}
this.normalize(state);
this.activate_if_new(*state)?;
if let Some(checkpoint) = current_checkpoint {
this.checkpoints.insert(*state, checkpoint);
}
Ok(())
}
normalize_one(self, &mut transitions.wildcard, current_checkpoint)?;
for (_, state) in &mut transitions.labelled {
normalize_one(self, state, current_checkpoint)?;
}
Ok(())
}
/// If a superstate contains a Recursive NFA state, then all the NFA states
/// prior to that Recursive state can be removed, equalizing many possible
/// combinations.
fn normalize(&self, superstate: &mut SmallSet256) {
let furthest_checkpoint = superstate
.iter()
.map(NfaStateId)
.filter(|&x| matches!(self.nfa[x], NfaState::Recursive(_)))
.max();
if let Some(cutoff) = furthest_checkpoint {
superstate.remove_all_before(cutoff.0);
}
}
}
#[cfg(test)]
mod tests {
use super::super::*;
use super::*;
use nfa::NfaState;
use pretty_assertions::assert_eq;
use smallvec::smallvec;
#[test]
fn empty_query() {
// Query = $
let nfa = NondeterministicAutomaton {
ordered_states: vec![NfaState::Accepting],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn simple_wildcard() {
// Query = $.*
let nfa = NondeterministicAutomaton {
ordered_states: vec![NfaState::Direct(nfa::Transition::Wildcard), NfaState::Accepting],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![],
fallback_state: State(2),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn simple_nonnegative_indexed() {
// Query = $[0]
let label = TransitionLabel::ArrayIndex(0.try_into().unwrap());
let nfa = NondeterministicAutomaton {
ordered_states: vec![NfaState::Direct(nfa::Transition::Labelled(label)), NfaState::Accepting],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label, State(2))],
fallback_state: State(0),
attributes: StateAttributes::UNITARY | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn simple_descendant_wildcard() {
// Query = $..*
let nfa = NondeterministicAutomaton {
ordered_states: vec![NfaState::Recursive(nfa::Transition::Wildcard), NfaState::Accepting],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![],
fallback_state: State(2),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![],
fallback_state: State(2),
attributes: StateAttributes::ACCEPTING | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn interstitial_descendant_wildcard() {
// Query = $..a.b..*.a..b
let label_a = JsonString::new("a");
let label_a = (&label_a).into();
let label_b = JsonString::new("b");
let label_b = (&label_b).into();
let nfa = NondeterministicAutomaton {
ordered_states: vec![
NfaState::Recursive(nfa::Transition::Labelled(label_a)),
NfaState::Direct(nfa::Transition::Labelled(label_b)),
NfaState::Recursive(nfa::Transition::Wildcard),
NfaState::Direct(nfa::Transition::Labelled(label_a)),
NfaState::Recursive(nfa::Transition::Labelled(label_b)),
NfaState::Accepting,
],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label_a, State(2))],
fallback_state: State(1),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(2)), (label_b, State(3))],
fallback_state: State(1),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![],
fallback_state: State(4),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(5))],
fallback_state: State(4),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_b, State(6))],
fallback_state: State(5),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label_b, State(6))],
fallback_state: State(5),
attributes: StateAttributes::ACCEPTING | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn interstitial_nondescendant_wildcard() {
// Query = $..a.b.*.a..b
let label_a = JsonString::new("a");
let label_a = (&label_a).into();
let label_b = JsonString::new("b");
let label_b = (&label_b).into();
let nfa = NondeterministicAutomaton {
ordered_states: vec![
NfaState::Recursive(nfa::Transition::Labelled(label_a)),
NfaState::Direct(nfa::Transition::Labelled(label_b)),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Direct(nfa::Transition::Labelled(label_a)),
NfaState::Recursive(nfa::Transition::Labelled(label_b)),
NfaState::Accepting,
],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label_a, State(2))],
fallback_state: State(1),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(2)), (label_b, State(3))],
fallback_state: State(1),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(5))],
fallback_state: State(4),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(6))],
fallback_state: State(1),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(6)), (label_b, State(3))],
fallback_state: State(1),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_b, State(7))],
fallback_state: State(6),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label_b, State(7))],
fallback_state: State(6),
attributes: StateAttributes::ACCEPTING | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn simple_multi_accepting() {
// Query = $..a.*
let label = JsonString::new("a");
let label = (&label).into();
let nfa = NondeterministicAutomaton {
ordered_states: vec![
NfaState::Recursive(nfa::Transition::Labelled(label)),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Accepting,
],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label, State(2)),],
fallback_state: State(1),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label, State(4))],
fallback_state: State(3),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label, State(2))],
fallback_state: State(1),
attributes: StateAttributes::ACCEPTING,
},
StateTable {
transitions: smallvec![(label, State(4))],
fallback_state: State(3),
attributes: StateAttributes::ACCEPTING | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn simple_multi_accepting_nneg_index() {
// Query = $..[3]
let label = TransitionLabel::ArrayIndex(0.try_into().unwrap());
let nfa = NondeterministicAutomaton {
ordered_states: vec![
NfaState::Recursive(nfa::Transition::Labelled(label)),
NfaState::Accepting,
],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label, State(2)),],
fallback_state: State(1),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label, State(2))],
fallback_state: State(1),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING | StateAttributes::ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn chained_wildcard_children() {
// Query = $.a.*.*.*
let label = JsonString::new("a");
let label = (&label).into();
let nfa = NondeterministicAutomaton {
ordered_states: vec![
NfaState::Direct(nfa::Transition::Labelled(label)),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Accepting,
],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label, State(2))],
fallback_state: State(0),
attributes: StateAttributes::UNITARY,
},
StateTable {
transitions: smallvec![],
fallback_state: State(3),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![],
fallback_state: State(4),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![],
fallback_state: State(5),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn chained_wildcard_children_after_descendant() {
// Query = $..a.*.*
let label = JsonString::new("a");
let label = (&label).into();
let nfa = NondeterministicAutomaton {
ordered_states: vec![
NfaState::Recursive(nfa::Transition::Labelled(label)),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Accepting,
],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label, State(2))],
fallback_state: State(1),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label, State(4))],
fallback_state: State(3),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label, State(8))],
fallback_state: State(7),
attributes: StateAttributes::EMPTY | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label, State(6))],
fallback_state: State(5),
attributes: StateAttributes::EMPTY | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label, State(8))],
fallback_state: State(7),
attributes: StateAttributes::ACCEPTING | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label, State(6))],
fallback_state: State(5),
attributes: StateAttributes::ACCEPTING | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label, State(2))],
fallback_state: State(1),
attributes: StateAttributes::ACCEPTING,
},
StateTable {
transitions: smallvec![(label, State(4))],
fallback_state: State(3),
attributes: StateAttributes::ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn child_and_descendant() {
// Query = $.x..a.b.a.b.c..d
let label_a = JsonString::new("a");
let label_a = (&label_a).into();
let label_b = JsonString::new("b");
let label_b = (&label_b).into();
let label_c = JsonString::new("c");
let label_c = (&label_c).into();
let label_d = JsonString::new("d");
let label_d = (&label_d).into();
let label_x = JsonString::new("x");
let label_x = (&label_x).into();
let nfa = NondeterministicAutomaton {
ordered_states: vec![
NfaState::Direct(nfa::Transition::Labelled(label_x)),
NfaState::Recursive(nfa::Transition::Labelled(label_a)),
NfaState::Direct(nfa::Transition::Labelled(label_b)),
NfaState::Direct(nfa::Transition::Labelled(label_a)),
NfaState::Direct(nfa::Transition::Labelled(label_b)),
NfaState::Direct(nfa::Transition::Labelled(label_c)),
NfaState::Recursive(nfa::Transition::Labelled(label_d)),
NfaState::Accepting,
],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label_x, State(2))],
fallback_state: State(0),
attributes: StateAttributes::UNITARY,
},
StateTable {
transitions: smallvec![(label_a, State(3))],
fallback_state: State(2),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(3)), (label_b, State(4))],
fallback_state: State(2),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(5))],
fallback_state: State(2),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(3)), (label_b, State(6))],
fallback_state: State(2),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(5)), (label_c, State(7))],
fallback_state: State(2),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_d, State(8))],
fallback_state: State(7),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label_d, State(8))],
fallback_state: State(7),
attributes: StateAttributes::ACCEPTING | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn child_descendant_and_child_wildcard() {
// Query = $.x.*..a.*.b
let label_a = JsonString::new("a");
let label_a = (&label_a).into();
let label_b = JsonString::new("b");
let label_b = (&label_b).into();
let label_x = JsonString::new("x");
let label_x = (&label_x).into();
let nfa = NondeterministicAutomaton {
ordered_states: vec![
NfaState::Direct(nfa::Transition::Labelled(label_x)),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Recursive(nfa::Transition::Labelled(label_a)),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Direct(nfa::Transition::Labelled(label_b)),
NfaState::Accepting,
],
};
let result = minimize(nfa).unwrap();
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label_x, State(2))],
fallback_state: State(0),
attributes: StateAttributes::UNITARY,
},
StateTable {
transitions: smallvec![],
fallback_state: State(3),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(4))],
fallback_state: State(3),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(6))],
fallback_state: State(5),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_a, State(4)), (label_b, State(8))],
fallback_state: State(3),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label_a, State(6)), (label_b, State(7))],
fallback_state: State(5),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label_a, State(4)), (label_b, State(8))],
fallback_state: State(3),
attributes: StateAttributes::ACCEPTING | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![(label_a, State(4))],
fallback_state: State(3),
attributes: StateAttributes::ACCEPTING,
},
],
};
assert_eq!(result, expected);
}
#[test]
fn all_name_and_wildcard_selectors() {
// Query = $.a.b..c..d.*..*
let label_a = JsonString::new("a");
let label_a = (&label_a).into();
let label_b = JsonString::new("b");
let label_b = (&label_b).into();
let label_c = JsonString::new("c");
let label_c = (&label_c).into();
let label_d = JsonString::new("d");
let label_d = (&label_d).into();
let nfa = NondeterministicAutomaton {
ordered_states: vec![
NfaState::Direct(nfa::Transition::Labelled(label_a)),
NfaState::Direct(nfa::Transition::Labelled(label_b)),
NfaState::Recursive(nfa::Transition::Labelled(label_c)),
NfaState::Recursive(nfa::Transition::Labelled(label_d)),
NfaState::Direct(nfa::Transition::Wildcard),
NfaState::Recursive(nfa::Transition::Wildcard),
NfaState::Accepting,
],
};
let expected = Automaton {
states: vec![
StateTable {
transitions: smallvec![],
fallback_state: State(0),
attributes: StateAttributes::REJECTING,
},
StateTable {
transitions: smallvec![(label_a, State(2)),],
fallback_state: State(0),
attributes: StateAttributes::UNITARY,
},
StateTable {
transitions: smallvec![(label_b, State(3))],
fallback_state: State(0),
attributes: StateAttributes::UNITARY,
},
StateTable {
transitions: smallvec![(label_c, State(4))],
fallback_state: State(3),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_d, State(5))],
fallback_state: State(4),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![(label_d, State(6))],
fallback_state: State(6),
attributes: StateAttributes::EMPTY,
},
StateTable {
transitions: smallvec![],
fallback_state: State(7),
attributes: StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
StateTable {
transitions: smallvec![],
fallback_state: State(7),
attributes: StateAttributes::ACCEPTING | StateAttributes::TRANSITIONS_TO_ACCEPTING,
},
],
};
let result = minimize(nfa).unwrap();
assert_eq!(result, expected);
}
}
|
use raylib::prelude::*;
pub struct Window {
handle: RaylibHandle,
thread: RaylibThread,
}
pub type DrawingContext<'a> = RaylibDrawHandle<'a>;
pub use raylib::prelude::MouseButton;
pub use raylib::prelude::KeyboardKey;
pub struct WindowConfig {
pub width: u32,
pub height: u32,
pub title: &'static str,
}
impl Window {
pub fn new(WindowConfig { width, height, title }: &WindowConfig) -> Self {
let (handle, thread) = raylib::init()
.title(title)
.size(*width as i32, *height as i32)
.build();
Self { handle, thread }
}
pub fn width(&self) -> u32 {
self.handle.get_screen_width() as u32
}
pub fn height(&self) -> u32 {
self.handle.get_screen_height() as u32
}
pub fn draw_loop<F>(&mut self, mut draw: F)
where F: FnMut(DrawingContext) {
while !self.handle.window_should_close() {
draw(self.handle.begin_drawing(&self.thread));
}
}
pub fn handle(&self) -> &RaylibHandle { &self.handle }
}
pub mod prelude {
pub use super::{Window, DrawingContext, WindowConfig};
} |
mod math;
mod tensor;
mod utils;
pub use self::math::*;
pub use self::tensor::*;
pub use self::utils::*;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.