text stringlengths 8 4.13M |
|---|
use super::traits::{Category, ComplexType, DataType, PineType, SecondType, SimpleType};
use std::cell::RefCell;
use std::ops::{Deref, DerefMut};
use std::rc::Rc;
// PineRef contain the pointer to pine type object.
#[derive(Debug)]
pub enum PineRef<'a> {
Box(Box<dyn PineType<'a> + 'a>),
Rc(Rc<RefCell<dyn PineType<'a> + 'a>>),
}
impl<'a> Clone for PineRef<'a> {
fn clone(&self) -> Self {
match *self {
PineRef::Box(ref item) => item.copy(),
PineRef::Rc(ref item) => PineRef::Rc(Rc::clone(item)),
}
}
}
impl<'a> PartialEq for PineRef<'a> {
fn eq(&self, other: &PineRef<'a>) -> bool {
match (self, other) {
(PineRef::Box(ref item1), PineRef::Box(ref item2)) => PartialEq::eq(item1, item2),
(PineRef::Rc(ref item1), PineRef::Rc(ref item2)) => {
PartialEq::eq(&*item1.borrow(), &*item2.borrow())
}
_ => false,
}
}
}
impl<'a> PineRef<'a> {
pub fn new<T: PineType<'a> + 'a>(item: T) -> PineRef<'a> {
match item.category() {
Category::Simple => PineRef::Box(Box::new(item)),
Category::Complex => PineRef::Rc(Rc::new(RefCell::new(item))),
}
}
pub fn new_box<T: PineType<'a> + SimpleType + 'a>(item: T) -> PineRef<'a> {
PineRef::Box(Box::new(item))
}
pub fn new_rc<T: PineType<'a> + ComplexType + 'a>(item: T) -> PineRef<'a> {
PineRef::Rc(Rc::new(RefCell::new(item)))
}
pub fn into_box(self) -> Box<dyn PineType<'a> + 'a> {
match self {
PineRef::Box(item) => item,
_ => unreachable!(),
}
}
pub fn into_rc(self) -> Rc<RefCell<dyn PineType<'a> + 'a>> {
match self {
PineRef::Rc(item) => item,
_ => unreachable!(),
}
}
pub fn as_ptr(&self) -> *const (dyn PineType<'a> + 'a) {
match self {
&PineRef::Box(ref item) => item.as_ref(),
&PineRef::Rc(ref item) => unsafe { item.as_ptr().as_ref().unwrap() },
}
}
pub fn copy_inner(&self) -> PineRef<'a> {
match *self {
PineRef::Box(ref item) => item.copy(),
PineRef::Rc(ref item) => item.borrow().copy(),
}
}
}
impl<'a> PineType<'a> for PineRef<'a> {
fn get_type(&self) -> (DataType, SecondType) {
match *self {
PineRef::Box(ref item) => item.get_type(),
PineRef::Rc(ref item) => item.borrow().get_type(),
}
}
fn copy(&self) -> PineRef<'a> {
match *self {
PineRef::Box(ref item) => item.copy(),
PineRef::Rc(ref item) => PineRef::Rc(Rc::clone(item)),
}
}
}
impl<'a> Deref for PineRef<'a> {
type Target = dyn PineType<'a> + 'a;
fn deref(&self) -> &Self::Target {
match self {
&PineRef::Box(ref item) => item.as_ref(),
&PineRef::Rc(ref item) => unsafe { item.as_ptr().as_ref().unwrap() },
}
}
}
impl<'a> DerefMut for PineRef<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
match self {
&mut PineRef::Box(ref mut item) => item.as_mut(),
&mut PineRef::Rc(ref item) => unsafe { item.as_ptr().as_mut().unwrap() },
}
}
}
|
extern crate grpc;
use rand_service;
use std::thread;
fn main() {
let tls = false;
let port = if !tls { 50051 } else { 50052 };
let mut server = grpc::ServerBuilder::new_plain();
server.http.set_port(port);
server.add_service(rand_service::rand_grpc::RandServer::new_service_def(
rand_service::RandImpl {},
));
server.http.set_cpu_pool_threads(4);
let _server = server.build().expect("error building server");
println!("started on port {}", port);
loop {
thread::park();
}
}
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::sync::Arc;
use spin::Mutex;
use alloc::string::String;
use alloc::string::ToString;
use alloc::collections::btree_map::BTreeMap;
use super::super::super::qlib::common::*;
use super::super::super::task::*;
use super::super::filesystems::*;
use super::super::host::fs::*;
use super::super::inode::*;
use super::super::mount::*;
use super::proc::*;
pub struct ProcFileSystem {}
impl Filesystem for ProcFileSystem {
fn Name(&self) -> String {
return "proc".to_string();
}
fn Flags(&self) -> FilesystemFlags {
return 0;
}
fn Mount(&mut self, task: &Task, _device: &str, flags: &MountSourceFlags, data: &str) -> Result<Inode> {
info!("proc file system mount ...");
// Parse generic comma-separated key=value options, this file system expects them.
let options = WhitelistFileSystem::GenericMountSourceOptions(data);
// Proc options parsing checks for either a gid= or hidepid= and barfs on
// anything else, see fs/proc/root.c:proc_parse_options. Since we don't know
// what to do with gid= or hidepid=, we blow up if we get any options.
if options.len() > 0 {
return Err(Error::Common(format!("unsupported mount options: {:?}", &options)))
}
let cgroups = BTreeMap::new();
let msrc = MountSource::NewCachingMountSource(self, flags);
let inode = NewProc(task, &Arc::new(Mutex::new(msrc)), cgroups);
return Ok(inode)
}
fn AllowUserMount(&self) -> bool {
return true;
}
fn AllowUserList(&self) -> bool {
return true;
}
} |
pub mod runner_proto;
|
// camera.rs
//
// Copyright (c) 2019, Univerisity of Minnesota
//
// Author: Bridger Herman (herma582@umn.edu)
//! Scene camera (FPS-style)
use std::f32::consts;
use glam::{Mat4, Quat, Vec2, Vec3};
use crate::traits::Update;
use crate::transform::Transform;
const ANGULAR_VELOCITY: f32 = 0.007;
const LINEAR_VELOCITY: f32 = 0.01;
#[derive(Debug)]
pub struct Camera {
view_matrix: Mat4,
projection_matrix: Mat4,
transform: Transform,
velocity: Vec3,
/// Rotation about the x and y axes (screen y and x axes)
rotation: Vec2,
}
impl Camera {
pub fn new(position: Vec3) -> Self {
let mut ret = Self::default();
ret.transform.set_position(position);
ret
}
pub fn transform(&self) -> Transform {
self.transform
}
pub fn velocity(&self) -> Vec3 {
self.velocity
}
pub fn set_velocity(&mut self, velocity: Vec3) {
self.velocity = velocity;
}
pub fn rotation(&self) -> Vec2 {
self.rotation
}
pub fn set_rotation(&mut self, rotation: Vec2) {
self.rotation = rotation;
}
pub fn set_position(&mut self, position: Vec3) {
self.transform.set_position(position);
}
// Start moving in a direction (specified by the camera's basis)
pub fn start_moving(&mut self, direction: Vec3) {
self.velocity = direction * LINEAR_VELOCITY;
}
pub fn stop_moving(&mut self) {
self.velocity = Vec3::zero();
}
pub fn add_rotation(&mut self, delta: Vec2) {
self.rotation += delta * ANGULAR_VELOCITY;
}
pub fn view_matrix(&self) -> Mat4 {
self.view_matrix
}
pub fn projection_matrix(&self) -> Mat4 {
self.projection_matrix
}
}
impl Default for Camera {
fn default() -> Self {
let transform = Transform::identity();
Self {
view_matrix: Mat4::look_at_rh(
transform.position(),
transform.position() + transform.forward(),
transform.up(),
),
projection_matrix: Mat4::perspective_glu_rh(
consts::FRAC_PI_4,
16.0 / 9.0,
0.1,
300.0,
),
transform,
rotation: Vec2::zero(),
velocity: Vec3::zero(),
}
}
}
impl Update for Camera {
fn update(&mut self) {
let dt = wre_time!().dt.subsec_millis() as f32;
self.transform.set_position(
self.transform.position()
+ (self.transform.forward() * dt * self.velocity.z()
+ self.transform.right() * dt * self.velocity.x()),
);
// Rotation about the camera's local x and y axes
let local_rotation =
Quat::from_rotation_ypr(self.rotation.y(), self.rotation.x(), 0.0);
let rotation = Mat4::from_quat(local_rotation);
let new_matrix = rotation * self.transform.matrix();
self.transform.set_matrix(new_matrix);
// trace!("Pos: {:?}", self.transform.position());
// trace!("Rot: {:?}", self.rotation);
self.view_matrix = Mat4::look_at_rh(
self.transform.position(),
self.transform.position() + self.transform.forward(),
self.transform.up(),
);
}
}
|
use core::{Color, DisplayBuffer, Face, Renderable};
use na::{Vector2, Vector3};
/// Get barycentric coordinates for a point P with respect to a triangle ABC
///
/// # Arguments
///
/// * 'a' Vertex A of the triangle ABC
/// * 'b' Vertex B of the triangle ABC
/// * 'c' Vertex C of the triangle ABC
/// * 'p' Point P for which to calculate the barycentric coordinates
///
/// Barycentric coordinates (u, v, w) are defined such that uA + vB + wC = P
/// Some useful properties
/// - If u, v, w all are >= 0 then point P is inside the triangle ABC
/// - If any of u, v, w is < 0 then point P is outside the triangle ABC
/// - u, v, w can be used to interpolate the vertex attributes inside the triangle
/// - u + v + w = 1
///
fn get_barycentric(
a: Vector2<f32>,
b: Vector2<f32>,
c: Vector2<f32>,
p: Vector2<f32>,
) -> (f32, f32, f32) {
let v0 = b - a;
let v1 = c - a;
let v2 = p - a;
let d00 = v0.dot(&v0);
let d01 = v0.dot(&v1);
let d11 = v1.dot(&v1);
let d20 = v2.dot(&v0);
let d21 = v2.dot(&v1);
let denom = d00 * d11 - d01 * d01;
let v = (d11 * d20 - d01 * d21) / denom;
let w = (d00 * d21 - d01 * d20) / denom;
let u = 1.0 - v - w;
(u, v, w)
}
impl Renderable for Face<Vector3<f32>> {
/// Draw a color-filled face
fn render(&self, buffer: &mut DisplayBuffer) {
// Bounding box for the triangle
let all_x = [self.v0.position.x, self.v1.position.x, self.v2.position.x];
let all_y = [self.v0.position.y, self.v1.position.y, self.v2.position.y];
let min_x = all_x.iter().fold(std::f32::MAX, |a, &b| a.min(b)) as usize;
let max_x = all_x.iter().fold(std::f32::MIN, |a, &b| a.max(b)) as usize;
let min_y = all_y.iter().fold(std::f32::MAX, |a, &b| a.min(b)) as usize;
let max_y = all_y.iter().fold(std::f32::MIN, |a, &b| a.max(b)) as usize;
if max_x >= buffer.width || max_y >= buffer.height {
return;
}
for y in min_y..=max_y {
for x in min_x..=max_x {
let v0 = self.v0.position.remove_row(2);
let v1 = self.v1.position.remove_row(2);
let v2 = self.v2.position.remove_row(2);
let p = Vector2::new(x as f32, y as f32);
let (w0, w1, w2) = get_barycentric(v0, v1, v2, p);
if w0 >= 0.0 && w1 >= 0.0 && w2 >= 0.0 {
let z =
w0 * self.v0.position.z + w1 * self.v1.position.z + w2 * self.v2.position.z;
let color = Color {
r: (w0 * self.v0.color.r as f32
+ w1 * self.v1.color.r as f32
+ w2 * self.v2.color.r as f32) as u8,
g: (w0 * self.v0.color.g as f32
+ w1 * self.v1.color.g as f32
+ w2 * self.v2.color.g as f32) as u8,
b: (w0 * self.v0.color.b as f32
+ w1 * self.v1.color.b as f32
+ w2 * self.v2.color.b as f32) as u8,
a: 255,
};
buffer.set_pixel(x, y, z, color);
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_barycentric_ccw_inside() {
let a = Vector2::new(1.0, 0.0);
let b = Vector2::new(0.0, 1.0);
let c = Vector2::new(-1.0, 0.0);
let p = Vector2::new(0.0, 0.5);
let (w0, w1, w2) = get_barycentric(a, b, c, p);
assert!(w0 > 0.0);
assert!(w1 > 0.0);
assert!(w2 > 0.0);
assert!(w0 < 1.0);
assert!(w1 < 1.0);
assert!(w2 < 1.0);
}
#[test]
fn test_get_barycentric_cw_inside() {
let a = Vector2::new(-1.0, 0.0);
let b = Vector2::new(0.0, 1.0);
let c = Vector2::new(1.0, 0.0);
let p = Vector2::new(0.0, 0.5);
let (w0, w1, w2) = get_barycentric(a, b, c, p);
assert!(w0 > 0.0);
assert!(w1 > 0.0);
assert!(w2 > 0.0);
assert!(w0 < 1.0);
assert!(w1 < 1.0);
assert!(w2 < 1.0);
}
}
|
//! Database for the querier that contains all namespaces.
use crate::{
cache::CatalogCache,
ingester::IngesterConnection,
namespace::{QuerierNamespace, QuerierNamespaceArgs},
parquet::ChunkAdapter,
query_log::QueryLog,
table::PruneMetrics,
};
use async_trait::async_trait;
use backoff::{Backoff, BackoffConfig};
use data_types::Namespace;
use iox_catalog::interface::SoftDeletedRows;
use iox_query::exec::Executor;
use service_common::QueryNamespaceProvider;
use snafu::Snafu;
use std::{collections::HashMap, sync::Arc};
use trace::span::{Span, SpanRecorder};
use tracker::{
AsyncSemaphoreMetrics, InstrumentedAsyncOwnedSemaphorePermit, InstrumentedAsyncSemaphore,
};
/// The number of entries to store in the circular query buffer log.
///
/// That buffer is shared between all namespaces, and filtered on query
const QUERY_LOG_SIZE: usize = 10_000;
#[allow(missing_docs)]
#[derive(Debug, Snafu)]
pub enum Error {
#[snafu(display("Catalog error: {source}"))]
Catalog {
source: iox_catalog::interface::Error,
},
}
/// Database for the querier.
///
/// Contains all namespaces.
#[derive(Debug)]
pub struct QuerierDatabase {
/// Backoff config for IO operations.
backoff_config: BackoffConfig,
/// Catalog cache.
catalog_cache: Arc<CatalogCache>,
/// Adapter to create chunks.
chunk_adapter: Arc<ChunkAdapter>,
/// Executor for queries.
exec: Arc<Executor>,
/// Connection to ingester(s)
ingester_connection: Option<Arc<dyn IngesterConnection>>,
/// Query log.
query_log: Arc<QueryLog>,
/// Semaphore that limits the number of namespaces in used at the time by the query subsystem.
///
/// This should be a 1-to-1 relation to the number of active queries.
///
/// If the same namespace is requested twice for different queries, it is counted twice.
query_execution_semaphore: Arc<InstrumentedAsyncSemaphore>,
/// Chunk prune metrics.
prune_metrics: Arc<PruneMetrics>,
/// DataFusion config.
datafusion_config: Arc<HashMap<String, String>>,
}
#[async_trait]
impl QueryNamespaceProvider for QuerierDatabase {
type Db = QuerierNamespace;
async fn db(
&self,
name: &str,
span: Option<Span>,
include_debug_info_tables: bool,
) -> Option<Arc<Self::Db>> {
self.namespace(name, span, include_debug_info_tables).await
}
async fn acquire_semaphore(&self, span: Option<Span>) -> InstrumentedAsyncOwnedSemaphorePermit {
Arc::clone(&self.query_execution_semaphore)
.acquire_owned(span)
.await
.expect("Semaphore should not be closed by anyone")
}
}
impl QuerierDatabase {
/// The maximum value for `max_concurrent_queries` that is allowed.
///
/// This limit exists because [`tokio::sync::Semaphore`] has an internal limit and semaphore
/// creation beyond that will panic. The tokio limit is not exposed though so we pick a
/// reasonable but smaller number.
pub const MAX_CONCURRENT_QUERIES_MAX: usize = u16::MAX as usize;
/// Create new database.
pub async fn new(
catalog_cache: Arc<CatalogCache>,
metric_registry: Arc<metric::Registry>,
exec: Arc<Executor>,
ingester_connection: Option<Arc<dyn IngesterConnection>>,
max_concurrent_queries: usize,
datafusion_config: Arc<HashMap<String, String>>,
) -> Result<Self, Error> {
assert!(
max_concurrent_queries <= Self::MAX_CONCURRENT_QUERIES_MAX,
"`max_concurrent_queries` ({}) > `max_concurrent_queries_MAX` ({})",
max_concurrent_queries,
Self::MAX_CONCURRENT_QUERIES_MAX,
);
let backoff_config = BackoffConfig::default();
let chunk_adapter = Arc::new(ChunkAdapter::new(
Arc::clone(&catalog_cache),
Arc::clone(&metric_registry),
));
let query_log = Arc::new(QueryLog::new(QUERY_LOG_SIZE, catalog_cache.time_provider()));
let semaphore_metrics = Arc::new(AsyncSemaphoreMetrics::new(
&metric_registry,
&[("semaphore", "query_execution")],
));
let query_execution_semaphore =
Arc::new(semaphore_metrics.new_semaphore(max_concurrent_queries));
let prune_metrics = Arc::new(PruneMetrics::new(&metric_registry));
Ok(Self {
backoff_config,
catalog_cache,
chunk_adapter,
exec,
ingester_connection,
query_log,
query_execution_semaphore,
prune_metrics,
datafusion_config,
})
}
/// Get namespace if it exists.
///
/// This will await the internal namespace semaphore. Existence of namespaces is checked AFTER
/// a semaphore permit was acquired since this lowers the chance that we obtain stale data.
pub async fn namespace(
&self,
name: &str,
span: Option<Span>,
include_debug_info_tables: bool,
) -> Option<Arc<QuerierNamespace>> {
let span_recorder = SpanRecorder::new(span);
let name = Arc::from(name.to_owned());
let ns = self
.catalog_cache
.namespace()
.get(
Arc::clone(&name),
// we have no specific need for any tables or columns at this point, so nothing to cover
&[],
span_recorder.child_span("cache GET namespace schema"),
)
.await?;
Some(Arc::new(QuerierNamespace::new(QuerierNamespaceArgs {
chunk_adapter: Arc::clone(&self.chunk_adapter),
ns,
name,
exec: Arc::clone(&self.exec),
ingester_connection: self.ingester_connection.clone(),
query_log: Arc::clone(&self.query_log),
prune_metrics: Arc::clone(&self.prune_metrics),
datafusion_config: Arc::clone(&self.datafusion_config),
include_debug_info_tables,
})))
}
/// Return all namespaces this querier knows about
pub async fn namespaces(&self) -> Vec<Namespace> {
let catalog = &self.catalog_cache.catalog();
Backoff::new(&self.backoff_config)
.retry_all_errors("listing namespaces", || async {
catalog
.repositories()
.await
.namespaces()
.list(SoftDeletedRows::ExcludeDeleted)
.await
})
.await
.expect("retry forever")
}
/// Return connection to ingester(s) to get and aggregate information from them
pub fn ingester_connection(&self) -> Option<Arc<dyn IngesterConnection>> {
self.ingester_connection.clone()
}
/// Executor
pub(crate) fn exec(&self) -> &Executor {
&self.exec
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::create_ingester_connection_for_testing;
use iox_tests::TestCatalog;
use tokio::runtime::Handle;
#[tokio::test]
#[should_panic(
expected = "`max_concurrent_queries` (65536) > `max_concurrent_queries_MAX` (65535)"
)]
async fn test_semaphore_limit_is_checked() {
let catalog = TestCatalog::new();
let catalog_cache = Arc::new(CatalogCache::new_testing(
catalog.catalog(),
catalog.time_provider(),
catalog.metric_registry(),
catalog.object_store(),
&Handle::current(),
));
QuerierDatabase::new(
catalog_cache,
catalog.metric_registry(),
catalog.exec(),
Some(create_ingester_connection_for_testing()),
QuerierDatabase::MAX_CONCURRENT_QUERIES_MAX.saturating_add(1),
Arc::new(HashMap::default()),
)
.await
.unwrap();
}
#[tokio::test]
async fn test_namespace() {
let catalog = TestCatalog::new();
let db = new_db(&catalog).await;
catalog.create_namespace_1hr_retention("ns1").await;
assert!(db.namespace("ns1", None, true).await.is_some());
assert!(db.namespace("ns2", None, true).await.is_none());
}
#[tokio::test]
async fn test_namespaces() {
let catalog = TestCatalog::new();
let db = new_db(&catalog).await;
catalog.create_namespace_1hr_retention("ns1").await;
catalog.create_namespace_1hr_retention("ns2").await;
let mut namespaces = db.namespaces().await;
namespaces.sort_by_key(|ns| ns.name.clone());
assert_eq!(namespaces.len(), 2);
assert_eq!(namespaces[0].name, "ns1");
assert_eq!(namespaces[1].name, "ns2");
}
async fn new_db(catalog: &Arc<TestCatalog>) -> QuerierDatabase {
let catalog_cache = Arc::new(CatalogCache::new_testing(
catalog.catalog(),
catalog.time_provider(),
catalog.metric_registry(),
catalog.object_store(),
&Handle::current(),
));
QuerierDatabase::new(
catalog_cache,
catalog.metric_registry(),
catalog.exec(),
Some(create_ingester_connection_for_testing()),
QuerierDatabase::MAX_CONCURRENT_QUERIES_MAX,
Arc::new(HashMap::default()),
)
.await
.unwrap()
}
}
|
use serde::{Deserialize, Serialize};
use wasm_bindgen::prelude::*;
#[derive(Debug, Default, Serialize, Deserialize)]
struct MetaScreenshot {
pub id: String,
#[serde(default)]
pub preview: bool,
}
#[derive(Debug, Default, Serialize, Deserialize)]
struct MetaScreenshotResponse {
pub id: String,
pub data: String,
#[serde(default)]
pub preview: bool,
}
#[cfg(feature = "composite-renderer")]
pub fn ipc_on_screenshot(
canvas: web_sys::HtmlCanvasElement,
) -> Box<oxygengine_backend_web::app::WebIpcCallback> {
Box::new(move |data, _origin| {
if let Ok(meta) = data.into_serde::<MetaScreenshot>() {
if meta.id == "screenshot" {
if let Ok(data) = canvas.to_data_url() {
if let Ok(result) = JsValue::from_serde(&MetaScreenshotResponse {
id: meta.id,
data,
preview: meta.preview,
}) {
return Some(result);
}
}
}
}
None
})
}
|
//https://leetcode.com/problems/minimum-operations-to-make-the-array-increasing/submissions/
impl Solution {
pub fn min_operations(mut nums: Vec<i32>) -> i32 {
let mut num_of_operations = 0;
for i in 1..nums.len() {
num_of_operations += 0.max(nums[i-1] - nums[i] + 1);
nums[i] += 0.max(nums[i-1] - nums[i] + 1);
}
num_of_operations
}
} |
fn fahrenheit_to_celsius(value: f32) -> f32 {
return (value - 32.0) / 1.8
}
fn main() {
let fahrenheit: f32 = 96.0;
println!("{} degrees fahrenheit is {} degrees celsius!", fahrenheit, fahrenheit_to_celsius(fahrenheit));
} |
// 模块化 module
mod front_of_house {
pub mod hosting {
pub fn add_to_wait_list() {}
fn seat_at_table() {
}
}
mod serving {
fn take_order() {}
pub fn serve_order() {}
fn take_payment() {}
}
pub mod back_of_house {
fn fix_incorrect_order() {
cook_order();
// 通过super关键字调用相对路径的module的函数
super::server_order();
}
fn cook_order() {}
pub struct Breakfast {
pub toast: String,
seasonal_fruit: String,
}
impl Breakfast {
pub fn summer(toast: &str) -> Breakfast {
Breakfast {
toast: String::from(toast),
seasonal_fruit: String::from("peaches"),
}
}
}
pub fn create_breakfast(toast: &str, fruit: String) -> Breakfast {
Breakfast {
toast: String::from(toast),
seasonal_fruit: fruit,
}
}
pub enum Appetizer {
Soup,
Salad,
}
}
fn server_order() {}
}
// 通过use导入路径时, 必须在声明之后的部分
use front_of_house::{hosting, back_of_house};
pub fn eat_at_restaurant() {
// 父模块不可以调用子模块的函数等, 除非子模块的函数等定义为pub来暴露路径(paths)
hosting::add_to_wait_list();
let mut meal = back_of_house::Breakfast::summer("Rye");
// 修改
meal.toast = String::from("Wheat");
println!("like {}, please.", meal.toast);
// 编译err
// meal.season_fruit = String::from("blueberries");
let m = back_of_house::create_breakfast("tus", String::from("banana"));
// all public
let m = back_of_house::Appetizer::Salad;
hosting::add_to_wait_list();
hosting::add_to_wait_list();
}
use rand::Rng;
use std::{cmp::Ordering, io, collections};
fn test_rng() {
let number = rand::thread_rng().gen_range(1..101);
println!("生成的随机数为: {}", number);
} |
extern crate quickersort;
pub mod statests;
pub struct MT {
state : [u64; 624],
index: u32
}
impl MT {
pub fn new() -> MT {
MT {state: [0;624], index: 0}
}
pub fn seed(self: &mut MT, seed: u64) {
self.state[0] = seed & 0xffffffff ;
for i in 1..624 {
let x = i as u64;
let m :u64 = 1_812_433_253;
self.state[i] = m.wrapping_mul((self.state[i-1] ^
(self.state[i-1] >> 30))) + x;
self.state[i] = self.state[i] & 0xffffffff;
}
self.index = 624
} // end of seed
pub fn get(self: &mut MT) -> u64 {
let lower_mask : u64 = 0x7fffffff;
let upper_mask : u64 = 0x80000000;
let a : u64 = 0x9908b0df;
let m = 624;
let n = 397;
let k : [u64; 2] = [0, a];
if self.index >= 624 {
//generate numbers
if self.index == 625 {
self.seed(5489);
}
for x in 0..m-n {
let y = (self.state[x] & upper_mask) | (self.state[x+1 as usize] & lower_mask);
self.state[x] = self.state[x+n] ^ (y >> 1) ^ k[(y&0x1) as usize];
}
let neg: i32 = ( n as i32 - m as i32);
for x in m-n..m-1{
let ix = x as i32;
let y = (self.state[x] & upper_mask) | (self.state[x+1] & lower_mask);
self.state[x] = self.state[(ix+neg) as usize] ^ (y >> 1) ^ k[(y & 0x1) as usize];
}
let y = (self.state[m-1] & upper_mask) | (self.state[0] & lower_mask);
self.state[m-1] = self.state[n-1] ^ (y >> 1) ^ k[(y&0x1) as usize];
self.index = 0;
}
// Tempering
let mut l:u64 = self.state[self.index as usize];
self.index = self.index + 1;
l = l ^ (l >> 11);
l = l ^ ((l << 7) & 0x9d2c5680);
l = l ^ ((l << 15) & 0xefc60000);
l = l ^ (l >> 18);
l
}
pub fn get_real(self: &mut MT) -> f32 {
let g = self.get();
(g as f32) * (1.0f32/4294967296.0f32)
}
}
impl Iterator for MT {
type Item = u64;
fn next(&mut self) -> Option<u64> {
let k = self.get();
Some(k)
}
}
|
extern crate fuel_requirements;
use fuel_requirements::*;
fn main() {
let sum: isize =
data::data().into_iter()
.flat_map(Fuel)
.sum();
println!("{}", sum);
}
|
extern crate seedlink;
extern crate miniseed;
use seedlink::SeedLinkClient;
#[test]
#[ignore]
fn read() {
let mut slc = SeedLinkClient::new("rtserve.iris.washington.edu",18000);
let mut data = vec![0u8;2048];
// Say Hello
slc.hello().expect("bad write");
// Read Response
let n = slc.read(&mut data).expect("bad read");
let v = data[..n].to_vec();
let s = String::from_utf8(v).expect("Found invalid UTF-8");
println!("data: {:?}", s);
// Initiate Data Stream
slc.start().expect("bad write");
let mut buf = vec![];
// Read Response
loop {
println!("Waiting on read ...");
let n = slc.read(&mut data).expect("bad read");
buf.extend(data[..n].iter().cloned());
if buf.len() >= 520 {
// Parse data
let (num, rec) = seedlink::parse(&mut buf).unwrap();
println!("{}: {}", num, rec);
break;
}
}
// Say Good bye
slc.bye().expect("bad bye");
}
|
use std::collections::HashMap;
use std::convert::TryFrom;
use anyhow::anyhow;
use anyhow::ensure;
use anyhow::Result;
use insideout::InsideOut;
use rusqlite::types::ToSql;
use rusqlite::OptionalExtension;
use crate::nexus::AttachmentStatus;
use crate::nexus::Doc;
type Cache = (&'static str, HashMap<String, i64>);
pub struct DbBuilder<'t> {
conn: &'t rusqlite::Connection,
group_cache: Cache,
artifact_cache: Cache,
name_cache: Cache,
desc_cache: Cache,
packaging_cache: Cache,
classifier_cache: Cache,
}
impl<'t> DbBuilder<'t> {
pub fn new(conn: &rusqlite::Connection) -> Result<DbBuilder> {
let mut us = DbBuilder {
conn,
group_cache: ("group", HashMap::with_capacity(40 * 1_024)),
artifact_cache: ("artifact", HashMap::with_capacity(200 * 1_024)),
name_cache: ("name", HashMap::with_capacity(40 * 1_024)),
desc_cache: ("desc", HashMap::with_capacity(40 * 1_024)),
packaging_cache: ("packaging", HashMap::with_capacity(1_024)),
classifier_cache: ("classifier", HashMap::with_capacity(1_024)),
};
us.create_string_tables()?;
us.write_examples()?;
Ok(us)
}
pub fn create_string_tables(&self) -> Result<()> {
for (name, _cache) in &[
&self.group_cache,
&self.artifact_cache,
&self.name_cache,
&self.desc_cache,
&self.packaging_cache,
&self.classifier_cache,
] {
self.conn.execute(
&format!(
r"
create table if not exists {name}_names (
id integer primary key,
name varchar not null unique
)"
),
[],
)?;
}
Ok(())
}
#[rustfmt::skip]
pub fn write_examples(&mut self) -> Result<()> {
write_examples(self.conn, &mut self.group_cache, include_str!("top/top_group.txt"))?;
write_examples(self.conn, &mut self.artifact_cache, include_str!("top/top_artifact.txt"))?;
write_examples(self.conn, &mut self.classifier_cache, include_str!("top/top_classifier.txt"))?;
write_examples(self.conn, &mut self.packaging_cache, include_str!("top/top_packaging.txt"))?;
write_examples(self.conn, &mut self.name_cache, include_str!("top/top_name.txt"))?;
write_examples(self.conn, &mut self.desc_cache, include_str!("top/top_desc.txt"))?;
Ok(())
}
pub fn add(&mut self, doc: &Doc) -> Result<()> {
let group_name = string_write(self.conn, &mut self.group_cache, &doc.id.group)?;
let artifact_name = string_write(self.conn, &mut self.artifact_cache, &doc.id.artifact)?;
let name_name = option_write(self.conn, &mut self.name_cache, doc.name.as_ref())?;
let desc_name = option_write(self.conn, &mut self.desc_cache, doc.description.as_ref())?;
let shared_cache = &mut self.packaging_cache;
let pkg_name = option_write(self.conn, shared_cache, Some(&doc.object_info.packaging))?;
let ext_name = string_write(self.conn, shared_cache, &doc.object_info.extension)?;
let classifier_name = option_write(
self.conn,
&mut self.classifier_cache,
doc.id.classifier.as_ref(),
)?;
self.conn
.prepare_cached(
r"
insert into versions
(
group_id,
artifact_id,
version,
classifier_id,
extension_id,
packaging_id,
last_modified,
size,
checksum,
source_attached,
javadoc_attached,
signature_attached,
name_id,
desc_id
) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)
",
)?
.insert([
&group_name as &dyn ToSql,
&artifact_name,
&doc.id.version,
&classifier_name,
&ext_name,
&pkg_name,
&i64::try_from(doc.object_info.last_modified / 1000)?,
&doc.object_info.size.map(i64::try_from).inside_out()?,
&doc.checksum.map(hex::encode),
&attached_bool(doc.object_info.source_attached),
&attached_bool(doc.object_info.javadoc_attached),
&attached_bool(doc.object_info.signature_attached),
&name_name,
&desc_name,
])?;
Ok(())
}
}
#[inline]
fn option_write(
conn: &rusqlite::Connection,
cache: &mut Cache,
val: Option<&String>,
) -> Result<Option<i64>> {
val.filter(|name| empty_filter(name.as_str()))
.map(|name| -> Result<i64> { string_write(conn, cache, name) })
.inside_out()
}
#[inline]
fn string_write(conn: &rusqlite::Connection, cache: &mut Cache, val: &str) -> Result<i64> {
let (table, cache) = cache;
if let Some(id) = cache.get(val) {
return Ok(*id);
}
ensure!(
empty_filter(val.trim()),
"illegal string: {}: {:?}",
table,
val
);
let new_id = match conn
.prepare_cached(&format!("insert into {table}_names (name) values (?)"))?
.insert([val])
{
Ok(id) => id,
Err(rusqlite::Error::SqliteFailure(e, ref _msg))
if rusqlite::ErrorCode::ConstraintViolation == e.code =>
{
conn.prepare_cached(&format!("select id from {table}_names where name=?"))?
.query_row([val], |row| row.get(0))
.optional()?
.ok_or_else(|| anyhow!("constraint violation, but row didn't exist"))?
}
Err(e) => return Err(e.into()),
};
cache.insert(val.to_string(), new_id);
Ok(new_id)
}
#[inline]
fn write_examples(
conn: &rusqlite::Connection,
cache: &mut Cache,
contents: &'static str,
) -> Result<()> {
for line in contents.trim().split('\n') {
string_write(conn, cache, line.trim())?;
}
Ok(())
}
fn attached_bool(status: AttachmentStatus) -> Option<bool> {
match status {
AttachmentStatus::Absent => Some(false),
AttachmentStatus::Present => Some(true),
AttachmentStatus::Unavailable => None,
}
}
fn empty_filter(s: &str) -> bool {
!s.is_empty() && "null" != s
}
|
use crate::{
error::Error::{Indeterminate, OverFlow},
Result,
};
use num::{
bigint::Sign,
integer::{gcd, ExtendedGcd},
traits::Pow,
BigInt, BigUint, Integer, One, ToPrimitive, Zero,
};
pub fn power_iu(a: &BigInt, b: &BigUint) -> Result<BigInt> {
if a.is_zero() && b.is_zero() {
return Err(Indeterminate);
}
else if a.is_zero() || b.is_zero() {
return Ok(BigInt::zero());
}
else if a.is_one() {
return Ok(BigInt::one());
}
else if b.is_one() {
return Ok(BigInt::from_biguint(Sign::Plus, b.clone()));
}
match b.to_u32() {
None => Err(OverFlow),
Some(u) => Ok(a.pow(u)),
}
}
#[test]
fn test_power() {
assert_eq!(BigInt::from(-2).pow(16u64), BigInt::from(65536));
}
#[test]
fn test_gcd() {
assert_eq!(gcd(BigInt::from(15), BigInt::from(21)), BigInt::from(3));
}
pub fn extended_gcd2(a: &BigInt, b: &BigInt) -> (BigInt, Vec<BigInt>) {
let gcd = a.extended_gcd(b);
(gcd.gcd, vec![gcd.x, gcd.y])
}
pub fn extended_gcd_n(v: &[BigInt]) -> (BigInt, Vec<BigInt>) {
println!("{:?}", v);
unimplemented!()
}
#[test]
fn test_extend_gcd() {
let ou = extended_gcd2(&BigInt::from(314), &BigInt::from(271));
println!("{:?}", ou)
}
/// inverse in modulo
/// $ax \equiv 1 \pmod{m}$
pub fn modulo_inverse(a: &BigInt, m: &BigInt) -> Option<BigInt> {
let ExtendedGcd { gcd, x, .. } = a.extended_gcd(m);
if gcd.is_one() { Some(x + m) } else { None }
}
#[test]
fn test_modulo_inverse() {
let ou = modulo_inverse(&BigInt::from(-5), &BigInt::from(7));
println!("{:?}", ou)
}
/// division in modulo
/// $bx \equiv a \pmod{m}$
pub fn modulo_division(a: &BigInt, b: &BigInt, m: &BigInt) -> Option<BigInt> {
let ExtendedGcd { gcd, x: _, y, .. } = b.extended_gcd(m);
if (a % &gcd).is_zero() { Some(a / &gcd * y) } else { None }
}
#[test]
fn test_modulo_division() {
let ou = modulo_division(&BigInt::from(42), &BigInt::from(32), &BigInt::from(98));
println!("{:?}", ou)
}
/// $`\gcd(x, y) = 1`$
pub fn is_coprime(x: BigInt, y: BigInt) -> bool {
gcd(x, y).is_one()
}
/// Chinese remainder theorem
pub fn chinese_remainder(u: &[BigInt], m: &[BigInt]) -> Option<BigInt> {
if u.len() != m.len() {
return None;
}
let mut v = Vec::with_capacity(u.len());
for (i, (u_i, m_i)) in u.iter().zip(m.iter()).enumerate() {
let c_i = modulo_inverse(&m[0..i].iter().fold(BigInt::one(), |p, v| p * v % m_i), &m_i.clone())?;
let t = v.iter().zip(m.iter()).rev().fold(BigInt::zero(), |t, (v_j, m_j)| m_j * t + v_j % m_i);
v.push((u_i - t) * c_i % m_i);
}
let mut ret = v.pop().unwrap();
for (v_i, m_i) in v.iter().zip(m.iter()).rev() {
ret = ret * m_i + v_i;
}
return Some(ret);
}
#[test]
fn test_crt() {
let u = vec![BigInt::from(2), BigInt::from(3), BigInt::from(2)];
let m = vec![BigInt::from(3), BigInt::from(5), BigInt::from(7)];
let a = chinese_remainder(&u, &m).unwrap();
println!("{:?}", a)
}
pub fn chinese_remainder_d(u: &[BigInt], m: &[BigInt], d: BigInt) -> Option<BigInt> {
println!("{:?}\n{:?}\n{}", u, m, d);
unimplemented!()
}
|
use super::cpu;
use super::memory;
pub struct NES {
cpu: cpu::Cpu,
}
impl NES {
pub fn new(rom: Vec<u8>) -> NES{
let memory = memory::Memory::new(rom);
NES {
cpu: cpu::Cpu::new(memory)
}
}
pub fn power_on_reset(&mut self){
self.cpu.power_on_reset();
}
pub fn run(&mut self){
self.cpu.run();
}
}
|
use serde::Deserialize;
impl ToString for WeaponType {
fn to_string(&self) -> String {
match self {
WeaponType::WeaponBow => {
"Bow".to_owned()
}
WeaponType::WeaponCatalyst => {
"Catalyst".to_owned()
}
WeaponType::WeaponClaymore => {
"Claymore".to_owned()
}
WeaponType::WeaponPole => {
"Polearm".to_owned()
}
WeaponType::WeaponSwordOneHand => {
"Sword".to_owned()
}
}
}
}
#[derive(Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum WeaponType {
WeaponBow,
WeaponCatalyst,
WeaponClaymore,
WeaponPole,
WeaponSwordOneHand,
} |
use std::time::SystemTime;
struct EventLoop {
initial_time_offset: i32
}
impl EventLoop {
pub fn new() -> EventLoop {
EventLoop {
initial_time_offset: 0
}
}
pub fn init() {
}
pub fn shutdown() {
}
pub fn get_event() {
loop {
}
}
// Dispatch all pending events and return current time
pub fn run_event_loop(command_execution: bool) -> SystemTime {
return SystemTime::now();
}
fn get_real_event() {
}
fn init_port() -> bool {
true
}
}
|
use crate::compiling::v1::assemble::prelude::*;
/// Compile a `yield` expression.
impl Assemble for ast::ExprYield {
fn assemble(&self, c: &mut Compiler<'_>, needs: Needs) -> CompileResult<Asm> {
let span = self.span();
log::trace!("ExprYield => {:?}", c.source.source(span));
if let Some(expr) = &self.expr {
expr.assemble(c, Needs::Value)?.apply(c)?;
c.asm.push(Inst::Yield, span);
} else {
c.asm.push(Inst::YieldUnit, span);
}
if !needs.value() {
c.asm.push(Inst::Pop, span);
}
Ok(Asm::top(span))
}
}
|
//! An implementation of the segmented sieve of Eratosthenes.
use std::cmp::min;
use std::slice::from_raw_parts_mut;
use iterator::SieveIterator;
use segment::set_off;
use wheel::Wheel30;
const MODULUS: u64 = 240;
const SEGMENT_LEN: usize = 32768;
const SEGMENT_SIZE: u64 = MODULUS * SEGMENT_LEN as u64;
/// Returns a sequence of `u64`s encoding the primes up to the square root of the given limit, but
/// excluding 2, 3 and 5.
fn small_primes(limit: u64) -> Vec<u64> {
// Start by allocating enough `u64`s to hold information about the numbers up to the required
// square root.
let sqrt = (limit as f64).sqrt() as u64;
let mut sieve = vec![!0; (sqrt / MODULUS + 1) as usize];
let small_limit = 240 * sieve.len() as u64;
// Correct the first entry of the sieve to only contain 1's in positions corresponding to true
// prime numbers - this just speeds things up a little as it prevents the iterator from
// accidentally considering non-primes early in its life.
sieve[0] = 0b1111100100111101110110111011011001111110111011111101111111111110;
// Iterate over the prime numbers held in the sieve and cross of multiples of each one.
// Since we cannot usually have a mutable borrow and an immutable borrow to the sieve at the
// same time, there's some unsafe code here to do just that, and we'll make a promise to the
// compiler that we're not doing anything nasty 😮
unsafe {
let sieve_mut = from_raw_parts_mut(sieve.as_mut_ptr(), sieve.len());
let iter = SieveIterator::new(&sieve);
for prime in iter {
// For each prime p, we cross off the multiples of it larger than p^2 which are not
// multiples of 2, 3 or 5.
let mut wheel = Wheel30::new(prime, prime);
let mut multiple = prime * prime;
if multiple >= small_limit {
break;
}
while multiple < small_limit {
set_off(sieve_mut, multiple);
multiple += wheel.next_diff();
}
}
}
sieve
}
/// Sieve primes up to the given limit using a segmented sieve of Eratosthenes, and return a
/// vector of `u64`s encoding the primes.
pub fn segmented_sieve(limit: u64) -> Vec<u64> {
// First, we need to sieve the primes up to the square root of the given limit - these will be
// the primes whose multiples are crossed off the sieve.
let lim = limit + MODULUS - (limit % MODULUS);
let small_primes = small_primes(lim);
let mut small_primes_iter = SieveIterator::new(&small_primes);
// Here's the array in which we'll do our sieving of the segments, and a vector in which we'll
// store the final results.
let mut segment = [!0; SEGMENT_LEN];
segment[0] ^= 1;
let mut segments = Vec::with_capacity((lim / MODULUS) as usize);
// Here are the indices into the segment for the next multiple of each prime whose multiples
// are being crossed off - the first entry is the index, and the second entry is a wheel which
// generates the differences between successive indices.
let mut next_indices = Vec::<(u64, Wheel30)>::new();
// Iterate over segments for as long as we still have more sieving to do.
let mut low = 0;
while low <= lim {
// Now, add the new sieving primes which we will need for this segment.
let high = min(low + SEGMENT_SIZE, lim);
let segment_size = high - low;
while let Some(prime) = small_primes_iter.next() {
next_indices.push((prime * prime - low, Wheel30::new(prime, prime)));
if prime * prime >= high {
break;
}
}
// Sieve the current segment
for &mut (ref mut index, ref mut wheel) in &mut next_indices {
while *index < segment_size {
set_off(&mut segment, *index);
*index += wheel.next_diff();
}
*index -= segment_size;
}
// Store the result of this pass and prepare for the next pass.
segments.extend_from_slice(
if segment_size < SEGMENT_SIZE {
&segment[..(segment_size / MODULUS) as usize]
} else {
&segment
}
);
low += SEGMENT_SIZE;
segment = [!0; SEGMENT_LEN];
}
segments
}
#[test]
fn test_small_primes() {
let sieve = small_primes(1000000);
let primes = SieveIterator::new(&sieve).collect::<Vec<u64>>();
assert_eq!(primes,
vec![7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79,
83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157,
163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239,
241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331,
337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421,
431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503, 509,
521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, 607, 613,
617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709,
719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821,
823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911, 919,
929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, 1019,
1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093,
1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163, 1171, 1181, 1187,
1193]);
}
#[cfg(test)]
mod tests {
use super::*;
use iterator::SieveIterator;
#[test]
fn test_small_segmented_sieve() {
let sieve = segmented_sieve(1000);
let primes = SieveIterator::new(&sieve).collect::<Vec<u64>>();
assert_eq!(primes,
vec![7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79,
83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157,
163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239,
241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331,
337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421,
431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503, 509,
521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, 607, 613,
617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709,
719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821,
823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911, 919,
929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, 1019,
1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093,
1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163, 1171, 1181, 1187,
1193]);
}
#[test]
fn test_large_segmented_sieve() {
let sieve = segmented_sieve(50000000);
let primes = SieveIterator::new(&sieve).collect::<Vec<u64>>();
assert_eq!(primes[primes.len() - 100..].to_vec(),
vec![49998539, 49998563, 49998587, 49998593, 49998601, 49998617, 49998623,
49998653, 49998659, 49998661, 49998727, 49998743, 49998749, 49998763,
49998779, 49998791, 49998811, 49998821, 49998827, 49998841, 49998857,
49998869, 49998911, 49998913, 49998917, 49998919, 49998931, 49998947,
49998953, 49998983, 49999031, 49999069, 49999111, 49999121, 49999133,
49999151, 49999177, 49999207, 49999231, 49999253, 49999267, 49999289,
49999291, 49999297, 49999307, 49999349, 49999351, 49999361, 49999387,
49999403, 49999409, 49999423, 49999427, 49999441, 49999463, 49999471,
49999489, 49999529, 49999553, 49999561, 49999589, 49999597, 49999603,
49999613, 49999619, 49999627, 49999637, 49999639, 49999643, 49999667,
49999673, 49999693, 49999699, 49999711, 49999739, 49999751, 49999753,
49999757, 49999759, 49999777, 49999783, 49999801, 49999819, 49999843,
49999847, 49999853, 49999877, 49999883, 49999897, 49999903, 49999921,
49999991, 50000017, 50000021, 50000047, 50000059, 50000063, 50000101,
50000131, 50000141]);
}
} |
/*
Project Euler Problem 7:
By listing the first six prime numbers: 2, 3, 5, 7, 11, and 13, we can see that the 6th prime is 13.
What is the 10 001st prime number?
*/
fn is_prime(x: i64) -> bool {
if x == 2 || x == 3 {
return true;
} else if x % 2 == 0 || x % 3 == 0 {
return false;
}
let (mut i, mut w) = (5i64, 2i64);
while i * i <= x {
if x % i == 0 {
return false;
}
i += w;
w = 6 - w;
}
return true;
}
fn main() {
let mut n = 0;
for i in 1.. {
if is_prime(i) {
n += 1;
if n == 10001 {
println!("{:?}", i);
return;
}
}
}
} |
//! Rust Builder for [Mongo DB](https://github.com/mongo-db/mongo).
//!
//! This crate is intended for use with
//! [rub](https://github.com/rust-builder/rub).
//!
//! If you don't have `rub` installed, visit https://github.com/rust-builder/rub
//! for installation instructions.
//!
//! # Rub Options
//! <pre>
//! $ rub mongo --help
//! mongo - Rust Builder
//!
//! Usage:
//! rub mongo [options] [<lifecycle>...]
//! rub mongo (-h | --help)
//! rub mongo --version
//!
//! Options:
//! -d --dir <dir> Set the projects directory.
//! -b --branch <branch> Set the build branch. [default: master]
//! -t --enable-test Enable tests.
//! -p --prefix <prefix> Set the installation prefix. [default: /usr/local]
//! -u --url <url> Set the SCM URL.
//! -h --help Show this usage.
//! --version Show rust-rub version.
//! </pre>
//!
//! # Examples
//! ```
//! # extern crate buildable; extern crate mongo_rub; fn main() {
//! use buildable::Buildable;
//! use mongo_rub::MongoRub;
//!
//! // To run lifecycle methods outside of rub...
//! let mut mr = MongoRub::new();
//! let b = Buildable::new(&mut mr, &vec!["rub".to_string(),
//! "mongo".to_string(),
//! "--version".to_string()]);
//! assert_eq!(Ok(0), b.version());
//! # }
//! ```
#![experimental]
#![allow(unstable)]
extern crate buildable;
extern crate commandext;
extern crate docopt;
extern crate regex;
extern crate "rustc-serialize" as rustc_serialize;
extern crate scm;
extern crate utils;
use buildable::{Buildable,BuildConfig,LifeCycle};
use commandext::{CommandExt,to_res};
use docopt::Docopt;
use regex::Regex;
use scm::git::GitCommand;
use std::default::Default;
use std::io::fs;
use std::io::fs::PathExtensions;
use utils::usable_cores;
use utils::empty::to_opt;
static USAGE: &'static str = "mongo - Rust Builder
Usage:
rub mongo [options] [<lifecycle>...]
rub mongo (-h | --help)
rub mongo --version
Options:
-d --dir <dir> Set the projects directory.
-b --branch <branch> Set the build branch. [default: master]
-t --enable-test Enable tests.
-p --prefix <prefix> Set the installation prefix. [default: /usr/local]
-u --url <url> Set the SCM URL.
-h --help Show this usage.
-l --lint Run the MongoDB linter.
--mst=<mst> MongoDB scons targets.
core, tools, and all supported.
--smoke Runs the “dbtest” test.
--smokec Runs the C++ unit tests.
--smokej Runs (some of!) the Javascript integration tests.
--version Show rust-rub version.";
include!(concat!(env!("OUT_DIR"), "/version.rs"));
#[derive(RustcDecodable)]
struct Args {
flag_dir: String,
flag_branch: String,
flag_enable_test: bool,
flag_prefix: String,
flag_url: String,
flag_help: bool,
flag_lint: bool,
flag_mst: String,
flag_smoke: bool,
flag_smokec: bool,
flag_smokej: bool,
flag_version: bool,
arg_lifecycle: Vec<String>,
}
/// Mongo specific configuration for the `Buildable` lifecycle methods.
#[experimental]
#[derive(Clone,Default)]
pub struct MongoRub {
config: BuildConfig,
prefix: String,
url: String,
lint: bool,
targets: Vec<String>,
smoke: bool,
smokec: bool,
smokej: bool,
}
impl MongoRub {
/// Create a new default MongoRub.
pub fn new() -> MongoRub {
Default::default()
}
}
fn is_v26(branch: &str) -> bool {
let re = Regex::new(r"^[rv]2\.6").unwrap();
re.is_match(branch)
}
impl Buildable for MongoRub {
/// Update the MongoRub struct after parsing the given args vector.
///
/// Normally, the args vector would be supplied from the command line, but
/// they can be supplied as in the example below as well.
///
/// # Example
/// ```
/// # extern crate buildable; extern crate mongo_rub; fn main() {
/// use buildable::Buildable;
/// use mongo_rub::MongoRub;
///
/// // To run lifecycle methods outside of rub...
/// let mut mr = MongoRub::new();
/// let b = Buildable::new(&mut mr, &vec!["rub".to_string(),
/// "mongo".to_string()]);
/// assert_eq!(Ok(0), b.version());
/// # }
fn new(&mut self, args: &Vec<String>) -> &mut MongoRub {
let dargs: Args = Docopt::new(USAGE)
.and_then(|d| Ok(d.help(false)))
.and_then(|d| d.argv(args.clone().into_iter()).decode())
.unwrap_or_else( |e| e.exit());
self.prefix = dargs.flag_prefix;
self.url = dargs.flag_url;
self.lint = dargs.flag_lint;
self.smoke = dargs.flag_smoke;
self.smokec = dargs.flag_smokec;
self.smokej = dargs.flag_smokej;
if !dargs.flag_mst.is_empty() {
let v: Vec<&str> = dargs.flag_mst.split(',').collect();
self.targets = v.iter().map(|s| s.to_string()).collect();
} else {
self.targets = Vec::new();
}
if dargs.flag_version {
let mut cfg = BuildConfig::new();
cfg.lifecycle(vec!["version"]);
self.config = cfg;
} else if dargs.flag_help {
let mut cfg = BuildConfig::new();
cfg.lifecycle(vec!["help"]);
self.config = cfg;
} else {
let mut cfg = BuildConfig::new();
if to_opt(dargs.flag_dir.as_slice()).is_some() {
cfg.dir(Path::new(dargs.flag_dir.as_slice()));
}
cfg.project("mongo");
if to_opt(dargs.flag_branch.as_slice()).is_some() {
cfg.branch(dargs.flag_branch.as_slice());
}
cfg.test(dargs.flag_enable_test);
let lc = dargs.arg_lifecycle;
if to_opt(lc.clone()).is_some() {
let mut mylc = Vec::new();
for lc in lc.iter() {
mylc.push(lc.as_slice());
}
cfg.lifecycle(mylc);
}
self.config = cfg;
}
self
}
/// Get the `BuildConfig` associated with the `MongoRub`.
///
/// # Example
/// ```
/// # extern crate buildable; extern crate mongo_rub; fn main() {
/// use buildable::Buildable;
/// use mongo_rub::MongoRub;
///
/// // To run lifecycle methods outside of rub...
/// let mut mr = MongoRub::new();
/// let b = Buildable::new(&mut mr, &vec!["rub".to_string(),
/// "mongo".to_string()]);
/// let bc = b.get_bc();
/// assert_eq!("mongo", bc.get_project());
/// # }
fn get_bc(&self) -> &BuildConfig {
&self.config
}
/// No lifecycle reorder is necessary for Rust.
fn reorder<'a>(&self, lc: &'a mut Vec<LifeCycle>) -> &'a mut Vec<LifeCycle> {
lc
}
/// Check for cargo dependencies.
///
/// TODO: Implement
fn chkdeps(&self) -> Result<u8,u8> {
Ok(0)
}
/// Peform the git operations necessary to get the project directory ready
/// for the rest of the build lifecycle operations.
///
/// # Notes
/// * If the project directory doesn't exist, `mongo` will be cloned from
/// github automatically. You can adjust where is is cloned from by using
/// the `--url` flag at the command line.
/// * If the project does exist, the requested branch is updated via
/// `update_branch` to prepare for the rest of the build cycle.
fn scm(&self) -> Result<u8,u8> {
let cfg = &self.config;
let base = Path::new(cfg.get_dir());
let u = if self.url.is_empty() {
"git@github.com:mongodb/mongo.git"
} else {
self.url.as_slice()
};
let mut res: Result<u8,u8> = if !base.join(cfg.get_project()).exists() {
GitCommand::new()
.wd(base.clone())
.verbose(true)
.clone(Some(vec!["--recursive", u]), to_res())
} else {
Ok(0)
};
res = if res.is_ok() {
GitCommand::new()
.wd(base.join(cfg.get_project()))
.verbose(true)
.update_branch(self.config.get_branch())
} else {
res
};
res
}
/// Not yet implemented
fn clean(&self) -> Result<u8,u8> {
let cfg = &self.config;
let mut jobs = String::from_str("-j");
jobs.push_str(usable_cores().to_string().as_slice());
let wd = Path::new(cfg.get_dir()).join(cfg.get_project());
let mut cmd = CommandExt::new("scons");
cmd.wd(&wd);
cmd.arg(jobs.as_slice());
cmd.arg("-c");
cmd.exec(to_res())
}
/// Remove the dirty file.
fn configure(&self) -> Result<u8,u8> {
let cfg = &self.config;
let wd = Path::new(cfg.get_dir()).join(cfg.get_project());
fs::unlink(&Path::new(wd.join("dirty"))).unwrap_or_else(|why| {
println!("{}", why);
});
Ok(0)
}
/// Not yet implemented
fn make(&self) -> Result<u8,u8> {
let cfg = &self.config;
// Setup jobs
let mut jobs = String::from_str("-j");
jobs.push_str(usable_cores().to_string().as_slice());
// Working directory
let wd = Path::new(cfg.get_dir()).join(cfg.get_project());
// scons
let mut cmd = CommandExt::new("scons");
cmd.wd(&wd);
cmd.header(true);
let mut b = String::from_str("mongo-");
b.push_str(cfg.get_branch());
let ip = Path::new("/opt").join(b);
let mut prefix = String::from_str("--prefix=");
prefix.push_str(ip.as_str().unwrap());
cmd.arg(prefix.as_slice());
cmd.arg(jobs.as_slice());
cmd.arg("--release");
cmd.arg("--ssl");
cmd.arg("--variant-dir=build");
cmd.arg("--c++11");
cmd.arg("--use-system-tcmalloc");
cmd.arg("--use-system-pcre");
cmd.arg("--use-system-snappy");
cmd.arg("--use-system-stemmer");
if is_v26(cfg.get_branch()) {
let incp = Path::new(env!("HOME")).join("lib/v8/3.12.19/include");
let libp = Path::new(env!("HOME")).join("lib/v8/3.12.19/lib");
let mut incarg = String::from_str("--cpppath=");
incarg.push_str(incp.as_str().unwrap());
let mut libarg = String::from_str("--libpath=");
libarg.push_str(libp.as_str().unwrap());
cmd.arg("--use-system-v8");
cmd.arg(incarg.as_slice());
cmd.arg(libarg.as_slice());
}
// Targets
if self.targets.is_empty() {
cmd.arg("all");
} else {
if self.targets.contains(&"all".to_string()) {
cmd.arg("all");
} else {
for target in self.targets.iter() {
match target.as_slice() {
"core" | "tools" => {
cmd.arg(target.as_slice());
},
_ => {},
}
}
}
}
if cfg.get_test() {
cmd.arg("test");
if self.smoke {
cmd.arg("smoke");
}
if self.smokec {
cmd.arg("smokeCppUnitTests");
}
if self.smokej {
cmd.arg("smokeJsCore");
}
}
if self.lint {
cmd.arg("lint");
}
cmd.exec(to_res())
}
/// Not yet implemented
fn test(&self) -> Result<u8,u8> {
Ok(0)
}
/// Not yet implemented
fn install(&self) -> Result<u8,u8> {
let cfg = &self.config;
let mut sgarg = String::from_str("scons");
sgarg.push_str(" -j");
sgarg.push_str(usable_cores().to_string().as_slice());
let wd = Path::new(cfg.get_dir()).join(cfg.get_project());
let mut cmd = CommandExt::new("sudo");
cmd.wd(&wd);
cmd.header(true);
cmd.arg("sg");
cmd.arg("mongo");
cmd.arg("-c");
let mut b = String::from_str("mongo-");
b.push_str(cfg.get_branch());
let ip = Path::new("/opt").join(b);
sgarg.push_str(" --prefix=");
sgarg.push_str(ip.as_str().unwrap());
sgarg.push_str(" --release");
sgarg.push_str(" --ssl");
sgarg.push_str(" --variant-dir=build");
sgarg.push_str(" --c++11");
sgarg.push_str(" --use-system-tcmalloc");
sgarg.push_str(" --use-system-pcre");
sgarg.push_str(" --use-system-snappy");
sgarg.push_str(" --use-system-stemmer");
if is_v26(cfg.get_branch()) {
let incp = Path::new(env!("HOME")).join("lib/v8/3.12.19/include");
let libp = Path::new(env!("HOME")).join("lib/v8/3.12.19/lib");
let mut incarg = String::from_str(" --cpppath=");
incarg.push_str(incp.as_str().unwrap());
let mut libarg = String::from_str(" --libpath=");
libarg.push_str(libp.as_str().unwrap());
sgarg.push_str(" --use-system-v8");
sgarg.push_str(incarg.as_slice());
sgarg.push_str(libarg.as_slice());
}
// Targets
if self.targets.is_empty() {
sgarg.push_str(" all");
} else {
if self.targets.contains(&"all".to_string()) {
sgarg.push_str(" all");
} else {
for target in self.targets.iter() {
match target.as_slice() {
"core" | "tools" => {
sgarg.push_str(" ");
sgarg.push_str(target.as_slice());
},
_ => {},
}
}
}
}
if cfg.get_test() {
sgarg.push_str(" test");
if self.smoke {
sgarg.push_str(" smoke");
}
if self.smokec {
sgarg.push_str(" smokeCppUnitTests");
}
if self.smokej {
sgarg.push_str(" smokeJsCore");
}
}
if self.lint {
sgarg.push_str(" lint");
}
sgarg.push_str(" install");
cmd.arg(sgarg.as_slice());
cmd.exec(to_res())
}
/// Not yet implemented
fn cleanup(&self) -> Result<u8,u8> {
Ok(0)
}
/// Show the docopt USAGE string on stdout.
fn help(&self) -> Result<u8,u8> {
println!("{}", USAGE);
Ok(0)
}
/// Show the crate version on stdout.
fn version(&self) -> Result<u8,u8> {
println!("{} {} mongo-rub {}", now(), sha(), branch());
Ok(0)
}
}
#[cfg(test)]
mod test {
use buildable::{Buildable,BuildConfig};
use super::{MongoRub,is_v26};
fn check_mr(mr: &MongoRub) {
assert_eq!(mr.prefix, "");
assert_eq!(mr.url, "");
}
fn check_bc(bc: &BuildConfig, lc: &Vec<&str>) {
let mut tdir = env!("HOME").to_string();
tdir.push_str("/projects");
assert_eq!(bc.get_lifecycle(), lc);
assert_eq!(bc.get_dir().as_str().unwrap(), tdir.as_slice());
assert_eq!(bc.get_project(), "mongo");
assert_eq!(bc.get_branch(), "master");
assert!(!bc.get_test());
}
#[test]
fn test_new() {
let mr = MongoRub::new();
check_mr(&mr);
}
#[test]
fn test_version() {
let args = vec!["rub".to_string(),
"mongo".to_string(),
"--version".to_string()];
let mut mr = MongoRub::new();
check_mr(&mr);
let b = Buildable::new(&mut mr, &args);
let bc = b.get_bc();
assert_eq!(bc.get_lifecycle(), &vec!["version"]);
assert_eq!(b.version(), Ok(0))
}
#[test]
fn test_help() {
let args = vec!["rub".to_string(),
"mongo".to_string(),
"-h".to_string()];
let mut mr = MongoRub::new();
check_mr(&mr);
let b = Buildable::new(&mut mr, &args);
let bc = b.get_bc();
assert_eq!(bc.get_lifecycle(), &vec!["help"]);
assert_eq!(b.help(), Ok(0))
}
#[test]
fn test_base() {
let args = vec!["rub".to_string(),
"mongo".to_string()];
let mut mr = MongoRub::new();
check_mr(&mr);
let b = Buildable::new(&mut mr, &args);
let bc = b.get_bc();
check_bc(bc, &vec!["most"]);
assert_eq!(b.version(), Ok(0));
}
#[test]
fn test_scm() {
let args = vec!["rub".to_string(),
"mongo".to_string(),
"scm".to_string()];
let mut mr = MongoRub::new();
check_mr(&mr);
let b = Buildable::new(&mut mr, &args);
let bc = b.get_bc();
check_bc(bc, &vec!["scm"]);
assert_eq!(b.version(), Ok(0));
}
#[test]
fn test_all() {
let args = vec!["rub".to_string(),
"mongo".to_string(),
"all".to_string()];
let mut mr = MongoRub::new();
check_mr(&mr);
let b = Buildable::new(&mut mr, &args);
let bc = b.get_bc();
check_bc(bc, &vec!["all"]);
assert_eq!(b.version(), Ok(0));
}
#[test]
fn test_is_v26() {
assert!(is_v26("v2.6"));
assert!(is_v26("v2.6.1"));
assert!(is_v26("r2.6"));
assert!(is_v26("r2.6-rc1"));
assert!(!is_v26("v3.0"));
}
}
|
use std::rc::Rc;
use screen::dimension::Dimension;
use screen::layout::hlayout::HLayout;
use screen::layout::vlayout::VLayout;
use screen::screen::Screen;
use screen::layout::str_layout::StrLayout;
pub type LayoutRc = Rc<dyn Layout>;
pub trait Layout
where
Self: Dimension,
{
fn to_screen(&self, x: usize, y: usize, target: &mut Screen);
fn as_screen(&self) -> Screen {
let mut scr = Screen::new(self.width(), self.height());
self.to_screen(0, 0, &mut scr);
scr
}
fn to_screen_str(&self) -> String {
self.as_screen().to_string()
}
fn show(&self) {
self.as_screen().show();
}
}
pub struct L {}
impl L {
pub fn str(data: &str) -> Rc<StrLayout> {
Rc::new(StrLayout::new(data))
}
pub fn vert(data: Vec<LayoutRc>) -> LayoutRc {
Rc::new(VLayout::new(data))
}
pub fn hori(data: Vec<LayoutRc>) -> LayoutRc {
Rc::new(HLayout::new(data))
}
}
|
//! Compute expected value for the optimal strategy in every state of
//! Super Yahtzee. Takes between 100 and 140 minutes to compute.
extern crate yahtzeevalue;
extern crate byteorder;
use std::{io, fs};
use byteorder::{LittleEndian, WriteBytesExt};
use yahtzeevalue::compute_state_value;
fn main() {
let file = fs::File::create("state_value.tmp").expect("Could not open file");
let state_value = compute_state_value(|i, n| {
// 8 dots in a cluster, 32 dots in a line, BONUS_LIMIT lines
// each line represents 2**18
// each dot represents 2**13
if i == 0 {
eprintln!("Compute value of {} states", n);
}
if i != n && (i == 0 || i % (1 << 13) != 0) {
return;
}
eprint!("{}", ".");
if i == n || i % (1 << 16) == 0 {
eprint!(" ");
if i == n || i % (1 << 18) == 0 {
eprint!("{:8}/{}\n", i, n);
}
}
});
{
let mut writer = io::BufWriter::new(file);
for x in state_value.iter() {
writer.write_f64::<LittleEndian>(*x).expect("Writing failed");
}
}
fs::rename("state_value.tmp", "state_value.bin").expect("Failed to rename state_value.tmp");
}
|
use std::fmt;
use std::cmp;
use rand::thread_rng;
use rand::Rng;
use model::sectors::Sector;
pub type Ticker = String;
#[derive(Debug)]
pub struct Business {
/// Legal name of this business
pub name: String,
pub ticker: Ticker,
/// 0 to 1 estimate of the quality of management
leadership: f32,
/// 0 to 1 estimate of the global size and its markeshare
pub size: f32,
/// 0 to 1 estimate of the amount invested
investement: f32,
/// 0 to 1 estimate of the ability to communicate
pr: f32,
/// 0 to 1 estimate of the current performances
results: f32,
/// How market actors perceive the performances ?
pub perception: f32,
/// Real performances of the business
pub performance: f32,
/// Outstanding shares times the price, total value of
/// this business in the stock exchange.
/// Expressed as an integer, in million.
/// Beginning value are between 200 millions and one trillion.
pub capitalisation: u32,
/// Number of shares of the business on the stock exchange.
/// Divided by the capitalization, it gives the share value.
/// Express as an integer, in million
/// Beginning values are between 10 million and 10 billion.
pub shares_outstanding: u32,
pub sector: Sector
}
impl cmp::PartialEq for Business {
fn eq(&self, other: &Self) -> bool {
self.name == other.name
}
}
impl cmp::Eq for Business {}
impl cmp::PartialOrd for Business {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
return Some(self.cmp(other));
}
}
impl cmp::Ord for Business {
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.name.cmp(&other.name)
}
}
impl fmt::Display for Business {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.name)
}
}
impl Business {
pub fn new(name: String, sector: Sector) -> Business {
let random_size : f32 = thread_rng().gen_range(0.1, 1.);
let random_cap_factor : u32 = thread_rng().gen_range(2000, 1000000);
let random_so_factor : u32 = thread_rng().gen_range(100, 10000);
let capitalisation = random_size as f64 * random_cap_factor as f64;
let shares_outstanding = random_size as f64 * random_so_factor as f64;
Business { name: name,
ticker: "".to_string(),
sector: sector,
leadership: thread_rng().gen_range(0., 1.),
size: random_size,
investement: thread_rng().gen_range(0., 1.),
pr: thread_rng().gen_range(0., 1.),
results: thread_rng().gen_range(0., 1.),
perception: 0.,
performance: 0.,
capitalisation: capitalisation as u32,
shares_outstanding: shares_outstanding as u32 }
}
pub fn compute_performance(&self, sector_health: f32) -> f32 {
(self.leadership + self.size + self.investement + self.results + sector_health) / 5.0 // TODO: Add business event flags
}
pub fn compute_perception(&self, rnd_factor: f32) -> f32 {
(self.performance + self.pr + rnd_factor) / 3.0
}
pub fn get_current_stock_value(&self) -> f32 {
let real = self.capitalisation as f32 / self.shares_outstanding as f32;
(real * 100.0).round() / 100.0
}
/// New stock value is dependant on :
/// - The difference of perception of the performance, compared to the last
/// perception, giving the General tendancy G.
/// - The volume of transaction, depending on the outstading shares and
/// the spread of G, called V.
/// This gives a new market capitalization E.
pub fn compute_capitalisation_change(&self, rnd_factor: f32) -> f32 {
let new_perception = self.compute_perception(rnd_factor);
let g = new_perception - self.perception;
let v = ((1.0 / self.shares_outstanding as f32) * g * 1000.0).abs();
self.capitalisation as f32 * g * v
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_exception::Result;
use common_expression::types::DataType;
use common_expression::TableDataType;
use crate::binder::satisfied_by;
use crate::optimizer::RelExpr;
use crate::optimizer::RelationalProperty;
use crate::optimizer::SExpr;
use crate::plans::AggregateFunction;
use crate::plans::AndExpr;
use crate::plans::BoundColumnRef;
use crate::plans::CastExpr;
use crate::plans::ComparisonExpr;
use crate::plans::Filter;
use crate::plans::FunctionCall;
use crate::plans::Join;
use crate::plans::JoinType;
use crate::plans::NotExpr;
use crate::plans::OrExpr;
use crate::plans::WindowFunc;
use crate::ColumnBinding;
use crate::ColumnEntry;
use crate::ColumnSet;
use crate::IndexType;
use crate::MetadataRef;
use crate::ScalarExpr;
pub fn convert_outer_to_inner_join(s_expr: &SExpr) -> Result<(SExpr, bool)> {
let filter: Filter = s_expr.plan().clone().try_into()?;
let mut join: Join = s_expr.child(0)?.plan().clone().try_into()?;
let origin_join_type = join.join_type.clone();
if !origin_join_type.is_outer_join() {
return Ok((s_expr.clone(), false));
}
let s_join_expr = s_expr.child(0)?;
let join_expr = RelExpr::with_s_expr(s_join_expr);
let left_child_output_column = join_expr.derive_relational_prop_child(0)?.output_columns;
let right_child_output_column = join_expr.derive_relational_prop_child(1)?.output_columns;
let predicates = &filter.predicates;
let mut nullable_columns: Vec<IndexType> = vec![];
for predicate in predicates {
find_nullable_columns(
predicate,
&left_child_output_column,
&right_child_output_column,
&mut nullable_columns,
)?;
}
if join.join_type == JoinType::Left
|| join.join_type == JoinType::Right
|| join.join_type == JoinType::Full
{
let mut left_join = false;
let mut right_join = false;
for col in nullable_columns.iter() {
if left_child_output_column.contains(col) {
right_join = true;
}
if right_child_output_column.contains(col) {
left_join = true;
}
}
match join.join_type {
JoinType::Left => {
if left_join {
join.join_type = JoinType::Inner
}
}
JoinType::Right => {
if right_join {
join.join_type = JoinType::Inner
}
}
JoinType::Full => {
if left_join && right_join {
join.join_type = JoinType::Inner
} else if left_join {
join.join_type = JoinType::Right
} else if right_join {
join.join_type = JoinType::Left
}
}
_ => unreachable!(),
}
}
let changed_join_type = join.join_type.clone();
if origin_join_type == changed_join_type {
return Ok((s_expr.clone(), false));
}
let mut result = SExpr::create_binary(
join.into(),
s_join_expr.child(0)?.clone(),
s_join_expr.child(1)?.clone(),
);
// wrap filter s_expr
result = SExpr::create_unary(filter.into(), result);
Ok((result, true))
}
#[allow(clippy::only_used_in_recursion)]
fn find_nullable_columns(
predicate: &ScalarExpr,
left_output_columns: &ColumnSet,
right_output_columns: &ColumnSet,
nullable_columns: &mut Vec<IndexType>,
) -> Result<()> {
match predicate {
ScalarExpr::BoundColumnRef(column_binding) => {
nullable_columns.push(column_binding.column.index);
}
ScalarExpr::AndExpr(expr) => {
let mut left_cols = vec![];
let mut right_cols = vec![];
find_nullable_columns(
&expr.left,
left_output_columns,
right_output_columns,
&mut left_cols,
)?;
find_nullable_columns(
&expr.right,
left_output_columns,
right_output_columns,
&mut right_cols,
)?;
}
ScalarExpr::OrExpr(expr) => {
let mut left_cols = vec![];
let mut right_cols = vec![];
find_nullable_columns(
&expr.left,
left_output_columns,
right_output_columns,
&mut left_cols,
)?;
find_nullable_columns(
&expr.right,
left_output_columns,
right_output_columns,
&mut right_cols,
)?;
if !left_cols.is_empty() && !right_cols.is_empty() {
for left_col in left_cols.iter() {
for right_col in right_cols.iter() {
if (left_output_columns.contains(left_col)
&& left_output_columns.contains(right_col))
|| (right_output_columns.contains(left_col)
&& right_output_columns.contains(right_col))
{
nullable_columns.push(*left_col);
break;
}
}
}
}
}
ScalarExpr::NotExpr(expr) => {
find_nullable_columns(
&expr.argument,
left_output_columns,
right_output_columns,
nullable_columns,
)?;
}
ScalarExpr::ComparisonExpr(expr) => {
// For any comparison expr, if input is null, the compare result is false
find_nullable_columns(
&expr.left,
left_output_columns,
right_output_columns,
nullable_columns,
)?;
find_nullable_columns(
&expr.right,
left_output_columns,
right_output_columns,
nullable_columns,
)?;
}
ScalarExpr::CastExpr(expr) => {
find_nullable_columns(
&expr.argument,
left_output_columns,
right_output_columns,
nullable_columns,
)?;
}
_ => {}
}
Ok(())
}
// If outer join is converted to inner join, we need to change datatype of filter predicate
pub fn remove_nullable(
s_expr: &SExpr,
predicate: &ScalarExpr,
join_type: &JoinType,
metadata: MetadataRef,
) -> Result<ScalarExpr> {
let join_expr = s_expr.child(0)?;
let rel_expr = RelExpr::with_s_expr(join_expr);
let left_prop = rel_expr.derive_relational_prop_child(0)?;
let right_prop = rel_expr.derive_relational_prop_child(1)?;
remove_column_nullable(predicate, &left_prop, &right_prop, join_type, metadata)
}
fn remove_column_nullable(
scalar_expr: &ScalarExpr,
left_prop: &RelationalProperty,
right_prop: &RelationalProperty,
join_type: &JoinType,
metadata: MetadataRef,
) -> Result<ScalarExpr> {
Ok(match scalar_expr {
ScalarExpr::BoundColumnRef(column) => {
let mut data_type = column.column.data_type.clone();
let metadata = metadata.read();
let column_entry = metadata.column(column.column.index);
let mut need_remove = true;
// If the column type is nullable when the table is created
// Do not need to remove nullable.
match column_entry {
ColumnEntry::BaseTableColumn(base) => {
if let TableDataType::Nullable(_) = base.data_type {
need_remove = false;
}
}
ColumnEntry::DerivedColumn(derived) => {
if let DataType::Nullable(_) = derived.data_type {
need_remove = false;
}
}
// None of internal columns will be nullable, so just ignore internal column type entry
ColumnEntry::InternalColumn(..) => {}
}
match join_type {
JoinType::Left => {
if satisfied_by(scalar_expr, right_prop) && need_remove {
data_type = Box::new(column.column.data_type.remove_nullable());
}
}
JoinType::Right => {
if satisfied_by(scalar_expr, left_prop) && need_remove {
data_type = Box::new(column.column.data_type.remove_nullable());
}
}
JoinType::Full => {
if need_remove {
data_type = Box::new(column.column.data_type.remove_nullable())
}
}
_ => {}
};
ScalarExpr::BoundColumnRef(BoundColumnRef {
span: column.span,
column: ColumnBinding {
database_name: column.column.database_name.clone(),
table_name: column.column.table_name.clone(),
column_name: column.column.column_name.clone(),
index: column.column.index,
data_type,
visibility: column.column.visibility.clone(),
},
})
}
ScalarExpr::BoundInternalColumnRef(_) => {
// internal column will never be null
unreachable!()
}
ScalarExpr::AndExpr(expr) => {
let left_expr = remove_column_nullable(
&expr.left,
left_prop,
right_prop,
join_type,
metadata.clone(),
)?;
let right_expr =
remove_column_nullable(&expr.right, left_prop, right_prop, join_type, metadata)?;
ScalarExpr::AndExpr(AndExpr {
left: Box::new(left_expr),
right: Box::new(right_expr),
})
}
ScalarExpr::OrExpr(expr) => {
let left_expr = remove_column_nullable(
&expr.left,
left_prop,
right_prop,
join_type,
metadata.clone(),
)?;
let right_expr =
remove_column_nullable(&expr.right, left_prop, right_prop, join_type, metadata)?;
ScalarExpr::OrExpr(OrExpr {
left: Box::new(left_expr),
right: Box::new(right_expr),
})
}
ScalarExpr::NotExpr(expr) => {
let new_expr =
remove_column_nullable(&expr.argument, left_prop, right_prop, join_type, metadata)?;
ScalarExpr::NotExpr(NotExpr {
argument: Box::new(new_expr),
})
}
ScalarExpr::ComparisonExpr(expr) => {
let left_expr = remove_column_nullable(
&expr.left,
left_prop,
right_prop,
join_type,
metadata.clone(),
)?;
let right_expr =
remove_column_nullable(&expr.right, left_prop, right_prop, join_type, metadata)?;
ScalarExpr::ComparisonExpr(ComparisonExpr {
op: expr.op.clone(),
left: Box::new(left_expr),
right: Box::new(right_expr),
})
}
ScalarExpr::WindowFunction(expr) => {
let mut args = Vec::with_capacity(expr.agg_func.args.len());
for arg in expr.agg_func.args.iter() {
args.push(remove_column_nullable(
arg,
left_prop,
right_prop,
join_type,
metadata.clone(),
)?);
}
ScalarExpr::WindowFunction(WindowFunc {
agg_func: AggregateFunction {
display_name: expr.agg_func.display_name.clone(),
func_name: expr.agg_func.func_name.clone(),
distinct: expr.agg_func.distinct,
params: expr.agg_func.params.clone(),
args,
return_type: expr.agg_func.return_type.clone(),
},
partition_by: expr.partition_by.clone(),
frame: expr.frame.clone(),
})
}
ScalarExpr::AggregateFunction(expr) => {
let mut args = Vec::with_capacity(expr.args.len());
for arg in expr.args.iter() {
args.push(remove_column_nullable(
arg,
left_prop,
right_prop,
join_type,
metadata.clone(),
)?);
}
ScalarExpr::AggregateFunction(AggregateFunction {
display_name: expr.display_name.clone(),
func_name: expr.func_name.clone(),
distinct: expr.distinct,
params: expr.params.clone(),
args,
return_type: expr.return_type.clone(),
})
}
ScalarExpr::FunctionCall(expr) => {
let mut args = Vec::with_capacity(expr.arguments.len());
for arg in expr.arguments.iter() {
args.push(remove_column_nullable(
arg,
left_prop,
right_prop,
join_type,
metadata.clone(),
)?);
}
ScalarExpr::FunctionCall(FunctionCall {
span: expr.span,
params: expr.params.clone(),
arguments: args,
func_name: expr.func_name.clone(),
})
}
ScalarExpr::CastExpr(expr) => {
let new_expr =
remove_column_nullable(&expr.argument, left_prop, right_prop, join_type, metadata)?;
ScalarExpr::CastExpr(CastExpr {
span: expr.span,
is_try: expr.is_try,
argument: Box::new(new_expr),
target_type: expr.target_type.clone(),
})
}
ScalarExpr::ConstantExpr(_) | ScalarExpr::SubqueryExpr(_) => scalar_expr.clone(),
})
}
|
extern crate tcod;
extern crate rand;
use util::{Point};
use util::Contains::{ DoesContain, DoesNotContain };
use game::Game;
use traits::Updates;
use self::tcod::input::KeyCode;
use self::rand::{ thread_rng, Rng };
use self::tcod::console::{ Console, Root, BackgroundFlag };
pub struct NPC {
pub position: Point,
pub display_char: char,
}
impl NPC {
pub fn new(position: Point, display_char: char) -> NPC {
NPC { position, display_char }
}
}
impl Updates for NPC {
fn update(&mut self, _key: KeyCode, game: &Game) {
let offset_x: i32 = thread_rng().gen_range(0, 3) - 1;
match game.window_bounds.contains(self.position.offset_x(offset_x)) {
DoesContain => self.position = self.position.offset_x(offset_x),
DoesNotContain => (),
}
let offset_y: i32 = thread_rng().gen_range(0, 3) - 1;
match game.window_bounds.contains(self.position.offset_y(offset_y)) {
DoesContain => self.position = self.position.offset_y(offset_y),
DoesNotContain => (),
}
}
fn render(&self, root: &mut Root) {
root.put_char(self.position.x, self.position.y, self.display_char, BackgroundFlag::Set);
}
} |
use bitcoin::{Transaction, Script, TxOut, TxIn, OutPoint};
pub(crate) use inner::Weight;
// ensure explicit constructor
mod inner {
use std::ops::{Add, Sub, AddAssign, SubAssign, Mul, Div};
use crate::fee_rate::FeeRate;
/// Represents virtual transaction size
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub(crate) struct Weight(u64);
impl Weight {
pub(crate) const ZERO: Weight = Weight(0);
pub(crate) fn from_witness_data_size(size: u64) -> Self {
Weight(size)
}
pub(crate) fn from_non_witness_data_size(size: u64) -> Self {
Weight(size * 4)
}
pub(crate) fn manual_from_u64(weight: u64) -> Self {
Weight(weight)
}
}
impl From<Weight> for u64 {
fn from(value: Weight) -> Self {
value.0
}
}
impl Add for Weight {
type Output = Weight;
fn add(self, rhs: Weight) -> Self::Output {
Weight(self.0 + rhs.0)
}
}
impl Sub for Weight {
type Output = Weight;
fn sub(self, rhs: Weight) -> Self::Output {
Weight(self.0 - rhs.0)
}
}
impl AddAssign for Weight {
fn add_assign(&mut self, rhs: Weight) {
self.0 += rhs.0
}
}
impl SubAssign for Weight {
fn sub_assign(&mut self, rhs: Weight) {
self.0 -= rhs.0
}
}
impl Mul<u64> for Weight {
type Output = Weight;
fn mul(self, rhs: u64) -> Self::Output {
Weight(self.0 * rhs)
}
}
impl Div<Weight> for bitcoin::Amount {
type Output = FeeRate;
fn div(self, rhs: Weight) -> Self::Output {
FeeRate::from_sat_per_wu(self.as_sat() / rhs.0)
}
}
}
fn witness_weight(witness: &Vec<Vec<u8>>) -> Weight {
if witness.is_empty() {
return Weight::ZERO;
}
let mut size = varint_size(witness.len() as u64);
for item in witness {
size += varint_size(item.len() as u64) + item.len() as u64;
}
Weight::from_witness_data_size(size)
}
pub(crate) trait ComputeWeight {
fn weight(&self) -> Weight;
}
pub(crate) trait ComputeSize {
fn encoded_size(&self) -> u64;
}
fn varint_size(number: u64) -> u64 {
match number {
0..=0xfc => 1,
0xfd..=0xffff => 3,
0x10000..=0xffffffff => 5,
0x100000000..=0xffffffffffffffff => 9,
}
}
impl ComputeSize for Script {
fn encoded_size(&self) -> u64 {
self.len() as u64 + varint_size(self.len() as u64)
}
}
impl ComputeWeight for TxOut {
fn weight(&self) -> Weight {
Weight::from_non_witness_data_size(self.script_pubkey.encoded_size() + 8 /* bytes encoding u64 value */)
}
}
impl ComputeWeight for TxIn {
fn weight(&self) -> Weight {
Weight::from_non_witness_data_size(self.script_sig.encoded_size() + 4 /* bytes encoding u32 sequence number */) + self.previous_output.weight() + witness_weight(&self.witness)
}
}
impl ComputeWeight for OutPoint {
fn weight(&self) -> Weight {
Weight::from_non_witness_data_size(32 /* bytes encoding previous hash */ + 4 /* bytes encoding u32 output index */)
}
}
impl ComputeWeight for Transaction {
fn weight(&self) -> Weight {
Weight::manual_from_u64(self.get_weight() as u64)
}
}
|
//! Parsers recognizing numbers
#![allow(clippy::match_same_arms)]
pub mod bits;
#[cfg(test)]
mod tests;
use crate::combinator::repeat;
use crate::error::ErrMode;
use crate::error::ErrorKind;
use crate::error::Needed;
use crate::error::ParserError;
use crate::lib::std::ops::{Add, Shl};
use crate::stream::Accumulate;
use crate::stream::{AsBytes, Stream, StreamIsPartial};
use crate::stream::{ToUsize, UpdateSlice};
use crate::token::take;
use crate::trace::trace;
use crate::PResult;
use crate::Parser;
/// Configurable endianness
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Endianness {
/// Big endian
Big,
/// Little endian
Little,
/// Will match the host's endianness
Native,
}
/// Recognizes an unsigned 1 byte integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_u8;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u8> {
/// be_u8.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"\x03abcefg"[..], 0x00)));
/// assert_eq!(parser(&b""[..]), Err(ErrMode::Backtrack(InputError::new(&[][..], ErrorKind::Token))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_u8;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u8> {
/// be_u8.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01abcd"[..])), Ok((Partial::new(&b"\x01abcd"[..]), 0x00)));
/// assert_eq!(parser(Partial::new(&b""[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn be_u8<I, E: ParserError<I>>(input: &mut I) -> PResult<u8, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
{
u8(input)
}
/// Recognizes a big endian unsigned 2 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_u16;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u16> {
/// be_u16.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"abcefg"[..], 0x0003)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_u16;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u16> {
/// be_u16.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x0001)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn be_u16<I, E: ParserError<I>>(input: &mut I) -> PResult<u16, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_u16", move |input: &mut I| be_uint(input, 2)).parse_next(input)
}
/// Recognizes a big endian unsigned 3 byte integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_u24;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u32> {
/// be_u24.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03\x05abcefg"[..]), Ok((&b"abcefg"[..], 0x000305)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_u24;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u32> {
/// be_u24.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x000102)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(2))));
/// ```
#[inline(always)]
pub fn be_u24<I, E: ParserError<I>>(input: &mut I) -> PResult<u32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_u23", move |input: &mut I| be_uint(input, 3)).parse_next(input)
}
/// Recognizes a big endian unsigned 4 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_u32;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u32> {
/// be_u32.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03\x05\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x00030507)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_u32;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u32> {
/// be_u32.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x00010203)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(3))));
/// ```
#[inline(always)]
pub fn be_u32<I, E: ParserError<I>>(input: &mut I) -> PResult<u32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_u32", move |input: &mut I| be_uint(input, 4)).parse_next(input)
}
/// Recognizes a big endian unsigned 8 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_u64;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u64> {
/// be_u64.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x0001020304050607)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_u64;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u64> {
/// be_u64.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x0001020304050607)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(7))));
/// ```
#[inline(always)]
pub fn be_u64<I, E: ParserError<I>>(input: &mut I) -> PResult<u64, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_u64", move |input: &mut I| be_uint(input, 8)).parse_next(input)
}
/// Recognizes a big endian unsigned 16 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_u128;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u128> {
/// be_u128.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x00010203040506070001020304050607)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_u128;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u128> {
/// be_u128.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x00010203040506070809101112131415)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(15))));
/// ```
#[inline(always)]
pub fn be_u128<I, E: ParserError<I>>(input: &mut I) -> PResult<u128, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_u128", move |input: &mut I| be_uint(input, 16)).parse_next(input)
}
#[inline]
fn be_uint<I, Uint, E: ParserError<I>>(input: &mut I, bound: usize) -> PResult<Uint, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
Uint: Default + Shl<u8, Output = Uint> + Add<Uint, Output = Uint> + From<u8>,
{
debug_assert_ne!(bound, 1, "to_be_uint needs extra work to avoid overflow");
take(bound)
.map(|n: <I as Stream>::Slice| to_be_uint(n.as_bytes()))
.parse_next(input)
}
#[inline]
fn to_be_uint<Uint>(number: &[u8]) -> Uint
where
Uint: Default + Shl<u8, Output = Uint> + Add<Uint, Output = Uint> + From<u8>,
{
let mut res = Uint::default();
for byte in number.iter().copied() {
res = (res << 8) + byte.into();
}
res
}
/// Recognizes a signed 1 byte integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_i8;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i8> {
/// be_i8.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"\x03abcefg"[..], 0x00)));
/// assert_eq!(parser(&b""[..]), Err(ErrMode::Backtrack(InputError::new(&[][..], ErrorKind::Token))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_i8;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i8> {
/// be_i8.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01abcd"[..])), Ok((Partial::new(&b"\x01abcd"[..]), 0x00)));
/// assert_eq!(parser(Partial::new(&b""[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn be_i8<I, E: ParserError<I>>(input: &mut I) -> PResult<i8, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
{
i8(input)
}
/// Recognizes a big endian signed 2 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_i16;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i16> {
/// be_i16.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"abcefg"[..], 0x0003)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_i16;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i16> {
/// be_i16.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x0001)));
/// assert_eq!(parser(Partial::new(&b""[..])), Err(ErrMode::Incomplete(Needed::new(2))));
/// ```
#[inline(always)]
pub fn be_i16<I, E: ParserError<I>>(input: &mut I) -> PResult<i16, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_i16", move |input: &mut I| {
be_uint::<_, u16, _>(input, 2).map(|n| n as i16)
})
.parse_next(input)
}
/// Recognizes a big endian signed 3 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_i24;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i32> {
/// be_i24.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03\x05abcefg"[..]), Ok((&b"abcefg"[..], 0x000305)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_i24;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i32> {
/// be_i24.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x000102)));
/// assert_eq!(parser(Partial::new(&b""[..])), Err(ErrMode::Incomplete(Needed::new(3))));
/// ```
#[inline(always)]
pub fn be_i24<I, E: ParserError<I>>(input: &mut I) -> PResult<i32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_i24", move |input: &mut I| {
be_uint::<_, u32, _>(input, 3).map(|n| {
// Same as the unsigned version but we need to sign-extend manually here
let n = if n & 0x80_00_00 != 0 {
(n | 0xff_00_00_00) as i32
} else {
n as i32
};
n
})
})
.parse_next(input)
}
/// Recognizes a big endian signed 4 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_i32;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i32> {
/// be_i32.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03\x05\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x00030507)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_i32;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i32> {
/// be_i32.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x00010203)));
/// assert_eq!(parser(Partial::new(&b""[..])), Err(ErrMode::Incomplete(Needed::new(4))));
/// ```
#[inline(always)]
pub fn be_i32<I, E: ParserError<I>>(input: &mut I) -> PResult<i32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_i32", move |input: &mut I| {
be_uint::<_, u32, _>(input, 4).map(|n| n as i32)
})
.parse_next(input)
}
/// Recognizes a big endian signed 8 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_i64;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i64> {
/// be_i64.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x0001020304050607)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_i64;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i64> {
/// be_i64.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x0001020304050607)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(7))));
/// ```
#[inline(always)]
pub fn be_i64<I, E: ParserError<I>>(input: &mut I) -> PResult<i64, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_i64", move |input: &mut I| {
be_uint::<_, u64, _>(input, 8).map(|n| n as i64)
})
.parse_next(input)
}
/// Recognizes a big endian signed 16 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_i128;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i128> {
/// be_i128.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x00010203040506070001020304050607)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_i128;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i128> {
/// be_i128.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x00010203040506070809101112131415)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(15))));
/// ```
#[inline(always)]
pub fn be_i128<I, E: ParserError<I>>(input: &mut I) -> PResult<i128, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_i128", move |input: &mut I| {
be_uint::<_, u128, _>(input, 16).map(|n| n as i128)
})
.parse_next(input)
}
/// Recognizes an unsigned 1 byte integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_u8;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u8> {
/// le_u8.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"\x03abcefg"[..], 0x00)));
/// assert_eq!(parser(&b""[..]), Err(ErrMode::Backtrack(InputError::new(&[][..], ErrorKind::Token))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_u8;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u8> {
/// le_u8.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01abcd"[..])), Ok((Partial::new(&b"\x01abcd"[..]), 0x00)));
/// assert_eq!(parser(Partial::new(&b""[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn le_u8<I, E: ParserError<I>>(input: &mut I) -> PResult<u8, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
{
u8(input)
}
/// Recognizes a little endian unsigned 2 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_u16;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u16> {
/// le_u16.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"abcefg"[..], 0x0300)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_u16;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u16> {
/// le_u16::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x0100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn le_u16<I, E: ParserError<I>>(input: &mut I) -> PResult<u16, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_u16", move |input: &mut I| le_uint(input, 2)).parse_next(input)
}
/// Recognizes a little endian unsigned 3 byte integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_u24;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u32> {
/// le_u24.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03\x05abcefg"[..]), Ok((&b"abcefg"[..], 0x050300)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_u24;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u32> {
/// le_u24::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x020100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(2))));
/// ```
#[inline(always)]
pub fn le_u24<I, E: ParserError<I>>(input: &mut I) -> PResult<u32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_u24", move |input: &mut I| le_uint(input, 3)).parse_next(input)
}
/// Recognizes a little endian unsigned 4 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_u32;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u32> {
/// le_u32.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03\x05\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x07050300)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_u32;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u32> {
/// le_u32::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x03020100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(3))));
/// ```
#[inline(always)]
pub fn le_u32<I, E: ParserError<I>>(input: &mut I) -> PResult<u32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_u32", move |input: &mut I| le_uint(input, 4)).parse_next(input)
}
/// Recognizes a little endian unsigned 8 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_u64;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u64> {
/// le_u64.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x0706050403020100)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_u64;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u64> {
/// le_u64::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x0706050403020100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(7))));
/// ```
#[inline(always)]
pub fn le_u64<I, E: ParserError<I>>(input: &mut I) -> PResult<u64, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_u64", move |input: &mut I| le_uint(input, 8)).parse_next(input)
}
/// Recognizes a little endian unsigned 16 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_u128;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u128> {
/// le_u128.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x07060504030201000706050403020100)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_u128;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u128> {
/// le_u128::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x15141312111009080706050403020100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(15))));
/// ```
#[inline(always)]
pub fn le_u128<I, E: ParserError<I>>(input: &mut I) -> PResult<u128, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_u128", move |input: &mut I| le_uint(input, 16)).parse_next(input)
}
#[inline]
fn le_uint<I, Uint, E: ParserError<I>>(input: &mut I, bound: usize) -> PResult<Uint, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
Uint: Default + Shl<u8, Output = Uint> + Add<Uint, Output = Uint> + From<u8>,
{
take(bound)
.map(|n: <I as Stream>::Slice| to_le_uint(n.as_bytes()))
.parse_next(input)
}
#[inline]
fn to_le_uint<Uint>(number: &[u8]) -> Uint
where
Uint: Default + Shl<u8, Output = Uint> + Add<Uint, Output = Uint> + From<u8>,
{
let mut res = Uint::default();
for (index, byte) in number.iter_offsets() {
res = res + (Uint::from(byte) << (8 * index as u8));
}
res
}
/// Recognizes a signed 1 byte integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_i8;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i8> {
/// le_i8.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"\x03abcefg"[..], 0x00)));
/// assert_eq!(parser(&b""[..]), Err(ErrMode::Backtrack(InputError::new(&[][..], ErrorKind::Token))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_i8;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i8> {
/// le_i8.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01abcd"[..])), Ok((Partial::new(&b"\x01abcd"[..]), 0x00)));
/// assert_eq!(parser(Partial::new(&b""[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn le_i8<I, E: ParserError<I>>(input: &mut I) -> PResult<i8, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
{
i8(input)
}
/// Recognizes a little endian signed 2 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_i16;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i16> {
/// le_i16.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"abcefg"[..], 0x0300)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_i16;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i16> {
/// le_i16::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x0100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn le_i16<I, E: ParserError<I>>(input: &mut I) -> PResult<i16, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_i16", move |input: &mut I| {
le_uint::<_, u16, _>(input, 2).map(|n| n as i16)
})
.parse_next(input)
}
/// Recognizes a little endian signed 3 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_i24;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i32> {
/// le_i24.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03\x05abcefg"[..]), Ok((&b"abcefg"[..], 0x050300)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_i24;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i32> {
/// le_i24::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x020100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(2))));
/// ```
#[inline(always)]
pub fn le_i24<I, E: ParserError<I>>(input: &mut I) -> PResult<i32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_i24", move |input: &mut I| {
le_uint::<_, u32, _>(input, 3).map(|n| {
// Same as the unsigned version but we need to sign-extend manually here
let n = if n & 0x80_00_00 != 0 {
(n | 0xff_00_00_00) as i32
} else {
n as i32
};
n
})
})
.parse_next(input)
}
/// Recognizes a little endian signed 4 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_i32;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i32> {
/// le_i32.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03\x05\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x07050300)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_i32;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i32> {
/// le_i32::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x03020100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(3))));
/// ```
#[inline(always)]
pub fn le_i32<I, E: ParserError<I>>(input: &mut I) -> PResult<i32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_i32", move |input: &mut I| {
le_uint::<_, u32, _>(input, 4).map(|n| n as i32)
})
.parse_next(input)
}
/// Recognizes a little endian signed 8 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_i64;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i64> {
/// le_i64.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x0706050403020100)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_i64;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i64> {
/// le_i64::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x0706050403020100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(7))));
/// ```
#[inline(always)]
pub fn le_i64<I, E: ParserError<I>>(input: &mut I) -> PResult<i64, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_i64", move |input: &mut I| {
le_uint::<_, u64, _>(input, 8).map(|n| n as i64)
})
.parse_next(input)
}
/// Recognizes a little endian signed 16 bytes integer.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_i128;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i128> {
/// le_i128.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x07060504030201000706050403020100)));
/// assert_eq!(parser(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_i128;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i128> {
/// le_i128::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15abcd"[..])), Ok((Partial::new(&b"abcd"[..]), 0x15141312111009080706050403020100)));
/// assert_eq!(parser(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(15))));
/// ```
#[inline(always)]
pub fn le_i128<I, E: ParserError<I>>(input: &mut I) -> PResult<i128, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_i128", move |input: &mut I| {
le_uint::<_, u128, _>(input, 16).map(|n| n as i128)
})
.parse_next(input)
}
/// Recognizes an unsigned 1 byte integer
///
/// **Note:** that endianness does not apply to 1 byte numbers.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::u8;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], u8> {
/// u8.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"\x03abcefg"[..], 0x00)));
/// assert_eq!(parser(&b""[..]), Err(ErrMode::Backtrack(InputError::new(&[][..], ErrorKind::Token))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::u8;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, u8> {
/// u8::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x03abcefg"[..])), Ok((Partial::new(&b"\x03abcefg"[..]), 0x00)));
/// assert_eq!(parser(Partial::new(&b""[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn u8<I, E: ParserError<I>>(input: &mut I) -> PResult<u8, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
{
trace("u8", move |input: &mut I| {
if <I as StreamIsPartial>::is_partial_supported() {
u8_::<_, _, true>(input)
} else {
u8_::<_, _, false>(input)
}
})
.parse_next(input)
}
fn u8_<I, E: ParserError<I>, const PARTIAL: bool>(input: &mut I) -> PResult<u8, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
{
input.next_token().ok_or_else(|| {
if PARTIAL && input.is_partial() {
ErrMode::Incomplete(Needed::new(1))
} else {
ErrMode::Backtrack(E::from_error_kind(input, ErrorKind::Token))
}
})
}
/// Recognizes an unsigned 2 bytes integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian u16 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian u16 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::u16;
///
/// let be_u16 = |s| {
/// u16(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u16(&b"\x00\x03abcefg"[..]), Ok((&b"abcefg"[..], 0x0003)));
/// assert_eq!(be_u16(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_u16 = |s| {
/// u16(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u16(&b"\x00\x03abcefg"[..]), Ok((&b"abcefg"[..], 0x0300)));
/// assert_eq!(le_u16(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::u16;
///
/// let be_u16 = |s| {
/// u16::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u16(Partial::new(&b"\x00\x03abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x0003)));
/// assert_eq!(be_u16(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(1))));
///
/// let le_u16 = |s| {
/// u16::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u16(Partial::new(&b"\x00\x03abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x0300)));
/// assert_eq!(le_u16(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn u16<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, u16, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_u16,
Endianness::Little => le_u16,
#[cfg(target_endian = "big")]
Endianness::Native => be_u16,
#[cfg(target_endian = "little")]
Endianness::Native => le_u16,
}
}(input)
}
/// Recognizes an unsigned 3 byte integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian u24 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian u24 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::u24;
///
/// let be_u24 = |s| {
/// u24(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u24(&b"\x00\x03\x05abcefg"[..]), Ok((&b"abcefg"[..], 0x000305)));
/// assert_eq!(be_u24(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_u24 = |s| {
/// u24(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u24(&b"\x00\x03\x05abcefg"[..]), Ok((&b"abcefg"[..], 0x050300)));
/// assert_eq!(le_u24(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::u24;
///
/// let be_u24 = |s| {
/// u24::<_,InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u24(Partial::new(&b"\x00\x03\x05abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x000305)));
/// assert_eq!(be_u24(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(2))));
///
/// let le_u24 = |s| {
/// u24::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u24(Partial::new(&b"\x00\x03\x05abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x050300)));
/// assert_eq!(le_u24(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(2))));
/// ```
#[inline(always)]
pub fn u24<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, u32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_u24,
Endianness::Little => le_u24,
#[cfg(target_endian = "big")]
Endianness::Native => be_u24,
#[cfg(target_endian = "little")]
Endianness::Native => le_u24,
}
}(input)
}
/// Recognizes an unsigned 4 byte integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian u32 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian u32 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::u32;
///
/// let be_u32 = |s| {
/// u32(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u32(&b"\x00\x03\x05\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x00030507)));
/// assert_eq!(be_u32(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_u32 = |s| {
/// u32(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u32(&b"\x00\x03\x05\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x07050300)));
/// assert_eq!(le_u32(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::u32;
///
/// let be_u32 = |s| {
/// u32::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u32(Partial::new(&b"\x00\x03\x05\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x00030507)));
/// assert_eq!(be_u32(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(3))));
///
/// let le_u32 = |s| {
/// u32::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u32(Partial::new(&b"\x00\x03\x05\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x07050300)));
/// assert_eq!(le_u32(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(3))));
/// ```
#[inline(always)]
pub fn u32<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, u32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_u32,
Endianness::Little => le_u32,
#[cfg(target_endian = "big")]
Endianness::Native => be_u32,
#[cfg(target_endian = "little")]
Endianness::Native => le_u32,
}
}(input)
}
/// Recognizes an unsigned 8 byte integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian u64 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian u64 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::u64;
///
/// let be_u64 = |s| {
/// u64(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u64(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x0001020304050607)));
/// assert_eq!(be_u64(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_u64 = |s| {
/// u64(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u64(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x0706050403020100)));
/// assert_eq!(le_u64(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::u64;
///
/// let be_u64 = |s| {
/// u64::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u64(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x0001020304050607)));
/// assert_eq!(be_u64(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(7))));
///
/// let le_u64 = |s| {
/// u64::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u64(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x0706050403020100)));
/// assert_eq!(le_u64(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(7))));
/// ```
#[inline(always)]
pub fn u64<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, u64, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_u64,
Endianness::Little => le_u64,
#[cfg(target_endian = "big")]
Endianness::Native => be_u64,
#[cfg(target_endian = "little")]
Endianness::Native => le_u64,
}
}(input)
}
/// Recognizes an unsigned 16 byte integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian u128 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian u128 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::u128;
///
/// let be_u128 = |s| {
/// u128(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u128(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x00010203040506070001020304050607)));
/// assert_eq!(be_u128(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_u128 = |s| {
/// u128(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u128(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x07060504030201000706050403020100)));
/// assert_eq!(le_u128(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::u128;
///
/// let be_u128 = |s| {
/// u128::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_u128(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x00010203040506070001020304050607)));
/// assert_eq!(be_u128(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(15))));
///
/// let le_u128 = |s| {
/// u128::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_u128(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x07060504030201000706050403020100)));
/// assert_eq!(le_u128(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(15))));
/// ```
#[inline(always)]
pub fn u128<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, u128, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_u128,
Endianness::Little => le_u128,
#[cfg(target_endian = "big")]
Endianness::Native => be_u128,
#[cfg(target_endian = "little")]
Endianness::Native => le_u128,
}
}(input)
}
/// Recognizes a signed 1 byte integer
///
/// **Note:** that endianness does not apply to 1 byte numbers.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::i8;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], i8> {
/// i8.parse_peek(s)
/// }
///
/// assert_eq!(parser(&b"\x00\x03abcefg"[..]), Ok((&b"\x03abcefg"[..], 0x00)));
/// assert_eq!(parser(&b""[..]), Err(ErrMode::Backtrack(InputError::new(&[][..], ErrorKind::Token))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::i8;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, i8> {
/// i8.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&b"\x00\x03abcefg"[..])), Ok((Partial::new(&b"\x03abcefg"[..]), 0x00)));
/// assert_eq!(parser(Partial::new(&b""[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn i8<I, E: ParserError<I>>(input: &mut I) -> PResult<i8, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
{
trace("i8", move |input: &mut I| {
if <I as StreamIsPartial>::is_partial_supported() {
u8_::<_, _, true>(input)
} else {
u8_::<_, _, false>(input)
}
.map(|n| n as i8)
})
.parse_next(input)
}
/// Recognizes a signed 2 byte integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian i16 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian i16 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::i16;
///
/// let be_i16 = |s| {
/// i16(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i16(&b"\x00\x03abcefg"[..]), Ok((&b"abcefg"[..], 0x0003)));
/// assert_eq!(be_i16(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_i16 = |s| {
/// i16(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i16(&b"\x00\x03abcefg"[..]), Ok((&b"abcefg"[..], 0x0300)));
/// assert_eq!(le_i16(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::i16;
///
/// let be_i16 = |s| {
/// i16::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i16(Partial::new(&b"\x00\x03abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x0003)));
/// assert_eq!(be_i16(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(1))));
///
/// let le_i16 = |s| {
/// i16::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i16(Partial::new(&b"\x00\x03abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x0300)));
/// assert_eq!(le_i16(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn i16<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, i16, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_i16,
Endianness::Little => le_i16,
#[cfg(target_endian = "big")]
Endianness::Native => be_i16,
#[cfg(target_endian = "little")]
Endianness::Native => le_i16,
}
}(input)
}
/// Recognizes a signed 3 byte integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian i24 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian i24 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::i24;
///
/// let be_i24 = |s| {
/// i24(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i24(&b"\x00\x03\x05abcefg"[..]), Ok((&b"abcefg"[..], 0x000305)));
/// assert_eq!(be_i24(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_i24 = |s| {
/// i24(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i24(&b"\x00\x03\x05abcefg"[..]), Ok((&b"abcefg"[..], 0x050300)));
/// assert_eq!(le_i24(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::i24;
///
/// let be_i24 = |s| {
/// i24::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i24(Partial::new(&b"\x00\x03\x05abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x000305)));
/// assert_eq!(be_i24(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(2))));
///
/// let le_i24 = |s| {
/// i24::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i24(Partial::new(&b"\x00\x03\x05abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x050300)));
/// assert_eq!(le_i24(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(2))));
/// ```
#[inline(always)]
pub fn i24<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, i32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_i24,
Endianness::Little => le_i24,
#[cfg(target_endian = "big")]
Endianness::Native => be_i24,
#[cfg(target_endian = "little")]
Endianness::Native => le_i24,
}
}(input)
}
/// Recognizes a signed 4 byte integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian i32 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian i32 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::i32;
///
/// let be_i32 = |s| {
/// i32(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i32(&b"\x00\x03\x05\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x00030507)));
/// assert_eq!(be_i32(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_i32 = |s| {
/// i32(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i32(&b"\x00\x03\x05\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x07050300)));
/// assert_eq!(le_i32(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::i32;
///
/// let be_i32 = |s| {
/// i32::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i32(Partial::new(&b"\x00\x03\x05\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x00030507)));
/// assert_eq!(be_i32(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(3))));
///
/// let le_i32 = |s| {
/// i32::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i32(Partial::new(&b"\x00\x03\x05\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x07050300)));
/// assert_eq!(le_i32(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(3))));
/// ```
#[inline(always)]
pub fn i32<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, i32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_i32,
Endianness::Little => le_i32,
#[cfg(target_endian = "big")]
Endianness::Native => be_i32,
#[cfg(target_endian = "little")]
Endianness::Native => le_i32,
}
}(input)
}
/// Recognizes a signed 8 byte integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian i64 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian i64 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::i64;
///
/// let be_i64 = |s| {
/// i64(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i64(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x0001020304050607)));
/// assert_eq!(be_i64(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_i64 = |s| {
/// i64(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i64(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x0706050403020100)));
/// assert_eq!(le_i64(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::i64;
///
/// let be_i64 = |s| {
/// i64::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i64(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x0001020304050607)));
/// assert_eq!(be_i64(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(7))));
///
/// let le_i64 = |s| {
/// i64::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i64(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x0706050403020100)));
/// assert_eq!(le_i64(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(7))));
/// ```
#[inline(always)]
pub fn i64<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, i64, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_i64,
Endianness::Little => le_i64,
#[cfg(target_endian = "big")]
Endianness::Native => be_i64,
#[cfg(target_endian = "little")]
Endianness::Native => le_i64,
}
}(input)
}
/// Recognizes a signed 16 byte integer
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian i128 integer,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian i128 integer.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::i128;
///
/// let be_i128 = |s| {
/// i128(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i128(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x00010203040506070001020304050607)));
/// assert_eq!(be_i128(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
///
/// let le_i128 = |s| {
/// i128(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i128(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..]), Ok((&b"abcefg"[..], 0x07060504030201000706050403020100)));
/// assert_eq!(le_i128(&b"\x01"[..]), Err(ErrMode::Backtrack(InputError::new(&[0x01][..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::i128;
///
/// let be_i128 = |s| {
/// i128::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_i128(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x00010203040506070001020304050607)));
/// assert_eq!(be_i128(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(15))));
///
/// let le_i128 = |s| {
/// i128::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_i128(Partial::new(&b"\x00\x01\x02\x03\x04\x05\x06\x07\x00\x01\x02\x03\x04\x05\x06\x07abcefg"[..])), Ok((Partial::new(&b"abcefg"[..]), 0x07060504030201000706050403020100)));
/// assert_eq!(le_i128(Partial::new(&b"\x01"[..])), Err(ErrMode::Incomplete(Needed::new(15))));
/// ```
#[inline(always)]
pub fn i128<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, i128, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_i128,
Endianness::Little => le_i128,
#[cfg(target_endian = "big")]
Endianness::Native => be_i128,
#[cfg(target_endian = "little")]
Endianness::Native => le_i128,
}
}(input)
}
/// Recognizes a big endian 4 bytes floating point number.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_f32;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], f32> {
/// be_f32.parse_peek(s)
/// }
///
/// assert_eq!(parser(&[0x41, 0x48, 0x00, 0x00][..]), Ok((&b""[..], 12.5)));
/// assert_eq!(parser(&b"abc"[..]), Err(ErrMode::Backtrack(InputError::new(&b"abc"[..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_f32;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, f32> {
/// be_f32.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&[0x40, 0x29, 0x00, 0x00][..])), Ok((Partial::new(&b""[..]), 2.640625)));
/// assert_eq!(parser(Partial::new(&[0x01][..])), Err(ErrMode::Incomplete(Needed::new(3))));
/// ```
#[inline(always)]
pub fn be_f32<I, E: ParserError<I>>(input: &mut I) -> PResult<f32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_f32", move |input: &mut I| {
be_uint::<_, u32, _>(input, 4).map(f32::from_bits)
})
.parse_next(input)
}
/// Recognizes a big endian 8 bytes floating point number.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::be_f64;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], f64> {
/// be_f64.parse_peek(s)
/// }
///
/// assert_eq!(parser(&[0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00][..]), Ok((&b""[..], 12.5)));
/// assert_eq!(parser(&b"abc"[..]), Err(ErrMode::Backtrack(InputError::new(&b"abc"[..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::be_f64;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, f64> {
/// be_f64::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&[0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00][..])), Ok((Partial::new(&b""[..]), 12.5)));
/// assert_eq!(parser(Partial::new(&[0x01][..])), Err(ErrMode::Incomplete(Needed::new(7))));
/// ```
#[inline(always)]
pub fn be_f64<I, E: ParserError<I>>(input: &mut I) -> PResult<f64, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_f64", move |input: &mut I| {
be_uint::<_, u64, _>(input, 8).map(f64::from_bits)
})
.parse_next(input)
}
/// Recognizes a little endian 4 bytes floating point number.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_f32;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], f32> {
/// le_f32.parse_peek(s)
/// }
///
/// assert_eq!(parser(&[0x00, 0x00, 0x48, 0x41][..]), Ok((&b""[..], 12.5)));
/// assert_eq!(parser(&b"abc"[..]), Err(ErrMode::Backtrack(InputError::new(&b"abc"[..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_f32;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, f32> {
/// le_f32::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&[0x00, 0x00, 0x48, 0x41][..])), Ok((Partial::new(&b""[..]), 12.5)));
/// assert_eq!(parser(Partial::new(&[0x01][..])), Err(ErrMode::Incomplete(Needed::new(3))));
/// ```
#[inline(always)]
pub fn le_f32<I, E: ParserError<I>>(input: &mut I) -> PResult<f32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("le_f32", move |input: &mut I| {
le_uint::<_, u32, _>(input, 4).map(f32::from_bits)
})
.parse_next(input)
}
/// Recognizes a little endian 8 bytes floating point number.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::le_f64;
///
/// fn parser(s: &[u8]) -> IResult<&[u8], f64> {
/// le_f64.parse_peek(s)
/// }
///
/// assert_eq!(parser(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x40][..]), Ok((&b""[..], 12.5)));
/// assert_eq!(parser(&b"abc"[..]), Err(ErrMode::Backtrack(InputError::new(&b"abc"[..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::Partial;
/// use winnow::binary::le_f64;
///
/// fn parser(s: Partial<&[u8]>) -> IResult<Partial<&[u8]>, f64> {
/// le_f64::<_, InputError<_>>.parse_peek(s)
/// }
///
/// assert_eq!(parser(Partial::new(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x41][..])), Ok((Partial::new(&b""[..]), 3145728.0)));
/// assert_eq!(parser(Partial::new(&[0x01][..])), Err(ErrMode::Incomplete(Needed::new(7))));
/// ```
#[inline(always)]
pub fn le_f64<I, E: ParserError<I>>(input: &mut I) -> PResult<f64, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
trace("be_f64", move |input: &mut I| {
le_uint::<_, u64, _>(input, 8).map(f64::from_bits)
})
.parse_next(input)
}
/// Recognizes a 4 byte floating point number
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian f32 float,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian f32 float.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::f32;
///
/// let be_f32 = |s| {
/// f32(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_f32(&[0x41, 0x48, 0x00, 0x00][..]), Ok((&b""[..], 12.5)));
/// assert_eq!(be_f32(&b"abc"[..]), Err(ErrMode::Backtrack(InputError::new(&b"abc"[..], ErrorKind::Slice))));
///
/// let le_f32 = |s| {
/// f32(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_f32(&[0x00, 0x00, 0x48, 0x41][..]), Ok((&b""[..], 12.5)));
/// assert_eq!(le_f32(&b"abc"[..]), Err(ErrMode::Backtrack(InputError::new(&b"abc"[..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::f32;
///
/// let be_f32 = |s| {
/// f32::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_f32(Partial::new(&[0x41, 0x48, 0x00, 0x00][..])), Ok((Partial::new(&b""[..]), 12.5)));
/// assert_eq!(be_f32(Partial::new(&b"abc"[..])), Err(ErrMode::Incomplete(Needed::new(1))));
///
/// let le_f32 = |s| {
/// f32::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_f32(Partial::new(&[0x00, 0x00, 0x48, 0x41][..])), Ok((Partial::new(&b""[..]), 12.5)));
/// assert_eq!(le_f32(Partial::new(&b"abc"[..])), Err(ErrMode::Incomplete(Needed::new(1))));
/// ```
#[inline(always)]
pub fn f32<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, f32, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_f32,
Endianness::Little => le_f32,
#[cfg(target_endian = "big")]
Endianness::Native => be_f32,
#[cfg(target_endian = "little")]
Endianness::Native => le_f32,
}
}(input)
}
/// Recognizes an 8 byte floating point number
///
/// If the parameter is `winnow::binary::Endianness::Big`, parse a big endian f64 float,
/// otherwise if `winnow::binary::Endianness::Little` parse a little endian f64 float.
///
/// *Complete version*: returns an error if there is not enough input data
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// use winnow::binary::f64;
///
/// let be_f64 = |s| {
/// f64(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_f64(&[0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00][..]), Ok((&b""[..], 12.5)));
/// assert_eq!(be_f64(&b"abc"[..]), Err(ErrMode::Backtrack(InputError::new(&b"abc"[..], ErrorKind::Slice))));
///
/// let le_f64 = |s| {
/// f64(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_f64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x40][..]), Ok((&b""[..], 12.5)));
/// assert_eq!(le_f64(&b"abc"[..]), Err(ErrMode::Backtrack(InputError::new(&b"abc"[..], ErrorKind::Slice))));
/// ```
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::InputError, error::Needed};
/// # use winnow::prelude::*;
/// # use winnow::error::Needed::Size;
/// # use winnow::Partial;
/// use winnow::binary::f64;
///
/// let be_f64 = |s| {
/// f64::<_, InputError<_>>(winnow::binary::Endianness::Big).parse_peek(s)
/// };
///
/// assert_eq!(be_f64(Partial::new(&[0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00][..])), Ok((Partial::new(&b""[..]), 12.5)));
/// assert_eq!(be_f64(Partial::new(&b"abc"[..])), Err(ErrMode::Incomplete(Needed::new(5))));
///
/// let le_f64 = |s| {
/// f64::<_, InputError<_>>(winnow::binary::Endianness::Little).parse_peek(s)
/// };
///
/// assert_eq!(le_f64(Partial::new(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x40][..])), Ok((Partial::new(&b""[..]), 12.5)));
/// assert_eq!(le_f64(Partial::new(&b"abc"[..])), Err(ErrMode::Incomplete(Needed::new(5))));
/// ```
#[inline(always)]
pub fn f64<I, E: ParserError<I>>(endian: Endianness) -> impl Parser<I, f64, E>
where
I: StreamIsPartial,
I: Stream<Token = u8>,
<I as Stream>::Slice: AsBytes,
{
move |input: &mut I| {
match endian {
Endianness::Big => be_f64,
Endianness::Little => le_f64,
#[cfg(target_endian = "big")]
Endianness::Native => be_f64,
#[cfg(target_endian = "little")]
Endianness::Native => le_f64,
}
}(input)
}
/// Gets a number from the parser and returns a
/// subslice of the input of that size.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Arguments
/// * `f` The parser to apply.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::ErrorKind, error::Needed, stream::Partial};
/// # use winnow::prelude::*;
/// use winnow::Bytes;
/// use winnow::binary::be_u16;
/// use winnow::binary::length_data;
/// use winnow::token::tag;
///
/// type Stream<'i> = Partial<&'i Bytes>;
///
/// fn stream(b: &[u8]) -> Stream<'_> {
/// Partial::new(Bytes::new(b))
/// }
///
/// fn parser(s: Stream<'_>) -> IResult<Stream<'_>, &[u8]> {
/// length_data(be_u16).parse_peek(s)
/// }
///
/// assert_eq!(parser(stream(b"\x00\x03abcefg")), Ok((stream(&b"efg"[..]), &b"abc"[..])));
/// assert_eq!(parser(stream(b"\x00\x03a")), Err(ErrMode::Incomplete(Needed::new(2))));
/// ```
pub fn length_data<I, N, E, F>(mut f: F) -> impl Parser<I, <I as Stream>::Slice, E>
where
I: StreamIsPartial,
I: Stream,
N: ToUsize,
F: Parser<I, N, E>,
E: ParserError<I>,
{
trace("length_data", move |i: &mut I| {
let length = f.parse_next(i)?;
crate::token::take(length).parse_next(i)
})
}
/// Gets a number from the first parser,
/// takes a subslice of the input of that size,
/// then applies the second parser on that subslice.
/// If the second parser returns `Incomplete`,
/// `length_value` will return an error.
///
/// *Complete version*: Returns an error if there is not enough input data.
///
/// *Partial version*: Will return `Err(winnow::error::ErrMode::Incomplete(_))` if there is not enough data.
///
/// # Arguments
/// * `f` The parser to apply.
/// * `g` The parser to apply on the subslice.
///
/// # Example
///
/// ```rust
/// # use winnow::{error::ErrMode, error::{InputError, ErrorKind}, error::Needed, stream::{Partial, StreamIsPartial}};
/// # use winnow::prelude::*;
/// use winnow::Bytes;
/// use winnow::binary::be_u16;
/// use winnow::binary::length_value;
/// use winnow::token::tag;
///
/// type Stream<'i> = Partial<&'i Bytes>;
///
/// fn stream(b: &[u8]) -> Stream<'_> {
/// Partial::new(Bytes::new(b))
/// }
///
/// fn complete_stream(b: &[u8]) -> Stream<'_> {
/// let mut p = Partial::new(Bytes::new(b));
/// let _ = p.complete();
/// p
/// }
///
/// fn parser(s: Stream<'_>) -> IResult<Stream<'_>, &[u8]> {
/// length_value(be_u16, "abc").parse_peek(s)
/// }
///
/// assert_eq!(parser(stream(b"\x00\x03abcefg")), Ok((stream(&b"efg"[..]), &b"abc"[..])));
/// assert_eq!(parser(stream(b"\x00\x03123123")), Err(ErrMode::Backtrack(InputError::new(complete_stream(&b"123"[..]), ErrorKind::Tag))));
/// assert_eq!(parser(stream(b"\x00\x03a")), Err(ErrMode::Incomplete(Needed::new(2))));
/// ```
pub fn length_value<I, O, N, E, F, G>(mut f: F, mut g: G) -> impl Parser<I, O, E>
where
I: StreamIsPartial,
I: Stream + UpdateSlice + Clone,
N: ToUsize,
F: Parser<I, N, E>,
G: Parser<I, O, E>,
E: ParserError<I>,
{
trace("length_value", move |i: &mut I| {
let data = length_data(f.by_ref()).parse_next(i)?;
let mut data = I::update_slice(i.clone(), data);
let _ = data.complete();
let o = g.by_ref().complete_err().parse_next(&mut data)?;
Ok(o)
})
}
/// Gets a number from the first parser,
/// then applies the second parser that many times.
///
/// # Arguments
/// * `f` The parser to apply to obtain the count.
/// * `g` The parser to apply repeatedly.
///
/// # Example
///
/// ```rust
/// # #[cfg(feature = "std")] {
/// # use winnow::prelude::*;
/// # use winnow::{error::ErrMode, error::{InputError, ErrorKind}, error::Needed};
/// # use winnow::prelude::*;
/// use winnow::Bytes;
/// use winnow::binary::u8;
/// use winnow::binary::length_count;
/// use winnow::token::tag;
///
/// type Stream<'i> = &'i Bytes;
///
/// fn stream(b: &[u8]) -> Stream<'_> {
/// Bytes::new(b)
/// }
///
/// fn parser(s: Stream<'_>) -> IResult<Stream<'_>, Vec<&[u8]>> {
/// length_count(u8.map(|i| {
/// println!("got number: {}", i);
/// i
/// }), "abc").parse_peek(s)
/// }
///
/// assert_eq!(parser(stream(b"\x02abcabcabc")), Ok((stream(b"abc"), vec![&b"abc"[..], &b"abc"[..]])));
/// assert_eq!(parser(stream(b"\x03123123123")), Err(ErrMode::Backtrack(InputError::new(stream(b"123123123"), ErrorKind::Tag))));
/// # }
/// ```
pub fn length_count<I, O, C, N, E, F, G>(mut f: F, mut g: G) -> impl Parser<I, C, E>
where
I: Stream,
N: ToUsize,
C: Accumulate<O>,
F: Parser<I, N, E>,
G: Parser<I, O, E>,
E: ParserError<I>,
{
trace("length_count", move |i: &mut I| {
let n = f.parse_next(i)?;
let n = n.to_usize();
repeat(n, g.by_ref()).parse_next(i)
})
}
|
// temporary, need to figure out why applying this before the include doesn't work
#![cfg_attr(feature = "cargo-clippy", allow(suspicious_else_formatting, single_match, cyclomatic_complexity, unit_arg, naive_bytecount, len_zero))]
#[derive(Serialize)]
pub enum RootTypes {
Scalar(DataModelScalarDeclaration),
Type(DataModelTypeDeclaration)
}
#[derive(Serialize)]
pub struct DataModelScalarDeclaration {
pub name: String,
}
#[derive(Serialize)]
pub struct DataModelTypeDeclaration {
pub name: String,
pub fields: Vec<DataModelFieldDeclaration>,
}
#[derive(Serialize)]
pub struct DataModelFieldDeclaration {
pub name: String,
pub field_type: DataModelTypeRef,
pub directives: Vec<DataModelFieldDirective>,
}
#[derive(Serialize)]
pub struct DataModelTypeRef {
pub name: String,
pub inner_type: Option<Box<DataModelTypeRef>>,
pub required: bool,
}
#[derive(Serialize)]
pub struct DataModelFieldDirective {
pub name: String,
pub arguments: Vec<DataModelFieldDirectiveArg>,
}
#[derive(Serialize)]
pub struct DataModelFieldDirectiveArg {
pub name: String,
pub value: String,
pub quoted: bool,
}
// need to figure out why this doesn't work.
// #[cfg_attr(feature = "cargo-clippy", allow(suspicious_else_formatting, single_match, cyclomatic_complexity, unit_arg, naive_bytecount, len_zero))]
include!(concat!(env!("OUT_DIR"), "/datamodel_grammar.rs"));
|
//! Truth tables.
use crate::ir;
use fxhash::FxHashMap;
use itertools::Itertools;
use log::debug;
use std;
use std::collections::hash_map;
use utils::*;
/// Lists the rules to apply for each combinaison of input.
#[derive(Debug)]
struct TruthTable {
values: Vec<(usize, Vec<ir::ValueSet>)>,
rules: NDArray<Cell>,
}
impl TruthTable {
/// Creates the truth table from the set of rules.
fn build(
inputs: &[ir::ChoiceInstance],
rules: &[ir::Rule],
ir_desc: &ir::IrDesc,
) -> Self {
let values = inputs
.iter()
.enumerate()
.flat_map(|(id, input)| {
let choice = ir_desc.get_choice(&input.choice);
if !choice.fragile_values().is_empty() {
return None;
}
match *choice.choice_def() {
ir::ChoiceDef::Enum(ref name) => {
let sets = ir_desc
.get_enum(name)
.values()
.keys()
.map(|v| {
let values = std::iter::once(v.clone()).collect();
ir::ValueSet::enum_values(name.clone(), values)
})
.collect_vec();
Some((id, sets))
}
ir::ChoiceDef::Counter { .. } | ir::ChoiceDef::Number { .. } => None,
}
})
.collect_vec();
let sizes = values.iter().map(|&(_, ref v)| v.len()).collect_vec();
let data = NDRange::new(&sizes)
.map(|index| {
let input_mapping = index
.iter()
.zip_eq(&values)
.map(|(&idx, &(input, ref values))| (input, &values[idx]))
.collect();
let rules = rules
.iter()
.flat_map(|x| x.instantiate(inputs, &input_mapping, ir_desc))
.collect_vec();
Cell::build(rules, inputs, ir_desc)
})
.collect_vec();
TruthTable {
values,
rules: NDArray::new(sizes, data),
}
}
/// Returns a view on the entire table.
fn view(&mut self) -> TableView {
TableView {
values: &self.values,
dim_map: (0..self.values.len()).collect(),
view: self.rules.view_mut(),
}
}
}
/// The condition and set constraints of a rule.
type RuleConds = (Vec<ir::Condition>, ir::SetConstraints);
/// The rules that applies to a particular combination of choices.
#[derive(Debug, PartialEq, Eq)]
struct Cell {
rules: FxHashMap<RuleConds, Vec<ir::ValueSet>>,
}
impl Cell {
/// Creates a cell from the given rules.
fn build(
rules: Vec<ir::Rule>,
inputs: &[ir::ChoiceInstance],
ir_desc: &ir::IrDesc,
) -> Cell {
let mut rule_map: FxHashMap<_, Vec<ir::ValueSet>> = FxHashMap::default();
for mut rule in rules {
rule.normalize(inputs, ir_desc);
match rule_map.entry((rule.conditions, rule.set_constraints)) {
hash_map::Entry::Occupied(mut entry) => {
let mut success = false;
for set in entry.get_mut() {
if set.intersect(rule.alternatives.clone()) {
success = true;
break;
}
}
if !success {
entry.get_mut().push(rule.alternatives);
}
}
hash_map::Entry::Vacant(entry) => {
entry.insert(vec![rule.alternatives]);
}
};
}
Cell { rules: rule_map }
}
/// Extracts the rules from the cell. Leaves the cell empty.
fn extract_rules(&mut self) -> Vec<ir::Rule> {
self.rules
.drain()
.flat_map(|((conds, set_conds), alternatives)| {
alternatives.into_iter().map(move |alts| ir::Rule {
conditions: conds.clone(),
alternatives: alts,
set_constraints: set_conds.clone(),
})
})
.collect_vec()
}
/// Indicates if the cell allows any alternative,
fn is_empty(&self) -> bool {
self.rules
.get(&(vec![], ir::SetConstraints::default()))
.map(|sets| sets.iter().any(|set| set.is_empty()))
.unwrap_or(false)
}
}
/// A `TruthTable` with some input fixed.
struct TableView<'a> {
values: &'a Vec<(usize, Vec<ir::ValueSet>)>,
/// Maps the id of a view dimension to the id of dimension of the original table.
dim_map: Vec<usize>,
view: ndarray::ViewMut<'a, Cell>,
}
impl<'a> TableView<'a> {
/// Returns the number of inputs of the table.
fn num_inputs(&self) -> usize {
self.dim_map.len()
}
/// Returns the input_id associated to a position.
fn input_from_pos(&self, input: usize) -> usize {
self.values[self.dim_map[input]].0
}
/// Instantiate the truth table for each value of a given dimension.
fn instantiate(&mut self, pos: usize) -> Vec<(&'a ir::ValueSet, TableView)> {
let dim_map = &self.dim_map;
let values = &self.values;
self.values[self.dim_map[pos]]
.1
.iter()
.zip_eq(self.view.split(pos))
.map(|(value, view)| {
let mut dim_map = dim_map.clone();
dim_map.remove(pos);
(
value,
TableView {
values,
dim_map,
view,
},
)
})
.collect()
}
/// Indicates if the two view have the same cells.
fn equal_content(&self, other: &Self) -> bool {
::itertools::equal(self.into_iter(), other.into_iter())
}
/// Indicates if al the cells are empty.
fn is_empty(&self) -> bool {
self.into_iter().all(|c| c.is_empty())
}
}
impl<'a, 'b> IntoIterator for &'b TableView<'a>
where
'a: 'b,
{
type Item = &'b Cell;
type IntoIter = ndarray::ViewMutIter<'b, Cell>;
fn into_iter(self) -> Self::IntoIter {
(&self.view).into_iter()
}
}
impl<'a, 'b> IntoIterator for &'b mut TableView<'a>
where
'a: 'b,
{
type Item = &'b mut Cell;
type IntoIter = ndarray::ViewIterMut<'a, 'b, Cell>;
fn into_iter(self) -> Self::IntoIter {
(&mut self.view).into_iter()
}
}
/// Optimize the given rules into a sub_filter.
pub fn opt_rules(
inputs: &[ir::ChoiceInstance],
rules: Vec<ir::Rule>,
ir_desc: &ir::IrDesc,
) -> ir::SubFilter {
if rules.len() == 1 {
ir::SubFilter::Rules(rules)
} else {
let mut table = TruthTable::build(inputs, &rules, ir_desc);
debug!("truth table: {:?}", table);
truth_table_to_filter(&mut table.view())
}
}
/// Implements a truth table as a filter.
fn truth_table_to_filter(table: &mut TableView) -> ir::SubFilter {
if table.num_inputs() == 0 {
let cell = table.into_iter().next().unwrap();
let rules = cell.extract_rules();
return ir::SubFilter::Rules(rules);
}
match table_min_split(table).unwrap() {
TableSplit::Forward { mut sub_view } => truth_table_to_filter(&mut sub_view),
TableSplit::Switch { input, cases } => {
let sub_filters = cases
.into_iter()
.map(|(values, mut view)| (values, truth_table_to_filter(&mut view)))
.collect();
ir::SubFilter::Switch {
switch: input,
cases: sub_filters,
}
}
}
}
enum TableSplit<'a> {
Switch {
input: usize,
cases: Vec<(ir::ValueSet, TableView<'a>)>,
},
Forward {
sub_view: TableView<'a>,
},
}
/// Find the input instantiation with the minimal number of instances.
fn table_min_split<'a, 'b>(table: &'a mut TableView<'b>) -> Option<TableSplit<'a>> {
let mut min_split: Option<(usize, Vec<_>)> = None;
// Find the best splitting configuration
for pos in 0..table.num_inputs() {
let mut forward = true;
{
let mut instances: Vec<(ir::ValueSet, usize, TableView)> = Vec::new();
let split = table.instantiate(pos);
// Merge similar branches.
'instances: for (pos, (value, sub_table)) in split.into_iter().enumerate() {
if sub_table.is_empty() {
forward = false;
continue 'instances;
}
for &mut (ref mut other_values, _, ref other_table) in &mut instances {
if sub_table.equal_content(other_table) {
other_values.extend(value.clone());
continue 'instances;
}
}
instances.push((value.clone(), pos, sub_table));
}
forward &= instances.len() == 1;
// Update the best split.
if min_split
.as_ref()
.map(|x| x.1.len() > instances.len())
.unwrap_or(true)
{
let config = instances
.into_iter()
.map(|(values, vpos, _)| (values, vpos));
min_split = Some((pos, config.collect_vec()));
}
}
// Early exit if the table is not split.
if forward {
let sub_view = table.instantiate(pos).pop().unwrap().1;
return Some(TableSplit::Forward { sub_view });
}
}
// Replicate the best splitting
min_split.map(move |(pos, config)| {
let input = table.input_from_pos(pos);
let mut views = table.instantiate(pos).into_iter().enumerate();
let cases = config
.into_iter()
.map(|(values, pos)| {
let view = views.find(|&(other_pos, _)| pos == other_pos).unwrap().1;
assert!(!view.1.is_empty());
(values, view.1)
})
.collect();
TableSplit::Switch { input, cases }
})
}
#[cfg(test)]
pub mod test {
use super::*;
use crate::constraint::Constraint;
use crate::ir;
use crate::ir::test::{mk_enum_values_set, EvalContext};
use itertools::Itertools;
/// Returns the values allowed by the given `Cell`.
fn eval_cell(cell: &Cell, context: &ir::test::EvalContext) -> ir::ValueSet {
let enum_name = context.enum_.name().clone();
let values = context.enum_.values().keys().cloned().collect();
let mut valid_values = ir::ValueSet::enum_values(enum_name, values);
for (&(ref conds, ref set_constraints), value_sets) in &cell.rules {
for set in value_sets.clone() {
context.eval_rule_aux(conds, set_constraints, set, &mut valid_values);
}
}
valid_values
}
/// Returns the valid alternatives according to a given table.
fn eval_table(table: &TruthTable, context: &ir::test::EvalContext) -> ir::ValueSet {
let valid_indexes = table
.values
.iter()
.map(|&(input, ref table_values)| {
let ctx_values = &context.input_values[input];
table_values
.iter()
.enumerate()
.filter(|&(_, value)| ctx_values.is(value).maybe_true())
.map(|(pos, _)| pos)
.collect_vec()
})
.collect_vec();
let num_indexes = valid_indexes.iter().map(|x| x.len()).collect_vec();
let t = ir::ValueType::Enum(context.enum_.name().clone());
let mut value_set = ir::ValueSet::empty(&t);
for indexes in NDRange::new(&num_indexes) {
let table_index = indexes
.iter()
.zip_eq(&valid_indexes)
.map(|(&idx, table_indexes)| table_indexes[idx])
.collect_vec();
value_set.extend(eval_cell(&table.rules[&table_index[..]], context));
}
value_set
}
/// Ensures the generation of filters works when no rule is present.
#[test]
fn no_rules() {
let _ = ::env_logger::try_init();
let mut ir_desc = ir::IrDesc::default();
ir::test::gen_enum("A", 3, &mut ir_desc);
ir::test::gen_enum("B", 3, &mut ir_desc);
ir::test::gen_enum("C", 3, &mut ir_desc);
let enum_ = ir_desc.get_enum("EnumA");
let inputs = [mk_input("enum_b"), mk_input("enum_c")];
test_filter(&inputs, &[], enum_, &ir_desc);
}
/// Ensures the generation of static condition works correctly.
#[test]
fn no_inputs_filter() {
let _ = ::env_logger::try_init();
let mut ir_desc = ir::IrDesc::default();
ir::test::gen_enum("A", 4, &mut ir_desc);
let enum_ = ir_desc.get_enum("EnumA");
let rule0 = mk_rule(vec![], "EnumA", &["A_0", "A_1", "A_2"]);
let rule1 = mk_rule(
vec![mk_code_cond("code_0")],
"EnumA",
&["A_1", "A_2", "A_3"],
);
let rules = [rule0, rule1];
test_filter(&[], &rules, enum_, &ir_desc);
}
/// Ensures the generation of filters with a single input works correctly.
#[test]
fn single_input_filter() {
let _ = ::env_logger::try_init();
let mut ir_desc = ir::IrDesc::default();
ir::test::gen_enum("A", 4, &mut ir_desc);
ir::test::gen_enum("B", 4, &mut ir_desc);
let enum_a = ir_desc.get_enum("EnumA");
let rule0 = mk_rule(vec![], "EnumA", &["A_0", "A_1", "A_2"]);
let rule1 = mk_rule(
vec![mk_enum_cond(0, &["B_0", "B_1"])],
"EnumA",
&["A_0", "A_1"],
);
let rule2 = mk_rule(
vec![mk_enum_cond(0, &["B_0", "B_2"]), mk_code_cond("code_0")],
"EnumA",
&["A_0", "A_2"],
);
let rule3 = mk_rule(vec![mk_enum_cond(0, &["B_3"])], "EnumA", &[]);
let rules = [rule0, rule1, rule2, rule3];
test_filter(&[mk_input("enum_b")], &rules, enum_a, &ir_desc)
}
/// Snsures the generation of filters with multiple inputs works correctly.
#[test]
fn two_inputs_filter() {
let _ = ::env_logger::try_init();
let mut ir_desc = ir::IrDesc::default();
ir::test::gen_enum("A", 4, &mut ir_desc);
ir::test::gen_enum("B", 3, &mut ir_desc);
ir::test::gen_enum("C", 3, &mut ir_desc);
let enum_a = ir_desc.get_enum("EnumA");
let cond_b1 = mk_enum_cond(0, &["B_1"]);
let cond_c01 = mk_enum_cond(1, &["C_0", "C_1"]);
let cond_b12 = mk_enum_cond(0, &["B_1", "B_2"]);
let cond_c12 = mk_enum_cond(1, &["C_1", "C_2"]);
let cond_code0 = mk_code_cond("code_0");
let rules = [
mk_rule(vec![mk_enum_cond(0, &["B_0"])], "EnumA", &["A_0", "A_1"]),
mk_rule(vec![cond_b1, cond_c01], "EnumA", &["A_1", "A_2"]),
mk_rule(
vec![cond_b12, cond_c12, cond_code0],
"EnumA",
&["A_2", "A_3"],
),
];
let inputs = [mk_input("enum_b"), mk_input("enum_c")];
test_filter(&inputs, &rules, enum_a, &ir_desc)
}
fn test_filter(
inputs: &[ir::ChoiceInstance],
rules: &[ir::Rule],
enum_: &ir::Enum,
ir_desc: &ir::IrDesc,
) {
let static_conds = rules
.iter()
.flat_map(|rule| {
rule.conditions
.iter()
.flat_map(|x| x.as_static_cond())
.map(|x| x.0)
})
.unique()
.collect_vec();
// Test the table correctness.
let mut table = TruthTable::build(inputs, &rules, ir_desc);
for ctx in EvalContext::iter_contexts(ir_desc, enum_, inputs, &static_conds[..]) {
let table_res = eval_table(&table, &ctx);
let rules_res = ctx.eval_rules(rules);
debug!("Context{}", ctx);
debug!("table res: {:?}", table_res);
debug!("rules res: {:?}", rules_res);
assert_eq!(table_res, rules_res);
}
// Test the generated filter correctness.
let filter = truth_table_to_filter(&mut table.view());
for ctx in EvalContext::iter_contexts(ir_desc, enum_, inputs, &static_conds[..]) {
let filter_res = ctx.eval_subfilter(&filter);
let rules_res = ctx.eval_rules(rules);
debug!("Context{}", ctx);
debug!("filter res: {:?}", filter_res);
debug!("rules res: {:?}", rules_res);
assert_eq!(filter_res, rules_res);
}
}
/// Ensures similar inputs are correctly merged.
#[test]
fn normalize_equal_inputs() {
let mut constraint = Constraint {
restrict_fragile: true,
vars: vec![],
inputs: vec![mk_input("enum_b"), mk_input("enum_b")],
conditions: vec![mk_enum_cond(0, &["B_0"]), mk_enum_cond(1, &["B_1"])],
};
let mut ir_desc = ir::IrDesc::default();
ir::test::gen_enum("B", 2, &mut ir_desc);
constraint.dedup_inputs(&ir_desc);
assert_eq!(constraint.inputs.len(), 1);
}
/// Creates a code condition.
fn mk_code_cond(code: &str) -> ir::Condition {
ir::Condition::Code {
code: ir::Code {
code: code.into(),
vars: vec![],
},
negate: false,
}
}
/// Create an enum condition.
fn mk_enum_cond(input: usize, values: &[&str]) -> ir::Condition {
let values = values.iter().map(|&s| s.into()).collect();
ir::Condition::Enum {
input,
values,
negate: false,
inverse: false,
}
}
/// Creates a rule.
fn mk_rule(
conds: Vec<ir::Condition>,
enum_: &str,
alternatives: &[&str],
) -> ir::Rule {
ir::Rule {
conditions: conds,
alternatives: mk_enum_values_set(enum_, alternatives),
set_constraints: ir::SetConstraints::default(),
}
}
/// Create a input definition.
fn mk_input(name: &str) -> ir::ChoiceInstance {
ir::ChoiceInstance {
choice: name.into(),
vars: Vec::new(),
}
}
}
|
use proconio::{input, marker::Bytes};
use rolling_hash::RollingHash;
fn main() {
input! {
n: usize,
t: Bytes,
};
let mut rt = t.clone();
rt.reverse();
let t: Vec<u64> = t.into_iter().map(|b| u64::from(b)).collect();
let rt: Vec<u64> = rt.into_iter().map(|b| u64::from(b)).collect();
let rh = RollingHash::new(&t);
let rrh = RollingHash::new(&rt);
for i in 0..=n {
// eprintln!("i = {}", i);
// eprintln!("{:?}, {:?}", &t[0..i], &t[(n * 2 - (n - i))..(n * 2)]);
let u = rh.connect(rh.get(0..i), rh.get((n * 2 - (n - i))..(n * 2)), n - i);
let v = rrh.get((n - i)..(n - i + n));
if u == v {
for &b in &rt[(n - i)..(n - i + n)] {
let ch = b as u8 as char;
print!("{}", ch);
}
println!();
println!("{}", i);
return;
}
}
println!("-1");
}
|
fn main() {
println!("Usage: cargo run --bin (produce|consume)");
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u8,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u8,
}
impl super::RXCSRH4 {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u8 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_INCOMPRXR {
bits: bool,
}
impl USB_RXCSRH4_INCOMPRXR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_INCOMPRXW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_INCOMPRXW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 0);
self.w.bits |= ((value as u8) & 1) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_DTR {
bits: bool,
}
impl USB_RXCSRH4_DTR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_DTW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_DTW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 1);
self.w.bits |= ((value as u8) & 1) << 1;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_DTWER {
bits: bool,
}
impl USB_RXCSRH4_DTWER {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_DTWEW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_DTWEW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 2);
self.w.bits |= ((value as u8) & 1) << 2;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_DMAMODR {
bits: bool,
}
impl USB_RXCSRH4_DMAMODR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_DMAMODW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_DMAMODW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 3);
self.w.bits |= ((value as u8) & 1) << 3;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_PIDERRR {
bits: bool,
}
impl USB_RXCSRH4_PIDERRR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_PIDERRW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_PIDERRW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 4);
self.w.bits |= ((value as u8) & 1) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_DMAENR {
bits: bool,
}
impl USB_RXCSRH4_DMAENR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_DMAENW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_DMAENW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 5);
self.w.bits |= ((value as u8) & 1) << 5;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_AUTORQR {
bits: bool,
}
impl USB_RXCSRH4_AUTORQR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_AUTORQW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_AUTORQW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 6);
self.w.bits |= ((value as u8) & 1) << 6;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_AUTOCLR {
bits: bool,
}
impl USB_RXCSRH4_AUTOCLR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_AUTOCLW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_AUTOCLW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 7);
self.w.bits |= ((value as u8) & 1) << 7;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_DISNYETR {
bits: bool,
}
impl USB_RXCSRH4_DISNYETR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_DISNYETW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_DISNYETW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 4);
self.w.bits |= ((value as u8) & 1) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_RXCSRH4_ISOR {
bits: bool,
}
impl USB_RXCSRH4_ISOR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_RXCSRH4_ISOW<'a> {
w: &'a mut W,
}
impl<'a> _USB_RXCSRH4_ISOW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 6);
self.w.bits |= ((value as u8) & 1) << 6;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
#[doc = "Bit 0 - Incomplete RX Transmission Status"]
#[inline(always)]
pub fn usb_rxcsrh4_incomprx(&self) -> USB_RXCSRH4_INCOMPRXR {
let bits = ((self.bits >> 0) & 1) != 0;
USB_RXCSRH4_INCOMPRXR { bits }
}
#[doc = "Bit 1 - Data Toggle"]
#[inline(always)]
pub fn usb_rxcsrh4_dt(&self) -> USB_RXCSRH4_DTR {
let bits = ((self.bits >> 1) & 1) != 0;
USB_RXCSRH4_DTR { bits }
}
#[doc = "Bit 2 - Data Toggle Write Enable"]
#[inline(always)]
pub fn usb_rxcsrh4_dtwe(&self) -> USB_RXCSRH4_DTWER {
let bits = ((self.bits >> 2) & 1) != 0;
USB_RXCSRH4_DTWER { bits }
}
#[doc = "Bit 3 - DMA Request Mode"]
#[inline(always)]
pub fn usb_rxcsrh4_dmamod(&self) -> USB_RXCSRH4_DMAMODR {
let bits = ((self.bits >> 3) & 1) != 0;
USB_RXCSRH4_DMAMODR { bits }
}
#[doc = "Bit 4 - PID Error"]
#[inline(always)]
pub fn usb_rxcsrh4_piderr(&self) -> USB_RXCSRH4_PIDERRR {
let bits = ((self.bits >> 4) & 1) != 0;
USB_RXCSRH4_PIDERRR { bits }
}
#[doc = "Bit 5 - DMA Request Enable"]
#[inline(always)]
pub fn usb_rxcsrh4_dmaen(&self) -> USB_RXCSRH4_DMAENR {
let bits = ((self.bits >> 5) & 1) != 0;
USB_RXCSRH4_DMAENR { bits }
}
#[doc = "Bit 6 - Auto Request"]
#[inline(always)]
pub fn usb_rxcsrh4_autorq(&self) -> USB_RXCSRH4_AUTORQR {
let bits = ((self.bits >> 6) & 1) != 0;
USB_RXCSRH4_AUTORQR { bits }
}
#[doc = "Bit 7 - Auto Clear"]
#[inline(always)]
pub fn usb_rxcsrh4_autocl(&self) -> USB_RXCSRH4_AUTOCLR {
let bits = ((self.bits >> 7) & 1) != 0;
USB_RXCSRH4_AUTOCLR { bits }
}
#[doc = "Bit 4 - Disable NYET"]
#[inline(always)]
pub fn usb_rxcsrh4_disnyet(&self) -> USB_RXCSRH4_DISNYETR {
let bits = ((self.bits >> 4) & 1) != 0;
USB_RXCSRH4_DISNYETR { bits }
}
#[doc = "Bit 6 - Isochronous Transfers"]
#[inline(always)]
pub fn usb_rxcsrh4_iso(&self) -> USB_RXCSRH4_ISOR {
let bits = ((self.bits >> 6) & 1) != 0;
USB_RXCSRH4_ISOR { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Incomplete RX Transmission Status"]
#[inline(always)]
pub fn usb_rxcsrh4_incomprx(&mut self) -> _USB_RXCSRH4_INCOMPRXW {
_USB_RXCSRH4_INCOMPRXW { w: self }
}
#[doc = "Bit 1 - Data Toggle"]
#[inline(always)]
pub fn usb_rxcsrh4_dt(&mut self) -> _USB_RXCSRH4_DTW {
_USB_RXCSRH4_DTW { w: self }
}
#[doc = "Bit 2 - Data Toggle Write Enable"]
#[inline(always)]
pub fn usb_rxcsrh4_dtwe(&mut self) -> _USB_RXCSRH4_DTWEW {
_USB_RXCSRH4_DTWEW { w: self }
}
#[doc = "Bit 3 - DMA Request Mode"]
#[inline(always)]
pub fn usb_rxcsrh4_dmamod(&mut self) -> _USB_RXCSRH4_DMAMODW {
_USB_RXCSRH4_DMAMODW { w: self }
}
#[doc = "Bit 4 - PID Error"]
#[inline(always)]
pub fn usb_rxcsrh4_piderr(&mut self) -> _USB_RXCSRH4_PIDERRW {
_USB_RXCSRH4_PIDERRW { w: self }
}
#[doc = "Bit 5 - DMA Request Enable"]
#[inline(always)]
pub fn usb_rxcsrh4_dmaen(&mut self) -> _USB_RXCSRH4_DMAENW {
_USB_RXCSRH4_DMAENW { w: self }
}
#[doc = "Bit 6 - Auto Request"]
#[inline(always)]
pub fn usb_rxcsrh4_autorq(&mut self) -> _USB_RXCSRH4_AUTORQW {
_USB_RXCSRH4_AUTORQW { w: self }
}
#[doc = "Bit 7 - Auto Clear"]
#[inline(always)]
pub fn usb_rxcsrh4_autocl(&mut self) -> _USB_RXCSRH4_AUTOCLW {
_USB_RXCSRH4_AUTOCLW { w: self }
}
#[doc = "Bit 4 - Disable NYET"]
#[inline(always)]
pub fn usb_rxcsrh4_disnyet(&mut self) -> _USB_RXCSRH4_DISNYETW {
_USB_RXCSRH4_DISNYETW { w: self }
}
#[doc = "Bit 6 - Isochronous Transfers"]
#[inline(always)]
pub fn usb_rxcsrh4_iso(&mut self) -> _USB_RXCSRH4_ISOW {
_USB_RXCSRH4_ISOW { w: self }
}
}
|
//https://rust-lang-nursery.github.io/rust-cookbook/science/mathematics/statistics.html
fn main() {
let data = [3, 1, 6, 1, 5, 8, 1, 8, 10, 11];
let sum = data.iter().sum::<i32>() as f32;
let count = data.len();
let mean = match count {
positive if positive > 0 => Some( sum / count as f32),
_ => None
};
println!("The mean is {:?}", mean);
} |
use std::collections::HashMap;
use super::{common::SocketId, socket::Socket};
#[derive(Default)]
pub struct Sockets {
sockets: HashMap<SocketId, Box<dyn Socket>>,
}
impl Sockets {
pub fn add(&mut self, socket: Box<dyn Socket>) {
self.sockets.insert(socket.get_id(), socket);
}
pub fn get(&self, id: SocketId) -> Option<&Box<dyn Socket>> {
self.sockets.get(&id)
}
pub fn get_mut(&mut self, id: SocketId) -> Option<&mut Box<dyn Socket>> {
self.sockets.get_mut(&id)
}
pub fn iter(&self) -> impl Iterator<Item = &Box<dyn Socket>> {
self.sockets.values()
}
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut Box<dyn Socket>> {
self.sockets.values_mut()
}
}
|
//! Compressor mod
//! All compressors going to be here
pub mod compressed;
pub mod uncompressed;
use serde_derive::Deserialize;
use std::boxed::Box;
use std::path::Path;
use crate::backup::backup::Backup;
use crate::compressors::compressed::Zip;
use crate::compressors::uncompressed::Uncompressed;
/// The type of compression used for backup files
#[derive(Copy, Clone, Debug, Deserialize)]
pub enum CompressType {
/// Zip with deflate compress
Zip = 1,
/// BZip with deflate compress
Bzip = 2,
}
type CompressResult = std::io::Result<()>;
/// Comprensable trait
pub trait Comprensable {
/// Init the
fn init(&mut self, bkp: &Backup);
/// Compress some data to backp destination
fn compress(&mut self, org: &Path, dest: &Path) -> CompressResult;
/// Finish the compression
fn finish(&mut self);
}
/// Get the compress by its type
pub fn get_compress_by_type(t: Option<CompressType>) -> Box<Comprensable + 'static> {
if t.is_none() {
let c: Uncompressed = Default::default();
return Box::new(c);
}
let mut c: Zip = Default::default();
match t.unwrap() {
CompressType::Zip => {
c.options = Some(
zip::write::FileOptions::default()
.compression_method(zip::CompressionMethod::Deflated)
.unix_permissions(0o755),
);
}
CompressType::Bzip => {
c.options = Some(
zip::write::FileOptions::default()
.compression_method(zip::CompressionMethod::Bzip2)
.unix_permissions(0o755),
);
}
}
return Box::new(c);
}
|
use ahash::AHashMap;
use cogs_gamedev::grids::{Direction4, ICoord};
use crate::simulator::transport::{Cable, TransferError};
use super::{
board::Board,
solutions::Metrics,
transport::{Port, Resource},
};
/// This lets us do a floodfill over several frames.
#[derive(Clone, Debug)]
pub struct FloodFiller {
/// Active "tips", or frontiers we're moving resources at.
/// Becomes None when the tip is satisfied at a Sink.
pub tips: Vec<Option<Tip>>,
/// Spaces we've already visited. (This should never overlap with any Tip.)
/// The boolean is for horizontality; were we horizontal when we were in this space?
///
/// This is purely for drawing purposes and NOT for the flood-fill itself!
pub visited: AHashMap<(ICoord, bool), Resource>,
pub cycles: u64,
pub min_cycles: Option<u64>,
}
#[derive(Clone, Debug)]
pub struct Tip {
/// Current position
pub pos: ICoord,
/// Direction this entered the current coordinate from.
pub facing: Direction4,
/// The resource this is carrying
pub resource: Resource,
}
impl FloodFiller {
/// Make a new FloodFiller operating on the given board.
pub fn new(board: &Board) -> Self {
let mut tips = Vec::new();
for (conn, dir, x) in [
// Ports on the left push their stuff east at column 0
(&board.left, Direction4::East, 0),
// Ports on the right push their stuff west at column (width-1)
(&board.right, Direction4::West, board.width - 1),
] {
for (y, port) in conn.ports.iter().enumerate() {
if let Some(Port::Source(res)) = port {
tips.push(Some(Tip {
pos: ICoord::new(x as isize, y as isize),
facing: dir,
resource: res.clone(),
}));
}
}
}
Self {
tips,
visited: AHashMap::new(),
cycles: 0,
min_cycles: None,
}
}
/// Do one flood-fill step.
///
/// If any problems happened we return them in the vector.
/// If it's empty, we're all set!
pub fn step(&mut self, board: &Board) -> Vec<FloodFillError> {
self.cycles += 1;
let mut errors = Vec::new();
// clippy is overzealous here
#[allow(clippy::manual_flatten)]
for tip_slot in self.tips.iter_mut() {
if let Some(tip) = tip_slot {
if self
.visited
.insert((tip.pos, tip.facing.is_horizontal()), tip.resource.clone())
.is_some()
{
errors.push(FloodFillError::Backtrack(tip.pos));
continue;
}
if let Some(current_cable) = board.cables.get(&tip.pos) {
let out_dir = match current_cable.exit_dir(&tip.resource, tip.facing) {
Ok(it) => it,
Err(ono) => {
let err = match ono {
TransferError::BadCableKind => {
FloodFillError::BadCableKind(tip.pos)
}
TransferError::NoEntrance => FloodFillError::NoEntrance(tip.pos),
};
errors.push(err);
continue;
}
};
let target_pos = tip.pos + out_dir;
if let Some(target_cable) = board.cables.get(&target_pos) {
tip.pos = target_pos;
tip.facing = out_dir;
} else {
// Perhaps we are "spilling" into an exit.
if let Some((Port::Sink(res), _)) = board.get_port(target_pos) {
if res != &tip.resource {
// oh no...
errors.push(FloodFillError::BadOutput(target_pos, res.clone()))
} else {
// we are done here poggers
*tip_slot = None;
if self.min_cycles.is_none() {
self.min_cycles = Some(self.cycles);
}
}
} else {
// Nope we spill into space
errors.push(FloodFillError::SpilledIntoSpace(target_pos));
}
}
} else {
// Really don't know how we got here but uh
errors.push(FloodFillError::SpilledIntoSpace(tip.pos));
}
}
}
errors
}
/// Did we win? If so return our metrics
pub fn did_win(&self, board: &Board) -> Option<Metrics> {
if self.tips.iter().all(Option::is_none) {
Some(Metrics {
total_cycles: self.cycles,
min_cycles: self.min_cycles.unwrap_or(0),
crossovers: board
.cables
.values()
.filter(|x| matches!(x, Cable::Crossover { .. }))
.count() as u64,
})
} else {
None
}
}
}
#[derive(Clone)]
pub enum FloodFillError {
BadCableKind(ICoord),
NoEntrance(ICoord),
/// We spilled something into the vacuum of space.
SpilledIntoSpace(ICoord),
/// We somehow tried to go back along a pipe we previously went on
Backtrack(ICoord),
/// The port didn't like the resource given
BadOutput(ICoord, Resource),
}
|
use std::env;
use std::error::Error;
use std::fs;
pub fn pattern_count(text: &str, pattern: &str) -> u64 {
let overlap = text.len() - pattern.len() + 1;
let mut count = 0;
for i in 0..overlap {
let start = i;
let end = i + pattern.len();
if &text[start..end] == pattern {
count += 1;
}
}
count
}
fn main() -> Result<(), Box<dyn Error>> {
let input: String = env::args()
.nth(1)
.unwrap_or("data/rosalind_ba1a.txt".into());
let data = fs::read_to_string(input)?;
let mut lines = data.lines();
let text = lines.next().unwrap();
let pattern = lines.next().unwrap();
println!("{}", pattern_count(text, pattern));
Ok(())
}
|
pub mod null;
//pub mod sled;
pub mod sqlite;
//pub mod rocks;
use blake3::hash;
use std::borrow::Cow;
use thiserror::Error;
use crate::commit;
use crate::key;
use crate::key::TypedKey;
use crate::Keyish;
use crate::Object;
#[derive(Debug, Error)]
pub enum CanonicalizeError {
#[error("Invalid object id '{_0}'")]
InvalidHex(String),
#[error("Object '{_0}' not found")]
NotFound(String),
#[error("Object '{_0}' is ambiguous")]
Ambigious(String, Vec<key::Key>),
#[error("error when converting db key: {_0}")]
FromDbKeyError(#[from] key::FromDbKeyError),
#[error("error when getting reflog: {_0}")]
GetReflogError(#[from] GetReflogError),
}
#[derive(Debug)]
pub struct Reflog {
pub refname: String,
pub key: TypedKey<commit::Commit>,
pub remote: Option<String>,
}
#[derive(Debug, Error)]
pub enum GetReflogError {
#[error("Ref not found")]
NotFound,
#[error("error parsing db key: {_0}")]
FromDbKeyError(#[from] key::FromDbKeyError),
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum DSError {
#[error("sqlite error: {_0}")]
SqliteError(#[from] rusqlite::Error),
}
pub trait ToDSError {
fn to_ds(self) -> DSError;
}
pub trait ToDSErrorResult<T> {
fn to_ds_r(self) -> Result<T, DSError>;
}
impl<T: Into<DSError>> ToDSError for T {
fn to_ds(self) -> DSError {
self.into()
}
}
impl<T, E: ToDSError> ToDSErrorResult<T> for Result<T, E> {
fn to_ds_r(self) -> Result<T, DSError> {
self.map_err(|x| x.to_ds())
}
}
#[derive(Debug, Error)]
pub enum BeginTransError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum RollbackTransError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum CommitTransError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum RawGetError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum RawPutError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum RawExistsError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum RawGetStateError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum RawPutStateError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum ReflogPushError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum RawBetweenError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum RawGetHeadError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum RawPutHeadError {
#[error(transparent)]
DSerror(#[from] DSError),
}
#[derive(Debug, Error)]
pub enum GetHeadError {
#[error("error when getting state: {_0}")]
RawGetStateError(#[from] RawGetStateError),
#[error("error decoding utf8 string: {_0}")]
FromUtf8Error(#[from] std::string::FromUtf8Error),
}
#[derive(Debug, Error)]
pub enum GetObjError {
#[error("error getting object: {_0}")]
RawGetError(#[from] RawGetError),
#[error("error decoding object: {_0}")]
DecodeError(#[from] serde_cbor::error::Error),
}
#[derive(Debug, Error)]
pub enum PutObjError {
#[error("error putting object: {_0}")]
RawPutError(#[from] RawPutError),
#[error("error encoding object: {_0}")]
EncodeError(#[from] serde_cbor::error::Error),
}
pub trait Transactional {
fn begin_trans(&mut self) -> Result<(), BeginTransError> {
Ok(())
}
fn commit(&mut self) -> Result<(), CommitTransError> {
Ok(())
}
fn rollback(&mut self) -> Result<(), RollbackTransError> {
Ok(())
}
}
static_assertions::assert_obj_safe!(DataStore);
pub trait DataStore: Transactional {
fn raw_get<'a>(&'a self, key: &[u8]) -> Result<Cow<'a, [u8]>, RawGetError>;
fn raw_put<'a>(&'a self, key: &[u8], data: &[u8]) -> Result<(), RawPutError>;
fn raw_exists(&self, key: &[u8]) -> Result<bool, RawExistsError>;
fn raw_get_state<'a>(&'a self, key: &[u8]) -> Result<Option<Vec<u8>>, RawGetStateError>;
fn raw_put_state<'a>(&'a self, key: &[u8], data: &[u8]) -> Result<(), RawPutStateError>;
fn get(&self, key: key::Key) -> Result<Cow<'_, [u8]>, RawGetError> {
let results = self.raw_get(&key.as_db_key())?;
Ok(results)
}
fn hash(&self, data: &[u8]) -> key::Key {
let b3 = hash(data);
key::Key::Blake3B(*b3.as_bytes())
}
fn put(&self, data: Vec<u8>) -> Result<key::Key, RawPutError> {
let keybuf = self.hash(&data);
self.raw_put(&keybuf.as_db_key(), &data)?;
Ok(keybuf)
}
fn get_head(&self) -> Result<Option<String>, GetHeadError> {
let bytes = self.raw_get_state(b"HEAD")?;
Ok(match bytes {
Some(b) => Some(String::from_utf8(b)?),
None => None,
})
}
fn put_head(&self, head: &str) -> Result<(), RawPutStateError> {
self.raw_put_state(b"HEAD", head.as_bytes())?;
Ok(())
}
fn reflog_push(&self, data: &Reflog) -> Result<(), ReflogPushError>;
fn reflog_get(
&self,
refname: &str,
remote: Option<&str>,
) -> Result<TypedKey<commit::Commit>, GetReflogError>;
fn reflog_walk(
&self,
refname: &str,
remote: Option<&str>,
) -> Result<Vec<TypedKey<commit::Commit>>, WalkReflogError>;
fn raw_between(
&self,
start: &[u8],
end: Option<&[u8]>,
) -> Result<Vec<Vec<u8>>, RawBetweenError>;
fn canonicalize(&self, search: Keyish) -> Result<key::Key, CanonicalizeError> {
let mut results: Vec<Vec<u8>> = Vec::new();
let err_str;
match search {
Keyish::Key(s, key) => {
err_str = s;
let k = self.raw_get(&key).unwrap();
results.push(k.to_vec());
}
Keyish::Range(s, start, end) => {
err_str = s;
results = self.raw_between(&start, end.as_deref()).unwrap();
}
Keyish::Reflog {
orig,
remote,
keyname,
} => match self.reflog_get(&keyname, remote.as_deref()) {
Ok(key) => return Ok(key.inner()),
Err(GetReflogError::NotFound) => return Err(CanonicalizeError::NotFound(orig)),
Err(e) => return Err(e.into()),
},
};
match results.len() {
0 => Err(CanonicalizeError::NotFound(err_str)),
// This is okay since we know it will have one item.
#[allow(clippy::option_unwrap_used)]
1 => Ok(key::Key::from_db_key(&results.pop().unwrap())?),
_ => {
let strs: Result<_, _> = results
.into_iter()
.map(|x| key::Key::from_db_key(&x))
.collect();
Err(CanonicalizeError::Ambigious(err_str, strs?))
}
}
}
fn get_obj(&self, key: key::Key) -> Result<Object, GetObjError> {
let data = self.get(key)?;
Ok(serde_cbor::from_slice(&data)?)
}
fn put_obj(&self, data: &Object) -> Result<key::Key, PutObjError> {
let data = serde_cbor::to_vec(data)?;
Ok(self.put(data)?)
}
}
#[derive(Debug, Error)]
pub enum WalkReflogError {
#[error("error parsing db key: {_0}")]
FromDbKeyError(#[from] key::FromDbKeyError),
#[error(transparent)]
DSerror(#[from] DSError),
}
|
#![allow(clippy::wildcard_imports)]
use image::{DynamicImage, ImageFormat};
use rand::seq::SliceRandom;
use rand::thread_rng;
use seed::{prelude::*, *};
use std::collections::BTreeMap;
use ulid::Ulid;
use web_sys::{self, DragEvent, Event, FileList};
const THUMB_SIZE: u32 = 250;
const COLUMNS_NUMBER: usize = 6;
const QUESTION_IMG: &str = "/matching-seed/q.png";
const ARROW_IMAGE: &str = "/matching-seed/arrow.png";
// ------ ------
// Init
// ------ ------
fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {
Model::default()
}
// ------ ------
// Models
// ------ ------
#[derive(PartialOrd, PartialEq, Clone)]
enum CardState {
FaceUp,
FaceDown,
}
enum NewCardType {
OnePhoto(String),
Empty,
}
#[derive(Clone)]
struct Card {
text: Option<String>,
photo: Option<String>,
id: Ulid,
}
struct PlayedCard {
card: Card,
displayed: CardState,
matched: bool,
}
struct Model {
game_started: bool,
words_list: BTreeMap<Ulid, Card>,
board: Vec<PlayedCard>,
last: Option<Ulid>,
needs_reset: bool,
// for drag and drop
drop_zone_active: bool,
}
impl Model {
fn all_face_down(&mut self) {
for card in &mut self.board {
card.displayed = CardState::FaceDown;
}
self.needs_reset = false;
self.last = None;
}
}
impl Default for Model {
fn default() -> Self {
Self {
game_started: false,
words_list: BTreeMap::new(),
board: Vec::new(),
last: None,
needs_reset: false,
drop_zone_active: false,
}
}
}
// ------ ------
// Update
// ------ ------
enum Msg {
NewCard(NewCardType),
UpdateCardText { id: Ulid, text: String },
DeleteCard(Ulid),
GuessCard(usize),
ShowHideAll,
StartGame,
ExitGame,
ResetClick,
DragEnter,
DragOver,
DragLeave,
Drop(FileList),
}
#[cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_lines))]
#[cfg_attr(
feature = "cargo-clippy",
allow(clippy::case_sensitive_file_extension_comparisons)
)]
// update, and make clippy allow too many lines since I don't feel like making this more readable
fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
match msg {
// create a new card based on NewCardType
Msg::NewCard(card_type) => {
let new_id = Ulid::new();
match card_type {
NewCardType::Empty => {
let new_card = Card {
id: new_id,
photo: None,
text: None,
};
model.words_list.entry(new_id).or_insert(new_card);
}
NewCardType::OnePhoto(content) => {
let new_card = Card {
id: new_id,
photo: Some(content),
text: None,
};
model.words_list.entry(new_id).or_insert(new_card);
}
}
}
// update a card with new text
Msg::UpdateCardText { id, text } => {
if !text.is_empty() {
if let Some(card) = model.words_list.get_mut(&id) {
card.text = Some(text);
}
}
}
// delete a card from the BTree
Msg::DeleteCard(id) => {
let _garbage = model.words_list.remove(&id);
}
// let me guess the card
Msg::GuessCard(index) => {
if model.needs_reset {
model.all_face_down();
return;
}
// do whatever based on whether there's a model.last or not
if let Some(last_guessed) = model.last {
// two IDs
let just_guessed = model.board[index].card.id;
if just_guessed == last_guessed {
// the person guessed correctly!
// set the cards to displayed and to matched = true
for card in &mut model.board {
if card.card.id == just_guessed || card.card.id == last_guessed {
// card.displayed = CardState::FaceUp;
card.matched = true;
}
}
// set all to face down (to make toggle less messed up)
// if card.matched == true, the card will be displayed regardless
model.all_face_down();
// set the last to none again, since it was a correct guess.
model.last = None;
} else {
// guessed incorrectly :(
model.board[index].displayed = CardState::FaceUp;
model.needs_reset = true;
}
} else {
// this will be the only flipped card, so set the last value to this one
model.last = Some(model.board[index].card.id);
// and flip the card so we can see it
model.board[index].displayed = CardState::FaceUp;
}
}
// show/hide all
Msg::ShowHideAll => {
// see if any are flipped already
let any_flipped = model
.board
.iter()
.any(|card| card.displayed == CardState::FaceUp);
// set an all_state. make all cards this next
let new_state: CardState = if any_flipped {
// one or more are already flipped, so face down
CardState::FaceDown
} else {
CardState::FaceUp
};
// get a copy of the board vec
for card in &mut model.board {
card.displayed = new_state.clone();
}
// clear last (if not cleared it will cause some weirdness)
model.last = None;
}
// start the game
Msg::StartGame => {
if model.words_list.len() < 2 {
return;
}
let mut new_board: Vec<PlayedCard> = vec![];
for card_pair in model.words_list.values() {
// skip the card if both photo and text are empty
if card_pair.text == None && card_pair.photo == None {
continue;
}
new_board.push(PlayedCard {
displayed: CardState::FaceDown,
matched: false,
card: card_pair.clone(),
});
new_board.push(PlayedCard {
displayed: CardState::FaceDown,
matched: false,
card: card_pair.clone(),
});
}
// now shuffle it to make it random
new_board.shuffle(&mut thread_rng());
// copy new_board to model.board
model.board = new_board;
// board is made, now set the model to show the game has started
model.game_started = true;
}
// set the model to all the default values to start over
Msg::ExitGame => {
model.words_list = BTreeMap::new();
model.game_started = false;
model.board = vec![];
model.last = None;
model.needs_reset = false;
}
// ResetClick will let me turn off the click listener and turn all cards FaceDown
Msg::ResetClick => {
// set all to face down
model.all_face_down();
}
// ******
// the following is for dragging files
// from https://github.com/seed-rs/seed/blob/master/examples/drop_zone/src/lib.rs
// ******
Msg::DragEnter => model.drop_zone_active = true,
Msg::DragOver => (),
Msg::DragLeave => model.drop_zone_active = false,
Msg::Drop(file_list) => {
model.drop_zone_active = false;
let files = (0..file_list.length())
.filter_map(|index| {
let file = file_list.get(index).expect("get file with given index");
if file.name().to_lowercase().ends_with(".png")
|| file.name().to_lowercase().ends_with(".gif")
|| file.name().to_lowercase().ends_with(".jpg")
|| file.name().to_lowercase().ends_with(".jpeg")
{
Some(file)
} else {
None
}
})
.collect::<Vec<_>>();
for file in files {
// go through files, process them
orders.perform_cmd(async move {
let result: JsValue = wasm_bindgen_futures::JsFuture::from(file.array_buffer())
.await
.expect("expected result from promise");
let array: Vec<u8> = js_sys::Uint8Array::new(&result).to_vec();
let pic: DynamicImage =
image::load_from_memory(&array).expect("load pic from js array");
let format: ImageFormat = image::guess_format(&array).expect("guess format");
let pic = pic.resize(THUMB_SIZE, THUMB_SIZE, image::imageops::Gaussian);
// from https://stackoverflow.com/questions/57457818/how-to-convert-dynamicimage-to-base64
let mut blob_buf = vec![];
let _garbage = pic.write_to(&mut blob_buf, format);
let resized_pic_b64: String = base64::encode(&blob_buf);
// make a nice url here
let format_string = match format {
ImageFormat::Gif => "image/gif",
ImageFormat::Png => "image/png",
ImageFormat::Jpeg => "image/jpeg",
_ => "image",
};
let nice_url_string =
format!("data:{};base64,{}", format_string, resized_pic_b64);
Msg::NewCard(NewCardType::OnePhoto(nice_url_string))
});
}
}
}
}
// ------ ------
// View
// ------ ------
// from https://github.com/seed-rs/seed/blob/master/examples/drop_zone/src/lib.rs
// set up drag events
trait IntoDragEvent {
fn into_drag_event(self) -> DragEvent;
}
impl IntoDragEvent for Event {
fn into_drag_event(self) -> DragEvent {
self.dyn_into::<web_sys::DragEvent>()
.expect("cannot cast given event into DragEvent")
}
}
macro_rules! stop_and_prevent {
{ $event:expr } => {
{
$event.stop_propagation();
$event.prevent_default();
}
};
}
fn view(model: &Model) -> Vec<Node<Msg>> {
if model.game_started {
game_page(model)
} else {
new_words_page(model)
}
}
// play the game page
fn game_page(model: &Model) -> Vec<Node<Msg>> {
let all_cards: Vec<Node<Msg>> = model
.board
.iter()
.enumerate()
.map(|(index, played_card)| print_card(played_card, index))
.collect();
// take cards and put them into divs for columns
let mut row: Vec<Node<Msg>> = vec![];
let mut all: Vec<Node<Msg>> = vec![];
for (index, card) in all_cards.iter().enumerate() {
row.push(card.clone());
// put the correct number of cards in a row
if (index + 1) % COLUMNS_NUMBER == 0 {
all.push(div![C!["columns"], &row]);
row.clear();
}
// for the last row if it has less than columns number
// add empty divs as placeholders
if index == all_cards.len() - 1 {
let remaining = COLUMNS_NUMBER - row.len();
for _ in 0..remaining {
row.push(div![C!["column"]]);
}
all.push(div![C!["columns"], &row]);
}
}
// decide whether to show a button that says show all or hide all
let show_hide_all_button_text: &str = if model
.board
.iter()
.any(|card| card.displayed == CardState::FaceUp)
{
// one or more are face up so display face down
"hide all"
} else {
"show all"
};
// just add a couple of buttons at the bottom to make navigation easier
all.push(div![
button![
show_hide_all_button_text,
C!["button is-large is-danger"],
ev(Ev::Click, move |_| { Msg::ShowHideAll })
],
br!(),
button![
"Play again!",
C!["button is-large is-success"],
ev(Ev::Click, move |_| { Msg::StartGame })
],
button![
"Create New",
C!["button is-large is-warning"],
ev(Ev::Click, move |_| { Msg::ExitGame })
]
]);
all
}
// print a card
fn print_card(played_card: &PlayedCard, index: usize) -> Node<Msg> {
// make a more usable photo string
let card_image = match &played_card.card.photo {
Some(blob) => format!("<img src=\"{}\">", blob),
None => format!("<img src=\"{}\">", ARROW_IMAGE),
};
let card_text = match &played_card.card.text {
Some(text) => text,
None => "",
};
let question_image = format!("<img src=\"{}\">", QUESTION_IMG);
let show_card = played_card.displayed == CardState::FaceUp || played_card.matched;
if show_card {
div![
C!["column"],
div![
C!["card"],
div![
C!["card-image"],
figure!(C!["image is-square is-fullwidth"], raw!(&card_image),)
],
div![
C!["card-content"],
div![
C!["media"],
div![C!["media-content"], p!(C!["title is-4"], card_text,)]
]
],
ev(Ev::Click, move |_| Msg::ResetClick),
]
]
} else {
div![
C!["column"],
div![
C!["card"],
div![
C!["card-image"],
figure!(C!["image is-square is-fullwidth"], raw!(&question_image),)
],
div![
C!["card-content"],
div![
C!["media"],
div![C!["media-content"], p!(C!["title is-4"], index + 1,)]
]
],
ev(Ev::Click, move |_| Msg::GuessCard(index))
]
]
}
}
// show the new words page
fn new_words_page(model: &Model) -> Vec<Node<Msg>> {
/*
the list of the words and formatted
*/
let existing_words = model
.words_list
.iter()
.map(|(id, card)| {
/*
information for the html: image blob and flashcard word title
*/
let image_blob = match &card.photo {
Some(text) => format!("<img src=\"{}\">", text),
None => "".to_string(),
};
let card_text = match &card.text {
Some(text) => text,
None => "",
};
let this_id = *id;
tr!(
td!(div![
IF!(!image_blob.is_empty() => raw!(&image_blob)),
style![
St::Margin => "5px",
]
],),
td!(div![
"show vocab word (optional)",
br!(),
input![
card_text,
input_ev(Ev::Input, move |word| Msg::UpdateCardText {
id: this_id,
text: word
}),
],
button![
"delete",
ev(Ev::Click, move |_| Msg::DeleteCard(this_id)),
C!["button is-small is-danger"]
],
style![
St::Margin => "5px"
]
])
)
})
.collect::<Vec<Node<Msg>>>();
/*
other stuff: add_new button, start_game button
*/
let add_new_button: Node<Msg> = button![
"Add New",
C!["button is-large is-link"],
ev(Ev::Click, move |_| { Msg::NewCard(NewCardType::Empty) })
];
let clear_list_button: Node<Msg> = button![
"Clear List",
C!["button is-large is-danger"],
ev(Ev::Click, move |_| Msg::ExitGame),
];
// add a start game button
let start_game: Node<Msg> = button![
"Start Game",
C!["button is-large is-success"],
ev(Ev::Click, move |_| { Msg::StartGame })
];
/*
put it all into a Vec to return
*/
vec![
drag_and_drop_area(model),
br!(),
table![existing_words, C!["table is-striped"]],
add_new_button,
clear_list_button,
br!(),
start_game,
]
}
// drag and drop area
// https://github.com/seed-rs/seed/blob/master/examples/drop_zone/src/lib.rs
fn drag_and_drop_area(model: &Model) -> Node<Msg> {
div![div![
style![
St::Height => px(200),
St::Width => px(200),
St::Margin => "auto",
St::Background => if model.drop_zone_active { "lightgreen" } else { "lightgray" },
St::FontFamily => "sans-serif",
St::Display => "flex",
St::FlexDirection => "column",
St::JustifyContent => "center",
St::AlignItems => "center",
St::Border => [&px(2), "dashed", "black"].join(" ");
St::BorderRadius => px(20),
],
ev(Ev::DragEnter, |event| {
stop_and_prevent!(event);
Msg::DragEnter
}),
ev(Ev::DragOver, |event| {
let drag_event = event.into_drag_event();
stop_and_prevent!(drag_event);
drag_event.data_transfer().unwrap().set_drop_effect("copy");
Msg::DragOver
}),
ev(Ev::DragLeave, |event| {
stop_and_prevent!(event);
Msg::DragLeave
}),
ev(Ev::Drop, |event| {
let drag_event = event.into_drag_event();
stop_and_prevent!(drag_event);
let file_list = drag_event.data_transfer().unwrap().files().unwrap();
Msg::Drop(file_list)
}),
div![
style! {
// we don't want to fire `DragLeave` when we are dragging over drop-zone children
St::PointerEvents => "none",
},
div!["Drop jpg/png/gif here"],
],
],]
}
// ------ ------
// Start
// ------ ------
#[wasm_bindgen(start)]
pub fn start() {
// Mount the `app` to the element with the `id` "app".
App::start("app", init, update, view);
}
|
#![allow(dead_code)]
pub struct Computer {
mem: Vec<i32>,
ip: i32,
}
enum State {
Input(i32),
Output(i32),
Continue,
Halt,
Error,
}
const OP_ADD: i32 = 1;
const OP_MULTIPLY: i32 = 2;
const OP_STORE_INPUT: i32 = 3;
const OP_EMIT_OUTPUT: i32 = 4;
const OP_JUMP_TRUE: i32 = 5;
const OP_JUMP_FALSE: i32 = 6;
const OP_LESS_THAN: i32 = 7;
const OP_EQUAL_TO: i32 = 8;
// skip some...
const OP_HALT: i32 = 99;
enum Op {
Add(i32, i32, i32),
Multiply(i32, i32, i32),
Input(i32),
Output(i32),
JumpIfTrue(i32, i32),
JumpIfFalse(i32, i32),
LessThan(i32, i32, i32),
EqualTo(i32, i32, i32),
Halt,
Error, // unknown code
}
fn no_input() -> i32 {
panic!("no input handler provided");
}
fn no_output(_: i32) {
panic!("no output handler provided");
}
impl Computer {
pub fn new(mem: Vec<i32>) -> Computer {
Computer { mem: mem, ip: 0 }
}
pub fn load(&mut self, mem: Vec<i32>) {
self.mem = mem;
self.ip = 0;
}
pub fn run_no_io(&mut self) -> bool {
return self.run(no_input, no_output);
}
pub fn run<FI, FO>(&mut self, mut input_handler: FI, mut output_handler: FO) -> bool
where
FI: FnMut() -> i32,
FO: FnMut(i32),
{
loop {
match self.tick() {
State::Input(a) => self.write(a, input_handler()),
State::Output(a) => output_handler(a),
State::Continue => (), // tick again
State::Halt => return true, // clean exit
State::Error => return false, // fail condition
}
}
}
pub fn read(&self, register: i32) -> i32 {
return self.mem[register as usize];
}
pub fn write(&mut self, register: i32, value: i32) {
self.mem[register as usize] = value
}
fn tick(&mut self) -> State {
// println!("tick: ip={}, ins={}", self.ip, self.mem[self.ip as usize]);
// println!("mem {:?}", self.mem);
let op = self.read_opcode();
match op {
Op::Add(a, b, c) => self.write(c, a + b),
Op::Multiply(a, b, c) => self.write(c, a * b),
Op::Input(a) => return State::Input(a),
Op::Output(a) => return State::Output(a),
Op::JumpIfFalse(a, b) => {
if a == 0 {
self.ip = b
}
}
Op::JumpIfTrue(a, b) => {
if a != 0 {
self.ip = b
}
}
Op::LessThan(a, b, c) => match a < b {
true => self.write(c, 1),
false => self.write(c, 0),
},
Op::EqualTo(a, b, c) => match a == b {
true => self.write(c, 1),
false => self.write(c, 0),
},
Op::Halt => return State::Halt,
_ => return State::Error,
}
return State::Continue;
}
fn read_opcode(&mut self) -> Op {
let op = self.consume();
// the actual code is the lower 100 values
match op % 100 {
OP_ADD => Op::Add(self.parameter(op, 1), self.parameter(op, 2), self.consume()),
OP_MULTIPLY => Op::Multiply(self.parameter(op, 1), self.parameter(op, 2), self.consume()),
OP_STORE_INPUT => Op::Input(self.consume()),
OP_EMIT_OUTPUT => Op::Output(self.parameter(op, 1)),
OP_JUMP_FALSE => Op::JumpIfFalse(self.parameter(op, 1), self.parameter(op, 2)),
OP_JUMP_TRUE => Op::JumpIfTrue(self.parameter(op, 1), self.parameter(op, 2)),
OP_LESS_THAN => Op::LessThan(self.parameter(op, 1), self.parameter(op, 2), self.consume()),
OP_EQUAL_TO => Op::EqualTo(self.parameter(op, 1), self.parameter(op, 2), self.consume()),
OP_HALT => Op::Halt,
_ => Op::Error,
}
}
fn consume(&mut self) -> i32 {
// get value in memory
// increment instruction pointer,
let v = self.mem[self.ip as usize];
self.ip += 1;
return v;
}
fn parameter(&mut self, op: i32, n: u32) -> i32 {
// this one uses the op and the parameter position to work
// out if this parameter should be fetched in positional or immediate mode
let mode = (op / 10i32.pow(n + 1)) % 2;
match mode {
// the digit in the relevant 100s, 1000s, 10000s, column is 0 => positional mode
// so the value is a register
0 => {
let v = self.consume();
let o = self.read(v);
// println!("pos mode: reg={}, value={}", v, o);
return o;
}
// the digit is 1, so immediate mode
1 => {
let v = self.consume();
// println!("imm mode: value={}", v);
return v;
}
_ => panic!("What sort of crazy number was that? {}", mode),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_day_2_part_1() {
let mut pc = Computer::new(vec![
1, 0, 0, 3, 1, 1, 2, 3, 1, 3, 4, 3, 1, 5, 0, 3, 2, 1, 6, 19, 1, 19, 6, 23, 2, 23, 6, 27, 2,
6, 27, 31, 2, 13, 31, 35, 1, 9, 35, 39, 2, 10, 39, 43, 1, 6, 43, 47, 1, 13, 47, 51, 2, 6, 51,
55, 2, 55, 6, 59, 1, 59, 5, 63, 2, 9, 63, 67, 1, 5, 67, 71, 2, 10, 71, 75, 1, 6, 75, 79, 1,
79, 5, 83, 2, 83, 10, 87, 1, 9, 87, 91, 1, 5, 91, 95, 1, 95, 6, 99, 2, 10, 99, 103, 1, 5,
103, 107, 1, 107, 6, 111, 1, 5, 111, 115, 2, 115, 6, 119, 1, 119, 6, 123, 1, 123, 10, 127, 1,
127, 13, 131, 1, 131, 2, 135, 1, 135, 5, 0, 99, 2, 14, 0, 0,
]);
pc.write(1, 12);
pc.write(2, 2);
pc.run_no_io();
assert_eq!(pc.read(0), 3224742);
}
#[test]
fn test_day_2_part_2() {
let code = vec![
1, 0, 0, 3, 1, 1, 2, 3, 1, 3, 4, 3, 1, 5, 0, 3, 2, 1, 6, 19, 1, 19, 6, 23, 2, 23, 6, 27, 2,
6, 27, 31, 2, 13, 31, 35, 1, 9, 35, 39, 2, 10, 39, 43, 1, 6, 43, 47, 1, 13, 47, 51, 2, 6, 51,
55, 2, 55, 6, 59, 1, 59, 5, 63, 2, 9, 63, 67, 1, 5, 67, 71, 2, 10, 71, 75, 1, 6, 75, 79, 1,
79, 5, 83, 2, 83, 10, 87, 1, 9, 87, 91, 1, 5, 91, 95, 1, 95, 6, 99, 2, 10, 99, 103, 1, 5,
103, 107, 1, 107, 6, 111, 1, 5, 111, 115, 2, 115, 6, 119, 1, 119, 6, 123, 1, 123, 10, 127, 1,
127, 13, 131, 1, 131, 2, 135, 1, 135, 5, 0, 99, 2, 14, 0, 0,
];
let mut pc = Computer::new(vec![]);
let target = 19690720;
for n in 1..100 {
for v in 1..100 {
// clone and load
pc.load(code.to_vec());
// set the 2 register
pc.write(1, n);
pc.write(2, v);
pc.run_no_io();
if pc.read(0) == target {
// we are done.
assert_eq!(n * 100 + v, 7960);
return;
}
}
}
panic!("should have found a solution")
}
#[test]
fn test_day_5_part_2() {
// aka is-eight
// the example program uses an input instruction to ask for a single number.
// The program will then output 999 if the input value is below 8,
// output 1000 if the input value is equal to 8,
// or output 1001 if the input value is greater than 8.
let code = vec![
3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36, 98, 0, 0,
1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000, 1, 20, 4, 20, 1105,
1, 46, 98, 99,
];
let mut pc = Computer::new(code.to_vec());
let mut last_output: i32 = 0;
pc.run(|| 8, |x| last_output = x);
assert_eq!(last_output, 1000);
pc.load(code.to_vec());
pc.run(|| 9, |x| last_output = x);
assert_eq!(last_output, 1001);
pc.load(code.to_vec());
pc.run(|| 0, |x| last_output = x);
assert_eq!(last_output, 999);
}
}
|
use std::collections::HashMap;
use ::FieldReference;
pub mod parser;
pub mod printer;
mod value_helpers;
#[derive(Debug, Clone)]
pub struct Block {
pub statements: Vec<Statement>,
}
#[derive(Debug, Clone)]
pub struct Statement {
pub attributes: HashMap<String, Value>,
pub items: Vec<Value>,
}
#[derive(Debug, Clone)]
pub enum Value {
String {
string: String,
is_block: bool,
},
Item(Item),
}
#[derive(Debug, Clone)]
pub struct Item {
pub name: Ident,
pub args: Vec<ItemArg>,
pub block: Block,
}
#[derive(Debug, Clone)]
pub struct ItemArg {
pub tag: Option<String>,
pub value: Value,
}
#[derive(Debug, Clone)]
pub enum Ident {
Simple(String),
RootNs(Vec<String>),
// Nc(Vec<String>),
}
// Helpers
impl Block {
pub fn empty() -> Block {
Block {
statements: vec![],
}
}
}
impl Value {
pub fn item<'a>(&'a self) -> Option<&'a Item> {
if let &Value::Item(ref item) = self {
Some(item)
} else {
None
}
}
pub fn string<'a>(&'a self) -> Option<&'a str> {
if let &Value::String { ref string, .. } = self {
Some(string)
} else {
None
}
}
pub fn field_reference(&self) -> Option<FieldReference> {
self.string().and_then(|string| FieldReference::parse(string))
}
}
impl Ident {
pub fn simple_str<'a>(&'a self) -> Option<&'a str> {
match *self {
Ident::Simple(ref string) => Some(string),
_ => None,
}
}
}
impl Item {
pub fn arg<'a>(&'a self, pos: usize) -> Option<&'a Value> {
self.args
.get(pos)
.map(|a| &a.value)
}
pub fn tagged_arg<'a>(&'a self, tag: &str) -> Option<&'a Value> {
self.args.iter()
.find(|a| a.tag.as_ref().map(|b| b.as_str()) == Some(tag))
.map(|a| &a.value)
}
}
impl ItemArg {
pub fn new(val: Value) -> ItemArg {
ItemArg {
tag: None,
value: val,
}
}
pub fn with_tag(tag: String, val: Value) -> ItemArg {
ItemArg {
tag: Some(tag),
value: val,
}
}
}
impl From<Value> for ItemArg {
fn from(item: Value) -> ItemArg {
ItemArg::new(item)
}
}
|
use tower_lsp::lsp_types::*;
pub fn keyword_completions(keywords: &[(&str, &str)]) -> Vec<CompletionItem> {
let mut items: Vec<CompletionItem> = Vec::new();
for key in keywords {
if key.1.is_empty() {
items.push(CompletionItem {
label: key.0.to_string(),
kind: Some(CompletionItemKind::Keyword),
..CompletionItem::default()
});
} else {
items.push(CompletionItem {
label: key.0.to_string(),
kind: Some(CompletionItemKind::Keyword),
insert_text: Some(key.1.to_string()),
insert_text_format: Some(InsertTextFormat::Snippet),
..CompletionItem::default()
})
}
}
items
}
pub fn other_completions(tasks: &[&str]) -> Vec<CompletionItem> {
tasks
.iter()
.map(|x| CompletionItem {
label: x.to_string(),
kind: Some(CompletionItemKind::Function),
..CompletionItem::default()
})
.collect()
}
pub const KEYWORDS: &[(&str, &str)] = &[
("accept_on", ""),
("alias", ""),
("always", "always @($1) begin\nend"),
("always_comb", "always_comb begin\n\t$1\nend"),
("always_ff", "always_ff @($1) begin\nend"),
("always_latch", "always_latch begin\n\t$1\nend"),
("and", ""),
("assert", ""),
("assign", ""),
("assume", ""),
("automatic", ""),
("before", ""),
("begin", "begin\n\t$1\nend"),
("bind", ""),
("bins", ""),
("binsof", ""),
("bit", ""),
("break", ""),
("buf", ""),
("bufif0", ""),
("bufif1", ""),
("byte", ""),
("case", "case $1;\nendcase"),
("casex", "casex $1;\nendcase"),
("casez", "casez $1;\nendcase"),
("cell", ""),
("chandle", ""),
("checker", "checker $1;\nendchecker"),
("class", "class $1;\nendclass"),
("clocking", "clocking $1;\nendclocking"),
("cmos", ""),
("config", "config $1;\nendconfig"),
("const", ""),
("constraint", ""),
("context", ""),
("continue", ""),
("cover", ""),
("covergroup", ""),
("coverpoint", ""),
("cross", ""),
("deassign", ""),
("default", ""),
("defparam", ""),
("design", ""),
("disable", ""),
("dist", ""),
("do", ""),
("edge", ""),
("else", ""),
("end", ""),
("endcase", ""),
("endchecker", ""),
("endclass", ""),
("endclocking", ""),
("endconfig", ""),
("endfunction", ""),
("endgenerate", ""),
("endgroup", ""),
("endinterface", ""),
("endmodule", ""),
("endpackage", ""),
("endprimitive", ""),
("endprogram", ""),
("endproperty", ""),
("endspecify", ""),
("endsequence", ""),
("endtable", ""),
("endtask", ""),
("enum", ""),
("event", ""),
("eventually", ""),
("expect", ""),
("export", ""),
("extends", ""),
("extern", ""),
("final", ""),
("first_match", ""),
("for", ""),
("force", ""),
("foreach", ""),
("forever", ""),
("fork", ""),
("forkjoin", ""),
("function", "function $1;\nendfunction"),
("generate", "generate\n\t$1\nendgenerate"),
("genvar", ""),
("global", ""),
("highz0", ""),
("highz1", ""),
("if", ""),
("iff", ""),
("ifnone", ""),
("ignore_bins", ""),
("illegal_bins", ""),
("implements", ""),
("implies", ""),
("import", ""),
("incdir", ""),
("include", ""),
("initial", ""),
("inout", ""),
("input", ""),
("inside", ""),
("instance", ""),
("int", ""),
("integer", ""),
("interconnect", ""),
("interface", "interface $1;\nendinterface"),
("intersect", ""),
("join", ""),
("join_any", ""),
("join_none", ""),
("large", ""),
("let", ""),
("liblist", ""),
("library", ""),
("local", ""),
("localparam", ""),
("logic", ""),
("longint", ""),
("macromodule", ""),
("matches", ""),
("medium", ""),
("modport", ""),
("module", "module $1 ($2);\nendmodule"),
("nand", ""),
("negedge", ""),
("nettype", ""),
("new", ""),
("nexttime", ""),
("nmos", ""),
("nor", ""),
("noshowcancelled", ""),
("not", ""),
("notif0", ""),
("notif1", ""),
("null", ""),
("or", ""),
("output", ""),
("package", "package $1;\nendpackage"),
("packed", ""),
("parameter", ""),
("pmos", ""),
("posedge", ""),
("primitive", "primitive $1;\nendprimitive"),
("priority", ""),
("program", "program $1;\nendprogram"),
("property", "property $1;\nendproperty"),
("protected", ""),
("pull0", ""),
("pull1", ""),
("pulldown", ""),
("pullup", ""),
("pulsestyle_ondetect", ""),
("pulsestyle_onevent", ""),
("pure", ""),
("rand", ""),
("randc", ""),
("randcase", ""),
("randsequence", ""),
("rcmos", ""),
("real", ""),
("realtime", ""),
("ref", ""),
("reg", ""),
("reject_on", ""),
("release", ""),
("repeat", ""),
("restrict", ""),
("return", ""),
("rnmos", ""),
("rpmos", ""),
("rtran", ""),
("rtranif0", ""),
("rtranif1", ""),
("s_always", ""),
("s_eventually", ""),
("s_nexttime", ""),
("s_until", ""),
("s_until_with", ""),
("scalared", ""),
("sequence", "sequence $1;\nendsequence"),
("shortint", ""),
("shortreal", ""),
("showcancelled", ""),
("signed", ""),
("small", ""),
("soft", ""),
("solve", ""),
("specify", "specify $1;\nendspecify"),
("specparam", ""),
("static", ""),
("string", ""),
("strong", ""),
("strong0", ""),
("strong1", ""),
("struct", ""),
("super", ""),
("supply0", ""),
("supply1", ""),
("sync_accept_on", ""),
("sync_reject_on", ""),
("table", "table $1;\nendtable"),
("tagged", ""),
("task", "task $1;\nendtask"),
("this", ""),
("throughout", ""),
("time", ""),
("timeprecision", ""),
("timeunit", ""),
("tran", ""),
("tranif0", ""),
("tranif1", ""),
("tri", ""),
("tri0", ""),
("tri1", ""),
("triand", ""),
("trior", ""),
("trireg", ""),
("type", ""),
("typedef", ""),
("union", ""),
("unique", ""),
("unique0", ""),
("unsigned", ""),
("until", ""),
("until_with", ""),
("untyped", ""),
("use", ""),
("uwire", ""),
("var", ""),
("vectored", ""),
("virtual", ""),
("void", ""),
("wait", ""),
("wait_order", ""),
("wand", ""),
("weak", ""),
("weak0", ""),
("weak1", ""),
("while", ""),
("wildcard", ""),
("wire", ""),
("with", ""),
("within", ""),
("wor", ""),
("xnor", ""),
("xor", ""),
];
pub const SYS_TASKS: &[&str] = &[
"finish",
"exit",
"fatal",
"warning",
"stop",
"error",
"info",
"realtime",
"time",
"asserton",
"assertkill",
"assertpasson",
"assertfailon",
"assertnonvacuouson",
"stime",
"printtimescale",
"timeformat",
"bitstoreal",
"bitstoshortreal",
"itor",
"signed",
"cast",
"realtobits",
"shortrealtobits",
"rtoi",
"unsigned",
"sampled",
"fell",
"changed",
"past_gclk",
"fell_gclk",
"changed_gclk",
"rising_gclk",
"steady_gclk",
"bits",
"typename",
"isunbounded",
"coverage_control",
"coverage_get",
"coverage_save",
"set_coverage_db_name",
"dimensions",
"right",
"high",
"size",
"random",
"dist_erlang",
"dist_normal",
"dist_t",
"asin",
"acos",
"atan",
"atan2",
"hypot",
"sinh",
"cosh",
"tanh",
"asinh",
"acosh",
"atanh",
"q_initialize",
"q_remove",
"q_exam",
"q_add",
"q_full",
"async$and$array",
"async$nand$array",
"async$or$array",
"async$nor$array",
"sync$and$array",
"sync$nand$array",
"sync$or$array",
"sync$nor$array",
"countones",
"onehot0",
"fatal",
"warning",
"dist_chi_square",
"dist_exponential",
"dist_poisson",
"dist_uniform",
"countbits",
"onehot",
"isunknown",
"coverage_get_max",
"coverage_merge",
"get_coverage",
"load_coverage_db",
"clog2",
"ln",
"log10",
"exp",
"sqrt",
"pow",
"floor",
"ceil",
"sin",
"cos",
"tan",
"rose",
"stable",
"past",
"rose_gclk",
"stable_gclk",
"future_gclk",
"falling_gclk",
"changing_gclk",
"unpacked_dimensions",
"left",
"low",
"increment",
"assertoff",
"assertcontrol",
"assertpassoff",
"assertfailoff",
"assertvacuousoff",
"error",
"info",
"async$and$plane",
"async$nand$plane",
"async$or$plane",
"async$nor$plane",
"sync$and$plane",
"sync$nand$plane",
"sync$or$plane",
"sync$nor$plane",
"system",
"countdrivers",
"getpattern",
"incsave",
"input",
"key",
"list",
"log",
"nokey",
"nolog",
"reset",
"reset_count",
"reset_value",
"restart",
"save",
"scale",
"scope",
"showscopes",
"showvars",
"sreadmemb",
"sreadmemh",
];
pub const DIRECTIVES: &[&str] = &[
"__FILE__",
"__LINE__",
"begin_keywords",
"celldefine",
"default_nettype",
"define",
"else",
"elsif",
"end_keywords",
"endcelldefine",
"endif",
"ifdef",
"ifndef",
"include",
"line",
"nounconnected_drive",
"pragma",
"resetall",
"timescale",
"unconnected_drive",
"undef",
"undefineall",
"default_decay_time",
"default_trireg_strength",
"delay_mode_distributed",
"delay_mode_path",
"delay_mode_unit",
"delay_mode_zero",
];
|
fn main() {
let input = include_str!("day6.txt");
let split = input.split("\n");
let v: Vec<&str> = split.collect();
let alphabet: Vec<char> = vec!['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'];
let mut vsub: Vec<&str> = vec![""];
let mut answers: Vec<char> = vec![' '];
let mut total = 0;
for thing in v {
if thing != "" {
vsub.push(thing);
} else {
for subthing in vsub {
for (i, c) in subthing.chars().enumerate() {
answers.push(c);
}
}
for letter in &alphabet {
if answers.contains(letter) {
total += 1;
}
}
vsub = vec![""];
answers = vec![' '];
}
}
println!("{}", total);
} |
#[cfg(test)]
mod test;
mod background;
pub(crate) mod conn;
mod establish;
pub(crate) mod options;
mod wait_queue;
use std::{sync::Arc, time::Duration};
use derivative::Derivative;
use tokio::sync::Mutex;
pub use self::conn::ConnectionInfo;
pub(crate) use self::conn::{Command, CommandResponse, Connection, StreamDescription};
use self::{
establish::ConnectionEstablisher,
options::{ConnectionOptions, ConnectionPoolOptions},
wait_queue::WaitQueue,
};
use crate::{
error::{ErrorKind, Result},
event::cmap::{
CmapEventHandler,
ConnectionCheckoutFailedEvent,
ConnectionCheckoutFailedReason,
ConnectionCheckoutStartedEvent,
ConnectionClosedReason,
PoolClearedEvent,
PoolClosedEvent,
PoolCreatedEvent,
},
options::StreamAddress,
runtime::HttpClient,
RUNTIME,
};
const DEFAULT_MAX_POOL_SIZE: u32 = 100;
/// A pool of connections implementing the CMAP spec. All state is kept internally in an `Arc`, and
/// internal state that is mutable is additionally wrapped by a lock.
#[derive(Clone, Debug)]
pub(crate) struct ConnectionPool {
inner: Arc<ConnectionPoolInner>,
}
impl From<Arc<ConnectionPoolInner>> for ConnectionPool {
fn from(inner: Arc<ConnectionPoolInner>) -> Self {
Self { inner }
}
}
/// A struct used to manage the creation, closing, and storage of connections for a
/// `ConnectionPool`.
#[derive(Debug)]
struct ConnectionManager {
/// The set of available connections in the pool. Because the CMAP spec requires that
/// connections are checked out in a FIFO manner, connections are pushed/popped from the back
/// of the Vec.
checked_in_connections: Vec<Connection>,
/// The total number of connections managed by the pool, including connections which are
/// currently checked out of the pool.
total_connection_count: u32,
/// The ID of the next connection created by the pool.
next_connection_id: u32,
/// The current generation of the pool. The generation is incremented whenever the pool is
/// cleared. Connections belonging to a previous generation are considered stale and will be
/// closed when checked back in or when popped off of the set of available connections.
generation: u32,
/// The address to create connections to.
address: StreamAddress,
/// The options used to create connections.
connection_options: Option<ConnectionOptions>,
/// Contains the logic for "establishing" a connection. This includes handshaking and
/// authenticating a connection when it's first created.
establisher: ConnectionEstablisher,
}
impl ConnectionManager {
fn new(
address: StreamAddress,
http_client: HttpClient,
options: Option<ConnectionPoolOptions>,
) -> Self {
let connection_options: Option<ConnectionOptions> = options
.as_ref()
.map(|pool_options| ConnectionOptions::from(pool_options.clone()));
Self {
checked_in_connections: Vec::new(),
total_connection_count: 0,
next_connection_id: 1,
generation: 0,
establisher: ConnectionEstablisher::new(http_client, options.as_ref()),
address,
connection_options,
}
}
/// Emits an event from the event handler if one is present, where `emit` is a closure that uses
/// the event handler.
fn emit_event<F>(&self, emit: F)
where
F: FnOnce(&Arc<dyn CmapEventHandler>),
{
if let Some(handler) = self
.connection_options
.as_ref()
.and_then(|options| options.event_handler.as_ref())
{
emit(handler);
}
}
/// Fetches the next connection id, incrementing it for the next connection.
fn next_connection_id(&mut self) -> u32 {
let id = self.next_connection_id;
self.next_connection_id += 1;
id
}
/// Increments the generation of the pool. Rather than eagerly removing stale connections from
/// the pool, they are left for the background thread to clean up.
fn clear(&mut self) {
self.generation += 1;
self.emit_event(|handler| {
let event = PoolClearedEvent {
address: self.address.clone(),
};
handler.handle_pool_cleared_event(event);
});
}
/// Create a connection, incrementing the total connection count and emitting the appropriate
/// monitoring events.
async fn create_connection(&mut self) -> Result<Connection> {
let mut connection = Connection::new(
self.next_connection_id(),
self.address.clone(),
self.generation,
self.connection_options.clone(),
)
.await?;
self.emit_event(|handler| {
handler.handle_connection_created_event(connection.created_event())
});
let establish_result = self.establisher.establish_connection(&mut connection).await;
if let Err(e) = establish_result {
if e.is_authentication_error() {
// auth spec requires that the pool be cleared when encountering an auth error
// during establishment.
self.clear();
}
return Err(e);
}
self.total_connection_count += 1;
self.emit_event(|handler| handler.handle_connection_ready_event(connection.ready_event()));
Ok(connection)
}
/// Close a connection, emit the event for it being closed, and decrement the
/// total connection count.
fn close_connection(&mut self, connection: Connection, reason: ConnectionClosedReason) {
connection.close_and_drop(reason);
self.total_connection_count -= 1;
}
}
/// The internal state of a connection pool.
#[derive(Derivative)]
#[derivative(Debug)]
pub(crate) struct ConnectionPoolInner {
/// The address the pool's connections will connect to.
address: StreamAddress,
/// The structure used to manage connection creation, closing, storage, and generation.
connection_manager: Arc<Mutex<ConnectionManager>>,
/// The event handler specified by the user to process CMAP events.
#[derivative(Debug = "ignore")]
event_handler: Option<Arc<dyn CmapEventHandler>>,
/// Connections that have been ready for usage in the pool for longer than `max_idle_time` will
/// be closed either by the background thread or when popped off of the set of available
/// connections. If `max_idle_time` is `None`, then connections will not be closed due to being
/// idle.
max_idle_time: Option<Duration>,
/// The minimum number of connections that the pool can have at a given time. This includes
/// connections which are currently checked out of the pool. If fewer than `min_pool_size`
/// connections are in the pool, the background thread will create more connections and add
/// them to the pool.
min_pool_size: Option<u32>,
/// The queue that operations wait in to check out a connection.
///
/// A thread will only reach the front of the queue if a connection is available to be checked
/// out or if one could be created without going over `max_pool_size`.
wait_queue: WaitQueue,
}
impl ConnectionPool {
pub(crate) fn new(
address: StreamAddress,
http_client: HttpClient,
options: Option<ConnectionPoolOptions>,
) -> Self {
let connection_manager =
ConnectionManager::new(address.clone(), http_client, options.clone());
let event_handler = options.as_ref().and_then(|opts| opts.event_handler.clone());
// The CMAP spec indicates that a max idle time of zero means that connections should not be
// closed due to idleness.
let mut max_idle_time = options.as_ref().and_then(|opts| opts.max_idle_time);
if max_idle_time == Some(Duration::from_millis(0)) {
max_idle_time = None;
}
let max_pool_size = options
.as_ref()
.and_then(|opts| opts.max_pool_size)
.unwrap_or(DEFAULT_MAX_POOL_SIZE);
let min_pool_size = options.as_ref().and_then(|opts| opts.min_pool_size);
let wait_queue_timeout = options.as_ref().and_then(|opts| opts.wait_queue_timeout);
let inner = ConnectionPoolInner {
address: address.clone(),
event_handler,
max_idle_time,
min_pool_size,
connection_manager: Arc::new(Mutex::new(connection_manager)),
wait_queue: WaitQueue::new(address.clone(), max_pool_size, wait_queue_timeout),
};
let pool = Self {
inner: Arc::new(inner),
};
pool.inner.emit_event(move |handler| {
let event = PoolCreatedEvent { address, options };
handler.handle_pool_created_event(event);
});
background::start_background_task(Arc::downgrade(&pool.inner));
pool
}
/// Checks out a connection from the pool. This method will block until this thread is at the
/// front of the wait queue, and then will block again if no available connections are in the
/// pool and the total number of connections is not less than the max pool size. If the method
/// blocks for longer than `wait_queue_timeout`, a `WaitQueueTimeoutError` will be returned.
pub(crate) async fn check_out(&self) -> Result<Connection> {
let mut conn = self.inner.check_out().await?;
conn.mark_checked_out(Arc::downgrade(&self.inner));
Ok(conn)
}
/// Checks a connection back into the pool and notifies the wait queue that a connection is
/// ready. If the connection is stale, it will be closed instead of being added to the set of
/// available connections. The time that the connection is checked in will be marked to
/// facilitate detecting if the connection becomes idle.
#[cfg(test)]
pub(crate) async fn check_in(&self, conn: Connection) {
self.inner.check_in(conn).await;
}
/// Increments the generation of the pool. Rather than eagerly removing stale connections from
/// the pool, they are left for the background thread to clean up.
pub(crate) async fn clear(&self) {
self.inner.clear().await;
}
}
impl ConnectionPoolInner {
/// Emits an event from the event handler if one is present, where `emit` is a closure that uses
/// the event handler.
fn emit_event<F>(&self, emit: F)
where
F: FnOnce(&Arc<dyn CmapEventHandler>),
{
if let Some(ref handler) = self.event_handler {
emit(handler);
}
}
async fn check_in(&self, mut conn: Connection) {
self.emit_event(|handler| {
handler.handle_connection_checked_in_event(conn.checked_in_event());
});
conn.mark_checked_in();
let mut connection_manager = self.connection_manager.lock().await;
// Close the connection if it's stale.
if conn.is_stale(connection_manager.generation) {
connection_manager.close_connection(conn, ConnectionClosedReason::Stale);
} else {
connection_manager.checked_in_connections.push(conn);
}
self.wait_queue.wake_front();
}
async fn check_out(&self) -> Result<Connection> {
self.emit_event(|handler| {
let event = ConnectionCheckoutStartedEvent {
address: self.address.clone(),
};
handler.handle_connection_checkout_started_event(event);
});
let result = self.acquire_or_create_connection().await;
let conn = match result {
Ok(conn) => conn,
Err(e) => {
let failure_reason =
if let ErrorKind::WaitQueueTimeoutError { .. } = e.kind.as_ref() {
ConnectionCheckoutFailedReason::Timeout
} else {
ConnectionCheckoutFailedReason::ConnectionError
};
self.emit_event(|handler| {
handler.handle_connection_checkout_failed_event(ConnectionCheckoutFailedEvent {
address: self.address.clone(),
reason: failure_reason,
})
});
return Err(e);
}
};
self.emit_event(|handler| {
handler.handle_connection_checked_out_event(conn.checked_out_event());
});
Ok(conn)
}
/// Waits for the thread to reach the front of the wait queue, then attempts to check out a
/// connection. If none are available in the pool, one is created and checked out instead.
async fn acquire_or_create_connection(&self) -> Result<Connection> {
// Handle that will wake up the front of the queue when dropped.
// Before returning a valid connection, this handle must be disarmed to prevent the front
// from waking up early.
let mut wait_queue_handle = self.wait_queue.wait_until_at_front().await?;
// Try to get the most recent available connection.
let mut connection_manager = self.connection_manager.lock().await;
while let Some(conn) = connection_manager.checked_in_connections.pop() {
// Close the connection if it's stale.
if conn.is_stale(connection_manager.generation) {
connection_manager.close_connection(conn, ConnectionClosedReason::Stale);
continue;
}
// Close the connection if it's idle.
if conn.is_idle(self.max_idle_time) {
connection_manager.close_connection(conn, ConnectionClosedReason::Idle);
continue;
}
// Otherwise, return the connection.
wait_queue_handle.disarm();
return Ok(conn);
}
// There are no connections in the pool, so open a new one.
let connection = connection_manager.create_connection().await?;
wait_queue_handle.disarm();
Ok(connection)
}
async fn clear(&self) {
self.connection_manager.lock().await.clear();
}
}
impl Drop for ConnectionPoolInner {
/// Automatic cleanup for the connection pool. This is defined on `ConnectionPoolInner` rather
/// than `ConnectionPool` so that it only gets run once all (non-weak) references to the
/// `ConnectionPoolInner` are dropped.
fn drop(&mut self) {
let address = self.address.clone();
let connection_manager = self.connection_manager.clone();
let event_handler = self.event_handler.clone();
RUNTIME.execute(async move {
let mut connection_manager = connection_manager.lock().await;
while let Some(connection) = connection_manager.checked_in_connections.pop() {
connection_manager.close_connection(connection, ConnectionClosedReason::PoolClosed);
}
if let Some(ref handler) = event_handler {
handler.handle_pool_closed_event(PoolClosedEvent {
address: address.clone(),
});
}
});
}
}
|
pub mod prelude {
pub use super::header;
pub use super::navigation;
pub use super::footer;
}
use maud::{ DOCTYPE, Markup };
pub fn header(title_opt: Option<&str>) -> Markup {
html! {
(DOCTYPE);
head {
meta charset="utf-8";
title { (compose_title(title_opt)) }
link rel="stylesheet" type="text/css" href="/res/lib/css/bootstrap.min.css";
script type="text/javascript" src="/res/lib/js/bootstrap.min.js" {}
}
}
}
fn compose_title(title_opt: Option<&str>) -> String {
const PAGE_TITLE: &str = "Hello Rocket";
if let Some(title) = title_opt {
format!("{} | {}", title, PAGE_TITLE)
} else {
PAGE_TITLE.to_string()
}
}
pub fn navigation(current_uri: Option<&str>) -> Markup {
html! {
div.container {
ul.nav.nav-pills {
li.active[is_uri(current_uri, "/")] {
a href="/" { "Home" }
}
li.active[is_uri(current_uri, "/one")] {
a href="/posts" { "Posts" }
}
li.active[is_uri(current_uri, "/two")] {
a href="/posts/new" { "New Post" }
}
}
}
}
}
fn is_uri(current_uri: Option<&str>, to_check: &str) -> bool {
if let Some(uri) = current_uri {
uri == to_check
} else {
false
}
}
pub fn footer() -> Markup {
html! { }
}
|
use std::fs;
use std::time::Instant;
use std::collections::{HashMap};
fn part1(numbers: Vec<u64>) -> u64 {
let size: usize = 25;
let mut map: HashMap<u64, u32> = HashMap::new();
for i in 0..25 {
for j in 0..25 {
if i != j {
let sum = numbers.get(i).unwrap() + numbers.get(j).unwrap();
*map.entry(sum).or_insert(0) += 1
}
}
}
for i in size..numbers.len() {
let cur = *numbers.get(i).unwrap();
if !map.contains_key(&cur) {
return cur;
}
let first = *numbers.get(i - size).unwrap();
for j in i - size + 1..i {
let sum = first + *numbers.get(j).unwrap();
if !map.contains_key(&sum) {
panic!("ERROR, key should be contained")
} else {
*map.entry(sum).or_default() -= 1;
if *map.get(&sum).unwrap() == 0 {
map.remove(&sum);
}
}
let new_sum = numbers.get(i).unwrap() + numbers.get(j).unwrap();
*map.entry(new_sum).or_insert(0) += 1
}
}
0
}
fn part2(numbers: Vec<u64>, target: u64) -> u64 {
let mut seq: Vec<u64> = vec![];
let mut sum: u64 = 0;
for number in numbers {
seq.push(number);
sum += number;
while sum > target {
let first:u64 = seq.remove(0);
sum -= first;
}
if sum == target && seq.len() > 1 {
return seq.iter().min().unwrap() + seq.iter().max().unwrap();
}
}
return 0;
}
fn main() {
let input = fs::read_to_string("input/test.txt")
.expect("Something went wrong reading the file");
let lines = input.lines();
let mut numbers: Vec<u64> = vec![];
for line in lines {
numbers.push(line.parse::<u64>().expect("Ouf that's not a number !"))
}
println!("Running part1");
let now = Instant::now();
let target = part1(numbers.clone());
println!("Found {}", target);
println!("Took {}us", now.elapsed().as_micros());
println!("Running part2");
let now = Instant::now();
println!("Found {}", part2(numbers.clone(), target));
println!("Took {}us", now.elapsed().as_micros());
} |
pub mod hosting;
pub mod serving {
fn take_order() {}
pub fn serve_order() {}
pub fn take_payment() {}
}
|
extern crate chrono;
extern crate cpd;
#[macro_use]
extern crate failure;
extern crate las;
#[macro_use]
extern crate log;
extern crate nalgebra;
type Matrix3D = nalgebra::MatrixMN<f64, nalgebra::Dynamic, nalgebra::U3>;
type Vector4 = nalgebra::MatrixN<f64, nalgebra::U4>;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
pub mod velocities;
mod vector;
use chrono::{DateTime, Utc};
use failure::Error;
use las::Point;
use serde_json::Value;
use std::path::Path;
pub use vector::Vector;
/// An error returned if the dat files doesn't contain 16 entries.
#[derive(Debug, Fail)]
#[fail(display = "Invalid matrix length: {}", _0)]
pub struct InvalidMatrixLen(usize);
/// The path cannot be turned into a datetime.
#[derive(Debug, Fail)]
#[fail(display = "Date and time from path: {}", _0)]
pub struct DateTimeFromPath(String);
/// Reads a .dat file and returns the underlying matrix.
///
/// # Examples
///
/// ```
/// let matrix = ape::matrix_from_path("data/sop.dat").unwrap();
/// assert_eq!(1001.7951549705150000, matrix[(0, 3)]);
/// ```
pub fn matrix_from_path<P: AsRef<Path>>(path: P) ->
Result<nalgebra::Projective3<f64>, Error>
{
use std::fs::File;
use std::io::Read;
let mut file = File::open(path)?;
let mut string = String::new();
file.read_to_string(&mut string)?;
let numbers = string
.split_whitespace()
.map(|s| s.parse::<f64>())
.collect::<Result<Vec<_>, _>>()?;
if numbers.len() != 16 {
return Err(InvalidMatrixLen(numbers.len()).into());
}
let v = Vector4::from_iterator(numbers.into_iter());
Ok(nalgebra::Projective3::from_matrix_unchecked(v.transpose()))
}
/// Returns a matrix from a las path.
pub fn matrix_from_las_path<P: AsRef<Path>>(path: P) ->
Result<Matrix3D, Error>
{
use las::Read;
let points = las::Reader::from_path(path)?
.points()
.collect::<Result<Vec<_>, _>>()?;
Ok(matrix_from_points(&points))
}
/// Creates a dat string from a matrix.
///
/// # Examples
///
/// ```
/// let matrix = ape::matrix_from_path("data/sop.dat").unwrap();
/// let string = ape::string_from_matrix(&matrix);
/// ```
pub fn string_from_matrix(matrix: &Vector4) -> String {
let mut string = String::new();
for i in 0..4 {
let row = matrix.row(i);
string.push_str(&format!("{} {} {} {}\n",
row[0], row[1], row[2], row[3]));
}
string
}
/// Turns las points into a matrix.
pub fn matrix_from_points(points: &Vec<Point>) -> Matrix3D
{
let mut matrix = Matrix3D::zeros(points.len());
for (i, point) in points.iter().enumerate() {
matrix[(i, 0)] = point.x;
matrix[(i, 1)] = point.y;
matrix[(i, 2)] = point.z;
}
matrix
}
/// Returns the center of gravity of this matrix as a vector.
pub fn center_of_gravity(matrix: &Matrix3D) -> Vector {
(0..3)
.map(|d| {
matrix.column(d).iter().sum::<f64>() / matrix.nrows() as f64
})
.collect()
}
/// Calculates a date time from a path.
pub fn datetime_from_path<P: AsRef<Path>>(path: P)
-> Result<DateTime<Utc>, Error>
{
use chrono::TimeZone;
if let Some(file_stem) =
path.as_ref().file_stem().map(|s| s.to_string_lossy()) {
Utc.datetime_from_str(&file_stem[0..13], "%y%m%d_%H%M%S")
.map_err(Error::from)
} else {
Err(DateTimeFromPath(path.as_ref().display().to_string()).into())
}
}
/// Returns the magic bucket configuration for the three matrices.
pub fn magic_bucket_config(
sop: &nalgebra::Projective3<f64>,
adjustment: &nalgebra::Projective3<f64>,
pop: &nalgebra::Projective3<f64>,
) -> Value {
json!({
"filters": [
{
"type": "filters.transformation",
"matrix": string_from_matrix(sop.matrix()),
},
{
"type": "filters.transformation",
"matrix": string_from_matrix(adjustment.matrix()),
},
{
"type": "filters.transformation",
"matrix": string_from_matrix(pop.matrix()),
},
{
"type": "filters.crop",
"polygon": "POLYGON ((535508.04019199998584 7356923.27050799969584, 526852.992188 7363507.49072299990803, 533350.83911099995021 7365850.74902299977839, 541962.312012 7365547.070313, 545282.91503899998497 7360871.8720699995756, 542695.264648 7358447.21875, 537531.614136 7357506.45642099995166, 536543.26751699997112 7357541.5081789996475, 535508.04019199998584 7356923.27050799969584))"
},
{
"type": "filters.range",
"limits": "Z[0:250]",
},
{
"type": "filters.outlier",
},
{
"type": "filters.colorinterp",
"ramp": "pestel_shades",
"minimum": 0,
"maximum": 175,
}
],
"output_ext": ".laz",
"args": [
"--writers.las.scale_x=0.0025",
"--writers.las.scale_y=0.0025",
"--writers.las.scale_z=0.0025",
"--writers.las.offset_x=auto",
"--writers.las.offset_y=auto",
"--writers.las.offset_z=auto",
"--writers.las.a_srs=EPSG:32624+5773",
]
})
}
|
extern crate nannou;
extern crate rand;
mod field;
mod cow;
mod evolution;
mod ui;
mod traits;
use nannou::prelude::*;
use nannou::event::SimpleWindowEvent;
use ui::UserInterface;
use evolution::Evolver;
fn main() {
nannou::app(model, event, view).run();
}
struct Model {
evolver: Evolver,
ui: UserInterface,
}
fn model(app: &App) -> Model {
let _ = app.new_window().with_title("Graze - Evolved").build().unwrap();
let (width, height) = (720.0, 720.0);
app.main_window().set_inner_size_points(width, height);
let ui = app.new_ui().build().unwrap();
let ui = UserInterface::new(ui);
let mut evolver = Evolver::new(width, height, 50);
evolver.field.init(10);
evolver.evolve();
Model { evolver: evolver, ui: ui }
}
fn event(_: &App, mut model: Model, event: Event) -> Model {
match event {
Event::Update(update) => {
let dt = update.since_last.secs() as f32;
model.evolver.step(dt);
model.ui.update(dt);
},
Event::WindowEvent { simple: Some(SimpleWindowEvent::Resized(size)), .. } => {
model.evolver.field.update_size(size);
},
Event::WindowEvent { simple: Some(SimpleWindowEvent::KeyPressed(nannou::VirtualKeyCode::Space)), .. } => {
model.evolver.field.toggle_freeze();
},
_ => (),
}
model
}
fn view(app: &App, model: &Model, frame: Frame) -> Frame {
let draw = app.draw();
draw.background().color(WHITE);
model.evolver.field.draw(&draw);
draw.to_frame(app, &frame).unwrap();
frame
}
|
use std::collections::HashMap;
use std::env;
use std::io;
use std::ops;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
struct Point3d {
x: i64,
y: i64,
z: i64,
}
impl Point3d {
pub fn new(x: i64, y: i64, z: i64) -> Self {
return Self { x, y, z };
}
pub fn neighbours(&self) -> impl Iterator<Item = Self> + '_ {
let neighbours = (-1..2)
.map(move |z| {
return (-1..2)
.map(move |y| {
return (-1..2).map(move |x| {
return *self + Self::new(x, y, z);
});
})
.flatten();
})
.flatten()
.filter(move |p| *p != *self);
return neighbours;
}
}
impl ops::Add<Self> for Point3d {
type Output = Self;
fn add(self, _rhs: Self) -> Self {
Self {
x: self.x + _rhs.x,
y: self.y + _rhs.y,
z: self.z + _rhs.z,
}
}
}
type CubeMap = HashMap<Point3d, bool>;
fn part1(text: &str) -> usize {
let mut cubes = CubeMap::new();
for (row, line) in text.lines().enumerate() {
for (col, c) in line.chars().enumerate() {
match c {
'#' => {
cubes.insert(Point3d::new(col as i64, row as i64, 0), true);
}
_ => (),
}
}
}
for _ in 0..6 {
let mut next = cubes.clone();
//expand the dimension
for (cube, _) in &cubes {
for n in cube.neighbours() {
if let None = next.get(&n) {
next.insert(n, false);
}
}
}
cubes = next;
let mut next = CubeMap::new();
//simulate the step
for (cube, state) in &cubes {
let n_active = cube
.neighbours()
.filter(|n| {
if let Some(state) = cubes.get(n) {
return *state;
}
return false;
})
.count();
if *state {
if n_active == 2 || n_active == 3 {
next.insert(*cube, true);
}
} else {
if n_active == 3 {
next.insert(*cube, true);
}
}
}
cubes = next;
}
return cubes.len();
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
struct Point4d {
x: i64,
y: i64,
z: i64,
w: i64,
}
impl Point4d {
pub fn new(x: i64, y: i64, z: i64, w: i64) -> Self {
return Self { x, y, z, w };
}
pub fn neighbours(&self) -> impl Iterator<Item = Self> + '_ {
let neighbours = (-1..2)
.map(move |w| {
return (-1..2)
.map(move |z| {
return (-1..2)
.map(move |y| {
return (-1..2).map(move |x| {
return *self + Self::new(x, y, z, w);
});
})
.flatten();
})
.flatten();
})
.flatten()
.filter(move |p| *p != *self);
return neighbours;
}
}
impl ops::Add<Self> for Point4d {
type Output = Self;
fn add(self, _rhs: Self) -> Self {
Self {
x: self.x + _rhs.x,
y: self.y + _rhs.y,
z: self.z + _rhs.z,
w: self.w + _rhs.w,
}
}
}
type HyperCubeMap = HashMap<Point4d, bool>;
fn part2(text: &str) -> usize {
let mut cubes = HyperCubeMap::new();
for (row, line) in text.lines().enumerate() {
for (col, c) in line.chars().enumerate() {
match c {
'#' => {
cubes.insert(Point4d::new(col as i64, row as i64, 0, 0), true);
}
_ => (),
}
}
}
for _ in 0..6 {
let mut next = cubes.clone();
//expand the dimension
for (cube, _) in &cubes {
for n in cube.neighbours() {
if let None = next.get(&n) {
next.insert(n, false);
}
}
}
cubes = next;
let mut next = HyperCubeMap::new();
//simulate the step
for (cube, state) in &cubes {
let n_active = cube
.neighbours()
.filter(|n| {
if let Some(state) = cubes.get(n) {
return *state;
}
return false;
})
.count();
if *state {
if n_active == 2 || n_active == 3 {
next.insert(*cube, true);
}
} else {
if n_active == 3 {
next.insert(*cube, true);
}
}
}
cubes = next;
}
return cubes.len();
}
fn main() -> Result<(), io::Error> {
let args: Vec<String> = env::args().collect();
let text = std::fs::read_to_string(&args[1]).expect("read_to_string failed");
if &args[2] == "1" {
println!("{}", part1(&text));
}
if &args[2] == "2" {
println!("{}", part2(&text));
}
Ok(())
}
#[cfg(test)]
mod tests {
// use super::*;
#[test]
fn test_part1() {}
}
|
mod add_iterable;
mod add_self;
mod create;
mod deref;
mod extend;
mod from_iterator;
mod index;
mod intersection;
mod into_iterator;
mod sub_iterable;
mod sub_self;
mod union;
|
extern crate reqwest;
extern crate url;
extern crate prettytable;
mod cli;
mod api;
mod benchmark;
use observer_ward::{scan, strings_to_urls, read_file_to_target, download_fingerprints_from_github};
use api::{api_server};
use cli::{WardArgs};
use std::process;
use std::io::{self, Read};
use std::thread;
use colored::Colorize;
use prettytable::{Table, Cell, Row, Attr, color};
use std::fs::File;
use futures::stream::FuturesUnordered;
use futures::StreamExt;
use benchmark::{Benchmark, NamedTimer};
#[macro_use]
extern crate log;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
env_logger::init();
let config = WardArgs::new();
let mut targets = vec![];
if !config.server_host_port.is_empty() {
let server_host_port: String = config.server_host_port;
thread::spawn(|| {
api_server(server_host_port).unwrap();
}).join().expect("Thread panicked")
}
if config.stdin {
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer)?;
targets.extend(strings_to_urls(buffer));
} else if !config.target.is_empty() {
targets.push(String::from(config.target));
} else if !config.file.is_empty() {
targets.extend(read_file_to_target(config.file));
}
if config.update {
download_fingerprints_from_github().await;
process::exit(0);
}
let mut benchmarks = Benchmark::init();
let mut observer_ward_bench = NamedTimer::start("ObserverWard");
if !targets.is_empty() {
let mut worker = FuturesUnordered::new();
let mut targets_iter = targets.iter();
let mut results = vec![];
for _ in 0..100 {
match targets_iter.next() {
Some(target) => {
worker.push(scan(target.to_string()))
}
None => { break; }
}
}
while let Some(result) = worker.next().await {
results.push(result);
if let Some(target) = targets_iter.next() {
worker.push(scan(target.to_string()));
}
}
if results.len() < 2000 {
results.sort_by(|a, b| b.priority.cmp(&a.priority));
}
if !config.json.is_empty() {
serde_json::to_writer(&File::create(config.json)?, &results)?
}
let mut table = Table::new();
table.set_titles(Row::new(vec![Cell::new("Url"), Cell::new("Name"), Cell::new("Length"), Cell::new("Title"), Cell::new("Priority")]));
for res in &results {
let wwn: Vec<String> = res.what_web_name.iter().map(String::from).collect();
table.add_row(
Row::new(vec![
Cell::new(&res.url.as_str()),
Cell::new(&wwn.join("\n")).with_style(Attr::ForegroundColor(color::GREEN)),
Cell::new(&res.length.to_string()),
Cell::new(&textwrap::fill(res.title.as_str(), 40)),
Cell::new(&res.priority.to_string()),
]));
}
if !config.csv.is_empty() {
let out = File::create(config.csv)?;
table.to_csv(out)?;
}
let mut table = Table::new();
table.set_titles(Row::new(vec![Cell::new("Url"), Cell::new("Name"), Cell::new("Length"), Cell::new("Title"), Cell::new("Priority")]));
for res in &results {
if res.priority > 0 {
let wwn: Vec<String> = res.what_web_name.iter().map(String::from).collect();
table.add_row(
Row::new(vec![
Cell::new(&res.url.as_str()),
Cell::new(&wwn.join("\n")).with_style(Attr::ForegroundColor(color::GREEN)),
Cell::new(&res.length.to_string()),
Cell::new(&textwrap::fill(res.title.as_str(), 40)),
Cell::new(&res.priority.to_string()),
]));
}
}
if table.len() > 0 {
println!("{}", "Important technology:".red());
table.printstd();
}
}
observer_ward_bench.end();
benchmarks.push(observer_ward_bench);
debug!("Benchmarks raw {:?}", benchmarks);
info!("{}", benchmarks.summary());
Ok(())
} |
pub enum Tile {
GrassTile,
SandTile,
}
pub enum Structure {
}
pub struct Map {
tiles: Vec<Vec<Tile>>
}
impl Map {
fn fromFile(path: &str) -> Map {
}
}
|
use std::cell::{Ref, RefCell};
use std::collections::BTreeMap;
use std::ops::{Index, IndexMut};
use std::rc::Rc;
use cranelift_entity::{EntityRef, PrimaryMap, SecondaryMap};
use intrusive_collections::UnsafeRef;
use firefly_diagnostics::{SourceSpan, Span};
use firefly_intern::Symbol;
use firefly_syntax_base::*;
use super::*;
#[derive(Clone)]
pub struct DataFlowGraph {
pub signatures: Rc<RefCell<PrimaryMap<FuncRef, Signature>>>,
pub callees: Rc<RefCell<BTreeMap<FunctionName, FuncRef>>>,
pub constants: Rc<RefCell<ConstantPool>>,
pub blocks: OrderedArenaMap<Block, BlockData>,
pub insts: ArenaMap<Inst, InstNode>,
pub inst_annotations: SecondaryMap<Inst, Annotations>,
pub results: SecondaryMap<Inst, ValueList>,
pub values: PrimaryMap<Value, ValueData>,
pub value_lists: ValueListPool,
}
impl DataFlowGraph {
pub fn new(
signatures: Rc<RefCell<PrimaryMap<FuncRef, Signature>>>,
callees: Rc<RefCell<BTreeMap<FunctionName, FuncRef>>>,
constants: Rc<RefCell<ConstantPool>>,
) -> Self {
Self {
signatures,
callees,
constants,
insts: ArenaMap::new(),
inst_annotations: SecondaryMap::new(),
results: SecondaryMap::new(),
blocks: OrderedArenaMap::new(),
values: PrimaryMap::new(),
value_lists: ValueListPool::new(),
}
}
/// Returns the signature of the given function reference
pub fn callee_signature(&self, callee: FuncRef) -> Ref<'_, Signature> {
Ref::map(self.signatures.borrow(), |sigs| sigs.get(callee).unwrap())
}
/// Looks up the concrete function for the given MFA (module of None indicates that it is a local or imported function)
pub fn get_callee(&self, mfa: FunctionName) -> Option<FuncRef> {
self.callees.borrow().get(&mfa).copied()
}
/// Registers an MFA as a callable function with a default signature
pub fn register_callee(&self, mfa: FunctionName) -> FuncRef {
let mut callees = self.callees.borrow_mut();
// Don't register duplicates
if let Some(func) = callees.get(&mfa).copied() {
return func;
}
let mut signatures = self.signatures.borrow_mut();
let func = signatures.push(Signature::generate(&mfa));
callees.insert(mfa, func);
func
}
pub fn make_constant(&mut self, data: ConstantItem) -> Constant {
let mut constants = self.constants.borrow_mut();
constants.insert(data)
}
pub fn constant(&self, handle: Constant) -> Ref<'_, ConstantItem> {
Ref::map(self.constants.borrow(), |pool| pool.get(handle))
}
pub fn constant_type(&self, handle: Constant) -> Type {
let constants = self.constants.borrow();
constants.get(handle).ty()
}
pub fn make_value(&mut self, data: ValueData) -> Value {
self.values.push(data)
}
pub fn values<'a>(&'a self) -> Values {
Values {
inner: self.values.iter(),
}
}
pub fn value_is_valid(&self, v: Value) -> bool {
self.values.is_valid(v)
}
pub fn value_type(&self, v: Value) -> Type {
self.values[v].ty()
}
pub fn set_value_type(&mut self, v: Value, ty: Type) {
self.values[v].set_type(ty)
}
pub fn get_value(&self, v: Value) -> ValueData {
self.values[v].clone()
}
pub fn push_inst(&mut self, block: Block, data: InstData, span: SourceSpan) -> Inst {
let inst = self.insts.alloc_key();
let node = InstNode::new(inst, block, Span::new(span, data));
self.insts.append(inst, node);
self.results.resize(inst.index() + 1);
let item = unsafe { UnsafeRef::from_raw(&self.insts[inst]) };
unsafe {
self.block_data_mut(block).append(item);
}
inst
}
pub fn inst_args(&self, inst: Inst) -> &[Value] {
self.insts[inst].arguments(&self.value_lists)
}
pub fn inst_args_mut(&mut self, inst: Inst) -> &mut [Value] {
self.insts[inst].arguments_mut(&mut self.value_lists)
}
pub fn append_inst_args(&mut self, inst: Inst, args: &[Value]) {
let vlist = self.insts[inst]
.arguments_list()
.expect("cannot append arguments to instruction with no valuelist");
vlist.extend(args.iter().copied(), &mut self.value_lists);
}
pub fn annotate_inst<A: Into<Annotation>>(&mut self, inst: Inst, key: Symbol, data: A) {
self.inst_annotations[inst].insert_mut(key, data);
}
pub fn make_inst_results(&mut self, inst: Inst, ty: Type) -> usize {
self.results[inst].clear(&mut self.value_lists);
let opcode = self.insts[inst].opcode();
if let Some(fdata) = self.call_signature(inst) {
// Tail calls are equivalent to return, they don't have results that are materialized as values
if opcode == Opcode::Enter || opcode == Opcode::EnterIndirect {
return 0;
}
// Erlang functions use a multi-value return calling convention
let mut num_results = 0;
for ty in fdata.results() {
self.append_result(inst, ty.clone());
num_results += 1;
}
num_results
} else {
// Create result values corresponding to the opcode's constraints.
match self.insts[inst].opcode() {
// Tail calls have no materialized results
Opcode::EnterIndirect => 0,
// An indirect call has no signature, but we know it must be Erlang
// convention, and thus multi-value return
Opcode::CallIndirect => {
self.append_result(inst, Type::Primitive(PrimitiveType::I1));
self.append_result(inst, ty);
2
}
// Initializing a binary match is a fallible operation that produces a match context when successful
Opcode::BitsMatchStart => {
self.append_result(inst, Type::Primitive(PrimitiveType::I1));
self.append_result(inst, Type::Term(TermType::Any));
2
}
// Binary matches produce three results, an error flag, the matched value, and the rest of the binary
Opcode::BitsMatch => {
self.append_result(inst, Type::Primitive(PrimitiveType::I1));
self.append_result(inst, ty);
self.append_result(inst, Type::Term(TermType::Any));
3
}
// This is an optimized form of BitsMatch that skips extraction of the term to be matched and just
// advances the position in the underlying match context
Opcode::BitsMatchSkip => {
self.append_result(inst, Type::Primitive(PrimitiveType::I1));
self.append_result(inst, Type::Term(TermType::Any));
2
}
// Binary construction produces two results, an error flag and the new binary value
Opcode::BitsPush => {
self.append_result(inst, Type::Primitive(PrimitiveType::I1));
// This value is either the none term or an exception, depending on the is_err flag
self.append_result(inst, Type::Term(TermType::Any));
2
}
Opcode::BitsTestTail => {
self.append_result(inst, Type::Primitive(PrimitiveType::I1));
1
}
// Constants/immediates have known types
Opcode::ImmInt
| Opcode::ImmFloat
| Opcode::ImmBool
| Opcode::ImmAtom
| Opcode::ImmNil
| Opcode::ImmNone
| Opcode::ImmNull
| Opcode::ConstBigInt
| Opcode::ConstBinary => {
self.append_result(inst, ty);
1
}
Opcode::IsNull => {
self.append_result(inst, Type::Primitive(PrimitiveType::I1));
1
}
// These arithmetic operators always return integers
Opcode::Bnot
| Opcode::Band
| Opcode::Bor
| Opcode::Bsl
| Opcode::Bsr
| Opcode::Div
| Opcode::Rem => {
self.append_result(inst, Type::Term(TermType::Integer));
1
}
// These arithmetic operators always return floats
Opcode::Fdiv => {
self.append_result(inst, Type::Term(TermType::Float));
1
}
// These binary arithmetic operators are polymorphic on their argument types
Opcode::Add | Opcode::Sub | Opcode::Mul => {
let (lhs, rhs) = {
let args = self.inst_args(inst);
(args[0], args[1])
};
let lhs_ty = self.value_type(lhs);
let rhs_ty = self.value_type(rhs);
let ty = lhs_ty
.as_term()
.unwrap()
.coerce_to_numeric_with(rhs_ty.as_term().unwrap());
self.append_result(inst, Type::Term(ty));
1
}
// These unary arithmetic operators are polymorphic on their argument type
Opcode::Neg => {
let arg = self.inst_args(inst)[0];
let ty = self.value_type(arg).as_term().unwrap().coerce_to_numeric();
self.append_result(inst, Type::Term(ty));
1
}
// Casts produce a single output from a single input
Opcode::Cast => {
self.append_result(inst, ty);
1
}
// These unary integer operators always produce primitive type outputs
Opcode::Trunc | Opcode::Zext => {
self.append_result(inst, ty);
1
}
// These boolean operators always produce primitive boolean outputs
Opcode::IcmpEq
| Opcode::IcmpNeq
| Opcode::IcmpGt
| Opcode::IcmpGte
| Opcode::IcmpLt
| Opcode::IcmpLte
| Opcode::IsType
| Opcode::IsTaggedTuple => {
self.append_result(inst, Type::Primitive(PrimitiveType::I1));
1
}
// These boolean operators always produce boolean term outputs
Opcode::Eq
| Opcode::EqExact
| Opcode::Neq
| Opcode::NeqExact
| Opcode::Gt
| Opcode::Gte
| Opcode::Lt
| Opcode::Lte
| Opcode::And
| Opcode::AndAlso
| Opcode::Or
| Opcode::OrElse
| Opcode::Not => {
self.append_result(inst, Type::Term(TermType::Bool));
1
}
// These ops have specific types they produce
Opcode::Cons => {
self.append_result(inst, Type::Term(TermType::Cons));
1
}
Opcode::ListConcat | Opcode::ListSubtract => {
self.append_result(inst, Type::Term(TermType::List(None)));
1
}
Opcode::Head | Opcode::GetElement => {
self.append_result(inst, Type::Term(TermType::Any));
1
}
Opcode::Tail => {
self.append_result(inst, Type::Term(TermType::MaybeImproperList));
1
}
Opcode::Tuple | Opcode::SetElement | Opcode::SetElementMut => {
self.append_result(inst, Type::Term(TermType::Tuple(None)));
1
}
Opcode::MakeFun => {
self.append_result(inst, Type::Primitive(PrimitiveType::I1));
self.append_result(inst, Type::Term(TermType::Any));
2
}
Opcode::UnpackEnv => {
self.append_result(inst, ty);
1
}
Opcode::RecvStart => {
// This primop returns a receive context
self.append_result(inst, Type::RecvContext);
1
}
Opcode::RecvNext => {
// This opcode returns the receive state machine state
self.append_result(inst, Type::RecvState);
1
}
Opcode::RecvPeek => {
// This primop returns the current message which the receive is inspecting
self.append_result(inst, Type::Term(TermType::Any));
1
}
Opcode::ExceptionClass => {
self.append_result(inst, Type::Term(TermType::Atom));
1
}
Opcode::ExceptionReason => {
self.append_result(inst, Type::Term(TermType::Any));
1
}
Opcode::ExceptionTrace => {
self.append_result(inst, Type::ExceptionTrace);
1
}
_ => 0,
}
}
}
pub fn append_result(&mut self, inst: Inst, ty: Type) -> Value {
let res = self.values.next_key();
let num = self.results[inst].push(res, &mut self.value_lists);
debug_assert!(num <= u16::MAX as usize, "too many result values");
self.make_value(ValueData::Inst {
ty,
inst,
num: num as u16,
})
}
pub fn first_result(&self, inst: Inst) -> Value {
self.results[inst]
.first(&self.value_lists)
.expect("instruction has no results")
}
pub fn has_results(&self, inst: Inst) -> bool {
!self.results[inst].is_empty()
}
pub fn inst_results(&self, inst: Inst) -> &[Value] {
self.results[inst].as_slice(&self.value_lists)
}
pub fn call_signature(&self, inst: Inst) -> Option<Signature> {
match self.insts[inst].analyze_call(&self.value_lists) {
CallInfo::NotACall => None,
CallInfo::Indirect(_, _) => None,
CallInfo::Direct(f, _) => Some(self.callee_signature(f).clone()),
}
}
pub fn analyze_branch(&self, inst: Inst) -> BranchInfo {
self.insts[inst].analyze_branch(&self.value_lists)
}
pub fn blocks<'f>(&'f self) -> impl Iterator<Item = (Block, &'f BlockData)> {
Blocks {
cursor: self.blocks.cursor(),
}
}
pub fn block_insts<'f>(&'f self, block: Block) -> impl Iterator<Item = Inst> + 'f {
self.blocks[block].insts()
}
pub fn block_data(&self, block: Block) -> &BlockData {
&self.blocks[block]
}
pub fn block_data_mut(&mut self, block: Block) -> &mut BlockData {
&mut self.blocks[block]
}
pub fn last_inst(&self, block: Block) -> Option<Inst> {
self.blocks[block].last()
}
pub fn is_block_inserted(&self, block: Block) -> bool {
self.blocks.contains(block)
}
pub fn is_block_empty(&self, block: Block) -> bool {
self.blocks[block].is_empty()
}
pub fn make_block(&mut self) -> Block {
self.blocks.push(BlockData::new())
}
pub fn remove_block(&mut self, block: Block) {
self.blocks.remove(block);
}
pub fn num_block_params(&self, block: Block) -> usize {
self.blocks[block].params.len(&self.value_lists)
}
pub fn block_params(&self, block: Block) -> &[Value] {
self.blocks[block].params.as_slice(&self.value_lists)
}
pub fn block_param_types(&self, block: Block) -> Vec<Type> {
self.block_params(block)
.iter()
.map(|&v| self.value_type(v))
.collect()
}
pub fn append_block_param(&mut self, block: Block, ty: Type, span: SourceSpan) -> Value {
let param = self.values.next_key();
let num = self.blocks[block].params.push(param, &mut self.value_lists);
debug_assert!(num <= u16::MAX as usize, "too many parameters on block");
self.make_value(ValueData::Param {
ty,
num: num as u16,
block,
span,
})
}
}
impl Index<Inst> for DataFlowGraph {
type Output = Span<InstData>;
fn index(&self, inst: Inst) -> &Span<InstData> {
&self.insts[inst]
}
}
impl IndexMut<Inst> for DataFlowGraph {
fn index_mut(&mut self, inst: Inst) -> &mut Span<InstData> {
&mut self.insts[inst]
}
}
struct Blocks<'f> {
cursor: intrusive_collections::linked_list::Cursor<'f, LayoutAdapter<Block, BlockData>>,
}
impl<'f> Iterator for Blocks<'f> {
type Item = (Block, &'f BlockData);
fn next(&mut self) -> Option<Self::Item> {
if self.cursor.is_null() {
return None;
}
let next = self.cursor.get().map(|data| (data.key(), data.value()));
self.cursor.move_next();
next
}
}
|
mod player_movement;
pub mod animations;
pub use self::player_movement::PlayerSystem;
|
use crate::comms::{CommsMessage, CommsVerifier};
use crate::manager::AccountStatus;
use crate::primitives::{Account, AccountType, ChallengeStatus, NetAccount, Result};
use crate::Database;
use strsim::jaro;
pub const VIOLATIONS_CAP: usize = 5;
pub struct DisplayNameHandler {
db: Database,
comms: CommsVerifier,
limit: f64,
}
impl DisplayNameHandler {
pub fn new(db: Database, comms: CommsVerifier, limit: f64) -> Self {
DisplayNameHandler {
db: db,
comms: comms,
limit: limit,
}
}
pub async fn start(self) {
loop {
let _ = self.local().await.map_err(|err| {
error!("{}", err);
err
});
}
}
pub async fn local(&self) -> Result<()> {
use CommsMessage::*;
match self.comms.recv().await {
AccountToVerify {
net_account,
account,
} => {
self.handle_display_name_matching(net_account, account)
.await?
}
_ => error!("Received unrecognized message type"),
}
Ok(())
}
pub async fn handle_display_name_matching(
&self,
net_account: NetAccount,
account: Account,
) -> Result<()> {
let display_names = self.db.select_display_names(&net_account).await?;
let mut violations = vec![];
for display_name in &display_names {
if Self::is_too_similar(display_name, &account, self.limit) {
violations.push(display_name.clone());
}
// Cap the violation list, prevent sending oversized buffers.
if violations.len() == VIOLATIONS_CAP {
break;
}
}
self.db.delete_display_name_violations(&net_account).await?;
// The display name does obviously not need to be verified by
// signing a challenge or having to contact an address. But we just
// treat it as any other "account".
if violations.is_empty() {
// Keep track of display names for future matching.
self.db.insert_display_name(&net_account, &account).await?;
self.db
.set_account_status(&account, &AccountType::DisplayName, &AccountStatus::Valid)
.await?;
self.db
.set_challenge_status(
&net_account,
&AccountType::DisplayName,
&ChallengeStatus::Accepted,
)
.await?;
} else {
self.db
.insert_display_name_violations(&net_account, &violations)
.await?;
self.db
.set_account_status(&account, &AccountType::DisplayName, &AccountStatus::Invalid)
.await?;
self.db
.set_challenge_status(
&net_account,
&AccountType::DisplayName,
&ChallengeStatus::Rejected,
)
.await?;
}
self.comms.notify_status_change(net_account);
Ok(())
}
fn is_too_similar(display_name: &Account, account: &Account, limit: f64) -> bool {
let name_str = display_name.as_str().to_lowercase();
let account_str = account.as_str().to_lowercase();
let similarities = [
jaro(&name_str, &account_str),
jaro_words(&name_str, &account_str, &[" ", "-", "_"]),
];
similarities.iter().any(|&s| s > limit)
}
}
fn jaro_words(left: &str, right: &str, delimiter: &[&str]) -> f64 {
fn splitter<'a>(string: &'a str, delimiter: &[&str]) -> Vec<&'a str> {
let mut all = vec![];
for del in delimiter {
let mut words: Vec<&str> = string
.split(del)
.map(|s| s.trim())
.filter(|s| !s.is_empty())
.collect();
all.append(&mut words);
}
all
}
let left_words = splitter(left, delimiter);
let right_words = splitter(right, delimiter);
let mut total = 0.0;
for left_word in &left_words {
let mut temp = 0.0;
for right_word in &right_words {
let sim = jaro(left_word, right_word);
if sim > temp {
temp = sim;
}
}
total += temp;
}
total as f64 / left_words.len().max(right_words.len()) as f64
}
#[cfg(test)]
mod tests {
use super::*;
use crate::primitives::Account;
const LIMIT: f64 = 0.85;
#[test]
fn is_too_similar() {
let current = [
Account::from("dave"),
Account::from("Dave"),
Account::from("daev"),
Account::from("Daev"),
];
let new = Account::from("dave");
for account in ¤t {
let res = DisplayNameHandler::is_too_similar(account, &new, LIMIT);
assert!(res);
}
let current = [
Account::from("David"),
Account::from("alice"),
Account::from("Alice"),
Account::from("bob"),
Account::from("Bob"),
Account::from("eve"),
Account::from("Eve"),
Account::from("David"),
];
for account in ¤t {
let res = DisplayNameHandler::is_too_similar(account, &new, LIMIT);
assert!(!res);
}
}
#[test]
fn is_too_similar_words() {
let current = [
Account::from("adam & eve"),
Account::from("Adam & Eve"),
Account::from("aadm & Eve"),
Account::from("Aadm & Eve"),
Account::from("adam & ev"),
Account::from("Adam & Ev"),
Account::from("eve & adam"),
Account::from("Eve & Adam"),
];
let new = Account::from("Adam & Eve");
for account in ¤t {
let res = DisplayNameHandler::is_too_similar(account, &new, LIMIT);
assert!(res);
}
let current = [
Account::from("alice & bob"),
Account::from("Alice & Bob"),
Account::from("jeff & john"),
Account::from("Jeff & John"),
];
let new = Account::from("Adam & Eve");
for account in ¤t {
let res = DisplayNameHandler::is_too_similar(account, &new, LIMIT);
assert!(!res);
}
}
#[test]
fn is_too_similar_words_special_delimiter() {
let current = [
Account::from("adam & eve"),
Account::from("Adam & Eve"),
Account::from("aadm & Eve"),
Account::from("Aadm & Eve"),
Account::from("adam & ev"),
Account::from("Adam & Ev"),
Account::from("eve & adam"),
Account::from("Eve & Adam"),
//
Account::from("adam-&-eve"),
Account::from("Adam-&-Eve"),
Account::from("aadm-&-Eve"),
Account::from("Aadm-&-Eve"),
Account::from("adam-&-ev"),
Account::from("Adam-&-Ev"),
Account::from("eve-&-adam"),
Account::from("Eve-&-Adam"),
//
Account::from("adam_&_eve"),
Account::from("Adam_&_Eve"),
Account::from("aadm_&_Eve"),
Account::from("Aadm_&_Eve"),
Account::from("adam_&_ev"),
Account::from("Adam_&_Ev"),
Account::from("eve_&_adam"),
Account::from("Eve_&_Adam"),
];
let new = Account::from("Adam & Eve");
for account in ¤t {
let res = DisplayNameHandler::is_too_similar(account, &new, LIMIT);
assert!(res);
}
let current = [
Account::from("alice & bob"),
Account::from("Alice & Bob"),
Account::from("jeff & john"),
Account::from("Jeff & John"),
//
Account::from("alice_&_bob"),
Account::from("Alice_&_Bob"),
Account::from("jeff_&_john"),
Account::from("Jeff_&_John"),
//
Account::from("alice-&-bob"),
Account::from("Alice-&-Bob"),
Account::from("jeff-&-john"),
Account::from("Jeff-&-John"),
];
let new = Account::from("Adam & Eve");
for account in ¤t {
let res = DisplayNameHandler::is_too_similar(account, &new, LIMIT);
assert!(!res);
}
}
#[test]
fn is_too_similar_unicode() {
let current = [Account::from("👻🥺👌 Alice")];
let new = Account::from("👻🥺👌 Alice");
for account in ¤t {
let res = DisplayNameHandler::is_too_similar(account, &new, LIMIT);
assert!(res);
}
let current = [
Account::from("Alice"),
Account::from("👻🥺👌 Johnny 💀"),
Account::from("🤖👈👈 Alice"),
Account::from("👻🥺👌 Bob"),
Account::from("👻🥺👌 Eve"),
];
for account in ¤t {
let res = DisplayNameHandler::is_too_similar(account, &new, LIMIT);
assert!(!res);
}
}
}
|
use std::{net::SocketAddr, sync::Arc};
use thiserror::Error;
use bevy::prelude::{AppBuilder, Plugin};
use quinn::{crypto::rustls::TlsSession, generic::Incoming};
use bevy::prelude::IntoQuerySystem;
use futures::StreamExt;
use tokio::sync::mpsc::{unbounded_channel, UnboundedSender};
use tracing::info;
use crate::networking::{
events::{
ReceiveEvent,
SendEvent
},
systems::{
Connecting,
NetworkConnections,
SessionEventListenerState,
receive_net_events_system,
send_net_events_system,
SEND_NET_EVENT_STAGE,
RECEIVE_NET_EVENT_STAGE
}
};
/// Add this plugin to start a server which sends and receives packets to a large number of network connections
#[derive(Debug)]
pub struct Network {
pub private_key: quinn::PrivateKey,
pub certificate: quinn::CertificateChain,
pub addr: SocketAddr,
}
impl Plugin for Network {
fn build(&self, app: &mut AppBuilder) {
// Create mpsc endpoints for received network events and store them in a resources
let (send, recv) = unbounded_channel();
app.add_resource(SessionEventListenerState {
event_sender: send.clone(),
event_receiver: recv,
stream_senders: Default::default(),
send_event_reader: Default::default(),
});
app.init_resource::<NetworkConnections>();
app.add_event::<ReceiveEvent>();
app.add_event::<SendEvent>();
// Create listen socket
let listening = create_endpoint(
self.addr,
self.private_key.clone(),
self.certificate.clone(),
)
.expect("Failed to create socket");
// Spawn a task that polls the socket for events and sends them into an mspc
tokio::spawn(poll_new_connections(listening, send));
// Add a system that consumes all network events from an MPSC and publishes them as ECS events
app.add_system_to_stage(RECEIVE_NET_EVENT_STAGE, receive_net_events_system.system());
// Add a system that consumes ECS events and forwards them to MPSCs which will eventually be sent over the network
app.add_system_to_stage(SEND_NET_EVENT_STAGE, send_net_events_system.system());
}
}
/// Poll for new incoming connection requests
async fn poll_new_connections(
mut incoming: Incoming<TlsSession>,
event_sender: UnboundedSender<ReceiveEvent>,
) {
info!("Polling for incoming connections");
// Keep polling for new incoming connections being opened
while let Some(conn) = incoming.next().await {
tokio::spawn(Connecting::new(conn, event_sender.clone()).run());
}
// Once the socket has closed notify the ECS about it. If sending this fails (because the ECS has stopped listening) just silently give up.
info!("Socket closed");
let _ = event_sender.send(ReceiveEvent::SocketClosed);
}
#[derive(Error, Debug)]
enum CreateEndpointError {
#[error(transparent)]
TLSError(#[from] rustls::TLSError),
#[error(transparent)]
EndpointError(#[from] quinn::EndpointError)
}
/// Create a network endpoint
fn create_endpoint(
listen: SocketAddr,
private_key: quinn::PrivateKey,
certificate: quinn::CertificateChain,
) -> Result<Incoming<TlsSession>, CreateEndpointError>
{
// Configure endpoint
let mut transport_config = quinn::TransportConfig::default();
transport_config.stream_window_uni(128);
let mut server_config = quinn::ServerConfig::default();
server_config.transport = Arc::new(transport_config);
let mut server_config = quinn::ServerConfigBuilder::new(server_config);
server_config.protocols(&[b"hq-29"]);
// Configure encryption
server_config.certificate(
certificate,
private_key,
)?;
// Begin listening for connections, drop the endpoint because we don't need to establish any outgoing connections
let mut endpoint = quinn::Endpoint::builder();
endpoint.listen(server_config.build());
let (_, incoming) = endpoint.bind(&listen)?;
Ok(incoming)
}
|
use std::convert::TryInto;
use crate::{
generation::WorldGenerator, Region, RegionWorldPosition, Tile, TileWorldCoordinate,
TileWorldPosition,
};
#[derive(Clone, Copy, Debug, Hash)]
pub struct FlatWorldGenerator {
fill: Tile,
fill_height: Option<TileWorldCoordinate>,
}
impl FlatWorldGenerator {
pub fn new(fill: Tile, fill_height: Option<TileWorldCoordinate>) -> Self {
FlatWorldGenerator { fill, fill_height }
}
}
impl WorldGenerator for FlatWorldGenerator {
fn populate_region(&mut self, region_position: RegionWorldPosition, region: &mut Region) {
match self.fill_height {
None => {
for position in Region::BOUNDS.iter_positions() {
if let Ok(tile) = region.get_mut(position) {
*tile = Some(self.fill);
}
}
}
Some(fill_height) => {
let world_position: TileWorldPosition = region_position.into();
for x in (0..Region::WIDTH.into()).map(|x| x + world_position.x) {
for y in 0..fill_height {
let tile_region_position = TileWorldPosition::new(x, y) - world_position;
if let Ok(tile_region_position) = tile_region_position.try_into() {
if let Ok(tile) = region.get_mut(tile_region_position) {
*tile = Some(self.fill);
}
}
}
}
}
}
}
}
|
use crate::rocket::State;
use crate::rocket_contrib::json;
use crate::todos::{MaybeTodo, Todo};
use crate::Connection;
#[get("/todos")]
pub fn get_todos(connection: State<Connection>) -> json::JsonValue {
let conn = connection.lock().unwrap();
json! {Todo::get_all(&conn)}
}
#[post("/todo", data = "<todo>")]
pub fn create_todo(todo: json::Json<Todo>, connection: State<Connection>) {
let conn = connection.lock().unwrap();
todo.create(&conn);
}
#[put("/todo/<id>", data = "<todo>")]
pub fn update_todo(id: i32, todo: json::Json<Todo>, connection: State<Connection>) {
let conn = connection.lock().unwrap();
todo.update(id, &conn);
}
#[patch("/todo/<id>", data = "<maybe_todo>")]
pub fn patch_todo(id: i32, maybe_todo: json::Json<MaybeTodo>, connection: State<Connection>) {
let conn = connection.lock().unwrap();
Todo::patch(id, maybe_todo.clone(), &conn);
}
#[delete("/todo/<id>")]
pub fn delete_todo(id: i32, connection: State<Connection>) {
let conn = connection.lock().unwrap();
Todo::delete(id, &conn);
}
pub fn get_api_routes() -> Vec<rocket::Route> {
routes![get_todos, create_todo, delete_todo, update_todo, patch_todo]
}
|
use {
bench_minplus_convolutions::*,
criterion::{
criterion_group, criterion_main, AxisScale, BenchmarkId, Criterion, PlotConfiguration,
Throughput,
},
rand::{prelude::StdRng, SeedableRng},
};
fn minplus_convolutions(c: &mut Criterion) {
let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Logarithmic);
let mut group = c.benchmark_group("Min-plus convolutions");
group.plot_config(plot_config);
let mut rng = StdRng::seed_from_u64(42);
for &size in &[10, 100, 1000, 10000, 1000000] {
group.throughput(Throughput::Elements(size as u64));
let input = (
generate_convex_vec(&mut rng, size),
generate_convex_vec(&mut rng, size),
);
if size <= 1000 {
group.bench_with_input(BenchmarkId::new("brute", size), &input, |bench, (a, b)| {
bench.iter(|| drop(brute_minplus_convolution(a, b)))
});
}
group.bench_with_input(
BenchmarkId::new("monotone_minima", size),
&input,
|bench, (a, b)| bench.iter(|| drop(monotone_minima_minplus_convolution(a, b))),
);
group.bench_with_input(BenchmarkId::new("smawk", size), &input, |bench, (a, b)| {
bench.iter(|| drop(smawk_minplus_convolution(a, b)))
});
}
group.finish();
}
fn minplus_convolutions_all_zeros(c: &mut Criterion) {
let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Logarithmic);
let mut group = c.benchmark_group("Min-plus convolutions all zeros");
group.plot_config(plot_config);
for &size in &[10, 100, 1000, 10000, 1000000] {
group.throughput(Throughput::Elements(size as u64));
let input = (vec![0; size], vec![0; size]);
if size <= 1000 {
group.bench_with_input(BenchmarkId::new("brute", size), &input, |bench, (a, b)| {
bench.iter(|| drop(brute_minplus_convolution(a, b)))
});
}
group.bench_with_input(
BenchmarkId::new("monotone_minima", size),
&input,
|bench, (a, b)| bench.iter(|| drop(monotone_minima_minplus_convolution(a, b))),
);
group.bench_with_input(BenchmarkId::new("smawk", size), &input, |bench, (a, b)| {
bench.iter(|| drop(smawk_minplus_convolution(a, b)))
});
}
group.finish();
}
fn minplus_convolutions_small(c: &mut Criterion) {
let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Linear);
let mut group = c.benchmark_group("Min-plus convolutions small");
group.plot_config(plot_config);
let mut rng = StdRng::seed_from_u64(42);
for &size in &[40, 60, 80, 100] {
group.throughput(Throughput::Elements(size as u64));
let input = (
generate_convex_vec(&mut rng, size),
generate_convex_vec(&mut rng, size),
);
group.bench_with_input(BenchmarkId::new("brute", size), &input, |bench, (a, b)| {
bench.iter(|| drop(brute_minplus_convolution(a, b)))
});
group.bench_with_input(
BenchmarkId::new("monotone_minima", size),
&input,
|bench, (a, b)| bench.iter(|| drop(monotone_minima_minplus_convolution(a, b))),
);
group.bench_with_input(BenchmarkId::new("smawk", size), &input, |bench, (a, b)| {
bench.iter(|| drop(smawk_minplus_convolution(a, b)))
});
}
group.finish();
}
fn minplus_convolutions_very_small(c: &mut Criterion) {
let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Linear);
let mut group = c.benchmark_group("Min-plus convolutions very small");
group.plot_config(plot_config);
let mut rng = StdRng::seed_from_u64(42);
for &size in &[1, 2, 3, 4] {
group.throughput(Throughput::Elements(size as u64));
let input = (
generate_convex_vec(&mut rng, size),
generate_convex_vec(&mut rng, size),
);
group.bench_with_input(BenchmarkId::new("brute", size), &input, |bench, (a, b)| {
bench.iter(|| drop(brute_minplus_convolution(a, b)))
});
group.bench_with_input(
BenchmarkId::new("monotone_minima", size),
&input,
|bench, (a, b)| bench.iter(|| drop(monotone_minima_minplus_convolution(a, b))),
);
group.bench_with_input(BenchmarkId::new("smawk", size), &input, |bench, (a, b)| {
bench.iter(|| drop(smawk_minplus_convolution(a, b)))
});
}
group.finish();
}
criterion_group!(
benches,
minplus_convolutions,
minplus_convolutions_all_zeros,
minplus_convolutions_small,
minplus_convolutions_very_small
);
criterion_main!(benches);
|
use super::filer::{read_dir_entries, FilerItem, FilerItemWithoutIcon};
use super::Direction;
use crate::stdio_server::handler::{CachedPreviewImpl, Preview, PreviewTarget};
use crate::stdio_server::input::{KeyEvent, KeyEventType};
use crate::stdio_server::provider::{ClapProvider, Context, SearcherControl};
use crate::stdio_server::vim::preview_syntax;
use anyhow::{anyhow, Result};
use matcher::MatchScope;
use pattern::extract_grep_position;
use printer::Printer;
use serde_json::json;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use types::{ClapItem, Query};
#[derive(Debug)]
struct Grepper {
searcher_control: Option<SearcherControl>,
}
impl Grepper {
fn new() -> Self {
Self {
searcher_control: None,
}
}
fn kill_last_searcher(&mut self) {
if let Some(control) = self.searcher_control.take() {
tokio::task::spawn_blocking(move || {
control.kill();
});
}
}
fn grep(&mut self, query: String, path: PathBuf, ctx: &Context) {
if let Some(control) = self.searcher_control.take() {
tokio::task::spawn_blocking(move || {
control.kill();
});
}
let matcher = ctx
.matcher_builder()
.match_scope(MatchScope::Full) // Force using MatchScope::Full.
.build(Query::from(&query));
let new_control = {
let stop_signal = Arc::new(AtomicBool::new(false));
let mut search_context = ctx.search_context(stop_signal.clone());
search_context.paths = vec![path];
let join_handle = tokio::spawn(async move {
crate::searcher::grep::search(query, matcher, search_context).await
});
SearcherControl {
stop_signal,
join_handle,
}
};
self.searcher_control.replace(new_control);
let _ = ctx
.vim
.setbufvar(ctx.env.display.bufnr, "&syntax", "clap_grep");
}
}
#[derive(Debug)]
struct Explorer {
printer: Printer,
current_dir: PathBuf,
dir_entries_cache: HashMap<PathBuf, Vec<Arc<dyn ClapItem>>>,
current_lines: Vec<String>,
icon_enabled: bool,
winwidth: usize,
}
impl Explorer {
async fn new(ctx: &Context) -> Result<Self> {
let current_dir = ctx.cwd.to_path_buf();
let printer = Printer::new(ctx.env.display_winwidth, icon::Icon::Null);
let icon_enabled = ctx.vim.get_var_bool("clap_enable_icon").await?;
let winwidth = ctx.vim.winwidth(ctx.env.display.winid).await?;
Ok(Self {
printer,
current_dir,
dir_entries_cache: HashMap::new(),
current_lines: Vec::new(),
icon_enabled,
winwidth,
})
}
async fn init(&mut self, ctx: &Context) -> Result<()> {
let cwd = &ctx.cwd;
let entries = match read_dir_entries(cwd, ctx.env.icon.enabled(), None) {
Ok(entries) => entries,
Err(err) => {
tracing::error!(?cwd, "Failed to read directory entries");
ctx.vim.exec("show_lines_in_preview", [err.to_string()])?;
return Ok(());
}
};
let query: String = ctx.vim.input_get().await?;
if query.is_empty() {
let response = json!({ "entries": &entries, "dir": cwd, "total": entries.len() });
ctx.vim
.exec("clap#file_explorer#handle_on_initialize", response)?;
self.current_lines = entries.clone();
}
self.dir_entries_cache.insert(
cwd.to_path_buf(),
entries
.into_iter()
.map(|line| Arc::new(FilerItem(line)) as Arc<dyn ClapItem>)
.collect(),
);
ctx.vim
.setbufvar(ctx.env.display.bufnr, "&syntax", "clap_filer")?;
Ok(())
}
// Strip the leading filer icon.
async fn current_line(&self, ctx: &Context) -> Result<String> {
let curline = ctx.vim.display_getcurline().await?;
let curline = if self.icon_enabled {
curline.chars().skip(2).collect()
} else {
curline
};
Ok(curline)
}
async fn expand_dir_or_preview(&mut self, ctx: &mut Context) -> Result<()> {
let curline = self.current_line(ctx).await?;
let target_dir = self.current_dir.join(curline);
if target_dir.is_dir() {
self.goto_dir(target_dir, ctx)?;
self.preview_current_line(ctx).await?;
} else if target_dir.is_file() {
let preview_target = PreviewTarget::File(target_dir);
self.update_preview_with_target(preview_target, ctx).await?;
}
Ok(())
}
async fn goto_parent(&mut self, ctx: &mut Context) -> Result<()> {
self.load_parent(ctx)?;
ctx.vim.exec(
"clap#file_explorer#set_prompt",
serde_json::json!([&self.current_dir, self.winwidth]),
)?;
self.current_lines = self.display_dir_entries(ctx)?;
self.preview_current_line(ctx).await?;
Ok(())
}
async fn apply_sink(&mut self, ctx: &Context) -> Result<()> {
let curline = self.current_line(ctx).await?;
let target_dir = self.current_dir.join(curline);
if target_dir.is_dir() {
self.goto_dir(target_dir, ctx)?;
} else if target_dir.is_file() {
ctx.vim.exec("execute", ["stopinsert"])?;
ctx.vim.exec("clap#file_explorer#sink", [target_dir])?;
} else {
let input = ctx.vim.input_get().await?;
let target_file = self.current_dir.join(input);
ctx.vim
.exec("clap#file_explorer#handle_special_entries", [target_file])?;
}
Ok(())
}
fn show_dir_entries(&mut self, ctx: &Context) -> Result<()> {
self.current_lines = self.display_dir_entries(ctx)?;
Ok(())
}
/// Display the file explorer.
fn display_dir_entries(&self, ctx: &Context) -> Result<Vec<String>> {
let current_items = self
.dir_entries_cache
.get(&self.current_dir)
.ok_or_else(|| anyhow!("Entries for {} not loaded", self.current_dir.display()))?;
let processed = current_items.len();
let printer::DisplayLines {
lines,
mut indices,
truncated_map: _,
icon_added,
} = self.printer.to_display_lines(
current_items
.iter()
.take(200)
.cloned()
.map(Into::into)
.collect(),
);
if ctx.env.icon.enabled() {
indices.iter_mut().for_each(|v| {
v.iter_mut().for_each(|x| {
*x -= 2;
})
});
}
let result = json!({
"lines": &lines,
"indices": indices,
"matched": 0,
"processed": processed,
"icon_added": icon_added,
"display_syntax": "clap_filer",
});
ctx.vim
.exec("clap#state#process_filter_message", json!([result, true]))?;
Ok(lines)
}
async fn preview_current_line(&self, ctx: &mut Context) -> Result<()> {
let curline = self.current_line(ctx).await?;
let target_dir = self.current_dir.join(curline);
let preview_target = if target_dir.is_dir() {
PreviewTarget::Directory(target_dir)
} else {
PreviewTarget::File(target_dir)
};
self.update_preview_with_target(preview_target, ctx).await
}
async fn update_preview_with_target(
&self,
preview_target: PreviewTarget,
ctx: &mut Context,
) -> Result<()> {
let preview_height = ctx.preview_height().await?;
let preview_impl = CachedPreviewImpl {
ctx,
preview_height,
preview_target,
cache_line: None,
};
match preview_impl.get_preview().await {
Ok((_preview_target, preview)) => {
ctx.render_preview(preview)?;
let maybe_syntax = preview_impl.preview_target.path().and_then(|path| {
if path.is_dir() {
Some("clap_filer")
} else if path.is_file() {
preview_syntax(path)
} else {
None
}
});
if let Some(syntax) = maybe_syntax {
ctx.vim.set_preview_syntax(syntax)?;
}
}
Err(err) => {
ctx.render_preview(Preview::new(vec![err.to_string()]))?;
}
}
Ok(())
}
fn goto_dir(&mut self, dir: PathBuf, ctx: &Context) -> Result<()> {
self.current_dir = dir.clone();
if let Err(err) = self.read_entries_if_not_in_cache(dir) {
ctx.vim.exec("show_lines_in_preview", [err.to_string()])?;
}
ctx.vim.exec("input_set", [""])?;
ctx.vim.exec(
"clap#file_explorer#set_prompt",
serde_json::json!([&self.current_dir, self.winwidth]),
)?;
self.current_lines = self.display_dir_entries(ctx)?;
Ok(())
}
fn load_parent(&mut self, ctx: &Context) -> Result<()> {
let parent_dir = match self.current_dir.parent() {
Some(parent) => parent,
None => return Ok(()),
};
self.current_dir = parent_dir.to_path_buf();
if let Err(err) = self.read_entries_if_not_in_cache(self.current_dir.clone()) {
ctx.vim.exec("show_lines_in_preview", [err.to_string()])?;
}
Ok(())
}
fn read_entries_if_not_in_cache(&mut self, target_dir: PathBuf) -> Result<()> {
if let Entry::Vacant(v) = self.dir_entries_cache.entry(target_dir) {
let entries = read_dir_entries(&self.current_dir, self.icon_enabled, None)?;
v.insert(
entries
.into_iter()
.map(|line| {
if self.icon_enabled {
Arc::new(FilerItem(line)) as Arc<dyn ClapItem>
} else {
Arc::new(FilerItemWithoutIcon(line)) as Arc<dyn ClapItem>
}
})
.collect(),
);
}
Ok(())
}
}
#[derive(Debug)]
enum Mode {
FileExplorer,
FileSearcher,
}
/// Grep in an interactive way.
#[derive(Debug)]
pub struct IgrepProvider {
explorer: Explorer,
grepper: Grepper,
mode: Mode,
}
impl IgrepProvider {
pub async fn new(ctx: &Context) -> Result<Self> {
Ok(Self {
explorer: Explorer::new(ctx).await?,
grepper: Grepper::new(),
mode: Mode::FileExplorer,
})
}
async fn on_tab(&mut self, ctx: &mut Context) -> Result<()> {
let input = ctx.vim.input_get().await?;
if input.is_empty() {
self.explorer.expand_dir_or_preview(ctx).await?;
} else {
ctx.vim.bare_exec("clap#selection#toggle")?;
}
Ok(())
}
async fn on_backspace(&mut self, ctx: &mut Context) -> Result<()> {
self.grepper.kill_last_searcher();
let mut input: String = if ctx.env.is_nvim {
ctx.vim.input_get().await?
} else {
ctx.vim
.eval("g:__clap_popup_input_before_backspace_applied")
.await?
};
if input.is_empty() {
self.explorer.goto_parent(ctx).await?;
} else {
input.pop();
ctx.vim.exec("input_set", [&input])?;
if input.is_empty() {
self.explorer.show_dir_entries(ctx)?;
} else {
self.grepper
.grep(input, self.explorer.current_dir.clone(), ctx)
}
}
Ok(())
}
async fn on_carriage_return(&mut self, ctx: &Context) -> Result<()> {
match self.mode {
Mode::FileExplorer => {
self.explorer.apply_sink(ctx).await?;
}
Mode::FileSearcher => {
let curline = ctx.vim.display_getcurline().await?;
let grep_line = self.explorer.current_dir.join(curline);
let (fpath, lnum, col, _line_content) = grep_line
.to_str()
.and_then(pattern::extract_grep_position)
.ok_or_else(|| {
anyhow!("Can not extract grep position: {}", grep_line.display())
})?;
if !std::path::Path::new(fpath).is_file() {
ctx.vim.echo_info(format!("{fpath} is not a file"))?;
return Ok(());
}
ctx.vim.exec(
"clap#handler#sink_with",
json!(["clap#sink#open_file", fpath, lnum, col]),
)?;
}
}
Ok(())
}
async fn preview_grep_line(&self, ctx: &mut Context) -> Result<()> {
let curline = ctx.vim.display_getcurline().await?;
if let Some((fpath, lnum, _col, _cache_line)) = extract_grep_position(&curline) {
let fpath = fpath.strip_prefix("./").unwrap_or(fpath);
let path = self.explorer.current_dir.join(fpath);
let preview_target = PreviewTarget::LineInFile {
path,
line_number: lnum,
};
ctx.update_preview(Some(preview_target)).await?;
}
Ok(())
}
}
#[async_trait::async_trait]
impl ClapProvider for IgrepProvider {
async fn on_initialize(&mut self, ctx: &mut Context) -> Result<()> {
self.explorer.init(ctx).await
}
async fn on_move(&mut self, ctx: &mut Context) -> Result<()> {
if !ctx.env.preview_enabled {
return Ok(());
}
let query: String = ctx.vim.input_get().await?;
if query.is_empty() {
self.explorer.preview_current_line(ctx).await
} else {
self.preview_grep_line(ctx).await
}
}
async fn on_typed(&mut self, ctx: &mut Context) -> Result<()> {
let query: String = ctx.vim.input_get().await?;
if query.is_empty() {
self.mode = Mode::FileExplorer;
self.explorer.show_dir_entries(ctx)?;
} else {
self.mode = Mode::FileSearcher;
self.grepper
.grep(query, self.explorer.current_dir.clone(), ctx);
}
Ok(())
}
async fn on_key_event(&mut self, ctx: &mut Context, key_event: KeyEvent) -> Result<()> {
let (key_event_type, _params) = key_event;
match key_event_type {
KeyEventType::CtrlN => ctx.next_input().await,
KeyEventType::CtrlP => ctx.prev_input().await,
KeyEventType::ShiftUp => ctx.scroll_preview(Direction::Up).await,
KeyEventType::ShiftDown => ctx.scroll_preview(Direction::Down).await,
KeyEventType::Tab => self.on_tab(ctx).await,
KeyEventType::Backspace => self.on_backspace(ctx).await,
KeyEventType::CarriageReturn => self.on_carriage_return(ctx).await,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_dir() {
// /home/xlc/.vim/plugged/vim-clap/crates/stdio_server
let entries = read_dir_entries(
std::env::current_dir()
.unwrap()
.into_os_string()
.into_string()
.unwrap(),
false,
None,
)
.unwrap();
assert_eq!(entries, vec!["Cargo.toml", "src/"]);
}
}
|
use ted_interface::TerrainSprite;
use gvec::*;
use id_types::*;
pub const CHUNK_WIDTH: i64=10;
pub const SQUARE_SIZE: f64=40.0;
pub const CHUNK_WIDTH_PIXELS: f64=(CHUNK_WIDTH as f64)*SQUARE_SIZE;
pub struct Terrain{
pub sprite: TerrainSprite
}
pub struct DamageableTerrain{
pub damageable_id: DamageableID
}
#[derive(PartialEq, Eq)]
pub enum BiomeType{
GrassBiome
}
pub struct Chunk{
pub loading: bool,
pub initialised: bool,
pub terrain: Square<GVec<Terrain>>,
pub solid: Square<u32>,
pub updates_since_last_frame: Vec<(usize,usize,Vec<TerrainSprite>)>,
pub damageable_terrain: Square<GVec<DamageableTerrain>>,
pub damaged_since_last_frame: bool,
pub biome_type: BiomeType
}
pub struct BasicChunkLoader{
pub loader_id: ChunkLoaderID,
pub mover_id: MoverID
}
pub struct ChunkLoader{
pub loaded_chunks: Vec<(i64,i64)>,
pub keep_loaded: Vec<(i64, i64)>,
pub terrain_updates: Vec<(i64,i64,Vec<TerrainSprite>)>,
pub chunks_added: Vec<(i64, i64)>,
pub chunks_added_stack: Vec<(i64, i64)>,
pub chunks_removed: Vec<(i64,i64)>
}
pub type Square<X>=[[X; CHUNK_WIDTH as usize]; CHUNK_WIDTH as usize];
|
// Copyright 2020 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs;
use std::io;
use std::io::Write;
use clap::App;
use clap_generate::generate;
use clap_generate::generators::Bash;
use clap_generate::generators::Zsh;
use clap_generate::Generator;
use rustyline::error::ReadlineError;
use rustyline::Editor;
use crate::cmds::command::Command;
use crate::cmds::queries::query::QueryCommand;
use crate::cmds::ClusterCommand;
use crate::cmds::CommentCommand;
use crate::cmds::Config;
use crate::cmds::Env;
use crate::cmds::HelpCommand;
use crate::cmds::PackageCommand;
use crate::cmds::VersionCommand;
use crate::cmds::Writer;
use crate::error::Result;
pub struct Processor {
env: Env,
readline: Editor<()>,
commands: Vec<Box<dyn Command>>,
}
fn print_completions<G: Generator>(app: &mut App) {
generate::<G, _>(app, app.get_name().to_string(), &mut io::stdout());
}
impl Processor {
pub fn create(conf: Config) -> Self {
fs::create_dir_all(conf.databend_dir.clone()).unwrap();
let sub_commands: Vec<Box<dyn Command>> = vec![
Box::new(VersionCommand::create()),
Box::new(CommentCommand::create()),
Box::new(PackageCommand::create(conf.clone())),
Box::new(QueryCommand::create(conf.clone())),
Box::new(ClusterCommand::create(conf.clone())),
];
let mut commands: Vec<Box<dyn Command>> = sub_commands.clone();
commands.push(Box::new(HelpCommand::create(sub_commands)));
Processor {
env: Env::create(conf),
readline: Editor::<()>::new(),
commands,
}
}
pub fn process_run(&mut self) -> Result<()> {
let mut writer = Writer::create();
match self.env.conf.clone().clap.into_inner().subcommand_name() {
Some("package") => {
let cmd = PackageCommand::create(self.env.conf.clone());
return cmd.exec_match(
&mut writer,
self.env
.conf
.clone()
.clap
.into_inner()
.subcommand_matches("package"),
);
}
Some("version") => {
let cmd = VersionCommand::create();
cmd.exec(&mut writer, "".parse().unwrap())
}
Some("cluster") => {
let cmd = ClusterCommand::create(self.env.conf.clone());
return cmd.exec_match(
&mut writer,
self.env
.conf
.clone()
.clap
.into_inner()
.subcommand_matches("cluster"),
);
}
Some("query") => {
let cmd = QueryCommand::create(self.env.conf.clone());
cmd.exec_match(
&mut writer,
self.env
.conf
.clone()
.clap
.into_inner()
.subcommand_matches("query"),
)
}
Some("completion") => {
if let Some(generator) = self
.env
.conf
.clone()
.clap
.into_inner()
.subcommand_matches("completion")
.unwrap()
.value_of("completion")
{
let mut app = Config::build_cli();
eprintln!("Generating completion file for {}...", generator);
match generator {
"bash" => print_completions::<Bash>(&mut app),
"zsh" => print_completions::<Zsh>(&mut app),
_ => panic!("Unknown generator"),
}
}
Ok(())
}
None => self.process_run_interactive(),
_ => {
println!("Some other subcommand was used");
Ok(())
}
}
}
pub fn process_run_interactive(&mut self) -> Result<()> {
let hist_path = format!("{}/history.txt", self.env.conf.databend_dir.clone());
let _ = self.readline.load_history(hist_path.as_str());
let mut content = String::new();
loop {
let writer = Writer::create();
let prompt = if content.is_empty() {
self.env.prompt.as_str()
} else {
self.env.multiline_prompt.as_str()
};
let readline = self.readline.readline(prompt);
match readline {
Ok(line) => {
let line = line.trim();
if line.ends_with('\\') {
content.push_str(&line[0..line.len() - 1]);
continue;
}
content.push_str(line);
self.readline.history_mut().add(content.clone());
self.processor_line(writer, content.clone())?;
content.clear();
}
Err(ReadlineError::Interrupted) => {
println!("CTRL-C");
break;
}
Err(ReadlineError::Eof) => {
println!("CTRL-D");
break;
}
Err(err) => {
println!("Error: {:?}", err);
break;
}
}
}
self.readline.save_history(hist_path.as_str()).unwrap();
Ok(())
}
pub fn processor_line(&self, mut writer: Writer, line: String) -> Result<()> {
if let Some(cmd) = self.commands.iter().find(|c| c.is(&*line)) {
cmd.exec(&mut writer, line.trim().to_string())?;
} else {
writeln!(writer, "Unknown command, usage: help").unwrap();
}
writer.flush()?;
Ok(())
}
}
|
mod device;
mod iokit;
mod iterator;
mod manager;
mod traits;
pub use self::device::IoKitDevice;
pub use self::iterator::IoKitIterator;
pub use self::manager::IoKitManager;
#[cfg(test)]
mod tests;
|
mod error;
mod checksum;
mod cmd_line;
use std::fs::{OpenOptions, File};
use std::io::{Write, BufRead, BufReader, Read};
use std::path::{PathBuf, Path};
use std::sync::mpsc::channel;
use anyhow::Result;
use itertools::{join, Itertools};
use structopt::StructOpt;
use threadpool::ThreadPool;
use walkdir::{WalkDir, DirEntry};
use crate::checksum::{calculate_checksum, verify_checksum};
use crate::cmd_line::{AppArgs, Commands, GenerationOpt, VerificationOpt};
use crate::error::AppError;
fn output_checksum(entry: DirEntry, opts: &GenerationOpt) -> Result<(PathBuf, Vec<u8>)> {
let path = entry.path();
if path.is_dir() || !path.is_file() {
return Err(AppError::InvalidFileError(path.to_path_buf()).into());
}
let c = calculate_checksum(path, opts.algorithm)?;
Ok((path.to_owned(), c))
}
struct Exclusion {
e: Vec<PathBuf>,
}
impl Exclusion {
fn new(excludes: &Vec<PathBuf>, checksum_file: &PathBuf) -> Self {
Self {
e: excludes.iter().filter_map(|p| {
if p.to_string_lossy() == "-" {
if checksum_file.to_string_lossy() == "-" {
None
} else {
checksum_file.canonicalize().ok()
}
} else {
p.canonicalize().ok()
}
}).unique().collect()
}
}
fn is_excluded(&self, path: &Path) -> bool {
let c = match path.canonicalize() {
Ok(p) => p,
Err(e) => {
eprintln!("{}", e);
return false;
}
};
for p in self.e.iter() {
if p == &c { return true; }
}
false
}
}
fn generate_checksums(opts: &GenerationOpt) -> Result<bool> {
let pool = ThreadPool::new(opts.num_threads.into());
let dot_prefix = format!(".{}", std::path::MAIN_SEPARATOR);
let mut all_succeeded: bool = true;
{
let (tx, rx) = channel();
let mut count: usize = 0;
let exclusion = Exclusion::new(&opts.exclude, &opts.checksum_file);
for entry in opts.directory.iter().map(|d| WalkDir::new(d).follow_links(true).same_file_system(true)).flatten() {
match entry {
Ok(e) => {
if e.path().is_dir() || !e.path().is_file() || exclusion.is_excluded(e.path()) {
continue;
}
let tx = tx.clone();
let opts = opts.clone();
pool.execute(move || {
tx.send(output_checksum(e, &opts)).expect("Internal error.");
});
}
Err(e) => {
eprintln!("{}", e);
}
};
count += 1;
}
let mut output: Box<dyn Write> = if opts.checksum_file == PathBuf::from("-") {
Box::new(std::io::stdout())
} else {
Box::new(OpenOptions::new().create(true).write(true).truncate(true).open(&opts.checksum_file)?)
};
let mut results: Vec<(PathBuf, String)> = Vec::new();
for _ in 0..count {
match rx.iter().next().ok_or(AppError::UnknownError)? {
Ok((path, checksum)) => {
let path = path.strip_prefix(&dot_prefix).unwrap_or(&path);
let checksum_str = join(checksum.into_iter().map(|b| format!("{:02x}", b)), "");
results.push((path.to_owned(), checksum_str));
}
Err(e) => {
eprintln!("{}", e);
all_succeeded = false
}
}
}
results.sort_by(|e1, e2| e1.0.partial_cmp(&e2.0).unwrap());
for e in results.into_iter() {
output.write(format!("{} {}\n", e.1, e.0.display()).as_bytes())?;
}
}
pool.join();
Ok(all_succeeded)
}
macro_rules! next_part {
($parts:expr, $line:expr) => {
match $parts.next().ok_or(AppError::InvalidHashValue($line.to_string())) {
Ok(s) => s,
Err(e) => {
eprintln!("{:?}", e);
continue;
}
}.to_owned()
}
}
fn verify_checksums(opts: &VerificationOpt) -> Result<bool> {
let pool = ThreadPool::new(opts.num_threads.into());
let mut all_succeeded: bool = true;
{
let input: Box<dyn Read> = if opts.checksum_file == PathBuf::from("-") {
Box::new(std::io::stdin())
} else {
Box::new(File::open(&opts.checksum_file)?)
};
let (tx, rx) = channel();
let mut count: usize = 0;
for line in BufReader::new(input).lines() {
let line = line?;
let mut parts = line.split_whitespace();
let checksum = next_part!(parts, line);
let path = PathBuf::from(next_part!(parts, line));
let algorithm = opts.algorithm;
let tx = tx.clone();
pool.execute(move || {
tx.send(verify_checksum(&path, &checksum, algorithm)).expect("Internal error.");
});
count += 1;
}
for _ in 0..count {
match rx.iter().next().ok_or(AppError::UnknownError)? {
Ok((path, is_ok)) => {
if is_ok {
if !opts.quiet {
println!("{}: OK", path.display());
}
} else {
println!("{}: FAILED", path.display());
}
all_succeeded &= is_ok;
}
Err(e) => {
eprintln!("{}", e);
}
}
}
}
pool.join();
Ok(all_succeeded)
}
fn main() -> Result<()> {
let args = AppArgs::from_args();
match &args.cmd {
Commands::G { generation_opts: opts } => {
if !generate_checksums(opts)? {
std::process::exit(1);
}
}
Commands::V { verification_opts: opts } => {
if !verify_checksums(opts)? {
std::process::exit(1);
}
}
}
Ok(())
}
|
use boards::print::{print};
use Semaphore;
use Kernel;
pub fn post_b1(_: Option<Semaphore>) {
print("task posting button1");
return Kernel::os_post(Semaphore::Button1);
}
|
use std::time::Duration;
use std::sync::mpsc::{Receiver,Sender, channel};
use std::thread;
use std::thread::{Builder,sleep};
use def::*;
//---------------------------------------------------------
//Auxiliar functions
pub fn to_hex_string(bytes: &Vec<u8>) -> String {
let strs: Vec<String> = bytes.iter()
.map(|b| format!("{:02X}", b))
.collect();
strs.connect(" ")
}
pub fn max_stream_id(stream_id: i16,version: u8) -> bool{
(stream_id as i32 >= CQL_MAX_STREAM_ID_V1_V2 as i32 && (version == 1 || version == 2))
|| (stream_id as i32 == CQL_MAX_STREAM_ID_V3 as i32 && version == 3)
}
pub fn set_interval<F>(delay: Duration,f: F) -> Sender<()>
where F: Fn(), F: Send + 'static + Sync{
let (tx, rx) = channel::<(())>();
thread::Builder::new().name("tick".to_string()).spawn(move || {
while !rx.try_recv().is_ok() {
sleep(delay);
f(); //Do stuff here
}
}).unwrap();
tx
} |
use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
use clippy_utils::higher::VecArgs;
use clippy_utils::source::snippet_opt;
use clippy_utils::usage::local_used_after_expr;
use clippy_utils::{get_enclosing_loop_or_closure, higher, path_to_local, path_to_local_id};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
use rustc_hir::{Expr, ExprKind, Param, PatKind, Unsafety};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
use rustc_middle::ty::subst::Subst;
use rustc_middle::ty::{self, ClosureKind, Ty, TypeFoldable};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// ### What it does
/// Checks for closures which just call another function where
/// the function can be called directly. `unsafe` functions or calls where types
/// get adjusted are ignored.
///
/// ### Why is this bad?
/// Needlessly creating a closure adds code for no benefit
/// and gives the optimizer more work.
///
/// ### Known problems
/// If creating the closure inside the closure has a side-
/// effect then moving the closure creation out will change when that side-
/// effect runs.
/// See [#1439](https://github.com/rust-lang/rust-clippy/issues/1439) for more details.
///
/// ### Example
/// ```rust,ignore
/// // Bad
/// xs.map(|x| foo(x))
///
/// // Good
/// xs.map(foo)
/// ```
/// where `foo(_)` is a plain function that takes the exact argument type of
/// `x`.
#[clippy::version = "pre 1.29.0"]
pub REDUNDANT_CLOSURE,
style,
"redundant closures, i.e., `|a| foo(a)` (which can be written as just `foo`)"
}
declare_clippy_lint! {
/// ### What it does
/// Checks for closures which only invoke a method on the closure
/// argument and can be replaced by referencing the method directly.
///
/// ### Why is this bad?
/// It's unnecessary to create the closure.
///
/// ### Example
/// ```rust,ignore
/// Some('a').map(|s| s.to_uppercase());
/// ```
/// may be rewritten as
/// ```rust,ignore
/// Some('a').map(char::to_uppercase);
/// ```
#[clippy::version = "1.35.0"]
pub REDUNDANT_CLOSURE_FOR_METHOD_CALLS,
pedantic,
"redundant closures for method calls"
}
declare_lint_pass!(EtaReduction => [REDUNDANT_CLOSURE, REDUNDANT_CLOSURE_FOR_METHOD_CALLS]);
impl<'tcx> LateLintPass<'tcx> for EtaReduction {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if expr.span.from_expansion() {
return;
}
let body = match expr.kind {
ExprKind::Closure(_, _, id, _, _) => cx.tcx.hir().body(id),
_ => return,
};
if body.value.span.from_expansion() {
if body.params.is_empty() {
if let Some(VecArgs::Vec(&[])) = higher::VecArgs::hir(cx, &body.value) {
// replace `|| vec![]` with `Vec::new`
span_lint_and_sugg(
cx,
REDUNDANT_CLOSURE,
expr.span,
"redundant closure",
"replace the closure with `Vec::new`",
"std::vec::Vec::new".into(),
Applicability::MachineApplicable,
);
}
}
// skip `foo(|| macro!())`
return;
}
let closure_ty = cx.typeck_results().expr_ty(expr);
if_chain!(
if let ExprKind::Call(callee, args) = body.value.kind;
if let ExprKind::Path(_) = callee.kind;
if check_inputs(cx, body.params, args);
let callee_ty = cx.typeck_results().expr_ty_adjusted(callee);
let call_ty = cx.typeck_results().type_dependent_def_id(body.value.hir_id)
.map_or(callee_ty, |id| cx.tcx.type_of(id));
if check_sig(cx, closure_ty, call_ty);
let substs = cx.typeck_results().node_substs(callee.hir_id);
// This fixes some false positives that I don't entirely understand
if substs.is_empty() || !cx.typeck_results().expr_ty(expr).has_late_bound_regions();
// A type param function ref like `T::f` is not 'static, however
// it is if cast like `T::f as fn()`. This seems like a rustc bug.
if !substs.types().any(|t| matches!(t.kind(), ty::Param(_)));
then {
span_lint_and_then(cx, REDUNDANT_CLOSURE, expr.span, "redundant closure", |diag| {
if let Some(mut snippet) = snippet_opt(cx, callee.span) {
if_chain! {
if let ty::Closure(_, substs) = callee_ty.peel_refs().kind();
if substs.as_closure().kind() == ClosureKind::FnMut;
if get_enclosing_loop_or_closure(cx.tcx, expr).is_some()
|| path_to_local(callee).map_or(false, |l| local_used_after_expr(cx, l, callee));
then {
// Mutable closure is used after current expr; we cannot consume it.
snippet = format!("&mut {}", snippet);
}
}
diag.span_suggestion(
expr.span,
"replace the closure with the function itself",
snippet,
Applicability::MachineApplicable,
);
}
});
}
);
if_chain!(
if let ExprKind::MethodCall(path, _, args, _) = body.value.kind;
if check_inputs(cx, body.params, args);
let method_def_id = cx.typeck_results().type_dependent_def_id(body.value.hir_id).unwrap();
let substs = cx.typeck_results().node_substs(body.value.hir_id);
let call_ty = cx.tcx.type_of(method_def_id).subst(cx.tcx, substs);
if check_sig(cx, closure_ty, call_ty);
then {
span_lint_and_then(cx, REDUNDANT_CLOSURE_FOR_METHOD_CALLS, expr.span, "redundant closure", |diag| {
let name = get_ufcs_type_name(cx, method_def_id);
diag.span_suggestion(
expr.span,
"replace the closure with the method itself",
format!("{}::{}", name, path.ident.name),
Applicability::MachineApplicable,
);
})
}
);
}
}
fn check_inputs(cx: &LateContext<'_>, params: &[Param<'_>], call_args: &[Expr<'_>]) -> bool {
if params.len() != call_args.len() {
return false;
}
std::iter::zip(params, call_args).all(|(param, arg)| {
match param.pat.kind {
PatKind::Binding(_, id, ..) if path_to_local_id(arg, id) => {},
_ => return false,
}
match *cx.typeck_results().expr_adjustments(arg) {
[] => true,
[
Adjustment {
kind: Adjust::Deref(None),
..
},
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(_, mu2)),
..
},
] => {
// re-borrow with the same mutability is allowed
let ty = cx.typeck_results().expr_ty(arg);
matches!(*ty.kind(), ty::Ref(.., mu1) if mu1 == mu2.into())
},
_ => false,
}
})
}
fn check_sig<'tcx>(cx: &LateContext<'tcx>, closure_ty: Ty<'tcx>, call_ty: Ty<'tcx>) -> bool {
let call_sig = call_ty.fn_sig(cx.tcx);
if call_sig.unsafety() == Unsafety::Unsafe {
return false;
}
if !closure_ty.has_late_bound_regions() {
return true;
}
let substs = match closure_ty.kind() {
ty::Closure(_, substs) => substs,
_ => return false,
};
let closure_sig = cx.tcx.signature_unclosure(substs.as_closure().sig(), Unsafety::Normal);
cx.tcx.erase_late_bound_regions(closure_sig) == cx.tcx.erase_late_bound_regions(call_sig)
}
fn get_ufcs_type_name(cx: &LateContext<'_>, method_def_id: DefId) -> String {
match cx.tcx.associated_item(method_def_id).container {
ty::TraitContainer(def_id) => cx.tcx.def_path_str(def_id),
ty::ImplContainer(def_id) => {
let ty = cx.tcx.type_of(def_id);
match ty.kind() {
ty::Adt(adt, _) => cx.tcx.def_path_str(adt.did),
_ => ty.to_string(),
}
},
}
}
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-wasm32
#![feature(decl_macro, macros_in_extern)]
macro_rules! returns_isize(
($ident:ident) => (
fn $ident() -> isize;
)
);
macro takes_u32_returns_u32($ident:ident) {
fn $ident (arg: u32) -> u32;
}
macro_rules! emits_nothing(
() => ()
);
fn main() {
assert_eq!(unsafe { rust_get_test_int() }, 1isize);
assert_eq!(unsafe { rust_dbg_extern_identity_u32(0xDEADBEEF) }, 0xDEADBEEFu32);
}
#[link(name = "rust_test_helpers", kind = "static")]
extern {
returns_isize!(rust_get_test_int);
takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
emits_nothing!();
}
|
use csv::Reader;
use csv::{ByteRecord, ByteRecordsIntoIter};
use encoding::all::ISO_8859_1;
use encoding::{DecoderTrap, EncodingRef};
use std::clone::Clone;
use std::fs::File;
use std::iter::FromIterator;
use super::Row;
fn decode(data: ByteRecord, encoding: EncodingRef) -> Row {
let mut row = Row::with_capacity(data.as_slice().len(), data.len());
for item in data.iter() {
row.push_field(&encoding.decode(item, DecoderTrap::Replace).unwrap());
}
row
}
pub struct ReaderSource {
pub reader: Reader<File>,
pub path: String,
pub encoding: EncodingRef,
}
impl ReaderSource {
fn headers(&mut self) -> Row {
let data = self.reader.byte_headers().unwrap().clone();
let mut headers = decode(data, self.encoding);
headers.push_field("_source");
headers
}
}
pub struct ByteRecordsIntoIterSource {
pub records: ByteRecordsIntoIter<File>,
pub path: String,
pub encoding: EncodingRef,
}
pub struct InputStream {
readers: Vec<ReaderSource>,
current_records: ByteRecordsIntoIterSource,
headers: Row,
}
impl InputStream {
fn new(mut reader_source: ReaderSource) -> InputStream {
let headers = reader_source.headers();
InputStream {
readers: Vec::new(),
headers,
current_records: ByteRecordsIntoIterSource {
path: reader_source.path,
records: reader_source.reader.into_byte_records(),
encoding: reader_source.encoding,
},
}
}
fn add(&mut self, item: ReaderSource) {
self.readers.push(item);
}
pub fn headers(&self) -> &Row {
&self.headers
}
}
impl FromIterator<ReaderSource> for InputStream {
fn from_iter<I: IntoIterator<Item = ReaderSource>>(iter: I) -> Self {
let mut iter = iter.into_iter();
let mut ra: InputStream =
InputStream::new(iter.next().expect("At least one input is required"));
for item in iter {
ra.add(item);
}
ra
}
}
impl Iterator for InputStream {
type Item = Row;
fn next(&mut self) -> Option<Self::Item> {
match self.current_records.records.next() {
Some(Ok(reg)) => {
let mut str_reg = decode(reg, self.current_records.encoding);
str_reg.push_field(&self.current_records.path);
if str_reg.len() != self.headers.len() {
panic!("Inconsistent size of rows");
}
Some(str_reg)
}
Some(Err(e)) => self.next(), // TODO warn something here
None => match self.readers.pop() {
Some(mut rs) => {
let new_headers = rs.headers();
if new_headers != self.headers {
panic!("Inconsistent headers among files");
}
self.current_records = ByteRecordsIntoIterSource {
path: rs.path,
records: rs.reader.into_byte_records(),
encoding: rs.encoding,
};
self.next()
}
None => None,
},
}
}
}
#[cfg(test)]
mod tests {
use super::{InputStream, ReaderSource, Row};
use encoding::all::{UTF_8, WINDOWS_1252};
#[test]
fn test_read_concatenated() {
let filenames = ["test/assets/1.csv", "test/assets/2.csv"];
let mut input_stream: InputStream = filenames
.iter()
.filter_map(|f| {
Some(ReaderSource {
reader: csv::Reader::from_path(f).unwrap(),
path: f.to_string(),
encoding: UTF_8,
})
})
.collect();
assert_eq!(
*input_stream.headers(),
Row::from(vec!["a", "b", "_source"])
);
assert_eq!(
input_stream.next(),
Some(Row::from(vec!["1", "3", "test/assets/1.csv"]))
);
assert_eq!(
input_stream.next(),
Some(Row::from(vec!["5", "2", "test/assets/1.csv"]))
);
assert_eq!(
input_stream.next(),
Some(Row::from(vec!["2", "2", "test/assets/2.csv"]))
);
assert_eq!(
input_stream.next(),
Some(Row::from(vec!["4", "3", "test/assets/2.csv"]))
);
}
#[test]
fn different_encoding() {
let filenames = ["test/assets/windows1252/data.csv"];
let mut input_stream: InputStream = filenames
.iter()
.filter_map(|f| {
Some(ReaderSource {
reader: csv::Reader::from_path(f).unwrap(),
path: f.to_string(),
encoding: WINDOWS_1252,
})
})
.collect();
assert_eq!(*input_stream.headers(), Row::from(vec!["name", "_source"]));
assert_eq!(
input_stream.next(),
Some(Row::from(vec![
"árbol",
"test/assets/windows1252/data.csv"
]))
);
}
}
|
use std::ops::Index;
pub struct SmallBitSet(u32);
const TRUE: bool = true;
const FALSE: bool = false;
impl Index<usize> for SmallBitSet {
type Output = bool;
fn index(&self, index: usize) -> &bool {
if self.0 & (1 << index) != 0 {
&TRUE
} else {
&FALSE
}
}
}
impl SmallBitSet {
pub fn count(&self) -> usize {
self.0.count_ones() as usize
}
}
/// Returns power of bitset (n <= 31)
pub fn power_bitset(n: usize) -> impl Iterator<Item = SmallBitSet> {
assert!(n <= 31);
PowerBitSetIter {
cur: 0,
n: n as u32,
}
}
struct PowerBitSetIter {
cur: u32,
n: u32,
}
impl Iterator for PowerBitSetIter {
type Item = SmallBitSet;
fn next(&mut self) -> Option<SmallBitSet> {
if self.cur < (1 << self.n) {
let ret = SmallBitSet(self.cur);
self.cur += 1;
Some(ret)
} else {
None
}
}
}
|
fn main() {
use text_grid::*;
let mut g = GridBuilder::new();
g.push(|b| {
b.push("A");
b.push("B");
b.push("C");
});
g.push(|b| {
b.push("AAA");
b.push("BBB");
b.push("CCC");
});
g.set_column_separators(vec![true, true]);
println!("{:?}", vec![true, true]);
println!("{}", g);
g.set_column_separators(vec![false, true]);
println!("{:?}", vec![false, true]);
println!("{}", g);
}
|
//! A randomised, globally unique identifier of a single ingester instance.
//!
//! The value of this ID is expected to change between restarts of the ingester.
use std::fmt::Display;
use uuid::Uuid;
/// A unique, random, opaque UUID assigned at startup of an ingester.
///
/// This [`IngesterId`] uniquely identifies a single ingester process - it
/// changes each time an ingester starts, reflecting the change of in-memory
/// state between crashes/restarts.
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(crate) struct IngesterId(Uuid);
impl Display for IngesterId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl IngesterId {
pub(crate) fn new() -> Self {
Self(Uuid::new_v4())
}
}
|
#[doc = "Reader of register MPCBB1_VCTR61"]
pub type R = crate::R<u32, super::MPCBB1_VCTR61>;
#[doc = "Writer for register MPCBB1_VCTR61"]
pub type W = crate::W<u32, super::MPCBB1_VCTR61>;
#[doc = "Register MPCBB1_VCTR61 `reset()`'s with value 0"]
impl crate::ResetValue for super::MPCBB1_VCTR61 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `B1952`"]
pub type B1952_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1952`"]
pub struct B1952_W<'a> {
w: &'a mut W,
}
impl<'a> B1952_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `B1953`"]
pub type B1953_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1953`"]
pub struct B1953_W<'a> {
w: &'a mut W,
}
impl<'a> B1953_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `B1954`"]
pub type B1954_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1954`"]
pub struct B1954_W<'a> {
w: &'a mut W,
}
impl<'a> B1954_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `B1955`"]
pub type B1955_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1955`"]
pub struct B1955_W<'a> {
w: &'a mut W,
}
impl<'a> B1955_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `B1956`"]
pub type B1956_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1956`"]
pub struct B1956_W<'a> {
w: &'a mut W,
}
impl<'a> B1956_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `B1957`"]
pub type B1957_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1957`"]
pub struct B1957_W<'a> {
w: &'a mut W,
}
impl<'a> B1957_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `B1958`"]
pub type B1958_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1958`"]
pub struct B1958_W<'a> {
w: &'a mut W,
}
impl<'a> B1958_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `B1959`"]
pub type B1959_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1959`"]
pub struct B1959_W<'a> {
w: &'a mut W,
}
impl<'a> B1959_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `B1960`"]
pub type B1960_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1960`"]
pub struct B1960_W<'a> {
w: &'a mut W,
}
impl<'a> B1960_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `B1961`"]
pub type B1961_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1961`"]
pub struct B1961_W<'a> {
w: &'a mut W,
}
impl<'a> B1961_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `B1962`"]
pub type B1962_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1962`"]
pub struct B1962_W<'a> {
w: &'a mut W,
}
impl<'a> B1962_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `B1963`"]
pub type B1963_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1963`"]
pub struct B1963_W<'a> {
w: &'a mut W,
}
impl<'a> B1963_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `B1964`"]
pub type B1964_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1964`"]
pub struct B1964_W<'a> {
w: &'a mut W,
}
impl<'a> B1964_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `B1965`"]
pub type B1965_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1965`"]
pub struct B1965_W<'a> {
w: &'a mut W,
}
impl<'a> B1965_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Reader of field `B1966`"]
pub type B1966_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1966`"]
pub struct B1966_W<'a> {
w: &'a mut W,
}
impl<'a> B1966_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Reader of field `B1967`"]
pub type B1967_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1967`"]
pub struct B1967_W<'a> {
w: &'a mut W,
}
impl<'a> B1967_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Reader of field `B1968`"]
pub type B1968_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1968`"]
pub struct B1968_W<'a> {
w: &'a mut W,
}
impl<'a> B1968_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `B1969`"]
pub type B1969_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1969`"]
pub struct B1969_W<'a> {
w: &'a mut W,
}
impl<'a> B1969_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "Reader of field `B1970`"]
pub type B1970_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1970`"]
pub struct B1970_W<'a> {
w: &'a mut W,
}
impl<'a> B1970_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
#[doc = "Reader of field `B1971`"]
pub type B1971_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1971`"]
pub struct B1971_W<'a> {
w: &'a mut W,
}
impl<'a> B1971_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);
self.w
}
}
#[doc = "Reader of field `B1972`"]
pub type B1972_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1972`"]
pub struct B1972_W<'a> {
w: &'a mut W,
}
impl<'a> B1972_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "Reader of field `B1973`"]
pub type B1973_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1973`"]
pub struct B1973_W<'a> {
w: &'a mut W,
}
impl<'a> B1973_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);
self.w
}
}
#[doc = "Reader of field `B1974`"]
pub type B1974_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1974`"]
pub struct B1974_W<'a> {
w: &'a mut W,
}
impl<'a> B1974_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);
self.w
}
}
#[doc = "Reader of field `B1975`"]
pub type B1975_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1975`"]
pub struct B1975_W<'a> {
w: &'a mut W,
}
impl<'a> B1975_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);
self.w
}
}
#[doc = "Reader of field `B1976`"]
pub type B1976_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1976`"]
pub struct B1976_W<'a> {
w: &'a mut W,
}
impl<'a> B1976_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "Reader of field `B1977`"]
pub type B1977_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1977`"]
pub struct B1977_W<'a> {
w: &'a mut W,
}
impl<'a> B1977_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);
self.w
}
}
#[doc = "Reader of field `B1978`"]
pub type B1978_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1978`"]
pub struct B1978_W<'a> {
w: &'a mut W,
}
impl<'a> B1978_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26);
self.w
}
}
#[doc = "Reader of field `B1979`"]
pub type B1979_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1979`"]
pub struct B1979_W<'a> {
w: &'a mut W,
}
impl<'a> B1979_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27);
self.w
}
}
#[doc = "Reader of field `B1980`"]
pub type B1980_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1980`"]
pub struct B1980_W<'a> {
w: &'a mut W,
}
impl<'a> B1980_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);
self.w
}
}
#[doc = "Reader of field `B1981`"]
pub type B1981_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1981`"]
pub struct B1981_W<'a> {
w: &'a mut W,
}
impl<'a> B1981_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
#[doc = "Reader of field `B1982`"]
pub type B1982_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1982`"]
pub struct B1982_W<'a> {
w: &'a mut W,
}
impl<'a> B1982_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `B1983`"]
pub type B1983_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `B1983`"]
pub struct B1983_W<'a> {
w: &'a mut W,
}
impl<'a> B1983_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 0 - B1952"]
#[inline(always)]
pub fn b1952(&self) -> B1952_R {
B1952_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - B1953"]
#[inline(always)]
pub fn b1953(&self) -> B1953_R {
B1953_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - B1954"]
#[inline(always)]
pub fn b1954(&self) -> B1954_R {
B1954_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - B1955"]
#[inline(always)]
pub fn b1955(&self) -> B1955_R {
B1955_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - B1956"]
#[inline(always)]
pub fn b1956(&self) -> B1956_R {
B1956_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - B1957"]
#[inline(always)]
pub fn b1957(&self) -> B1957_R {
B1957_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - B1958"]
#[inline(always)]
pub fn b1958(&self) -> B1958_R {
B1958_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - B1959"]
#[inline(always)]
pub fn b1959(&self) -> B1959_R {
B1959_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - B1960"]
#[inline(always)]
pub fn b1960(&self) -> B1960_R {
B1960_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - B1961"]
#[inline(always)]
pub fn b1961(&self) -> B1961_R {
B1961_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - B1962"]
#[inline(always)]
pub fn b1962(&self) -> B1962_R {
B1962_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - B1963"]
#[inline(always)]
pub fn b1963(&self) -> B1963_R {
B1963_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - B1964"]
#[inline(always)]
pub fn b1964(&self) -> B1964_R {
B1964_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - B1965"]
#[inline(always)]
pub fn b1965(&self) -> B1965_R {
B1965_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 14 - B1966"]
#[inline(always)]
pub fn b1966(&self) -> B1966_R {
B1966_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - B1967"]
#[inline(always)]
pub fn b1967(&self) -> B1967_R {
B1967_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 16 - B1968"]
#[inline(always)]
pub fn b1968(&self) -> B1968_R {
B1968_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - B1969"]
#[inline(always)]
pub fn b1969(&self) -> B1969_R {
B1969_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - B1970"]
#[inline(always)]
pub fn b1970(&self) -> B1970_R {
B1970_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - B1971"]
#[inline(always)]
pub fn b1971(&self) -> B1971_R {
B1971_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - B1972"]
#[inline(always)]
pub fn b1972(&self) -> B1972_R {
B1972_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 21 - B1973"]
#[inline(always)]
pub fn b1973(&self) -> B1973_R {
B1973_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bit 22 - B1974"]
#[inline(always)]
pub fn b1974(&self) -> B1974_R {
B1974_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bit 23 - B1975"]
#[inline(always)]
pub fn b1975(&self) -> B1975_R {
B1975_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bit 24 - B1976"]
#[inline(always)]
pub fn b1976(&self) -> B1976_R {
B1976_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 25 - B1977"]
#[inline(always)]
pub fn b1977(&self) -> B1977_R {
B1977_R::new(((self.bits >> 25) & 0x01) != 0)
}
#[doc = "Bit 26 - B1978"]
#[inline(always)]
pub fn b1978(&self) -> B1978_R {
B1978_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 27 - B1979"]
#[inline(always)]
pub fn b1979(&self) -> B1979_R {
B1979_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bit 28 - B1980"]
#[inline(always)]
pub fn b1980(&self) -> B1980_R {
B1980_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 29 - B1981"]
#[inline(always)]
pub fn b1981(&self) -> B1981_R {
B1981_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 30 - B1982"]
#[inline(always)]
pub fn b1982(&self) -> B1982_R {
B1982_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - B1983"]
#[inline(always)]
pub fn b1983(&self) -> B1983_R {
B1983_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - B1952"]
#[inline(always)]
pub fn b1952(&mut self) -> B1952_W {
B1952_W { w: self }
}
#[doc = "Bit 1 - B1953"]
#[inline(always)]
pub fn b1953(&mut self) -> B1953_W {
B1953_W { w: self }
}
#[doc = "Bit 2 - B1954"]
#[inline(always)]
pub fn b1954(&mut self) -> B1954_W {
B1954_W { w: self }
}
#[doc = "Bit 3 - B1955"]
#[inline(always)]
pub fn b1955(&mut self) -> B1955_W {
B1955_W { w: self }
}
#[doc = "Bit 4 - B1956"]
#[inline(always)]
pub fn b1956(&mut self) -> B1956_W {
B1956_W { w: self }
}
#[doc = "Bit 5 - B1957"]
#[inline(always)]
pub fn b1957(&mut self) -> B1957_W {
B1957_W { w: self }
}
#[doc = "Bit 6 - B1958"]
#[inline(always)]
pub fn b1958(&mut self) -> B1958_W {
B1958_W { w: self }
}
#[doc = "Bit 7 - B1959"]
#[inline(always)]
pub fn b1959(&mut self) -> B1959_W {
B1959_W { w: self }
}
#[doc = "Bit 8 - B1960"]
#[inline(always)]
pub fn b1960(&mut self) -> B1960_W {
B1960_W { w: self }
}
#[doc = "Bit 9 - B1961"]
#[inline(always)]
pub fn b1961(&mut self) -> B1961_W {
B1961_W { w: self }
}
#[doc = "Bit 10 - B1962"]
#[inline(always)]
pub fn b1962(&mut self) -> B1962_W {
B1962_W { w: self }
}
#[doc = "Bit 11 - B1963"]
#[inline(always)]
pub fn b1963(&mut self) -> B1963_W {
B1963_W { w: self }
}
#[doc = "Bit 12 - B1964"]
#[inline(always)]
pub fn b1964(&mut self) -> B1964_W {
B1964_W { w: self }
}
#[doc = "Bit 13 - B1965"]
#[inline(always)]
pub fn b1965(&mut self) -> B1965_W {
B1965_W { w: self }
}
#[doc = "Bit 14 - B1966"]
#[inline(always)]
pub fn b1966(&mut self) -> B1966_W {
B1966_W { w: self }
}
#[doc = "Bit 15 - B1967"]
#[inline(always)]
pub fn b1967(&mut self) -> B1967_W {
B1967_W { w: self }
}
#[doc = "Bit 16 - B1968"]
#[inline(always)]
pub fn b1968(&mut self) -> B1968_W {
B1968_W { w: self }
}
#[doc = "Bit 17 - B1969"]
#[inline(always)]
pub fn b1969(&mut self) -> B1969_W {
B1969_W { w: self }
}
#[doc = "Bit 18 - B1970"]
#[inline(always)]
pub fn b1970(&mut self) -> B1970_W {
B1970_W { w: self }
}
#[doc = "Bit 19 - B1971"]
#[inline(always)]
pub fn b1971(&mut self) -> B1971_W {
B1971_W { w: self }
}
#[doc = "Bit 20 - B1972"]
#[inline(always)]
pub fn b1972(&mut self) -> B1972_W {
B1972_W { w: self }
}
#[doc = "Bit 21 - B1973"]
#[inline(always)]
pub fn b1973(&mut self) -> B1973_W {
B1973_W { w: self }
}
#[doc = "Bit 22 - B1974"]
#[inline(always)]
pub fn b1974(&mut self) -> B1974_W {
B1974_W { w: self }
}
#[doc = "Bit 23 - B1975"]
#[inline(always)]
pub fn b1975(&mut self) -> B1975_W {
B1975_W { w: self }
}
#[doc = "Bit 24 - B1976"]
#[inline(always)]
pub fn b1976(&mut self) -> B1976_W {
B1976_W { w: self }
}
#[doc = "Bit 25 - B1977"]
#[inline(always)]
pub fn b1977(&mut self) -> B1977_W {
B1977_W { w: self }
}
#[doc = "Bit 26 - B1978"]
#[inline(always)]
pub fn b1978(&mut self) -> B1978_W {
B1978_W { w: self }
}
#[doc = "Bit 27 - B1979"]
#[inline(always)]
pub fn b1979(&mut self) -> B1979_W {
B1979_W { w: self }
}
#[doc = "Bit 28 - B1980"]
#[inline(always)]
pub fn b1980(&mut self) -> B1980_W {
B1980_W { w: self }
}
#[doc = "Bit 29 - B1981"]
#[inline(always)]
pub fn b1981(&mut self) -> B1981_W {
B1981_W { w: self }
}
#[doc = "Bit 30 - B1982"]
#[inline(always)]
pub fn b1982(&mut self) -> B1982_W {
B1982_W { w: self }
}
#[doc = "Bit 31 - B1983"]
#[inline(always)]
pub fn b1983(&mut self) -> B1983_W {
B1983_W { w: self }
}
}
|
pub use bevy::{prelude::*, reflect::TypeRegistry, utils::Duration};
///System for loading scenes. Should be based on a saveGame scene id
pub fn load_scene_system() {
}
///System for saving Scenes. Should open up a UI that shows past save games unless autosave
pub fn save_scene_system() {
//will make a scene for the new game, separate from the master scene if there is one, calls update_tilemap() which returns the generated tilemap name, adds the name of the updated Tilemap name and path as a part of the scene. uses this when you look at load_scene UI
}
//Should I couple scenes with tilemap systems? Probably not because tilemaps will be more complicated but scenes might alter tilemaps
///System for loading tilemaps with spritesheets
pub fn load_tilemap() {
}
///System for updating tilemaps when an event acts upon a tile
pub fn update_tilemap() {
//given some tile position, we update that tsx file's specific value with the new tile. So imagine a destroyed door, once the door is destroyed, the tilemap is changed
//This new tsx file then acts as this save game's main tsx file rather than the default one.
//keeps the original tsx file for reference. So: CurrentTileMap, MasterTileMap. where currentTileMap is given a name related to the current save game probably so it's not being overwritten every time a new game is made.
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
|
extern crate interpolate;
extern crate num;
pub mod vector;
pub mod rect;
pub mod circle;
pub mod grid;
pub mod wrapping_grid;
pub use vector::Vec2;
pub use rect::Rect;
pub use circle::Circle;
pub use grid::Grid;
pub use wrapping_grid::WrappingGrid;
|
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use lever::table::prelude::*;
use std::sync::Arc;
const MAX_THREADS: usize = 8;
const OP_RANGES: &'static [usize] = &[100, 300, 500, 700, 1000, 3000, 5000];
fn pure_read(lotable: Arc<LOTable<String, u64>>, key: String, op_count: usize) {
(0..op_count).into_iter().for_each(|_| {
let _ = lotable.get(&key);
})
}
fn bench_pure_reads(c: &mut Criterion) {
let lotable: Arc<LOTable<String, u64>> = Arc::new(LOTable::new());
let key: String = "CORE".into();
let _ = lotable.insert(key.clone(), 123_456);
let mut group = c.benchmark_group("parameterized_read");
(1..MAX_THREADS).for_each(|tid| {
for ops in OP_RANGES {
group.throughput(Throughput::Elements((tid * *ops) as u64));
group.bench_with_input(BenchmarkId::new(format!("{}", tid), ops), ops, |b, &i| {
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(tid)
.build()
.unwrap();
pool.install(|| b.iter(|| pure_read(lotable.clone(), key.clone(), i)));
});
}
});
}
////////////////////////////////
//
// fn rw_pareto(lotable: Arc<LOTable<String, u64>>, key: String, dist: f64, thread_count: u64) {
// let mut threads = vec![];
//
// for thread_no in 0..thread_count {
// let lotable = lotable.clone();
// let key = key.clone();
//
// let t = std::thread::Builder::new()
// .name(format!("t_{}", thread_no))
// .spawn(move || {
// if dist < 0.8_f64 {
// lotable.get(&key);
// } else {
// let data = lotable.get(&key).unwrap();
// let _ = lotable.insert(key, data + 1);
// }
// })
// .unwrap();
//
// threads.push(t);
// }
//
// for t in threads.into_iter() {
// t.join().unwrap();
// }
// }
//
// fn bench_rw_pareto(c: &mut Criterion) {
// let lotable = {
// let table: LOTable<String, u64> = LOTable::new();
// table.insert("data".into(), 1_u64);
// Arc::new(table)
// };
// let key: String = "CORE".into();
// let _ = lotable.insert(key.clone(), 123_456);
//
// let threads = 8;
//
// let mut group = c.benchmark_group("lotable_threaded_join_rw_pareto_throughput");
// group.throughput(Throughput::Elements(threads as u64));
// group.bench_function("rw_pareto", move |b| {
// b.iter_batched(
// || {
// let dist: f64 =
// 1. / thread_rng().sample(Pareto::new(1., 5.0_f64.log(4.0_f64)).unwrap());
// (lotable.clone(), key.clone(), dist)
// },
// |vars| rw_pareto(vars.0, vars.1, vars.2, threads),
// BatchSize::SmallInput,
// )
// });
// }
//
// ////////////////////////////////
//
// fn pure_writes(lotable: Arc<LOTable<String, u64>>, key: String, thread_count: u64) {
// let mut threads = vec![];
//
// for thread_no in 0..thread_count {
// let lotable = lotable.clone();
// let key = key.clone();
//
// let t = std::thread::Builder::new()
// .name(format!("t_{}", thread_no))
// .spawn(move || {
// let _ = lotable.insert(key, thread_no);
// })
// .unwrap();
//
// threads.push(t);
// }
//
// for t in threads.into_iter() {
// t.join().unwrap();
// }
// }
//
// fn bench_lotable_pure_writes(c: &mut Criterion) {
// let lotable = {
// let table: LOTable<String, u64> = LOTable::new();
// table.insert("data".into(), 1_u64);
// Arc::new(table)
// };
// let key: String = "CORE".into();
// let _ = lotable.insert(key.clone(), 123_456);
//
// let threads = 8;
//
// let mut group = c.benchmark_group("lotable_threaded_join_write_throughput");
// group.throughput(Throughput::Elements(threads as u64));
// group.bench_function("pure writes", move |b| {
// b.iter_batched(
// || (lotable.clone(), key.clone()),
// |vars| pure_writes(vars.0, vars.1, threads),
// BatchSize::SmallInput,
// )
// });
// }
criterion_group! {
name = parameterized_benches;
config = Criterion::default();
targets = bench_pure_reads
// targets = bench_pure_reads, bench_rw_pareto, bench_lotable_pure_writes
}
criterion_main!(parameterized_benches);
|
use bytes::Buf;
use futures::{Async, Poll};
use http::HeaderMap;
use super::internal::{FullDataArg, FullDataRet};
/// This trait represents a streaming body of a `Request` or `Response`.
///
/// The built-in implementation of this trait is [`Body`](::Body), in case you
/// don't need to customize a send stream for your own application.
pub trait Payload: Send + 'static {
/// A buffer of bytes representing a single chunk of a body.
type Data: Buf + Send;
/// The error type of this stream.
type Error: Into<Box<::std::error::Error + Send + Sync>>;
/// Poll for a `Data` buffer.
///
/// Similar to `Stream::poll_next`, this yields `Some(Data)` until
/// the body ends, when it yields `None`.
fn poll_data(&mut self) -> Poll<Option<Self::Data>, Self::Error>;
/// Poll for an optional **single** `HeaderMap` of trailers.
///
/// This should **only** be called after `poll_data` has ended.
///
/// Note: Trailers aren't currently used for HTTP/1, only for HTTP/2.
fn poll_trailers(&mut self) -> Poll<Option<HeaderMap>, Self::Error> {
Ok(Async::Ready(None))
}
/// A hint that the `Body` is complete, and doesn't need to be polled more.
///
/// This can be useful to determine if the there is any body or trailers
/// without having to poll. An empty `Body` could return `true` and hyper
/// would be able to know that only the headers need to be sent. Or, it can
/// also be checked after each `poll_data` call, to allow hyper to try to
/// end the underlying stream with the last chunk, instead of needing to
/// send an extra `DATA` frame just to mark the stream as finished.
///
/// As a hint, it is used to try to optimize, and thus is OK for a default
/// implementation to return `false`.
fn is_end_stream(&self) -> bool {
false
}
/// Return a length of the total bytes that will be streamed, if known.
///
/// If an exact size of bytes is known, this would allow hyper to send a
/// `Content-Length` header automatically, not needing to fall back to
/// `Transfer-Encoding: chunked`.
///
/// This does not need to be kept updated after polls, it will only be
/// called once to create the headers.
fn content_length(&self) -> Option<u64> {
None
}
// This API is unstable, and is impossible to use outside of hyper. Some
// form of it may become stable in a later version.
//
// The only thing a user *could* do is reference the method, but DON'T
// DO THAT! :)
#[doc(hidden)]
fn __hyper_full_data(&mut self, FullDataArg) -> FullDataRet<Self::Data> {
FullDataRet(None)
}
}
impl<E: Payload> Payload for Box<E> {
type Data = E::Data;
type Error = E::Error;
fn poll_data(&mut self) -> Poll<Option<Self::Data>, Self::Error> {
(**self).poll_data()
}
fn poll_trailers(&mut self) -> Poll<Option<HeaderMap>, Self::Error> {
(**self).poll_trailers()
}
fn is_end_stream(&self) -> bool {
(**self).is_end_stream()
}
fn content_length(&self) -> Option<u64> {
(**self).content_length()
}
#[doc(hidden)]
fn __hyper_full_data(&mut self, arg: FullDataArg) -> FullDataRet<Self::Data> {
(**self).__hyper_full_data(arg)
}
}
|
use std::error::Error;
use crate::ecs::Ecs;
type BoxedSystem<AD> = Box<dyn FnMut(&mut Ecs, &mut AD) -> SystemResult>;
pub type SystemResult = Result<(), Box<dyn Error>>;
pub struct SystemBundle<AD> {
systems: Vec<BoxedSystem<AD>>,
}
impl<AD> SystemBundle<AD> {
pub fn add_system<T, S: IntoSystem<T, AD>>(&mut self, system: S) {
self.systems.push(system.into_system());
}
pub fn step(&mut self, ecs: &mut Ecs, additional_data: &mut AD) -> Result<(), Box<dyn Error>> {
for system in &mut self.systems {
(system)(ecs, additional_data)?;
}
Ok(())
}
}
impl<T> Default for SystemBundle<T> {
fn default() -> Self {
Self { systems: vec![] }
}
}
pub trait IntoSystem<T, AD> {
fn into_system(self) -> BoxedSystem<AD>;
}
impl<F, AD> IntoSystem<F, AD> for F
where
F: 'static + FnMut(&mut Ecs, &mut AD) -> SystemResult,
{
fn into_system(self) -> BoxedSystem<AD> {
Box::new(self)
}
}
impl<F> IntoSystem<(F, (), ()), ()> for F
where
F: 'static + FnMut(&mut Ecs) -> SystemResult,
{
fn into_system(mut self) -> BoxedSystem<()> {
Box::new(move |ecs: &mut Ecs, _: &mut ()| (self)(ecs))
}
}
impl<F, AD> IntoSystem<(F,), AD> for F
where
F: 'static + FnMut(&mut Ecs, &mut AD),
{
fn into_system(mut self) -> BoxedSystem<AD> {
Box::new(move |ecs: &mut Ecs, additional_data: &mut AD| {
(self)(ecs, additional_data);
Ok(())
})
}
}
impl<F> IntoSystem<(F, ()), ()> for F
where
F: 'static + FnMut(&mut Ecs),
{
fn into_system(mut self) -> BoxedSystem<()> {
Box::new(move |ecs: &mut Ecs, _: &mut ()| {
(self)(ecs);
Ok(())
})
}
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use std::fmt::{Display, Formatter};
use super::*;
#[derive(Debug)]
struct AtrociousFailure;
impl Display for AtrociousFailure {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "ATROCIOUS ERROR")
}
}
impl Error for AtrociousFailure {}
#[test]
fn failing_system() {
let mut ecs = Ecs::default();
let mut system = (|_: &mut Ecs| Err(Box::new(AtrociousFailure) as _)).into_system();
let mut second_system = (|_: &mut Ecs| {}).into_system();
let result = (system)(&mut ecs, &mut ());
let second_result = (second_system)(&mut ecs, &mut ());
assert!(result.is_err());
assert!(second_result.is_ok());
}
#[test]
fn system_into_system() {
let _ = (|_: &mut Ecs| Ok(())).into_system();
}
#[test]
fn system_bundle_add() {
let mut system_bundle = SystemBundle::default();
system_bundle.add_system(|_: &mut Ecs| Ok(()));
assert_eq!(system_bundle.systems.len(), 1)
}
#[test]
fn system_bundle_step() {
#[derive(PartialEq, Debug, Eq, Hash, Copy, Clone)]
struct Value(i32);
struct OtherComponent;
let mut ecs = Ecs::default();
ecs.insert((Value(12),));
ecs.insert((Value(18), OtherComponent));
let mut system_bundle = SystemBundle::default();
system_bundle.add_system(|ecs: &mut Ecs| {
for (_, (mut v,)) in ecs.query::<(&mut Value,)>() {
v.0 += 35;
}
Ok(())
});
system_bundle.add_system(|ecs: &mut Ecs| {
for (_, (mut v,)) in ecs.query::<(&mut Value,)>() {
v.0 -= 6;
}
Ok(())
});
let _ = system_bundle.step(&mut ecs, &mut ());
let query_result = ecs.query::<(&Value,)>();
let result_set: HashSet<Value> = query_result.map(|result| *result.1 .0).collect();
assert!(result_set.contains(&Value(41)));
assert!(result_set.contains(&Value(47)));
}
#[test]
fn system_bundle_with_additional_data() {
struct ComponentA;
struct ComponentB;
struct AdditionalData {
some_value: i32,
}
let mut additional_data = AdditionalData { some_value: 0 };
let mut ecs = Ecs::default();
ecs.insert((ComponentA, ComponentB));
ecs.insert((ComponentB,));
let mut system_bundle = SystemBundle::default();
system_bundle.add_system(|_ecs: &mut Ecs, additional_data: &mut AdditionalData| {
additional_data.some_value += 1
});
let _ = system_bundle.step(&mut ecs, &mut additional_data);
let _ = system_bundle.step(&mut ecs, &mut additional_data);
assert_eq!(additional_data.some_value, 2);
}
}
|
test_stdout!(
without_environment_runs_function_in_child_process,
"from_fun\n"
);
test_stdout!(
with_environment_runs_function_in_child_process,
"from_environment\n"
);
|
extern crate sdl2;
use crate::cpu::Cpu;
use crate::png;
use crate::ppu;
use crate::ppu::Ppu;
use crate::record;
use std::time::Instant;
use sdl2::audio::AudioSpecDesired;
use sdl2::controller::Button;
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::render::Canvas;
use sdl2::video::Window;
// 256 * 240
pub static DISP_WIDTH: u32 = 256;
pub static DISP_HEIGHT: u32 = 240;
// Pixel aspect ratio is supposed to be 8:7?
static BLOCK_W: usize = 6;
static BLOCK_H: usize = 5;
static SCREEN_W: u32 = DISP_WIDTH * BLOCK_W as u32;
static SCREEN_H: u32 = DISP_HEIGHT * BLOCK_H as u32;
fn convert_color(color: ppu::Color) -> Color {
Color::from((color.r, color.g, color.b))
}
fn render(canvas: &mut Canvas<Window>, ppu: &mut Ppu) {
let bg_color = Color::RGB(0, 0, 0);
//println!("Color frame");
canvas.set_draw_color(bg_color);
//canvas.clear();
for y in 0..(DISP_HEIGHT as usize) {
for x in 0..(DISP_WIDTH as usize) {
let index = y * DISP_WIDTH as usize + x;
if ppu.canvas[index] != ppu.prev_canvas[index] {
let fg_color = ppu.canvas[index];
canvas.set_draw_color(convert_color(fg_color));
canvas
.fill_rect(Rect::new(
(x * BLOCK_W) as i32,
(y * BLOCK_H) as i32,
BLOCK_W as u32,
BLOCK_H as u32,
))
.unwrap();
}
}
}
ppu.prev_canvas = ppu.canvas.clone();
canvas.present();
}
pub fn execute(cpu: &mut Cpu) {
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let audio_subsystem = sdl_context.audio().unwrap();
let controller_subsystem = sdl_context.game_controller().unwrap();
let desired_spec = AudioSpecDesired {
freq: Some(44_100),
channels: Some(1), // mono
samples: Some(4), // default sample size
};
let audio_queue = audio_subsystem
.open_queue::<f32, _>(None, &desired_spec)
.unwrap();
audio_queue.resume();
let _gamepad = controller_subsystem.open(0);
println!(
"Game controllers: {}",
controller_subsystem.num_joysticks().unwrap()
);
let window = video_subsystem
.window("RNES Emulator", SCREEN_W, SCREEN_H)
.position_centered()
.build()
.unwrap();
let mut canvas = window.into_canvas().build().unwrap();
canvas.set_draw_color(Color::RGB(0, 0, 0));
canvas.clear();
canvas.present();
let mut gif_encoder: Option<record::GifEncoder> = None;
let mut frame_tick = Instant::now();
let mut input_poll = Instant::now();
let mut event_pump = sdl_context.event_pump().unwrap();
'running: loop {
let msec_since_poll = input_poll.elapsed().as_millis();
// Approx 16 ms between frames
if msec_since_poll > 16 {
input_poll = Instant::now();
for event in event_pump.poll_iter() {
//println!("{:?}", event);
match event {
Event::ControllerButtonDown { button, .. } => match button {
Button::DPadLeft => cpu.gamepad.left = true,
Button::DPadRight => cpu.gamepad.right = true,
Button::DPadUp => cpu.gamepad.up = true,
Button::DPadDown => cpu.gamepad.down = true,
Button::Back => cpu.gamepad.select = true,
Button::Start => cpu.gamepad.start = true,
Button::A => cpu.gamepad.a = true,
Button::X => cpu.gamepad.b = true,
Button::LeftShoulder => {
cpu.save_state();
}
Button::RightShoulder => {
cpu.load_state();
}
Button::Guide => {
let res = match gif_encoder {
Some(_) => None,
None => Some(record::new_gif_encoder(
DISP_WIDTH as u16,
DISP_HEIGHT as u16,
)),
};
gif_encoder = res;
}
_ => println!("{:?}", button),
},
Event::ControllerButtonUp { button, .. } => match button {
Button::DPadLeft => cpu.gamepad.left = false,
Button::DPadRight => cpu.gamepad.right = false,
Button::DPadUp => cpu.gamepad.up = false,
Button::DPadDown => cpu.gamepad.down = false,
Button::Back => cpu.gamepad.select = false,
Button::Start => cpu.gamepad.start = false,
Button::A => cpu.gamepad.a = false,
Button::X => cpu.gamepad.b = false,
_ => println!("{:?}", button),
},
Event::Quit { .. }
| Event::KeyDown {
keycode: Some(Keycode::Escape),
..
} => break 'running,
Event::KeyDown {
keycode: Some(kc), ..
} => match kc {
Keycode::Num1 => cpu.tracing = !cpu.tracing,
Keycode::Num2 => png::write_png_frame(&cpu.ppu),
Keycode::Num5 => cpu.ppu.use_ntsc = !cpu.ppu.use_ntsc,
Keycode::A => cpu.ppu.dump_pattern_tables(),
Keycode::O => cpu.ppu.dump_oam(),
Keycode::E => cpu.ppu.dump_nametables(),
Keycode::U => cpu.gamepad.dump_buttons(),
Keycode::Left => cpu.gamepad.left = true,
Keycode::Right => cpu.gamepad.right = true,
Keycode::Up => cpu.gamepad.up = true,
Keycode::Down => cpu.gamepad.down = true,
Keycode::Space => cpu.gamepad.select = true,
Keycode::Return => cpu.gamepad.start = true,
Keycode::J => cpu.gamepad.a = true,
Keycode::Q => cpu.gamepad.b = true,
_ => (),
},
Event::KeyUp {
keycode: Some(kc), ..
} => match kc {
Keycode::Left => cpu.gamepad.left = false,
Keycode::Right => cpu.gamepad.right = false,
Keycode::Up => cpu.gamepad.up = false,
Keycode::Down => cpu.gamepad.down = false,
Keycode::Space => cpu.gamepad.select = false,
Keycode::Return => cpu.gamepad.start = false,
Keycode::J => cpu.gamepad.a = false,
Keycode::Q => cpu.gamepad.b = false,
_ => (),
},
_ => {}
}
}
}
if cpu.ppu.updated {
render(&mut canvas, &mut cpu.ppu);
match &mut gif_encoder {
Some(encoder) => {
if cpu.ppu.frame % 4 == 0 {
record::write_gif_frame(encoder, &cpu.ppu);
}
}
None => (),
}
let buffer = cpu.apu.drain();
let delay = buffer.len() as u128 * 1_000_000 / 44_100;
let elapsed = frame_tick.elapsed();
if elapsed.as_micros() > delay {
println!("Frame took: {} msec", elapsed.as_millis());
cpu.ppu.dump();
} else {
while frame_tick.elapsed().as_micros() < delay {}
}
audio_queue.queue(&buffer);
frame_tick = Instant::now();
cpu.ppu.updated = false;
}
cpu.cycle();
}
}
|
#[doc = "Reader of register C13ISR"]
pub type R = crate::R<u32, super::C13ISR>;
#[doc = "Reader of field `TEIF13`"]
pub type TEIF13_R = crate::R<bool, bool>;
#[doc = "Reader of field `CTCIF13`"]
pub type CTCIF13_R = crate::R<bool, bool>;
#[doc = "Reader of field `BRTIF13`"]
pub type BRTIF13_R = crate::R<bool, bool>;
#[doc = "Reader of field `BTIF13`"]
pub type BTIF13_R = crate::R<bool, bool>;
#[doc = "Reader of field `TCIF13`"]
pub type TCIF13_R = crate::R<bool, bool>;
#[doc = "Reader of field `CRQA13`"]
pub type CRQA13_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - Channel x transfer error interrupt flag This bit is set by hardware. It is cleared by software writing 1 to the corresponding bit in the DMA_IFCRy register."]
#[inline(always)]
pub fn teif13(&self) -> TEIF13_R {
TEIF13_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Channel x Channel Transfer Complete interrupt flag This bit is set by hardware. It is cleared by software writing 1 to the corresponding bit in the DMA_IFCRy register. CTC is set when the last block was transferred and the channel has been automatically disabled. CTC is also set when the channel is suspended, as a result of writing EN bit to 0."]
#[inline(always)]
pub fn ctcif13(&self) -> CTCIF13_R {
CTCIF13_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Channel x block repeat transfer complete interrupt flag This bit is set by hardware. It is cleared by software writing 1 to the corresponding bit in the DMA_IFCRy register."]
#[inline(always)]
pub fn brtif13(&self) -> BRTIF13_R {
BRTIF13_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Channel x block transfer complete interrupt flag This bit is set by hardware. It is cleared by software writing 1 to the corresponding bit in the DMA_IFCRy register."]
#[inline(always)]
pub fn btif13(&self) -> BTIF13_R {
BTIF13_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - channel x buffer transfer complete"]
#[inline(always)]
pub fn tcif13(&self) -> TCIF13_R {
TCIF13_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 16 - channel x request active flag"]
#[inline(always)]
pub fn crqa13(&self) -> CRQA13_R {
CRQA13_R::new(((self.bits >> 16) & 0x01) != 0)
}
}
|
fn helloworld() {
println!(0xff)
} |
// Copyright 2020-2021, The Tremor Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! # File Offramp
//!
//! Writes events to a file, one event per line
//!
//! ## Configuration
//!
//! See [Config](struct.Config.html) for details.
#![cfg(not(tarpaulin_include))]
use crate::connectors::gcp::{
auth::{self, GcsClient},
storage,
};
use crate::connectors::qos::{self, QoSFacilities, SinkQoS};
use crate::sink::prelude::*;
use halfbrown::HashMap;
use http::HeaderMap;
use tremor_pipeline::{EventIdGenerator, OpMeta};
use tremor_value::Value;
pub struct GoogleCloudStorage {
remote: Option<GcsClient>,
is_down: bool,
qos_facility: Box<dyn SinkQoS>,
reply_channel: Option<Sender<sink::Reply>>,
is_linked: bool,
preprocessors: Preprocessors,
postprocessors: Postprocessors,
sink_url: TremorUrl,
event_id_gen: EventIdGenerator,
}
enum StorageCommand {
Create(String, String),
Add(String, String, Value<'static>),
RemoveObject(String, String),
ListBuckets(String),
Fetch(String, String),
Download(String, String),
RemoveBucket(String),
ListObjects(String),
Unknown,
}
impl std::fmt::Display for StorageCommand {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"{}",
match *self {
StorageCommand::Create(_, _) => "create_bucket",
StorageCommand::Add(_, _, _) => "upload_object",
StorageCommand::RemoveObject(_, _) => "remove_object",
StorageCommand::ListBuckets(_) => "list_buckets",
StorageCommand::Fetch(_, _) => "get_object",
StorageCommand::Download(_, _) => "download_object",
StorageCommand::RemoveBucket(_) => "delete_bucket",
StorageCommand::ListObjects(_) => "list_objects",
StorageCommand::Unknown => "Unknown",
}
)
}
}
impl offramp::Impl for GoogleCloudStorage {
fn from_config(_config: &Option<OpConfig>) -> Result<Box<dyn Offramp>> {
let headers = HeaderMap::new();
let remote = Some(auth::json_api_client(&headers)?);
let hostport = "storage.googleapis.com:443";
Ok(SinkManager::new_box(Self {
remote,
is_down: false,
qos_facility: Box::new(QoSFacilities::recoverable(hostport.to_string())),
reply_channel: None,
is_linked: false,
preprocessors: vec![],
postprocessors: vec![],
sink_url: TremorUrl::from_offramp_id("gcs")?,
event_id_gen: EventIdGenerator::new(0), // Fake ID overwritten in init
}))
}
}
fn parse_arg(field_name: &'static str, o: &Value) -> std::result::Result<String, String> {
o.get_str(field_name)
.map(ToString::to_string)
.ok_or_else(|| format!("Invalid Command, expected `{}` field", field_name))
}
fn parse_command(value: &Value) -> Result<StorageCommand> {
let cmd_name: &str = value
.get_str("command")
.ok_or("Invalid Command, expected `command` field")?;
let command = match cmd_name {
"fetch" => StorageCommand::Fetch(parse_arg("bucket", value)?, parse_arg("object", value)?),
"list_buckets" => StorageCommand::ListBuckets(parse_arg("project_id", value)?),
"list_objects" => StorageCommand::ListObjects(parse_arg("bucket", value)?),
"upload_object" => StorageCommand::Add(
parse_arg("bucket", value)?,
parse_arg("object", value)?,
value
.get("body")
.map(Value::clone_static)
.ok_or("Invalid Command, expected `body` field")?,
),
"remove_object" => {
StorageCommand::RemoveObject(parse_arg("bucket", value)?, parse_arg("object", value)?)
}
"create_bucket" => {
StorageCommand::Create(parse_arg("project_id", value)?, parse_arg("bucket", value)?)
}
"remove_bucket" => StorageCommand::RemoveBucket(parse_arg("bucket", value)?),
"download_object" => {
StorageCommand::Download(parse_arg("bucket", value)?, parse_arg("object", value)?)
}
_ => StorageCommand::Unknown,
};
Ok(command)
}
#[async_trait::async_trait]
impl Sink for GoogleCloudStorage {
async fn terminate(&mut self) {}
#[allow(clippy::too_many_lines)]
async fn on_event(
&mut self,
_input: &str,
codec: &mut dyn Codec,
_codec_map: &HashMap<String, Box<dyn Codec>>,
mut event: Event,
) -> ResultVec {
let remote = if let Some(remote) = &self.remote {
remote
} else {
self.remote = Some(auth::json_api_client(&HeaderMap::new())?);
let remote = self.remote.as_ref().ok_or("Client error!")?;
remote
// TODO - Qos checks
};
let mut response = Vec::new();
let maybe_correlation = event.correlation_meta();
for value in event.value_iter() {
let command = parse_command(value)?;
match command {
StorageCommand::Fetch(bucket_name, object_name) => {
response.push(make_command_response(
"fetch",
storage::get_object(remote, &bucket_name, &object_name).await?,
));
}
StorageCommand::ListBuckets(project_id) => {
response.push(make_command_response(
"list_buckets",
storage::list_buckets(remote, &project_id).await?,
));
}
StorageCommand::ListObjects(bucket_name) => {
response.push(make_command_response(
"list_objects",
storage::list_objects(remote, &bucket_name).await?,
));
}
StorageCommand::Add(bucket_name, object, body) => {
response.push(make_command_response(
"upload_object",
upload_object(
remote,
&bucket_name,
&object,
&body,
codec,
event.ingest_ns,
&mut self.postprocessors,
)
.await?,
));
}
StorageCommand::RemoveObject(bucket_name, object) => {
response.push(make_command_response(
"remove_object",
storage::delete_object(remote, &bucket_name, &object).await?,
));
}
StorageCommand::Create(project_id, bucket_name) => {
response.push(make_command_response(
"create_bucket",
storage::create_bucket(remote, &project_id, &bucket_name).await?,
));
}
StorageCommand::RemoveBucket(bucket_name) => {
response.push(make_command_response(
"remove_bucket",
storage::delete_bucket(remote, &bucket_name).await?,
));
}
StorageCommand::Download(bucket_name, object_name) => {
response.push(make_command_response(
"download_object",
download_object(
remote,
&bucket_name,
&object_name,
&self.sink_url,
codec,
&mut self.preprocessors,
)
.await?,
));
}
StorageCommand::Unknown => {
warn!(
"Unknown Google Cloud Storage command: `{}` attempted",
command.to_string()
);
return Err(format!(
"Unknown Google Cloud Storage command: `{}` attempted",
command.to_string()
)
.into());
}
};
}
if self.is_linked {
if let Some(reply_channel) = &self.reply_channel {
let mut meta = Object::with_capacity(1);
if let Some(correlation) = maybe_correlation {
meta.insert_nocheck("correlation".into(), correlation);
}
reply_channel
.send(sink::Reply::Response(
OUT,
Event {
id: self.event_id_gen.next_id(),
data: (response, meta).into(),
ingest_ns: nanotime(),
origin_uri: Some(EventOriginUri {
uid: 0,
scheme: "gRPC".into(),
host: "".into(),
port: None,
path: vec![],
}),
kind: None,
is_batch: false,
cb: CbAction::None,
op_meta: OpMeta::default(),
transactional: false,
},
))
.await?;
}
}
self.is_down = false;
return Ok(Some(vec![qos::ack(&mut event)]));
}
fn default_codec(&self) -> &str {
"json"
}
#[allow(clippy::too_many_arguments)]
async fn init(
&mut self,
sink_uid: u64,
_sink_url: &TremorUrl,
_codec: &dyn Codec,
_codec_map: &HashMap<String, Box<dyn Codec>>,
processors: Processors<'_>,
is_linked: bool,
reply_channel: Sender<sink::Reply>,
) -> Result<()> {
self.event_id_gen = EventIdGenerator::new(sink_uid);
self.postprocessors = make_postprocessors(processors.post)?;
self.preprocessors = make_preprocessors(processors.pre)?;
self.reply_channel = Some(reply_channel);
self.is_linked = is_linked;
Ok(())
}
async fn on_signal(&mut self, mut signal: Event) -> ResultVec {
if self.is_down && self.qos_facility.probe(signal.ingest_ns) {
self.is_down = false;
// This means the port is connectable
info!("Google Cloud Storage - sink remote endpoint - recovered and contactable");
self.is_down = false;
return Ok(Some(vec![qos::open(&mut signal)]));
}
Ok(None)
}
fn is_active(&self) -> bool {
true
}
fn auto_ack(&self) -> bool {
false
}
}
async fn upload_object(
client: &GcsClient,
bucket_name: &str,
object_name: &str,
data: &Value<'_>,
codec: &dyn Codec,
ingest_ns: u64,
postprocessors: &mut [Box<dyn Postprocessor>],
) -> Result<Value<'static>> {
let mut body: Vec<u8> = vec![];
let codec_in_use = None;
let codec = codec_in_use.unwrap_or(codec);
let encoded = codec.encode(data)?;
let mut processed = postprocess(postprocessors, ingest_ns, encoded)?;
for processed_elem in &mut processed {
body.append(processed_elem);
}
storage::add_object_with_slice(client, bucket_name, object_name, body).await
}
async fn download_object(
client: &GcsClient,
bucket_name: &str,
object_name: &str,
sink_url: &TremorUrl,
codec: &mut dyn Codec,
preprocessors: &mut [Box<dyn Preprocessor>],
) -> Result<Value<'static>> {
let response_bytes = storage::download_object(client, bucket_name, object_name).await?;
let mut ingest_ns = nanotime();
let preprocessed = preprocess(preprocessors, &mut ingest_ns, response_bytes, sink_url)?;
let mut res = Vec::with_capacity(preprocessed.len());
for pp in preprocessed {
let mut pp = pp;
let body = codec
.decode(&mut pp, ingest_ns)?
.unwrap_or_else(Value::object);
res.push(body.into_static());
}
Ok(Value::Array(res))
}
fn make_command_response(cmd: &str, value: Value) -> Value<'static> {
literal!({
"cmd": cmd,
"data": value
})
.into_static()
}
|
use super::Eye;
use super::Side;
use super::Vive;
use std::f32;
const INVALID_POSITION: (f32, f32, f32) = (f32::NAN, f32::NAN, f32::NAN);
const INVALID_ROTATION: (f32, f32, f32) = (f32::NAN, f32::NAN, f32::NAN);
pub struct Head {
pub left_eye: Eye,
pub right_eye: Eye,
pub position: (f32, f32, f32),
pub rotation: (f32, f32, f32),
}
impl Head {
pub fn new() -> Self {
Head {
left_eye: Eye::new(Side::Left),
right_eye: Eye::new(Side::Right),
position: INVALID_POSITION,
rotation: INVALID_ROTATION,
}
}
pub fn update(&mut self) {
self.left_eye.update();
self.right_eye.update();
}
pub fn texture_dimensions(&self) -> (u32, u32) {
Vive::api().system.get_recommended_render_target_size()
}
}
|
#[doc = r" Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Start TWI receive sequence"]
pub tasks_startrx: TASKS_STARTRX,
_reserved1: [u8; 4usize],
#[doc = "0x08 - Start TWI transmit sequence"]
pub tasks_starttx: TASKS_STARTTX,
_reserved2: [u8; 8usize],
#[doc = "0x14 - Stop TWI transaction. Must be issued while the TWI master is not suspended."]
pub tasks_stop: TASKS_STOP,
_reserved3: [u8; 4usize],
#[doc = "0x1c - Suspend TWI transaction"]
pub tasks_suspend: TASKS_SUSPEND,
#[doc = "0x20 - Resume TWI transaction"]
pub tasks_resume: TASKS_RESUME,
_reserved5: [u8; 224usize],
#[doc = "0x104 - TWI stopped"]
pub events_stopped: EVENTS_STOPPED,
_reserved6: [u8; 28usize],
#[doc = "0x124 - TWI error"]
pub events_error: EVENTS_ERROR,
_reserved7: [u8; 32usize],
#[doc = "0x148 - Last byte has been sent out after the SUSPEND task has been issued, TWI traffic is now suspended."]
pub events_suspended: EVENTS_SUSPENDED,
#[doc = "0x14c - Receive sequence started"]
pub events_rxstarted: EVENTS_RXSTARTED,
#[doc = "0x150 - Transmit sequence started"]
pub events_txstarted: EVENTS_TXSTARTED,
_reserved10: [u8; 8usize],
#[doc = "0x15c - Byte boundary, starting to receive the last byte"]
pub events_lastrx: EVENTS_LASTRX,
#[doc = "0x160 - Byte boundary, starting to transmit the last byte"]
pub events_lasttx: EVENTS_LASTTX,
_reserved12: [u8; 156usize],
#[doc = "0x200 - Shortcut register"]
pub shorts: SHORTS,
_reserved13: [u8; 252usize],
#[doc = "0x300 - Enable or disable interrupt"]
pub inten: INTEN,
#[doc = "0x304 - Enable interrupt"]
pub intenset: INTENSET,
#[doc = "0x308 - Disable interrupt"]
pub intenclr: INTENCLR,
_reserved16: [u8; 440usize],
#[doc = "0x4c4 - Error source"]
pub errorsrc: ERRORSRC,
_reserved17: [u8; 56usize],
#[doc = "0x500 - Enable TWIM"]
pub enable: ENABLE,
_reserved18: [u8; 4usize],
#[doc = "0x508 - Unspecified"]
pub psel: PSEL,
_reserved19: [u8; 20usize],
#[doc = "0x524 - TWI frequency. Accuracy depends on the HFCLK source selected."]
pub frequency: FREQUENCY,
_reserved20: [u8; 12usize],
#[doc = "0x534 - RXD EasyDMA channel"]
pub rxd: RXD,
#[doc = "0x544 - TXD EasyDMA channel"]
pub txd: TXD,
_reserved22: [u8; 52usize],
#[doc = "0x588 - Address used in the TWI transfer"]
pub address: ADDRESS,
}
#[doc = r" Register block"]
#[repr(C)]
pub struct PSEL {
#[doc = "0x00 - Pin select for SCL signal"]
pub scl: self::psel::SCL,
#[doc = "0x04 - Pin select for SDA signal"]
pub sda: self::psel::SDA,
}
#[doc = r" Register block"]
#[doc = "Unspecified"]
pub mod psel;
#[doc = r" Register block"]
#[repr(C)]
pub struct RXD {
#[doc = "0x00 - Data pointer"]
pub ptr: self::rxd::PTR,
#[doc = "0x04 - Maximum number of bytes in receive buffer"]
pub maxcnt: self::rxd::MAXCNT,
#[doc = "0x08 - Number of bytes transferred in the last transaction"]
pub amount: self::rxd::AMOUNT,
#[doc = "0x0c - EasyDMA list type"]
pub list: self::rxd::LIST,
}
#[doc = r" Register block"]
#[doc = "RXD EasyDMA channel"]
pub mod rxd;
#[doc = r" Register block"]
#[repr(C)]
pub struct TXD {
#[doc = "0x00 - Data pointer"]
pub ptr: self::txd::PTR,
#[doc = "0x04 - Maximum number of bytes in transmit buffer"]
pub maxcnt: self::txd::MAXCNT,
#[doc = "0x08 - Number of bytes transferred in the last transaction"]
pub amount: self::txd::AMOUNT,
#[doc = "0x0c - EasyDMA list type"]
pub list: self::txd::LIST,
}
#[doc = r" Register block"]
#[doc = "TXD EasyDMA channel"]
pub mod txd;
#[doc = "Start TWI receive sequence"]
pub struct TASKS_STARTRX {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Start TWI receive sequence"]
pub mod tasks_startrx;
#[doc = "Start TWI transmit sequence"]
pub struct TASKS_STARTTX {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Start TWI transmit sequence"]
pub mod tasks_starttx;
#[doc = "Stop TWI transaction. Must be issued while the TWI master is not suspended."]
pub struct TASKS_STOP {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Stop TWI transaction. Must be issued while the TWI master is not suspended."]
pub mod tasks_stop;
#[doc = "Suspend TWI transaction"]
pub struct TASKS_SUSPEND {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Suspend TWI transaction"]
pub mod tasks_suspend;
#[doc = "Resume TWI transaction"]
pub struct TASKS_RESUME {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Resume TWI transaction"]
pub mod tasks_resume;
#[doc = "TWI stopped"]
pub struct EVENTS_STOPPED {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI stopped"]
pub mod events_stopped;
#[doc = "TWI error"]
pub struct EVENTS_ERROR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI error"]
pub mod events_error;
#[doc = "Last byte has been sent out after the SUSPEND task has been issued, TWI traffic is now suspended."]
pub struct EVENTS_SUSPENDED {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Last byte has been sent out after the SUSPEND task has been issued, TWI traffic is now suspended."]
pub mod events_suspended;
#[doc = "Receive sequence started"]
pub struct EVENTS_RXSTARTED {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Receive sequence started"]
pub mod events_rxstarted;
#[doc = "Transmit sequence started"]
pub struct EVENTS_TXSTARTED {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Transmit sequence started"]
pub mod events_txstarted;
#[doc = "Byte boundary, starting to receive the last byte"]
pub struct EVENTS_LASTRX {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Byte boundary, starting to receive the last byte"]
pub mod events_lastrx;
#[doc = "Byte boundary, starting to transmit the last byte"]
pub struct EVENTS_LASTTX {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Byte boundary, starting to transmit the last byte"]
pub mod events_lasttx;
#[doc = "Shortcut register"]
pub struct SHORTS {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Shortcut register"]
pub mod shorts;
#[doc = "Enable or disable interrupt"]
pub struct INTEN {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Enable or disable interrupt"]
pub mod inten;
#[doc = "Enable interrupt"]
pub struct INTENSET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Enable interrupt"]
pub mod intenset;
#[doc = "Disable interrupt"]
pub struct INTENCLR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Disable interrupt"]
pub mod intenclr;
#[doc = "Error source"]
pub struct ERRORSRC {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Error source"]
pub mod errorsrc;
#[doc = "Enable TWIM"]
pub struct ENABLE {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Enable TWIM"]
pub mod enable;
#[doc = "TWI frequency. Accuracy depends on the HFCLK source selected."]
pub struct FREQUENCY {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TWI frequency. Accuracy depends on the HFCLK source selected."]
pub mod frequency;
#[doc = "Address used in the TWI transfer"]
pub struct ADDRESS {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Address used in the TWI transfer"]
pub mod address;
|
/* origin: FreeBSD /usr/src/lib/msun/src/s_tanf.c */
/*
* Conversion to float by Ian Lance Taylor, Cygnus Support, ian@cygnus.com.
* Optimized by Bruce D. Evans.
*/
/*
* ====================================================
* Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
*
* Developed at SunPro, a Sun Microsystems, Inc. business.
* Permission to use, copy, modify, and distribute this
* software is freely granted, provided that this notice
* is preserved.
* ====================================================
*/
use super::{k_tanf, rem_pio2f};
use core::f64::consts::FRAC_PI_2;
/* Small multiples of pi/2 rounded to double precision. */
const T1_PIO2: f64 = 1. * FRAC_PI_2; /* 0x3FF921FB, 0x54442D18 */
const T2_PIO2: f64 = 2. * FRAC_PI_2; /* 0x400921FB, 0x54442D18 */
const T3_PIO2: f64 = 3. * FRAC_PI_2; /* 0x4012D97C, 0x7F3321D2 */
const T4_PIO2: f64 = 4. * FRAC_PI_2; /* 0x401921FB, 0x54442D18 */
#[cfg_attr(all(test, assert_no_panic), no_panic::no_panic)]
pub fn tanf(x: f32) -> f32 {
let x64 = x as f64;
let x1p120 = f32::from_bits(0x7b800000); // 0x1p120f === 2 ^ 120
let mut ix = x.to_bits();
let sign = (ix >> 31) != 0;
ix &= 0x7fffffff;
if ix <= 0x3f490fda {
/* |x| ~<= pi/4 */
if ix < 0x39800000 {
/* |x| < 2**-12 */
/* raise inexact if x!=0 and underflow if subnormal */
force_eval!(if ix < 0x00800000 {
x / x1p120
} else {
x + x1p120
});
return x;
}
return k_tanf(x64, false);
}
if ix <= 0x407b53d1 {
/* |x| ~<= 5*pi/4 */
if ix <= 0x4016cbe3 {
/* |x| ~<= 3pi/4 */
return k_tanf(if sign { x64 + T1_PIO2 } else { x64 - T1_PIO2 }, true);
} else {
return k_tanf(if sign { x64 + T2_PIO2 } else { x64 - T2_PIO2 }, false);
}
}
if ix <= 0x40e231d5 {
/* |x| ~<= 9*pi/4 */
if ix <= 0x40afeddf {
/* |x| ~<= 7*pi/4 */
return k_tanf(if sign { x64 + T3_PIO2 } else { x64 - T3_PIO2 }, true);
} else {
return k_tanf(if sign { x64 + T4_PIO2 } else { x64 - T4_PIO2 }, false);
}
}
/* tan(Inf or NaN) is NaN */
if ix >= 0x7f800000 {
return x - x;
}
/* argument reduction */
let (n, y) = rem_pio2f(x);
k_tanf(y, n & 1 != 0)
}
|
use crate::semantic;
use crate::semantic::column_class;
use yew::prelude::*;
use super::model;
use yew::{Component, ComponentLink, Html};
#[derive(PartialEq, Clone, Properties)]
pub struct Score {
pub score: Vec<(model::Team, model::Score)>,
}
impl Component for Score {
type Message = ();
type Properties = Self;
fn create(props: Self::Properties, _link: ComponentLink<Self>) -> Self {
props
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
if self != &props {
std::mem::replace(self, props);
true
} else {
false
}
}
fn update(&mut self, _msg: Self::Message) -> ShouldRender {
false
}
fn view(&self) -> Html {
let answ = self
.score
.iter()
.map(|(team, score)| {
html! {
<div class="column">
<div class=("ui", semantic::color_class(team.into()), "statistic")>
<div class="value">
{ score }
</div>
<div class="label">
{"cards left"}
</div>
</div>
</div>
}
})
.fold(None, |acc, x| match acc {
None => Some((1, x)),
Some((count, html)) => Some((
count + 1,
html! {
<>
{ html }
<div class="ui vertical divider">{ "vs" }</div>
{ x }
</>
},
)),
});
match answ {
None => html! {},
Some((count, html)) => html! {
<div class=("ui", column_class(count).unwrap_or(""), "column stackable center aligned grid basic segment")>
{ html }
</div>
},
}
}
}
|
impl Solution {
pub fn num_jewels_in_stones(jewels: String, stones: String) -> i32 {
let mut valid_chars = [false; 123];
for c in jewels.chars() {
valid_chars[c as usize] = true;
}
let mut ans = 0;
for c in stones.chars() {
ans += match valid_chars[(c-'A') as usize] {
true => 1,
false => 0
}
}
ans
}
} |
use std::convert::TryInto;
use std::net::Shutdown;
use byteorder::NetworkEndian;
use futures_channel::mpsc::UnboundedSender;
use crate::io::{Buf, BufStream, MaybeTlsStream};
use crate::postgres::protocol::{Message, NotificationResponse, Response, Write};
use crate::postgres::PgError;
use crate::url::Url;
use futures_util::SinkExt;
pub struct PgStream {
pub(super) stream: BufStream<MaybeTlsStream>,
pub(super) notifications: Option<UnboundedSender<NotificationResponse<'static>>>,
// Most recently received message
// Is referenced by our buffered stream
// Is initialized to ReadyForQuery/0 at the start
pub(super) message: (Message, u32),
}
impl PgStream {
pub(super) async fn new(url: &Url) -> crate::Result<Self> {
let stream = MaybeTlsStream::connect(&url, 5432).await?;
Ok(Self {
notifications: None,
stream: BufStream::new(stream),
message: (Message::ReadyForQuery, 0),
})
}
pub(super) fn shutdown(&self) -> crate::Result<()> {
Ok(self.stream.shutdown(Shutdown::Both)?)
}
#[inline]
pub(super) fn write<M>(&mut self, message: M)
where
M: Write,
{
message.write(self.stream.buffer_mut());
}
#[inline]
pub(super) async fn flush(&mut self) -> crate::Result<()> {
Ok(self.stream.flush().await?)
}
pub(super) async fn read(&mut self) -> crate::Result<Message> {
// https://www.postgresql.org/docs/12/protocol-overview.html#PROTOCOL-MESSAGE-CONCEPTS
// All communication is through a stream of messages. The first byte of a message
// identifies the message type, and the next four bytes specify the length of the rest of
// the message (this length count includes itself, but not the message-type byte).
if self.message.1 > 0 {
// If there is any data in our read buffer we need to make sure we flush that
// so reading will return the *next* message
self.stream.consume(self.message.1 as usize);
}
let mut header = self.stream.peek(4 + 1).await?;
let type_ = header.get_u8()?.try_into()?;
let length = header.get_u32::<NetworkEndian>()? - 4;
self.message = (type_, length);
self.stream.consume(4 + 1);
// Wait until there is enough data in the stream. We then return without actually
// inspecting the data. This is then looked at later through the [buffer] function
let _ = self.stream.peek(length as usize).await?;
Ok(type_)
}
pub(super) async fn receive(&mut self) -> crate::Result<Message> {
loop {
let type_ = self.read().await?;
match type_ {
Message::ErrorResponse | Message::NoticeResponse => {
let response = Response::read(self.stream.buffer())?;
if response.severity.is_error() {
// This is an error, bubble up as one immediately
return Err(crate::Error::Database(Box::new(PgError(response))));
}
// TODO: Provide some way of receiving these non-critical
// notices from postgres
continue;
}
Message::NotificationResponse => {
if let Some(buffer) = &mut self.notifications {
let notification = NotificationResponse::read(self.stream.buffer())?;
let _ = buffer.send(notification.into_owned()).await;
continue;
}
}
_ => {}
}
return Ok(type_);
}
}
/// Returns a reference to the internally buffered message.
///
/// This is the body of the message identified by the most recent call
/// to `read`.
#[inline]
pub(super) fn buffer(&self) -> &[u8] {
&self.stream.buffer()[..(self.message.1 as usize)]
}
}
|
// Copyright 2018 Evgeniy Reizner
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::str;
use std::collections::HashMap;
pub use self::options::*;
use roxmltree;
use svgtypes::{
Paint,
PaintFallback,
StreamExt,
StyleParser,
};
use svgtypes::xmlparser::{
Stream,
StrSpan,
};
use super::*;
mod css;
mod options;
mod text;
type Result<T> = ::std::result::Result<T, ParserError>;
pub struct NodeStringData {
pub node: Node,
pub text: String,
}
pub struct LinkData {
attr_id: AttributeId,
iri: String,
fallback: Option<PaintFallback>,
node: Node,
}
pub struct Links {
/// List of all parsed IRI and FuncIRI.
pub list: Vec<LinkData>,
/// Store all nodes with id's.
///
/// For performance reasons only.
pub elems_with_id: HashMap<String, Node>,
}
impl Links {
fn append(
&mut self,
id: AttributeId,
iri: &str,
fallback: Option<PaintFallback>,
node: &Node,
) {
self.list.push(LinkData {
attr_id: id,
iri: iri.to_string(),
fallback,
node: node.clone(),
});
}
}
pub struct PostData {
pub links: Links,
// List of element with 'class' attribute.
// We can't process it inplace, because styles can be set after usage.
pub class_attrs: Vec<NodeStringData>,
// List of style attributes.
pub style_attrs: Vec<NodeStringData>,
}
pub fn parse_svg(text: &str, opt: &ParseOptions) -> Result<Document> {
let ro_doc = roxmltree::Document::parse(text)?;
// Since we not only parsing, but also converting an SVG structure,
// we can't do everything in one take.
// At first, we create nodes structure with attributes.
// Than apply CSS. And then ungroup style attributes.
// Order is important, otherwise we get rendering error.
let mut post_data = PostData {
links: Links {
list: Vec::new(),
elems_with_id: HashMap::new(),
},
class_attrs: Vec::new(),
style_attrs: Vec::new(),
};
let mut doc = Document::new();
let root = doc.root();
let mut parent = root.clone();
for child in ro_doc.root().children() {
process_node(child, opt, &mut post_data, &mut doc, &mut parent)?;
}
// First element must be an 'svg' element.
if doc.svg_element().is_none() {
return Err(ParserError::NoSvgElement);
}
// Remove 'style' elements, because their content (CSS)
// is stored separately and will be processed later.
doc.drain(root.clone(), |n| n.is_tag_name(ElementId::Style));
if let Err(e) = css::resolve_css(&ro_doc, &doc, &mut post_data, opt) {
if opt.skip_invalid_css {
warn!("{}.", e);
} else {
return Err(e.into());
}
}
// Resolve styles.
for d in &mut post_data.style_attrs {
parse_style_attribute(&d.text, opt, &mut d.node, &mut post_data.links)?;
}
resolve_links(&mut post_data.links);
text::prepare_text(&mut doc);
Ok(doc)
}
fn process_node(
xml_node: roxmltree::Node,
opt: &ParseOptions,
post_data: &mut PostData,
doc: &mut Document,
parent: &mut Node,
) -> Result<()> {
match xml_node.node_type() {
roxmltree::NodeType::Element => {
if xml_node.tag_name().namespace() != "http://www.w3.org/2000/svg" {
return Ok(());
}
let tag_name = xml_node.tag_name();
let local = tag_name.name();
let mut e = match ElementId::from_str(local) {
Some(eid) => {
doc.create_element(eid)
}
None => {
return Ok(());
}
};
for attr in xml_node.attributes() {
match attr.namespace() {
"" |
"http://www.w3.org/2000/svg" |
"http://www.w3.org/1999/xlink" |
"http://www.w3.org/XML/1998/namespace" => {}
_ => continue,
}
let local = attr.name();
let value = StrSpan::from(attr.value());
if let Some(aid) = AttributeId::from_str(local) {
if e.is_svg_element() {
parse_svg_attribute(aid, value, opt, &mut e, post_data)?;
}
}
}
parent.append(e.clone());
if xml_node.is_element() && xml_node.has_children() {
for child in xml_node.children() {
process_node(child, opt, post_data, doc, &mut e)?;
}
}
}
roxmltree::NodeType::Text => {
let text = xml_node.text().unwrap();
if text.trim().is_empty() {
// Whitespaces inside text elements are important.
if let Some(id) = parent.tag_id() {
match id {
ElementId::Text
| ElementId::Tspan
| ElementId::Tref => {
let n = doc.create_node(NodeType::Text, text);
parent.append(n);
}
_ => {}
}
}
} else {
let n = doc.create_node(NodeType::Text, xml_node.text().unwrap());
parent.append(n);
}
}
roxmltree::NodeType::Comment => {
let n = doc.create_node(NodeType::Comment, xml_node.text().unwrap());
parent.append(n);
}
_ => {}
}
// Check that the first element of the doc is 'svg'.
//
// Check only when we parsing the root nodes, which is faster.
if parent.is_root() {
if let Some((id, _)) = doc.root().children().svg().nth(0) {
if id != ElementId::Svg {
return Err(ParserError::NoSvgElement);
}
}
}
Ok(())
}
fn parse_svg_attribute<'a>(
id: AttributeId,
value: StrSpan<'a>,
opt: &ParseOptions,
node: &mut Node,
post_data: &mut PostData,
) -> Result<()> {
match id {
AttributeId::Id => {
node.set_id(value.to_str());
post_data.links.elems_with_id.insert(value.to_str().to_owned(), node.clone());
}
AttributeId::Style => {
// We store 'style' attributes for later use.
post_data.style_attrs.push(NodeStringData {
node: node.clone(),
text: value.to_string(),
});
}
AttributeId::Class => {
// TODO: to svgtypes
// We store 'class' attributes for later use.
let mut s = Stream::from(value);
while !s.at_end() {
s.skip_spaces();
let class = s.consume_bytes(|s2, _| !s2.starts_with_space());
post_data.class_attrs.push(NodeStringData {
node: node.clone(),
text: class.to_string(),
});
s.skip_spaces();
}
}
_ => {
parse_svg_attribute_value(id, value, opt, node, &mut post_data.links)?;
}
}
Ok(())
}
pub fn parse_svg_attribute_value<'a>(
id: AttributeId,
value: StrSpan<'a>,
opt: &ParseOptions,
node: &mut Node,
links: &mut Links,
) -> Result<()> {
let av = _parse_svg_attribute_value(id, value, node, links);
match av {
Ok(av) => {
if let Some(av) = av {
match av {
AttributeValue::NumberList(ref list) if list.is_empty() => {}
AttributeValue::LengthList(ref list) if list.is_empty() => {}
_ => node.set_attribute((id, av)),
}
}
}
Err(e) => {
if opt.skip_invalid_attributes {
warn!("Attribute '{}' has an invalid value: '{}'.", id, value.to_str());
} else {
return Err(e.into());
}
}
}
Ok(())
}
#[inline]
fn f64_bound(min: f64, val: f64, max: f64) -> f64 {
if val > max {
return max;
} else if val < min {
return min;
}
val
}
pub fn _parse_svg_attribute_value<'a>(
aid: AttributeId,
value: StrSpan<'a>,
node: &mut Node,
links: &mut Links,
) -> Result<Option<AttributeValue>> {
use AttributeId as AId;
let eid = node.tag_id().unwrap();
// 'unicode' attribute can contain spaces.
let value = if aid != AId::Unicode { value.trim() } else { value };
if aid == AId::Href {
let mut s = Stream::from(value);
match s.parse_iri() {
Ok(link) => {
// Collect links for later processing.
links.append(aid, link, None, node);
return Ok(None);
}
Err(_) => {
return Ok(Some(AttributeValue::String(value.to_str().to_string())));
}
}
}
let av = match aid {
AId::X | AId::Y
| AId::Dx | AId::Dy => {
// Some attributes can contain different data based on the element type.
match eid {
ElementId::AltGlyph
| ElementId::Text
| ElementId::Tref
| ElementId::Tspan => {
AttributeValue::LengthList(LengthList::from_span(value)?)
}
_ => {
AttributeValue::Length(Length::from_span(value)?)
}
}
}
AId::X1 | AId::Y1
| AId::X2 | AId::Y2
| AId::R
| AId::Rx | AId::Ry
| AId::Cx | AId::Cy
| AId::Fx | AId::Fy
| AId::Offset
| AId::Width | AId::Height => {
AttributeValue::Length(Length::from_span(value)?)
}
AId::StrokeDashoffset
| AId::StrokeMiterlimit
| AId::StrokeWidth => {
match value.to_str() {
"inherit" => AttributeValue::Inherit,
_ => Length::from_span(value)?.into(),
}
}
AId::Opacity
| AId::FillOpacity
| AId::FloodOpacity
| AId::StrokeOpacity
| AId::StopOpacity => {
match value.to_str() {
"inherit" => AttributeValue::Inherit,
_ => {
let mut s = Stream::from(value);
let mut n = s.parse_number()?;
n = f64_bound(0.0, n, 1.0);
AttributeValue::Number(n)
}
}
}
AId::StrokeDasharray => {
match value.to_str() {
"none" => AttributeValue::None,
"inherit" => AttributeValue::Inherit,
_ => AttributeValue::LengthList(LengthList::from_span(value)?),
}
}
AId::Fill => {
// 'fill' in animate-based elements it's another 'fill'
// https://www.w3.org/TR/SVG/animate.html#FillAttribute
match eid {
ElementId::Set
| ElementId::Animate
| ElementId::AnimateColor
| ElementId::AnimateMotion
| ElementId::AnimateTransform
=> AttributeValue::String(value.to_str().to_string()),
_ => {
match Paint::from_span(value)? {
Paint::None => AttributeValue::None,
Paint::Inherit => AttributeValue::Inherit,
Paint::CurrentColor => AttributeValue::CurrentColor,
Paint::Color(color) => AttributeValue::Color(color),
Paint::FuncIRI(link, fallback) => {
// collect links for later processing
links.append(aid, link, fallback, node);
return Ok(None);
}
}
}
}
}
AId::Stroke => {
match Paint::from_span(value)? {
Paint::None => AttributeValue::None,
Paint::Inherit => AttributeValue::Inherit,
Paint::CurrentColor => AttributeValue::CurrentColor,
Paint::Color(color) => AttributeValue::Color(color),
Paint::FuncIRI(link, fallback) => {
// Collect links for later processing.
links.append(aid, link, fallback, node);
return Ok(None);
}
}
}
AId::ClipPath
| AId::Filter
| AId::Marker
| AId::MarkerEnd
| AId::MarkerMid
| AId::MarkerStart
| AId::Mask => {
match value.to_str() {
"none" => AttributeValue::None,
"inherit" => AttributeValue::Inherit,
_ => {
let mut s = Stream::from(value);
let link = s.parse_func_iri()?;
// collect links for later processing
links.append(aid, link, None, node);
return Ok(None);
}
}
}
AId::Color => {
match value.to_str() {
"inherit" => AttributeValue::Inherit,
_ => AttributeValue::Color(Color::from_span(value)?),
}
}
AId::LightingColor
| AId::FloodColor
| AId::StopColor => {
match value.to_str() {
"inherit" => AttributeValue::Inherit,
"currentColor" => AttributeValue::CurrentColor,
_ => AttributeValue::Color(Color::from_span(value)?),
}
}
AId::StdDeviation
| AId::BaseFrequency
| AId::Rotate => {
// TODO: 'stdDeviation' can contain only one or two numbers
AttributeValue::NumberList(NumberList::from_span(value)?)
}
AId::Points => {
AttributeValue::Points(Points::from_span(value)?)
}
AId::D => {
AttributeValue::Path(Path::from_span(value)?)
}
AId::Transform
| AId::GradientTransform
| AId::PatternTransform => {
let ts = Transform::from_span(value)?;
if !ts.is_default() {
AttributeValue::Transform(Transform::from_span(value)?)
} else {
return Ok(None);
}
}
AId::FontSize => {
let mut s = Stream::from(value);
match s.parse_length() {
Ok(l) => AttributeValue::Length(l),
Err(_) => {
if value.to_str() == "inherit" {
AttributeValue::Inherit
} else {
AttributeValue::String(value.to_str().to_string())
}
}
}
}
AId::FontSizeAdjust => {
match value.to_str() {
"none" => AttributeValue::None,
"inherit" => AttributeValue::Inherit,
_ => {
let mut s = Stream::from(value);
AttributeValue::Number(s.parse_number()?)
}
}
}
AId::Display
| AId::PointerEvents
| AId::TextDecoration => {
match value.to_str() {
"none" => AttributeValue::None,
"inherit" => AttributeValue::Inherit,
_ => AttributeValue::String(value.to_str().to_string()),
}
}
AId::BaselineShift
| AId::ClipRule
| AId::ColorInterpolation
| AId::ColorInterpolationFilters
| AId::ColorProfile
| AId::ColorRendering
| AId::Direction
| AId::DominantBaseline
| AId::EnableBackground
| AId::FillRule
| AId::FontFamily
| AId::FontStretch
| AId::FontStyle
| AId::FontVariant
| AId::FontWeight
| AId::GlyphOrientationVertical
| AId::ImageRendering
| AId::Kerning
| AId::LetterSpacing
| AId::Overflow
| AId::ShapeRendering
| AId::StrokeLinecap
| AId::StrokeLinejoin
| AId::TextAnchor
| AId::TextRendering
| AId::UnicodeBidi
| AId::Visibility
| AId::WordSpacing
| AId::WritingMode => {
match value.to_str() {
"inherit" => AttributeValue::Inherit,
_ => AttributeValue::String(value.to_str().to_string()),
}
}
AId::ViewBox => {
AttributeValue::ViewBox(ViewBox::from_span(value)?)
}
AId::PreserveAspectRatio => {
AttributeValue::AspectRatio(AspectRatio::from_span(value)?)
}
_ => {
AttributeValue::String(value.to_str().to_string())
}
};
Ok(Some(av))
}
fn parse_style_attribute(
text: &str,
opt: &ParseOptions,
node: &mut Node,
links: &mut Links,
) -> Result<()> {
for token in StyleParser::from(text) {
let (name, value) = token?;
match AttributeId::from_str(name.to_str()) {
Some(aid) => {
parse_svg_attribute_value(aid, value, opt, node, links)?;
}
None => {
node.set_attribute((name.to_str(), value.to_str()));
}
}
}
Ok(())
}
fn resolve_links(links: &mut Links) {
for d in &mut links.list {
match links.elems_with_id.get(&d.iri) {
Some(node) => {
let res = if d.attr_id == AttributeId::Fill || d.attr_id == AttributeId::Stroke {
d.node.set_attribute_checked((d.attr_id, (node.clone(), d.fallback)))
} else {
d.node.set_attribute_checked((d.attr_id, node.clone()))
};
match res {
Ok(_) => {}
Err(Error::ElementMustHaveAnId) => {
// TODO: unreachable?
let attr = Attribute::from((d.attr_id, node.clone()));
warn!("Element without an ID cannot be linked. \
Attribute {} ignored.", attr);
}
Err(Error::ElementCrosslink) => {
let attr = Attribute::from((d.attr_id, node.clone()));
warn!("Crosslink detected. Attribute {} ignored.", attr);
}
}
}
None => {
let av = match d.fallback {
Some(PaintFallback::None) => AttributeValue::None,
Some(PaintFallback::CurrentColor) => AttributeValue::CurrentColor,
Some(PaintFallback::Color(c)) => AttributeValue::Color(c),
None => {
if d.attr_id == AttributeId::Fill {
warn!("Could not resolve the 'fill' IRI reference: {}. \
Fallback to 'none'.", d.iri);
AttributeValue::None
} else if d.attr_id == AttributeId::Href {
warn!("Could not resolve IRI reference: {}.", d.iri);
AttributeValue::String(format!("#{}", d.iri))
} else {
warn!("Could not resolve FuncIRI reference: {}.", d.iri);
AttributeValue::String(format!("url(#{})", d.iri))
}
}
};
d.node.set_attribute((d.attr_id, av));
}
}
}
}
|
use async_trait::async_trait;
use crate::protobuf::Command;
/// State-machine interface that Raft uses to deliver commands to the replicated state-machine
#[async_trait]
pub trait StateMachine {
/// Delivers the replicated command (eg. an order request) to the state machine
async fn apply_command(&self, command: &Command, is_leader: bool) -> ();
}
|
//==============================================================================
// Notes
//==============================================================================
// drivers::log.rs
// The logger library is meant to be a scrolling circular buffer of entries.
// The log can be updated in the background in real-time, as needed.
// When the log page is shown, only the current entries will be shown.
//==============================================================================
// Crates and Mods
//==============================================================================
use crate::app::{info, page};
use super::lcd::{lcd_api, font};
//==============================================================================
// Enums, Structs, and Types
//=============================================================================
#[derive(Copy, Clone)]
struct LogLine{
active: bool,
stale: bool,
line: [u8; 24]
}
//==============================================================================
// Variables
//==============================================================================
const LOG_INITIAL_X: u16 = 0;
const LOG_INITIAL_Y: u16 = 0;
const LOG_SCALE: u16 = 2;
const LOG_BACKGROUND: lcd_api::Color = lcd_api::Color::Black;
const LOG_FOREGROUND: lcd_api::Color = lcd_api::Color::White;
const LOG_WELCOME: &'static str = "** Log Output Window **";
const LOG_PREFIX_LENGTH: usize = 3;
const LOG_MAX_LENGTH: usize = 24;
const LOG_ACTUAL_LEN: usize = LOG_MAX_LENGTH - LOG_PREFIX_LENGTH;
const LOG_LINE_ENTRIES: usize = 15;
static mut LOG_LINES_ACTIVE: usize = 0;
static mut LOG_LINES:[LogLine; LOG_LINE_ENTRIES] = [
LogLine { active: false, stale: true, line: [ 0x00; LOG_MAX_LENGTH ] };
LOG_LINE_ENTRIES
];
//==============================================================================
// Public Functions
//==============================================================================
pub fn init() {
// Push the welcome message
push_log(LOG_WELCOME);
}
pub fn make_stale() {
unsafe {
for i in 0..LOG_LINES_ACTIVE {
LOG_LINES[i].stale = true;
}
}
}
#[allow(dead_code)]
pub fn push_log(string: &'static str) {
unsafe {
if LOG_LINES_ACTIVE == LOG_LINE_ENTRIES {
pop_log();
}
let index = LOG_LINES_ACTIVE;
LOG_LINES_ACTIVE = LOG_LINES_ACTIVE + 1;
let len = if string.len() < LOG_ACTUAL_LEN { string.len()} else { LOG_ACTUAL_LEN };
let string = string.as_bytes();
LOG_LINES[index].active = true;
LOG_LINES[index].stale = true;
// Copy bytes from string into the log lines object
for i in 0..len {
LOG_LINES[index].line[i] = string[i];
}
if len < LOG_ACTUAL_LEN {
LOG_LINES[index].line[len] = 0;
}
}
}
#[allow(dead_code)]
pub fn push_log_number(string: &'static str, num: &u32) {
unsafe {
if LOG_LINES_ACTIVE == LOG_LINE_ENTRIES {
pop_log();
}
let index = LOG_LINES_ACTIVE;
LOG_LINES_ACTIVE = LOG_LINES_ACTIVE + 1;
let string_len = if string.len() < LOG_ACTUAL_LEN { string.len()} else { LOG_ACTUAL_LEN };
let string = string.as_bytes();
let num_len = get_num_len(*num);
LOG_LINES[index].active = true;
LOG_LINES[index].stale = true;
// Copy bytes from string into the log lines object
for i in 0..string_len {
LOG_LINES[index].line[i] = string[i];
}
// Copy in number as ascii
let mut div: u32 = 1;
for i in string_len..(string_len+num_len) {
if i == LOG_ACTUAL_LEN {
break;
}
else {
LOG_LINES[index].line[i] = (0x30 + ((num / div) % 10)) as u8;
div *= 10;
}
}
if string_len + num_len <= LOG_ACTUAL_LEN {
LOG_LINES[index].line[string_len+num_len+3] = 0;
}
}
}
//==============================================================================
// Private Functions
//==============================================================================
fn clear_line(line_number: usize) {
let y = LOG_INITIAL_Y + ((line_number as u16) * font::MINIMAL_CHARACTER_HEIGHT * LOG_SCALE);
lcd_api::fill_rectangle(0, 240, y, font::MINIMAL_CHARACTER_HEIGHT * LOG_SCALE, LOG_BACKGROUND);
}
fn get_line_length(line_number: usize) -> usize {
let line = unsafe { LOG_LINES[line_number].line };
for i in 0..line.len() {
if line[i] == 0 {
return i + 1;
}
}
line.len()
}
fn get_num_len(mut num: u32) -> usize {
let mut len: usize = 1;
num /= 10;
while num > 0 {
len += 1;
num /= 10;
}
len
}
fn pop_log() {
unsafe {
// Show that a line has just been popped
LOG_LINES_ACTIVE = LOG_LINES_ACTIVE - 1;
// Shift all entries up one - leaving the bottom entry available
// Start at 1 to always show the header on row 0
for i in 1..(LOG_LINES_ACTIVE) {
LOG_LINES[i].active = true;
LOG_LINES[i].stale = true;
LOG_LINES[i].line = LOG_LINES[i+1].line;
}
LOG_LINES[LOG_LINES_ACTIVE].active = false;
}
}
fn write_line(line_number: usize) {
let y = LOG_INITIAL_Y + ((line_number as u16) * font::MINIMAL_CHARACTER_HEIGHT * LOG_SCALE);
let len = get_line_length(line_number);
unsafe {
font::write_minimal_line(&LOG_LINES[line_number].line[0..len], LOG_INITIAL_X, y, LOG_FOREGROUND, LOG_BACKGROUND, LOG_SCALE);
// Update the stale line flag showing it has been displayed
LOG_LINES[line_number].stale = false;
}
}
//==============================================================================
// Interrupt Handler
//==============================================================================
//==============================================================================
// Task Handler
//==============================================================================
pub fn task_handler(d: &info::DeviceInfo) {
if let page::AppPage::Log = d.app_page {
unsafe {
for i in 0..LOG_LINES_ACTIVE {
// If log lines are current, do nothing
if !LOG_LINES[i].active {
return;
}
if LOG_LINES[i].stale {
clear_line(i);
write_line(i);
}
}
}
}
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - OPAMP1 control/status register"]
pub opamp1_csr: OPAMP1_CSR,
#[doc = "0x04 - OPAMP1 offset trimming register in normal mode"]
pub opamp1_otr: OPAMP1_OTR,
#[doc = "0x08 - OPAMP1 offset trimming register in low-power mode"]
pub opamp1_lpotr: OPAMP1_LPOTR,
_reserved3: [u8; 4usize],
#[doc = "0x10 - OPAMP2 control/status register"]
pub opamp2_csr: OPAMP2_CSR,
#[doc = "0x14 - OPAMP2 offset trimming register in normal mode"]
pub opamp2_otr: OPAMP2_OTR,
#[doc = "0x18 - OPAMP2 offset trimming register in low-power mode"]
pub opamp2_lpotr: OPAMP2_LPOTR,
}
#[doc = "OPAMP1 control/status register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [opamp1_csr](opamp1_csr) module"]
pub type OPAMP1_CSR = crate::Reg<u32, _OPAMP1_CSR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _OPAMP1_CSR;
#[doc = "`read()` method returns [opamp1_csr::R](opamp1_csr::R) reader structure"]
impl crate::Readable for OPAMP1_CSR {}
#[doc = "`write(|w| ..)` method takes [opamp1_csr::W](opamp1_csr::W) writer structure"]
impl crate::Writable for OPAMP1_CSR {}
#[doc = "OPAMP1 control/status register"]
pub mod opamp1_csr;
#[doc = "OPAMP1 offset trimming register in normal mode\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [opamp1_otr](opamp1_otr) module"]
pub type OPAMP1_OTR = crate::Reg<u32, _OPAMP1_OTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _OPAMP1_OTR;
#[doc = "`read()` method returns [opamp1_otr::R](opamp1_otr::R) reader structure"]
impl crate::Readable for OPAMP1_OTR {}
#[doc = "`write(|w| ..)` method takes [opamp1_otr::W](opamp1_otr::W) writer structure"]
impl crate::Writable for OPAMP1_OTR {}
#[doc = "OPAMP1 offset trimming register in normal mode"]
pub mod opamp1_otr;
#[doc = "OPAMP1 offset trimming register in low-power mode\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [opamp1_lpotr](opamp1_lpotr) module"]
pub type OPAMP1_LPOTR = crate::Reg<u32, _OPAMP1_LPOTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _OPAMP1_LPOTR;
#[doc = "`read()` method returns [opamp1_lpotr::R](opamp1_lpotr::R) reader structure"]
impl crate::Readable for OPAMP1_LPOTR {}
#[doc = "`write(|w| ..)` method takes [opamp1_lpotr::W](opamp1_lpotr::W) writer structure"]
impl crate::Writable for OPAMP1_LPOTR {}
#[doc = "OPAMP1 offset trimming register in low-power mode"]
pub mod opamp1_lpotr;
#[doc = "OPAMP2 control/status register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [opamp2_csr](opamp2_csr) module"]
pub type OPAMP2_CSR = crate::Reg<u32, _OPAMP2_CSR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _OPAMP2_CSR;
#[doc = "`read()` method returns [opamp2_csr::R](opamp2_csr::R) reader structure"]
impl crate::Readable for OPAMP2_CSR {}
#[doc = "`write(|w| ..)` method takes [opamp2_csr::W](opamp2_csr::W) writer structure"]
impl crate::Writable for OPAMP2_CSR {}
#[doc = "OPAMP2 control/status register"]
pub mod opamp2_csr;
#[doc = "OPAMP2 offset trimming register in normal mode\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [opamp2_otr](opamp2_otr) module"]
pub type OPAMP2_OTR = crate::Reg<u32, _OPAMP2_OTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _OPAMP2_OTR;
#[doc = "`read()` method returns [opamp2_otr::R](opamp2_otr::R) reader structure"]
impl crate::Readable for OPAMP2_OTR {}
#[doc = "`write(|w| ..)` method takes [opamp2_otr::W](opamp2_otr::W) writer structure"]
impl crate::Writable for OPAMP2_OTR {}
#[doc = "OPAMP2 offset trimming register in normal mode"]
pub mod opamp2_otr;
#[doc = "OPAMP2 offset trimming register in low-power mode\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [opamp2_lpotr](opamp2_lpotr) module"]
pub type OPAMP2_LPOTR = crate::Reg<u32, _OPAMP2_LPOTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _OPAMP2_LPOTR;
#[doc = "`read()` method returns [opamp2_lpotr::R](opamp2_lpotr::R) reader structure"]
impl crate::Readable for OPAMP2_LPOTR {}
#[doc = "`write(|w| ..)` method takes [opamp2_lpotr::W](opamp2_lpotr::W) writer structure"]
impl crate::Writable for OPAMP2_LPOTR {}
#[doc = "OPAMP2 offset trimming register in low-power mode"]
pub mod opamp2_lpotr;
|
{{#>loop_nest loop_nest~}}
{{#with filter_ref.Inline~}}
{{#each rules~}}
{{>rule}}
trace!("inline filter restricts to {:?}", values);
{{/each~}}
{{/with~}}
{{#with filter_ref.Call~}}
let filter_res = {{choice}}::filter_{{id}}({{>choice.arg_names}}ir_instance, store);
{{value_var}}.restrict(filter_res);
{{/with}}
{{/loop_nest~}}
|
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::fmt;
use std::io::{Cursor, Read};
use std::net::Ipv4Addr;
use byteorder::{NetworkEndian, ReadBytesExt};
use thiserror::Error;
pub use options::DhcpOption;
use super::id::Mac;
pub mod encode;
pub mod options;
#[derive(Error, Debug)]
pub enum Error {
#[error("packet truncated")]
Truncated,
#[error("invalid hlen, expected {expected} got {got}")]
InvalidHLen { expected: u8, got: u8 },
#[error("invalid cookie: {0}.{1}.{2}.{3}")]
InvalidCookie(u8, u8, u8, u8),
#[error("invalid message type: {0}")]
InvalidMessageType(u8),
#[error("option type={tag} len={len} parse failed")]
OptionParseFailed {
tag: u8,
len: u8,
source: options::Error,
},
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u8)]
pub enum BootpMessageType {
Request = 1,
Reply = 2,
}
impl TryFrom<u8> for BootpMessageType {
type Error = u8;
fn try_from(x: u8) -> Result<Self, Self::Error> {
match x {
1 => Ok(Self::Request),
2 => Ok(Self::Reply),
x => Err(x),
}
}
}
impl Into<u8> for BootpMessageType {
fn into(self) -> u8 {
self as u8
}
}
impl fmt::Display for BootpMessageType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Self::Request => "request",
Self::Reply => "reply",
}
)
}
}
pub struct Packet {
pub bootp_message_type: BootpMessageType,
pub htype: u8,
pub hlen: u8,
pub mac: Mac,
pub hops: u8,
pub xid: u32,
pub secs: u16,
pub flags: u16,
pub ciaddr: Ipv4Addr,
pub yiaddr: Ipv4Addr,
pub siaddr: Ipv4Addr,
pub giaddr: Ipv4Addr,
pub server_name: Option<String>,
pub boot_file_name: Option<String>,
pub options: BTreeMap<u8, DhcpOption>,
}
impl Packet {
pub fn parse(buf: &[u8]) -> Result<Self, Error> {
let mut cursor = Cursor::new(buf);
let bootp_message_type =
BootpMessageType::try_from(cursor.read_u8().map_err(|_| Error::Truncated)?)
.map_err(|x| Error::InvalidMessageType(x))?;
let htype = cursor.read_u8().map_err(|_| Error::Truncated)?;
let hlen = cursor.read_u8().map_err(|_| Error::Truncated)?;
if hlen != 6 {
return Err(Error::InvalidHLen {
expected: 6,
got: hlen,
});
}
let hops = cursor.read_u8().map_err(|_| Error::Truncated)?;
let xid = cursor
.read_u32::<NetworkEndian>()
.map_err(|_| Error::Truncated)?;
let secs = cursor
.read_u16::<NetworkEndian>()
.map_err(|_| Error::Truncated)?;
let flags = cursor
.read_u16::<NetworkEndian>()
.map_err(|_| Error::Truncated)?;
let mut ciaddr = [0u8; 4];
let mut yiaddr = [0u8; 4];
let mut siaddr = [0u8; 4];
let mut giaddr = [0u8; 4];
cursor
.read_exact(&mut ciaddr[..])
.map_err(|_| Error::Truncated)?;
cursor
.read_exact(&mut yiaddr[..])
.map_err(|_| Error::Truncated)?;
cursor
.read_exact(&mut siaddr[..])
.map_err(|_| Error::Truncated)?;
cursor
.read_exact(&mut giaddr[..])
.map_err(|_| Error::Truncated)?;
let mut hardware_address = [0u8; 16];
cursor
.read_exact(&mut hardware_address[..])
.map_err(|_| Error::Truncated)?;
let ciaddr = Ipv4Addr::from(ciaddr);
let yiaddr = Ipv4Addr::from(yiaddr);
let siaddr = Ipv4Addr::from(siaddr);
let giaddr = Ipv4Addr::from(giaddr);
let hardware_address = Mac::from(hardware_address);
let server_name = Self::parse_str(&mut cursor, 64)?;
let boot_file_name = Self::parse_str(&mut cursor, 128)?;
trace!("type = {}", bootp_message_type);
trace!("htype = {}", htype);
trace!("hlen = {}", hlen);
trace!("hops = {}", hops);
trace!("xid = {:x}", xid);
trace!("secs = {}", secs);
trace!("flags = {:x}", flags);
trace!("ciaddr = {}", ciaddr);
trace!("yiaddr = {}", yiaddr);
trace!("siaddr = {}", siaddr);
trace!("giaddr = {}", giaddr);
trace!("mac = {}", hardware_address);
trace!("server = {}", server_name.as_deref().unwrap_or("<NONE>"));
trace!("file = {}", boot_file_name.as_deref().unwrap_or("<NONE>"));
let options_len = buf.len() - cursor.position() as usize;
trace!("options length = {}", options_len);
let mut options: BTreeMap<u8, DhcpOption> = BTreeMap::new();
if options_len > 0 {
Self::parse_options(&mut cursor, &mut options, options_len)?;
}
Ok(Self {
bootp_message_type,
htype,
hlen,
mac: hardware_address,
hops,
xid,
secs,
flags,
ciaddr,
yiaddr,
siaddr,
giaddr,
server_name,
boot_file_name,
options,
})
}
fn parse_str(cursor: &mut Cursor<&[u8]>, len: usize) -> Result<Option<String>, Error> {
let raw = cursor
.get_ref()
.get(cursor.position() as usize..cursor.position() as usize + len)
.ok_or(Error::Truncated)?;
cursor.set_position(cursor.position() + len as u64);
if raw[0] == 0 {
return Ok(None);
}
let terminator_offset = raw
.iter()
.copied()
.enumerate()
.find(|(x, _)| *x == 0)
.map(|(i, _)| i)
.unwrap_or(len);
Ok(Some(
String::from_utf8_lossy(&raw[..terminator_offset]).to_string(),
))
}
fn parse_options(
cursor: &mut Cursor<&[u8]>,
options: &mut BTreeMap<u8, DhcpOption>,
mut left: usize,
) -> Result<(), Error> {
// options always start at offset 236 from packet start
debug_assert_eq!(cursor.position(), 236);
let mut cookie = [0u8; 4];
cursor
.read_exact(&mut cookie)
.map_err(|_| Error::Truncated)?;
if &cookie[..] != &[99, 130, 83, 99][..] {
return Err(Error::InvalidCookie(
cookie[0], cookie[1], cookie[2], cookie[3],
));
}
while left > 0 {
let tag = cursor.read_u8().map_err(|_| Error::Truncated)?;
// padding
if tag == 0 {
left -= 1;
continue;
}
// end of options field
if tag == 255 {
break;
}
let len = cursor.read_u8().map_err(|_| Error::Truncated)?;
left -= 2;
trace!("option tag={} len={}", tag, len);
assert!(len as usize <= left);
let data = &cursor.get_ref()
[cursor.position() as usize..cursor.position() as usize + len as usize];
cursor.set_position(cursor.position() + Into::<u64>::into(len));
left -= Into::<usize>::into(len);
options.insert(
tag,
DhcpOption::parse(tag, data).map_err(|source| Error::OptionParseFailed {
tag,
len,
source,
})?,
);
}
Ok(())
}
}
|
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::ty::is_type_diagnostic_item;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, NestedVisitorMap, Visitor};
use rustc_hir::{Arm, Expr, ExprKind, MatchSource, PatKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::symbol::SymbolStr;
use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
/// Checks for `match` expressions modifying the case of a string with non-compliant arms
///
/// ### Why is this bad?
/// The arm is unreachable, which is likely a mistake
///
/// ### Example
/// ```rust
/// # let text = "Foo";
///
/// match &*text.to_ascii_lowercase() {
/// "foo" => {},
/// "Bar" => {},
/// _ => {},
/// }
/// ```
/// Use instead:
/// ```rust
/// # let text = "Foo";
///
/// match &*text.to_ascii_lowercase() {
/// "foo" => {},
/// "bar" => {},
/// _ => {},
/// }
/// ```
#[clippy::version = "1.58.0"]
pub MATCH_STR_CASE_MISMATCH,
correctness,
"creation of a case altering match expression with non-compliant arms"
}
declare_lint_pass!(MatchStrCaseMismatch => [MATCH_STR_CASE_MISMATCH]);
#[derive(Debug)]
enum CaseMethod {
LowerCase,
AsciiLowerCase,
UpperCase,
AsciiUppercase,
}
impl LateLintPass<'_> for MatchStrCaseMismatch {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
if !in_external_macro(cx.tcx.sess, expr.span);
if let ExprKind::Match(match_expr, arms, MatchSource::Normal) = expr.kind;
if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty(match_expr).kind();
if let ty::Str = ty.kind();
then {
let mut visitor = MatchExprVisitor {
cx,
case_method: None,
};
visitor.visit_expr(match_expr);
if let Some(case_method) = visitor.case_method {
if let Some((bad_case_span, bad_case_str)) = verify_case(&case_method, arms) {
lint(cx, &case_method, bad_case_span, &bad_case_str);
}
}
}
}
}
}
struct MatchExprVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
case_method: Option<CaseMethod>,
}
impl<'a, 'tcx> Visitor<'tcx> for MatchExprVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_expr(&mut self, ex: &'tcx Expr<'_>) {
match ex.kind {
ExprKind::MethodCall(segment, _, [receiver], _)
if self.case_altered(&*segment.ident.as_str(), receiver) => {},
_ => walk_expr(self, ex),
}
}
}
impl<'a, 'tcx> MatchExprVisitor<'a, 'tcx> {
fn case_altered(&mut self, segment_ident: &str, receiver: &Expr<'_>) -> bool {
if let Some(case_method) = get_case_method(segment_ident) {
let ty = self.cx.typeck_results().expr_ty(receiver).peel_refs();
if is_type_diagnostic_item(self.cx, ty, sym::String) || ty.kind() == &ty::Str {
self.case_method = Some(case_method);
return true;
}
}
false
}
}
fn get_case_method(segment_ident_str: &str) -> Option<CaseMethod> {
match segment_ident_str {
"to_lowercase" => Some(CaseMethod::LowerCase),
"to_ascii_lowercase" => Some(CaseMethod::AsciiLowerCase),
"to_uppercase" => Some(CaseMethod::UpperCase),
"to_ascii_uppercase" => Some(CaseMethod::AsciiUppercase),
_ => None,
}
}
fn verify_case<'a>(case_method: &'a CaseMethod, arms: &'a [Arm<'_>]) -> Option<(Span, SymbolStr)> {
let case_check = match case_method {
CaseMethod::LowerCase => |input: &str| -> bool { input.chars().all(|c| c.to_lowercase().next() == Some(c)) },
CaseMethod::AsciiLowerCase => |input: &str| -> bool { !input.chars().any(|c| c.is_ascii_uppercase()) },
CaseMethod::UpperCase => |input: &str| -> bool { input.chars().all(|c| c.to_uppercase().next() == Some(c)) },
CaseMethod::AsciiUppercase => |input: &str| -> bool { !input.chars().any(|c| c.is_ascii_lowercase()) },
};
for arm in arms {
if_chain! {
if let PatKind::Lit(Expr {
kind: ExprKind::Lit(lit),
..
}) = arm.pat.kind;
if let LitKind::Str(symbol, _) = lit.node;
let input = symbol.as_str();
if !case_check(&input);
then {
return Some((lit.span, input));
}
}
}
None
}
fn lint(cx: &LateContext<'_>, case_method: &CaseMethod, bad_case_span: Span, bad_case_str: &str) {
let (method_str, suggestion) = match case_method {
CaseMethod::LowerCase => ("to_lowercase", bad_case_str.to_lowercase()),
CaseMethod::AsciiLowerCase => ("to_ascii_lowercase", bad_case_str.to_ascii_lowercase()),
CaseMethod::UpperCase => ("to_uppercase", bad_case_str.to_uppercase()),
CaseMethod::AsciiUppercase => ("to_ascii_uppercase", bad_case_str.to_ascii_uppercase()),
};
span_lint_and_sugg(
cx,
MATCH_STR_CASE_MISMATCH,
bad_case_span,
"this `match` arm has a differing case than its expression",
&*format!("consider changing the case of this arm to respect `{}`", method_str),
format!("\"{}\"", suggestion),
Applicability::MachineApplicable,
);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.