text
stringlengths
8
4.13M
#![allow(clippy::comparison_chain)] #![allow(clippy::collapsible_if)] use std::cmp::Reverse; use std::cmp::{max, min}; use std::collections::{BTreeSet, HashMap, HashSet}; use std::fmt::Debug; use itertools::Itertools; use whiteread::parse_line; const ten97: usize = 1000_000_007; /// 2の逆元 mod ten97.割りたいときに使う const inv2ten97: u128 = 500_000_004; fn main() { let n: usize = parse_line().unwrap(); let cc: Vec<String> = parse_line().unwrap(); let cc: Vec<char> = cc.into_iter().map(|a| a.chars().collect_vec()[0]).collect(); let mut paths: Vec<Vec<usize>> = vec![vec![]; n + 1]; for _ in 0..n - 1 { let (a, b): (usize, usize) = parse_line().unwrap(); paths[a].push(b); paths[b].push(a); } let mut memo = vec![vec![0; 3]; n + 1]; let mut already = vec![false; n + 1]; already[1] = true; ki_dp(&cc, &paths, &mut already, &mut memo, 1); println!("{}", memo[1][2]); // dbg!(memo); } /// 各ノードが持つ部下の数のを求める(自分含む) /// indexは1始まり /// dp[x] = 1 + dp[y_1] + dp[y_2] + dp[y_3] ... /// Error: The argument 'file' cannot be empty. Received ''Error: Command failed: /// let mut memo = vec![0; n + 1]; /// let mut already = vec![false; n + 1]; /// already[0] = true; /// already[1] = true; /// ki_dp(&paths, &mut already, &mut memo, 1); fn ki_dp( cc: &Vec<char>, paths: &Vec<Vec<usize>>, already: &mut Vec<bool>, memo: &mut Vec<Vec<usize>>, now: usize, ) { // leaf if paths[now].iter().all(|p| already[*p]) { if cc[now - 1] == 'a' { memo[now][0] = 1; } else { memo[now][1] = 1; } return; } let mut child = vec![]; for &p in paths[now].iter() { if already[p] { continue; } child.push(p); already[p] = true; ki_dp(cc, paths, already, memo, p); } if cc[now - 1] == 'a' { let mut tmp = 1; for mut i in child.iter().map(|&p| memo[p][0] + memo[p][2]) { i %= ten97; tmp *= i; tmp %= ten97; } memo[now][0] = tmp; let mut tmp = 1; for mut i in child .iter() .map(|&p| memo[p][0] + memo[p][1] + 2 * memo[p][2]) { i %= ten97; tmp *= i; tmp %= ten97; } memo[now][2] = tmp + inv(memo[now][0]); memo[now][2] %= ten97; } else { let mut tmp = 1; for mut i in child.iter().map(|&p| memo[p][1] + memo[p][2]) { i %= ten97; tmp *= i; tmp %= ten97; } memo[now][1] = tmp; let mut tmp = 1; for mut i in child .iter() .map(|&p| memo[p][0] + memo[p][1] + 2 * memo[p][2]) { i %= ten97; tmp *= i; tmp %= ten97; } memo[now][2] = tmp + inv(memo[now][1]); memo[now][2] %= ten97; } } fn inv(a: usize) -> usize { let tmp = a / ten97; ((tmp + 1) * ten97 - a) % ten97 }
use crate::module::Module; #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub enum MirError { UseWithInvalidPath { module: Module, path: String }, UseHasTooManyParentNavigations { module: Module, path: String }, ModuleNotFound { module: Module, path: String }, UseNotStaticallyResolvable { containing_module: Module }, ModuleHasCycle { cycle: Vec<String> }, }
pub mod checker; pub mod parser; /// A single decoded line in a Make A Lisp reference spec. #[derive(Clone, Debug)] pub enum MalTestingLine { /// Set all the following lines in the current file as optional or not. /// /// Optional functionality is not needed for self hosting. ToggleOptional(bool), /// Declare an exact input and the corresponding exact output after evaluation. InputShouldOutput(Vec<String>, String), /// Declare an exact input and the corresponding expected error /// to occur during evaluation. /// /// TODO: replace Result<T, String> with more granular error propagation /// (eg. failure crate) to better unit test against the spec InputShouldThrow(Vec<String>), /// (Cosmetic) Declare the current section name for the next lines. BeginSection(String), }
pub mod builder; mod deserializer; mod headers; use crate::{ message::{Error, MessageDeserializer}, Event, }; use deserializer::Deserializer; pub use headers::Headers; mod serializer; pub use builder::Builder; pub use serializer::Serializer; pub static SPEC_VERSION_HEADER: &str = "ce-specversion"; /// Turn a pile of HTTP headers and a body into a CloudEvent pub fn to_event<'a, T: Headers<'a>>( headers: &'a T, body: Vec<u8>, ) -> std::result::Result<Event, Error> { MessageDeserializer::into_event(Deserializer::new(headers, body)) } pub fn header_prefix(name: &str) -> String { super::header_prefix("ce-", name) }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use crate::errors::{Error, ErrorKind}; use failure::ResultExt; use fidl_fuchsia_io; use fidl_fuchsia_sys::{LauncherMarker, LauncherProxy}; use fuchsia_async::{ self as fasync, futures::{future::BoxFuture, FutureExt}, }; use fuchsia_component::client::{connect_to_service, launch}; use fuchsia_merkle::Hash; use fuchsia_syslog::{fx_log_err, fx_log_info}; use fuchsia_zircon as zx; #[cfg(test)] use proptest_derive::Arbitrary; const SYSTEM_UPDATER_RESOURCE_URL: &str = "fuchsia-pkg://fuchsia.com/amber#meta/system_updater.cmx"; #[derive(Debug, Clone, Copy)] #[cfg_attr(test, derive(Arbitrary))] pub enum Initiator { Manual, Automatic, } impl std::fmt::Display for Initiator { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match *self { Initiator::Manual => write!(f, "manual"), Initiator::Automatic => write!(f, "automatic"), } } } // On success, system will reboot before this function returns pub async fn apply_system_update( current_system_image: Hash, latest_system_image: Hash, initiator: Initiator, ) -> Result<(), Error> { let launcher = connect_to_service::<LauncherMarker>().context(ErrorKind::ConnectToLauncher)?; let mut component_runner = RealComponentRunner { launcher_proxy: launcher }; apply_system_update_impl( current_system_image, latest_system_image, &RealServiceConnector, &mut component_runner, initiator, &RealTimeSource, ) .await } // For mocking trait ServiceConnector { fn service_connect(&self, service_path: &str, channel: zx::Channel) -> Result<(), zx::Status>; } struct RealServiceConnector; impl ServiceConnector for RealServiceConnector { fn service_connect(&self, service_path: &str, channel: zx::Channel) -> Result<(), zx::Status> { fdio::service_connect(service_path, channel) } } // For mocking trait ComponentRunner { fn run_until_exit( &mut self, url: String, arguments: Option<Vec<String>>, ) -> BoxFuture<'_, Result<(), Error>>; } struct RealComponentRunner { launcher_proxy: LauncherProxy, } impl ComponentRunner for RealComponentRunner { fn run_until_exit( &mut self, url: String, arguments: Option<Vec<String>>, ) -> BoxFuture<'_, Result<(), Error>> { let app_res = launch(&self.launcher_proxy, url, arguments); async move { let mut app = app_res.context(ErrorKind::LaunchSystemUpdater)?; let exit_status = app.wait().await.context(ErrorKind::WaitForSystemUpdater)?; exit_status.ok().context(ErrorKind::SystemUpdaterFailed)?; Ok(()) } .boxed() } } // For mocking trait TimeSource { fn get_nanos(&self) -> i64; } struct RealTimeSource; impl TimeSource for RealTimeSource { fn get_nanos(&self) -> i64 { zx::Time::get(zx::ClockId::UTC).into_nanos() } } async fn apply_system_update_impl<'a>( current_system_image: Hash, latest_system_image: Hash, service_connector: &'a impl ServiceConnector, component_runner: &'a mut impl ComponentRunner, initiator: Initiator, time_source: &'a impl TimeSource, ) -> Result<(), Error> { if let Err(err) = pkgfs_gc(service_connector).await { fx_log_err!("failed to garbage collect pkgfs, will still attempt system update: {}", err); } fx_log_info!("starting system_updater"); let fut = component_runner .run_until_exit( SYSTEM_UPDATER_RESOURCE_URL.to_string(), Some(vec![ format!("-initiator={}", initiator), format!("-start={}", time_source.get_nanos()), format!("-source={}", current_system_image), format!("-target={}", latest_system_image), ]), ); fut.await?; Err(ErrorKind::SystemUpdaterFinished)? } async fn pkgfs_gc(service_connector: &impl ServiceConnector) -> Result<(), Error> { fx_log_info!("triggering pkgfs GC"); let (dir_end, dir_server_end) = fidl::endpoints::create_endpoints::<fidl_fuchsia_io::DirectoryMarker>() .context(ErrorKind::PkgfsGc)?; service_connector .service_connect("/pkgfs/ctl", dir_server_end.into_channel()) .context(ErrorKind::PkgfsGc)?; let dir_proxy = fidl_fuchsia_io::DirectoryProxy::new( fasync::Channel::from_channel(dir_end.into_channel()).context(ErrorKind::PkgfsGc)?, ); let status = dir_proxy.unlink("garbage").await.context(ErrorKind::PkgfsGc)?; zx::Status::ok(status).context(ErrorKind::PkgfsGc)?; Ok(()) } #[cfg(test)] mod test_apply_system_update_impl { use super::*; use fuchsia_async::futures::future; use matches::assert_matches; use proptest::prelude::*; use std::fs; const ACTIVE_SYSTEM_IMAGE_MERKLE: [u8; 32] = [0u8; 32]; const NEW_SYSTEM_IMAGE_MERKLE: [u8; 32] = [1u8; 32]; struct TempDirServiceConnector { temp_dir: tempfile::TempDir, } impl TempDirServiceConnector { fn new() -> TempDirServiceConnector { TempDirServiceConnector { temp_dir: tempfile::tempdir().expect("create temp dir") } } fn new_with_pkgfs_garbage() -> TempDirServiceConnector { let service_connector = Self::new(); let pkgfs = service_connector.temp_dir.path().join("pkgfs"); fs::create_dir(&pkgfs).expect("create pkgfs dir"); fs::create_dir(pkgfs.join("ctl")).expect("create pkgfs/ctl dir"); fs::File::create(pkgfs.join("ctl/garbage")).expect("create garbage file"); service_connector } } impl TempDirServiceConnector { fn has_garbage_file(&self) -> bool { self.temp_dir.path().join("pkgfs/ctl/garbage").exists() } } impl ServiceConnector for TempDirServiceConnector { fn service_connect( &self, service_path: &str, channel: zx::Channel, ) -> Result<(), zx::Status> { fdio::service_connect( self.temp_dir.path().join(&service_path[1..]).to_str().expect("paths are utf8"), channel, ) } } struct DoNothingComponentRunner; impl ComponentRunner for DoNothingComponentRunner { fn run_until_exit( &mut self, _url: String, _arguments: Option<Vec<String>>, ) -> BoxFuture<'_, Result<(), Error>> { future::ok(()).boxed() } } struct WasCalledComponentRunner { was_called: bool, } impl ComponentRunner for WasCalledComponentRunner { fn run_until_exit( &mut self, _url: String, _arguments: Option<Vec<String>>, ) -> BoxFuture<'_, Result<(), Error>> { self.was_called = true; future::ok(()).boxed() } } struct FakeTimeSource { now: i64, } impl TimeSource for FakeTimeSource { fn get_nanos(&self) -> i64 { self.now } } #[fasync::run_singlethreaded(test)] async fn test_trigger_pkgfs_gc_if_update_available() { let service_connector = TempDirServiceConnector::new_with_pkgfs_garbage(); let mut component_runner = DoNothingComponentRunner; let time_source = FakeTimeSource { now: 0 }; assert!(service_connector.has_garbage_file()); let result = apply_system_update_impl( ACTIVE_SYSTEM_IMAGE_MERKLE.into(), NEW_SYSTEM_IMAGE_MERKLE.into(), &service_connector, &mut component_runner, Initiator::Manual, &time_source, ) .await; assert_matches!(result.map_err(|e| e.kind()), Err(ErrorKind::SystemUpdaterFinished)); assert!(!service_connector.has_garbage_file()); } #[fasync::run_singlethreaded(test)] async fn test_launch_system_updater_if_update_available() { let service_connector = TempDirServiceConnector::new(); let mut component_runner = WasCalledComponentRunner { was_called: false }; let time_source = FakeTimeSource { now: 0 }; let result = apply_system_update_impl( ACTIVE_SYSTEM_IMAGE_MERKLE.into(), NEW_SYSTEM_IMAGE_MERKLE.into(), &service_connector, &mut component_runner, Initiator::Manual, &time_source, ) .await; assert_matches!(result.map_err(|e| e.kind()), Err(ErrorKind::SystemUpdaterFinished)); assert!(component_runner.was_called); } #[fasync::run_singlethreaded(test)] async fn test_launch_system_updater_even_if_gc_fails() { let service_connector = TempDirServiceConnector::new(); let mut component_runner = WasCalledComponentRunner { was_called: false }; let time_source = FakeTimeSource { now: 0 }; let result = apply_system_update_impl( ACTIVE_SYSTEM_IMAGE_MERKLE.into(), NEW_SYSTEM_IMAGE_MERKLE.into(), &service_connector, &mut component_runner, Initiator::Manual, &time_source, ) .await; assert_matches!(result.map_err(|e| e.kind()), Err(ErrorKind::SystemUpdaterFinished)); assert!(component_runner.was_called); } proptest! { #[test] fn test_values_passed_through_to_component_launcher( initiator: Initiator, start_time in proptest::num::i64::ANY, source_merkle in "[A-Fa-f0-9]{64}", target_merkle in "[A-Fa-f0-9]{64}") { prop_assume!(source_merkle != target_merkle); #[derive(Debug, PartialEq, Eq)] struct Args { url: String, arguments: Option<Vec<String>>, } struct ArgumentCapturingComponentRunner { captured_args: Vec<Args>, } impl ComponentRunner for ArgumentCapturingComponentRunner { fn run_until_exit( &mut self, url: String, arguments: Option<Vec<String>>, ) -> BoxFuture<'_, Result<(), Error>> { self.captured_args.push(Args { url, arguments }); future::ok(()).boxed() } } let service_connector = TempDirServiceConnector::new(); let mut component_runner = ArgumentCapturingComponentRunner { captured_args: vec![] }; let time_source = FakeTimeSource { now: start_time }; let mut executor = fasync::Executor::new().expect("create executor in test"); let result = executor.run_singlethreaded(apply_system_update_impl( source_merkle.parse().expect("source merkle string literal"), target_merkle.parse().expect("target merkle string literal"), &service_connector, &mut component_runner, initiator, &time_source, )); prop_assert!(result.is_err()); prop_assert_eq!( result.err().unwrap().kind(), ErrorKind::SystemUpdaterFinished ); prop_assert_eq!( component_runner.captured_args, vec![Args { url: SYSTEM_UPDATER_RESOURCE_URL.to_string(), arguments: Some(vec![ format!("-initiator={}", initiator), format!("-start={}", start_time), format!("-source={}", source_merkle.to_lowercase()), format!("-target={}", target_merkle.to_lowercase()), ]) }] ); } } } #[cfg(test)] mod test_real_service_connector { use super::*; use matches::assert_matches; use std::fs; #[fasync::run_singlethreaded(test)] async fn test_connect_to_directory_and_unlink_file() { let dir = tempfile::tempdir().expect("create temp dir"); let file_name = "the-file"; let file_path = dir.path().join(file_name); fs::File::create(&file_path).expect("create file"); let (dir_end, dir_server_end) = fidl::endpoints::create_endpoints::<fidl_fuchsia_io::DirectoryMarker>() .expect("create endpoints"); RealServiceConnector .service_connect( dir.path().to_str().expect("paths are utf8"), dir_server_end.into_channel(), ) .expect("service_connect"); let dir_proxy = fidl_fuchsia_io::DirectoryProxy::new( fasync::Channel::from_channel(dir_end.into_channel()).expect("create async channel"), ); assert!(file_path.exists()); let status = dir_proxy.unlink(file_name).await.expect("unlink the file fidl"); zx::Status::ok(status).expect("unlink the file"); assert!(!file_path.exists()); } #[fasync::run_singlethreaded(test)] async fn test_connect_to_missing_directory_errors() { let dir = tempfile::tempdir().expect("create temp dir"); let (dir_end, dir_server_end) = fidl::endpoints::create_endpoints::<fidl_fuchsia_io::DirectoryMarker>() .expect("create endpoints"); RealServiceConnector .service_connect( dir.path().join("non-existent-directory").to_str().expect("paths are utf8"), dir_server_end.into_channel(), ) .expect("service_connect"); let dir_proxy = fidl_fuchsia_io::DirectoryProxy::new( fasync::Channel::from_channel(dir_end.into_channel()).expect("create async channel"), ); let read_dirents_res = dir_proxy .read_dirents(1000 /*size shouldn't matter, as this should immediately fail*/) .await; assert_matches!( read_dirents_res, Err(e) if e.is_closed() ); } } #[cfg(test)] mod test_real_component_runner { use super::*; const TEST_SHELL_COMMAND_RESOURCE_URL: &str = "fuchsia-pkg://fuchsia.com/system-update-checker-tests/0#meta/test-shell-command.cmx"; #[fasync::run_singlethreaded(test)] async fn test_run_a_component_that_exits_0() { let launcher_proxy = connect_to_service::<LauncherMarker>().expect("connect to launcher"); let mut runner = RealComponentRunner { launcher_proxy }; let run_res = runner .run_until_exit( TEST_SHELL_COMMAND_RESOURCE_URL.to_string(), Some(vec!["!".to_string()]), ) .await; assert!(run_res.is_ok(), "{:?}", run_res.err().unwrap()); } #[fasync::run_singlethreaded(test)] async fn test_run_a_component_that_exits_1() { let launcher_proxy = connect_to_service::<LauncherMarker>().expect("connect to launcher"); let mut runner = RealComponentRunner { launcher_proxy }; let run_res = runner.run_until_exit(TEST_SHELL_COMMAND_RESOURCE_URL.to_string(), Some(vec![])).await; assert_eq!(run_res.err().expect("run should fail").kind(), ErrorKind::SystemUpdaterFailed); } }
/// Builds a type that implements [List](crate::list::List). #[macro_export] macro_rules! List { [] => { $crate::list::Nil }; [$name:ty $(, $names:ty)* $(,)?] => { $crate::list::Cons<$name, $crate::List![$($names),*]> }; } #[macro_export] macro_rules! list { [] => { $crate::list::Nil }; [$name:expr $(, $names:expr)* $(,)?] => { $crate::list::Cons { head: $name, tail: $crate::list![$($names),*] } }; } /// Builds a type that implements [List](crate::list::List) with extra appending list. #[macro_export] macro_rules! PrependList { [; $tail:ty] => { $tail }; [$name:ty $(, $names:ty)* $(,)?; $tail:ty] => { $crate::list::Cons<$name, $crate::PrependList![$($names),*; $tail]> }; } #[macro_export] macro_rules! prepend_list { [; $tail:expr] => { $tail }; [$name:expr $(, $names:expr)* $(,)?; $tail:expr] => { $crate::list::Cons { head: $name, tail: $crate::prepend_list![$($names),*; $tail] } }; } #[macro_export] macro_rules! ListFromTuple { () => { $crate::list::Nil }; ($head:ty $(, $elems:ty)* $(,)?) => { $crate::list::Cons<$head, $crate::ListFromTuple!($($elems),*)> }; } #[macro_export] macro_rules! list_from_tuple { () => { $crate::list::Nil }; ($head:expr $(, $elems:expr)* $(,)?) => { $crate::list::Cons { head: $head, tail: $crate::list_from_tuple!($($elems),*) } }; } #[cfg(test)] mod tests { use crate::control::SameOp; use crate::list::{Cons, Nil}; struct A(usize); struct B(String); struct C(bool); struct D([u8; 2]); enum E { E1, E2, } #[test] fn list_macros() { let _: SameOp<List![A, B, C], Cons<A, Cons<B, Cons<C, Nil>>>> = (); let _: SameOp< PrependList![D, E; List![A, B, C]], Cons<D, Cons<E, Cons<A, Cons<B, Cons<C, Nil>>>>>, > = (); { let a = A(2); let b = B("text".into()); let c = C(false); let _: List![A, B, C] = list![A(3), B("string".into()), C(true)]; let _: List![A, B, C] = list![a, b, c]; } { let tail: List![A, B, C] = list![A(3), B("string".into()), C(true)]; let _: PrependList![D, E; List![A, B, C]] = prepend_list![D([1, 2]), E::E1; tail]; } { let tail: List![A, B, C] = list![A(3), B("string".into()), C(true)]; let d = D([3, 4]); let e = E::E2; let _: PrependList![D, E; List![A, B, C]] = prepend_list![d, e; tail]; } } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use common_meta_app::storage::StorageFsConfig; use common_meta_app::storage::StorageGcsConfig; use common_meta_app::storage::StorageOssConfig; use common_meta_app::storage::StorageS3Config; use common_meta_app::storage::StorageWebhdfsConfig; use common_protos::pb; use crate::reader_check_msg; use crate::FromToProto; use crate::Incompatible; use crate::MIN_READER_VER; use crate::VER; impl FromToProto for StorageS3Config { type PB = pb::S3StorageConfig; fn get_pb_ver(p: &Self::PB) -> u64 { p.version } fn from_pb(p: pb::S3StorageConfig) -> Result<Self, Incompatible> where Self: Sized { reader_check_msg(p.version, p.min_reader_ver)?; Ok(StorageS3Config { region: p.region, endpoint_url: p.endpoint_url, access_key_id: p.access_key_id, secret_access_key: p.secret_access_key, security_token: p.security_token, bucket: p.bucket, root: p.root, master_key: p.master_key, disable_credential_loader: p.disable_credential_loader, enable_virtual_host_style: p.enable_virtual_host_style, role_arn: p.role_arn, external_id: p.external_id, }) } fn to_pb(&self) -> Result<pb::S3StorageConfig, Incompatible> { Ok(pb::S3StorageConfig { version: VER, min_reader_ver: MIN_READER_VER, region: self.region.clone(), endpoint_url: self.endpoint_url.clone(), access_key_id: self.access_key_id.clone(), secret_access_key: self.secret_access_key.clone(), security_token: self.security_token.clone(), bucket: self.bucket.clone(), root: self.root.clone(), master_key: self.master_key.clone(), disable_credential_loader: self.disable_credential_loader, enable_virtual_host_style: self.enable_virtual_host_style, role_arn: self.role_arn.clone(), external_id: self.external_id.clone(), }) } } impl FromToProto for StorageGcsConfig { type PB = pb::GcsStorageConfig; fn get_pb_ver(p: &Self::PB) -> u64 { p.version } fn from_pb(p: Self::PB) -> Result<Self, Incompatible> where Self: Sized { reader_check_msg(p.version, p.min_reader_ver)?; Ok(StorageGcsConfig { credential: p.credential, endpoint_url: p.endpoint_url, bucket: p.bucket, root: p.root, }) } fn to_pb(&self) -> Result<Self::PB, Incompatible> { Ok(pb::GcsStorageConfig { version: VER, min_reader_ver: MIN_READER_VER, credential: self.credential.clone(), endpoint_url: self.endpoint_url.clone(), bucket: self.bucket.clone(), root: self.root.clone(), }) } } impl FromToProto for StorageFsConfig { type PB = pb::FsStorageConfig; fn get_pb_ver(p: &Self::PB) -> u64 { p.version } fn from_pb(p: pb::FsStorageConfig) -> Result<Self, Incompatible> where Self: Sized { reader_check_msg(p.version, p.min_reader_ver)?; Ok(StorageFsConfig { root: p.root }) } fn to_pb(&self) -> Result<pb::FsStorageConfig, Incompatible> { Ok(pb::FsStorageConfig { version: VER, min_reader_ver: MIN_READER_VER, root: self.root.clone(), }) } } impl FromToProto for StorageOssConfig { type PB = pb::OssStorageConfig; fn get_pb_ver(p: &Self::PB) -> u64 { p.version } fn from_pb(p: pb::OssStorageConfig) -> Result<Self, Incompatible> where Self: Sized { reader_check_msg(p.version, p.min_reader_ver)?; Ok(StorageOssConfig { endpoint_url: p.endpoint_url, presign_endpoint_url: "".to_string(), bucket: p.bucket, root: p.root, access_key_id: p.access_key_id, access_key_secret: p.access_key_secret, }) } fn to_pb(&self) -> Result<pb::OssStorageConfig, Incompatible> { Ok(pb::OssStorageConfig { version: VER, min_reader_ver: MIN_READER_VER, endpoint_url: self.endpoint_url.clone(), bucket: self.bucket.clone(), root: self.root.clone(), access_key_id: self.access_key_id.clone(), access_key_secret: self.access_key_secret.clone(), }) } } impl FromToProto for StorageWebhdfsConfig { type PB = pb::WebhdfsStorageConfig; fn get_pb_ver(p: &Self::PB) -> u64 { p.version } fn from_pb(p: Self::PB) -> Result<Self, Incompatible> where Self: Sized { reader_check_msg(p.version, p.min_reader_ver)?; Ok(StorageWebhdfsConfig { endpoint_url: p.endpoint_url, root: p.root, delegation: p.delegation, }) } fn to_pb(&self) -> Result<pb::WebhdfsStorageConfig, Incompatible> { Ok(pb::WebhdfsStorageConfig { version: VER, min_reader_ver: MIN_READER_VER, endpoint_url: self.endpoint_url.clone(), root: self.root.clone(), delegation: self.delegation.clone(), username: String::new(), // reserved for future use password: String::new(), // reserved for future use }) } }
struct Color (u8, u8, u8); struct Kilometers(i32); enum _Day { Sunday, Monday, Tuesday, Wednesday, Thursday, Friday, Saturday } enum FlashMessage { Success, Warning { category: i32, message: String }, Error(String) } fn main() { let black = Color(0, 0, 0); let Color(r, g, b) = black; println!("Black = rgb({}, {}, {})", r, g, b); let distance = Kilometers(20); let Kilometers(distance_in_km) = distance; println!("The distance: {} km", distance_in_km); let mut form_status = FlashMessage::Success; print_flash_message(form_status); form_status = FlashMessage::Warning { category: 2, message: String::from("Field X is required") }; print_flash_message(form_status); form_status = FlashMessage::Error(String::from("Connection Error")); print_flash_message(form_status); } fn print_flash_message(m: FlashMessage) { match m { FlashMessage::Success => println!("Form Submitted correctly"), FlashMessage::Warning { category, message} => println!("Warning : {} - {}", category, message), FlashMessage::Error(msg) => println!("Error : {}", msg) } }
#[cfg(feature = "std")] use fslock::LockFile; #[cfg(feature = "std")] use std::{env, io, io::Read, process}; #[cfg(feature = "std")] fn main() -> Result<(), fslock::Error> { let mut args = env::args(); args.next(); let path = match args.next() { Some(arg) if args.next().is_none() => arg, _ => { eprintln!("Expected one argument"); process::exit(1); }, }; let mut lockfile = LockFile::open(&path)?; lockfile.lock()?; io::stdin().read(&mut [0; 1])?; Ok(()) } #[cfg(not(feature = "std"))] fn main() {}
use std::fmt; use std::io; use std::pin::Pin; use actix_web::web::Bytes; use tokio::io::{AsyncWrite, AsyncWriteExt}; use tokio_stream::{Stream, StreamExt}; use crate::error::Error; #[cfg_attr(test, derive(Debug))] pub struct MpegTsStream<T, S> { id: T, stream: S, range: Option<MpegTsStreamRange>, decoded: bool, } impl<T, S> MpegTsStream<T, S> { pub fn new(id: T, stream: S) -> Self { MpegTsStream { id, stream, decoded: false, range: None, } } pub fn with_range(id: T, stream: S, range: MpegTsStreamRange) -> Self { MpegTsStream { id, stream, decoded: false, range: Some(range), } } pub fn decoded(mut self) -> Self { self.decoded = true; self } pub fn is_decoded(&self) -> bool { self.decoded } pub fn range(&self) -> Option<MpegTsStreamRange> { self.range.clone() } } impl<T, S> MpegTsStream<T, S> where T: Clone, { pub fn id(&self) -> T { self.id.clone() } } impl<T, S> MpegTsStream<T, S> where T: fmt::Display + Clone + Unpin, S: Stream<Item = io::Result<Bytes>> + Unpin, { pub async fn pipe<W>(self, writer: W) where W: AsyncWrite + Unpin, { pipe(self, writer).await; } } impl<T, S> Stream for MpegTsStream<T, S> where T: Unpin, S: Stream + Unpin, { type Item = S::Item; fn poll_next( mut self: Pin<&mut Self>, cx: &mut std::task::Context ) -> std::task::Poll<Option<Self::Item>> { Pin::new(&mut self.stream).poll_next(cx) } } #[derive(Clone)] #[cfg_attr(test, derive(Debug))] pub struct MpegTsStreamRange { pub first: u64, pub last: u64, pub size: Option<u64>, } impl MpegTsStreamRange { pub fn bound(first: u64, size: u64) -> Result<Self, Error> { if size == 0 { return Err(Error::NoContent); } if first >= size { return Err(Error::OutOfRange); } Ok(Self::new(first, size - 1, Some(size))) } pub fn unbound(first: u64, size: u64) -> Result<Self, Error> { if size == 0 { return Err(Error::NoContent); } if first >= size { return Err(Error::OutOfRange); } Ok(Self::new(first, size - 1, None)) } pub fn is_partial(&self) -> bool { if let Some(size) = self.size { self.first != 0 || self.last + 1 != size } else { true } } pub fn bytes(&self) -> u64 { self.last - self.first + 1 } pub fn make_content_range(&self) -> String { if let Some(size) = self.size { format!("bytes {}-{}/{}", self.first, self.last, size) } else { format!("bytes {}-{}/*", self.first, self.last) } } fn new(first: u64, last: u64, size: Option<u64>) -> Self { MpegTsStreamRange { first, last, size, } } } // terminator // // A terminator is attached on the output-side endpoint of an MPEG-TS packets // filtering pipeline in order to shutting down streaming quickly when a HTTP // transaction ends. // // There is a delay from the HTTP transaction end to the tuner release when // using filters. On some environments, the delay is about 40ms. On those // environments, the next streaming request may be processed before the tuner is // released. // // It's impossible to eliminate the delay completely, but it's possible to // reduce the delay as much as possible. // // See a discussion in Japanese on: // https://github.com/mirakc/mirakc/issues/4#issuecomment-583818912. pub struct MpegTsStreamTerminator<S, T> { inner: S, _stop_trigger: T, } impl<S, T> MpegTsStreamTerminator<S, T> { pub fn new(inner: S, _stop_trigger: T) -> Self { Self { inner, _stop_trigger } } } impl<S, T> Stream for MpegTsStreamTerminator<S, T> where S: Stream + Unpin, T: Unpin, { type Item = S::Item; fn poll_next( mut self: Pin<&mut Self>, cx: &mut std::task::Context ) -> std::task::Poll<Option<Self::Item>> { Pin::new(&mut self.inner).poll_next(cx) } } async fn pipe<T, S, W>(mut stream: MpegTsStream<T, S>, mut writer: W) where T: fmt::Display + Clone + Unpin, S: Stream<Item = io::Result<Bytes>> + Unpin, W: AsyncWrite + Unpin, { loop { match stream.next().await { Some(Ok(chunk)) => { log::trace!("{}: Received a chunk of {} bytes", stream.id(), chunk.len()); if let Err(err) = writer.write_all(&chunk).await { if err.kind() == io::ErrorKind::BrokenPipe { log::debug!("{}: Downstream has been closed", stream.id()); } else { log::error!("{}: Failed to write to downstream: {}", stream.id(), err); } break; } } Some(Err(err)) => { if err.kind() == io::ErrorKind::BrokenPipe { log::debug!("{}: Upstream has been closed", stream.id()); } else { log::error!("{}: Failed to read from upstream: {}", stream.id(), err); } break; } None => { log::debug!("{}: EOF reached", stream.id()); break; } } // TODO: Should yield here like web::streaming()? } if let Err(err) = writer.shutdown().await { log::warn!("{}: Failed to shutdown: {}", stream.id(), err); } } #[cfg(test)] mod tests { use super::*; use tokio_stream::wrappers::ReceiverStream; #[actix::test] async fn test_pipe() { let (tx, rx) = tokio::sync::mpsc::channel(1); let stream = MpegTsStream::new(0, ReceiverStream::new(rx)); let writer = TestWriter::new(b"hello"); let handle = tokio::spawn(stream.pipe(writer)); let result = tx.send(Ok(Bytes::from("hello"))).await; assert!(result.is_ok()); drop(tx); let _ = handle.await.unwrap(); } struct TestWriter { buf: Vec<u8>, expected: &'static [u8], } impl TestWriter { fn new(expected: &'static [u8]) -> Self { Self { buf: Vec::new(), expected } } } impl AsyncWrite for TestWriter { fn poll_write( mut self: Pin<&mut Self>, _: &mut std::task::Context, buf: &[u8] ) -> std::task::Poll<io::Result<usize>> { self.buf.extend_from_slice(buf); std::task::Poll::Ready(Ok(buf.len())) } fn poll_flush( self: Pin<&mut Self>, _: &mut std::task::Context ) -> std::task::Poll<io::Result<()>> { std::task::Poll::Ready(Ok(())) } fn poll_shutdown( self: Pin<&mut Self>, _: &mut std::task::Context ) -> std::task::Poll<io::Result<()>> { std::task::Poll::Ready(Ok(())) } } impl Drop for TestWriter { fn drop(&mut self) { assert_eq!(self.buf.as_slice(), self.expected); } } }
use gl; use gl::types::*; use std::mem; use std::vec; pub struct VertexBuffers { vao: GLuint, vbo: GLuint, ebo: GLuint, pub vertex_width: u8, // this is the width of the concrete struct that satisfies the Vertex trait } pub trait VertexSpecable { fn get_vertex_specification(&self) -> VertexSpecification; } pub struct VertexSpecification { pub vertices: Vec<Box<Vertex>>, pub elements: Vec<ElementTriangle>, } pub trait Vertex { fn get_vec(&self) -> Vec<GLfloat>; } pub struct ColorVertex { pub x: GLfloat, pub y: GLfloat, pub red: GLfloat, pub green: GLfloat, pub blue: GLfloat, } impl Vertex for ColorVertex { fn get_vec(&self) -> Vec<GLfloat> { return vec![self.x, self.y, self.red, self.green, self.blue]; } } pub struct TextureVertex { pub x: GLfloat, pub y: GLfloat, pub tex_x: GLfloat, pub tex_y: GLfloat, } impl Vertex for TextureVertex { fn get_vec(&self) -> Vec<GLfloat> { return vec![self.x, self.y, self.tex_x, self.tex_y]; } } pub struct ElementTriangle { pub p1: GLint, pub p2: GLint, pub p3: GLint, } impl ElementTriangle { fn get_vec(&self) -> Vec<GLint> { return vec![self.p1, self.p2, self.p3]; } fn add_vertex_offset(&self, vertex_offset: i32) -> ElementTriangle { return ElementTriangle { p1: self.p1 + vertex_offset, p2: self.p2 + vertex_offset, p3: self.p3 + vertex_offset, }; } } impl VertexBuffers { pub fn new(vertex_width: u8) -> VertexBuffers { let mut vao = 0; let mut vbo = 0; let mut ebo = 0; unsafe { // Create Vertex Array Object gl::GenVertexArrays(1, &mut vao); gl::BindVertexArray(vao); // Create a Vertex Buffer Object gl::GenBuffers(1, &mut vbo); // Create a Element Buffer Object gl::GenBuffers(1, &mut ebo); } let v = VertexBuffers { vao: vao, vbo: vbo, ebo: ebo, vertex_width: vertex_width, }; unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, v.vbo); gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, v.ebo); } return v; } pub fn gen_vertex_buffers<V: VertexSpecable + ?Sized>(&self, rects: &Vec<Box<V>>) -> GLsizei { let vertex_spec = full_vertex_spec(rects); let vertex_structs = vertex_spec.vertices; let element_triangles = vertex_spec.elements; let mut vertices: vec::Vec<GLfloat> = vec::Vec::new(); for vertex in vertex_structs.iter() { vertices.append(&mut vertex.get_vec()); } let mut elements: vec::Vec<GLint> = vec::Vec::new(); for element_triangle in element_triangles.iter() { elements.append(&mut element_triangle.get_vec()); } let elem_count = elements.len() as GLsizei; unsafe { // copy the vertex data to the Vertex Buffer Object gl::BufferData(gl::ARRAY_BUFFER, (vertices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr, mem::transmute(&vertices[0]), gl::STATIC_DRAW); gl::BufferData(gl::ELEMENT_ARRAY_BUFFER, (elements.len() * mem::size_of::<GLint>()) as GLsizeiptr, mem::transmute(&elements[0]), gl::STATIC_DRAW); } return elem_count; } pub fn close(&self) { unsafe { gl::DeleteBuffers(1, &self.vao); gl::DeleteVertexArrays(1, &self.vao); gl::DeleteBuffers(1, &self.vbo); gl::DeleteBuffers(1, &self.ebo); } } } fn full_vertex_spec<V: VertexSpecable + ?Sized>(rects: &Vec<Box<V>>) -> VertexSpecification { let mut vertices = vec::Vec::new(); let mut elements = vec::Vec::new(); let mut vertex_count_offset = 0; for rect in rects.iter() { let mut vert_spec = rect.get_vertex_specification(); let vertex_count = vert_spec.vertices.len() as i32; vertices.append(&mut vert_spec.vertices); elements.append(&mut vert_spec.elements .iter() .map(|x| x.add_vertex_offset(vertex_count_offset)) .collect()); vertex_count_offset += vertex_count; } return VertexSpecification { vertices: vertices, elements: elements, }; }
use procon_reader::ProconReader; fn main() { let stdin = std::io::stdin(); let mut rd = ProconReader::new(stdin.lock()); let n: usize = rd.get(); let mut a = Vec::new(); let mut b = Vec::new(); for _ in 0..n { let x: u64 = rd.get(); let y: u64 = rd.get(); a.push(x); b.push(y); } let (a, b, s) = { let mut aa = vec![a[0]]; let mut bb = vec![b[0]]; let mut s = 0; for i in 1..n { if bb[bb.len() - 1] < b[i] { aa.push(a[i] + s); bb.push(b[i]); s = 0; } else { s += a[i]; } } (aa, bb, s) }; let n = a.len(); let mut cum_sum = vec![0]; for i in 0..n { cum_sum.push(cum_sum[i] + a[i]); } let inf = std::u64::MAX; let mut dp = vec![inf; n + 1]; dp[0] = 0; use std::cmp::Reverse; use std::collections::BinaryHeap; let mut heap = BinaryHeap::new(); heap.push((Reverse(dp[0]), a[0] - dp[0])); for i in 0..n { while let Some(&(_, y)) = heap.peek() { if y < b[i] { heap.pop(); } else { break; } } match heap.peek() { Some(&(Reverse(x), _)) => { dp[i + 1] = x + b[i]; if i + 2 <= n { assert!(cum_sum[i + 2] >= dp[i + 1]); heap.push((Reverse(dp[i + 1]), cum_sum[i + 2] - dp[i + 1])); } } None => { println!("-1"); return; } } } assert!(dp[n] < inf); println!("{}", s + cum_sum[n] - dp[n]); }
pub mod kalman_filter { extern crate nalgebra as na; // use na::Scalar; // use na::{DVector, VectorN, U1, U15, U2, U3, U5, U7}; use crate::sensor; use sensor::measurement::{DeviceSensor, SensorMeasurement}; use sensor::measurement::{LidarMeasurement, LidarSensor, RadarMeasurement, RadarSensor}; use sensor::measurement::{MeasurementPackage, SensorType}; use crate::ukf_type; use ukf_type::ukf::*; use crate::util::helper; use helper::negative_normalize; #[allow(non_snake_case)] #[derive(Debug)] pub struct UnscentedKalmanFilter { // initially set to false, set to ture in frist call of ProcessMeasurement is_initiliased: bool, // if this is false, lidar measurements will be ignores (except for init) use_laser: bool, // if this is false, radar measurements will be ignored (except for init) use_radar: bool, lidar_sensor: LidarSensor, radar_sensor: RadarSensor, // previous timestamp prev_timestamp: u64, // Covariance Matrix P: CovarMatrix, // StateVector x: StateVector, // Normalised Innovation Squared pub nis_lidar: f64, pub nis_radar: f64, } #[allow(non_snake_case)] pub trait UKFPredict { fn prediction( x: &StateVector, P: &CovarMatrix, delta_t: f64, ) -> (SigmaPoints, StateVector, CovarMatrix) { let X_sig_aug = Self::augmented_sigma_points(STD_A, STD_YAWDD, &x, &P); let X_sig_pred = Self::predict_sigma_points(delta_t, &X_sig_aug); Self::predict_mean_and_covar(X_sig_pred) } fn augmented_sigma_points( std_a: f64, std_yawdd: f64, x: &StateVector, P: &CovarMatrix, ) -> AugSigmaPoints { // create augmented state vector let mut x_aug = AugStateVector::zeros(); // x_aug.fixed_rows_mut::<U5>(0).copy_from(&x); let n_aug = x_aug.shape().0; x_aug.copy_from(&x.clone().resize(n_aug, 1, 0.0)); // create augmented coveriance matrix let mut P_aug = AugCovarMatrix::zeros(); P_aug.copy_from(&P.clone().resize(n_aug, n_aug, 0.0)); P_aug[(5, 5)] = std_a * std_a; P_aug[(6, 6)] = std_yawdd * std_yawdd; // square root of P let L: CholeskyMatrix = match P_aug.cholesky() { Some(x) => x.l(), None => { warn!("no_cholesky!"); CholeskyMatrix::zeros() } }; // define spreading parameter let spread = (LAMBDA + n_aug as f64).sqrt(); let mut X_sig_aug = AugSigmaPoints::zeros(); X_sig_aug.column_mut(0).copy_from(&x_aug); for i in 0..n_aug { let mut x_aug1_col = x_aug.clone(); let mut x_aug2_col = x_aug.clone(); for j in 0..n_aug { x_aug1_col[j] += spread * L.column(i)[j]; x_aug2_col[j] -= spread * L.column(i)[j]; } X_sig_aug.column_mut(i + 1).copy_from(&x_aug1_col); X_sig_aug.column_mut(i + 1 + n_aug).copy_from(&x_aug2_col); } X_sig_aug } fn predict_sigma_points(delta_t: f64, X_sig_aug: &AugSigmaPoints) -> SigmaPoints { let mut X_sig_pred = SigmaPoints::zeros(); let n_aug = X_sig_aug.shape().0; for i in 0..2 * n_aug + 1 { // extract values for better readability let p_x = X_sig_aug[(0, i)]; let p_y = X_sig_aug[(1, i)]; let v = X_sig_aug[(2, i)]; let yaw = X_sig_aug[(3, i)]; let yawd = X_sig_aug[(4, i)]; let nu_a = X_sig_aug[(5, i)]; let nu_yawdd = X_sig_aug[(6, i)]; // predicted state values in *_p let mut px_p: f64; let mut py_p: f64; // avoid division by zero if yawd.is_normal() { px_p = p_x + v / yawd * ((yaw + yawd * delta_t).sin() - yaw.sin()); py_p = p_y + v / yawd * (yaw.cos() - (yaw + yawd * delta_t).cos()); } else { px_p = p_x + v * delta_t * yaw.cos(); py_p = p_y + v * delta_t * yaw.sin(); } let mut v_p = v; let mut yaw_p = yaw + yawd * delta_t; let mut yawd_p = yawd; // add noise px_p = px_p + 0.5 * nu_a * delta_t * delta_t * yaw.cos(); py_p = py_p + 0.5 * nu_a * delta_t * delta_t * yaw.sin(); v_p = v_p + nu_a * delta_t; yaw_p = yaw_p + 0.5 * nu_yawdd * delta_t * delta_t; yawd_p = yawd_p + nu_yawdd * delta_t; //write predicted sigma point into right column X_sig_pred[(0, i)] = px_p; X_sig_pred[(1, i)] = py_p; X_sig_pred[(2, i)] = v_p; X_sig_pred[(3, i)] = yaw_p; X_sig_pred[(4, i)] = yawd_p; } X_sig_pred } fn predict_mean_and_covar( X_sig_pred: SigmaPoints, ) -> (SigmaPoints, StateVector, CovarMatrix) { // create the predicted state vector let mut x = StateVector::zeros(); // create the predicted covariance matrix let mut P = CovarMatrix::zeros(); // create the sigme point weights let weights = SigmaPointWeights::new(); let n_sig_cols = X_sig_pred.cols(); // predicted state mean for i in 0..n_sig_cols { x += weights[i] * X_sig_pred.state_from_col(i); } // predicted state covariance matrix for i in 1..n_sig_cols { // state difference let mut x_diff = X_sig_pred.state_from_col(i) - X_sig_pred.state_from_col(0); // angle normalization -pi to pi x_diff[3] = negative_normalize(x_diff[3]); P += weights[i] * x_diff * x_diff.transpose(); } (X_sig_pred, x, P) } } #[allow(non_snake_case)] trait UKFUpdate<T, U, V, W, X, Y>: UnscentedKalmanUpdate<Y, V, W> where T: SensorMeasurement<T> + HasMeasurementFactory<T, V, W>, U: SensorSigmaPoints<U> + HasSensorSigmaPointsFactory<U, X> + HasSensorVector<V> + HasSensorCovar<W>, V: SensorVector<V> + HasSensorVectorFactory<V, U>, W: SensorCovar<W> + HasSensorCovarFactory<W, U>, X: SensorSigmaPointsCrossCorrelation<X> + HasTcFactory<X, W, Y>, Y: KalmanGain<Y>, { fn update( &self, m: &T, X_sig_pred: &SigmaPoints, x: &StateVector, P: &CovarMatrix, ) -> (StateVector, CovarMatrix, f64) where SigmaPoints: HasSigmaPointsMeasurementSpace<U>, { // measurement let z = m.z(); // Predict next measurement // Get the specific devices sigma points // let Z_sig:U = SensorSigmaPoints::<U>::from_sigma_points(&X_sig_pred); let Z_sig = X_sig_pred.measurement_space(); // mean predicted measurement let z_pred = Z_sig.predicted_measurement(); // measurement covariance matrix S let S = Z_sig.measurement_covar(); // Kalman Update // cross correlation matric let Tc = Z_sig.Tc(&X_sig_pred); // Kalman gain K let K = Tc.mul(S.inverse()); // residual let z_diff = z.diff(&z_pred); // update state and covariance // let x = x + K * z_diff; // let P = P - K * S * K.transpose(); let x = Self::update_state(&x, &K, &z_diff); let P = Self::update_covariance(&P, &K, &S); let nis = m.nis(&z_pred, &S); (x, P, nis) } } impl UKFUpdate< LidarMeasurement, LidarSigmaPoints, LidarStateVector, LidarCovarMatrix, LidarCrossCorrelationMatrix, LidarKalmanGain, > for LidarSensor { // fn update( // &self, // m: LidarMeasurement, // X_sig_pred: &SigmaPoints, // x: &StateVector, // P: &CovarMatrix, // ) -> (StateVector, CovarMatrix) { // // measurement // let z = LidarStateVector::new_from_measurement(m); // let n_z = z.shape().0; // assert!(n_z == self.n_z, "n_z != {} for lidar", self.n_z); // let (Z_sig, z_pred, S) = self.predict_measurement(&X_sig_pred); // // cross correlation matric // let Tc = Z_sig.Tc(&X_sig_pred); // // Kalman gain K // let K = Tc * S.try_inverse().unwrap(); // // residual // let z_diff = z - z_pred; // // update state and covariance // let x = x + K * z_diff; // let P = P - K * S * K.transpose(); // (x, P) // } // fn predict_measurement(&self, X_sig_pred: &SigmaPoints) -> (LidarSigmaPoints, LidarStateVector, LidarCovarMatrix) { // let mut z_Pred = LidarStateVector::new_state(); // let mut S = LidarCovarMatrix::new_covar(); // let Z_sig:LidarSigmaPoints = X_sig_pred.measurement_space(); // let weights = SigmaPointWeights::new(); // // mean predicted measurement // for i in 0 .. Z_sig.cols() { // z_Pred += weights[i] * Z_sig.column(i); // } // // measurement covariance matrix S // for i in 1 .. Z_sig.cols() { // let z_diff = Z_sig.column(i) - Z_sig.column(0); // S += weights[i] * z_diff * z_diff.transpose(); // } // // add measurement noise // S += self.noise_covar_matrix(); // (Z_sig, z_Pred, S) // } } impl UKFUpdate< RadarMeasurement, RadarSigmaPoints, RadarStateVector, RadarCovarMatrix, RadarCrossCorrelationMatrix, RadarKalmanGain, > for RadarSensor { } // trait UKF: UKFPredict + UKFLidarUpdate + UKFRadarUpdate{ impl UKFPredict for UnscentedKalmanFilter {} pub trait UKF: UKFPredict { fn new() -> Self; fn init_state_lidar(m: &LidarMeasurement) -> StateVector; fn init_state_radar(m: &RadarMeasurement) -> StateVector; fn initialise(&mut self, m: &MeasurementPackage); fn process_measurement(&mut self, m: &MeasurementPackage) -> StateVector; } #[allow(non_snake_case, dead_code)] impl UKF for UnscentedKalmanFilter { fn new() -> UnscentedKalmanFilter { let P = CovarMatrix::from_diagonal_element(1.0); let x = StateVector::zeros(); UnscentedKalmanFilter { is_initiliased: false, use_laser: true, use_radar: true, lidar_sensor: LidarSensor::new(), radar_sensor: RadarSensor::new(), prev_timestamp: 0, P: P, x: x, nis_lidar: 0.0, nis_radar: 0.0, } } fn init_state_lidar(m: &LidarMeasurement) -> StateVector { let mut x = StateVector::zeros(); x[0] = m.px; x[1] = m.py; trace!("init_state_lidar x:{:?}", x); x } fn init_state_radar(m: &RadarMeasurement) -> StateVector { let mut x = StateVector::zeros(); let rho = m.rho; // Range - radial distance from origin let phi = m.theta; // bearing - angle between rho and x let rho_dot = m.rho_dot; // Radial Velocity - change of p(range rate) let px = rho * phi.cos(); // metres let py = rho * phi.sin(); let v = rho_dot; // metres/sec let yaw = phi; // radians let yaw_dot = 0.0; // radians/sec x[0] = px; x[1] = py; x[2] = v; x[3] = yaw; x[4] = yaw_dot; trace!("init_state_radar x:{:?}", x); x } fn initialise(&mut self, m: &MeasurementPackage) { self.x = match m.sensor_type { SensorType::Lidar => Self::init_state_lidar(m.lidar_data.as_ref().unwrap()), SensorType::Radar => Self::init_state_radar(m.radar_data.as_ref().unwrap()), }; self.prev_timestamp = m.timestamp; self.is_initiliased = true; debug!("init x:{}", self.x); debug!("init P:{}", self.P); debug!("init lidar_sensor:{:?}", self.lidar_sensor); debug!("init radar_sensor:{:?}", self.radar_sensor); } fn process_measurement(&mut self, m: &MeasurementPackage) -> StateVector { // initialisation if !self.is_initiliased { self.initialise(m); return self.x; } let delta_t: f64 = (m.timestamp - self.prev_timestamp) as f64 / 1000000.0; self.prev_timestamp = m.timestamp; // prediction step let (X_sig_pred, x, P) = Self::prediction(&self.x, &self.P, delta_t); // update step let (x, P, nis) = match m.sensor_type { SensorType::Lidar => { self.lidar_sensor .update(m.lidar_data.as_ref().unwrap(), &X_sig_pred, &x, &P) } SensorType::Radar => { self.radar_sensor .update(m.radar_data.as_ref().unwrap(), &X_sig_pred, &x, &P) } }; match m.sensor_type { SensorType::Lidar => self.nis_lidar = nis, SensorType::Radar => self.nis_radar = nis, } self.x = x; self.P = P; self.x } } }
use model::controll::*; use protocol::*; use service::{console::ConsoleMail, SamotopService}; use grammar::SmtpParser; use tokio; use tokio::io; use tokio::net::TcpStream; use tokio::prelude::*; use tokio_codec::Decoder; #[derive(Clone)] pub struct MailService { name: String, parser: SmtpParser, } impl MailService { pub fn new(name: impl ToString) -> Self { Self { name: name.to_string(), parser: SmtpParser, } } } impl SamotopService for MailService { fn handle(self, socket: TcpStream) { let local = socket.local_addr().ok(); let peer = socket.peer_addr().ok(); let (dst, src) = SmtpCodec::new().framed(socket).split(); let task = src .peer(peer) .parse(SmtpParser) .mail(ConsoleMail::new()) // prevent polling after shutdown .fuse_shutdown() // prevent polling of completed stream .fuse() // forward to client .forward(dst) .map(move |_| info!("peer {:?} gone from {:?}", peer, local)) .map_err(move|e:io::Error| warn!("peer {:?} gone from {:?} with error {:?}", peer, local, e)); tokio::spawn(task); } }
#![no_main] #[macro_use] extern crate libfuzzer_sys; extern crate random_access_disk as rad; extern crate tempdir; use self::tempdir::TempDir; fuzz_target!(|data: &[u8]| { let dir = TempDir::new("random-access-disk").unwrap(); let mut file = rad::Sync::new(dir.path().join("2.db")); file.write(0, data).unwrap(); });
use crate::prelude::*; use std::os::raw::c_void; #[repr(C)] #[derive(Debug)] pub struct VkBindSparseInfo { pub sType: VkStructureType, pub pNext: *const c_void, pub waitSemaphoreCount: u32, pub pWaitSemaphores: *const VkSemaphore, pub bufferBindCount: u32, pub pBufferBinds: *const VkSparseBufferMemoryBindInfo, pub imageOpaqueBindCount: u32, pub pImageOpaqueBinds: *const VkSparseImageOpaqueMemoryBindInfo, pub imageBindCount: u32, pub pImageBinds: *const VkSparseImageMemoryBindInfo, pub signalSemaphoreCount: u32, pub pSignalSemaphores: *const VkSemaphore, }
use crate::event::GameEvent; use crate::render::ui::gui::GuiContext; use crate::render::ui::Gui; use crate::resources::Resources; use hecs::World; use std::time::Duration; /// The stack will keep track of the states in the game. /// The top of the stack will be used for the update loop. The states below /// are still kept in memory so to go back to a previous state, you just have /// to pop the stack. pub struct SceneStack<I> { states: Vec<Box<dyn Scene<I>>>, } impl<I> Default for SceneStack<I> { fn default() -> Self { Self { states: vec![] } } } pub enum SceneResult<I> { ReplaceScene(Box<dyn Scene<I>>), Push(Box<dyn Scene<I>>), Pop, /// Remove all existing scenes and create the new one. ReplaceAll(Box<dyn Scene<I>>), Noop, } impl<I> SceneStack<I> { pub fn apply_result( &mut self, res: SceneResult<I>, world: &mut hecs::World, resources: &mut Resources, ) { match res { SceneResult::ReplaceScene(state) => self.replace(state, world, resources), SceneResult::Push(state) => self.push(state, world, resources), SceneResult::Pop => { self.pop(world); } SceneResult::ReplaceAll(state) => { while !self.states.is_empty() { self.pop(world); } self.push(state, world, resources); } SceneResult::Noop => (), } } /// Add a state to the game. Will be used for updating. /// /// The callback on_enter will be executed for the new state. pub fn push( &mut self, state: Box<dyn Scene<I>>, world: &mut hecs::World, resources: &mut Resources, ) { if let Some(current) = self.states.last_mut() { current.on_exit(); } self.states.push(state); if let Some(current) = self.states.last_mut() { current.on_create(world, resources); } } /// Remove the current state and execute its exit callback. pub fn pop(&mut self, world: &mut hecs::World) -> Option<Box<dyn Scene<I>>> { if let Some(mut s) = self.states.pop() { s.on_destroy(world); if let Some(current) = self.states.last() { current.on_enter(); } Some(s) } else { None } } /// Replace the current state. pub fn replace( &mut self, state: Box<dyn Scene<I>>, world: &mut hecs::World, resources: &mut Resources, ) { if let Some(mut s) = self.states.pop() { s.on_destroy(world); } self.states.push(state); if let Some(current) = self.states.last_mut() { current.on_create(world, resources); } } /// Get the current state as a mut reference. #[allow(clippy::borrowed_box)] pub fn current_mut(&mut self) -> Option<&mut Box<dyn Scene<I>>> { self.states.last_mut() } } pub trait Scene<I> { /// WIll be called when the state is added to the state stack. fn on_create(&mut self, _world: &mut hecs::World, _resources: &mut Resources) { info!("Create state"); } /// Will be called when the state is removed from the state stack. fn on_destroy(&mut self, _world: &mut hecs::World) { info!("Destroy state"); } /// Will be called when the state becomes active. This is called /// on stack.pop /// /// Careful, this is not call on stack.push. Use the on_create callback instead. fn on_enter(&self) { info!("Enter state"); } /// Will be called when the state becomes inactive. This is called on /// stack.push fn on_exit(&self) { info!("Exit state"); } //fn on_new_world(&mut self); /// Update gameplay systems. fn update(&mut self, dt: Duration, world: &mut World, resources: &Resources) -> SceneResult<I>; fn prepare_gui( &mut self, _dt: Duration, _world: &mut World, _resources: &Resources, _gui_context: &GuiContext, ) -> Option<Gui> { None } /// React to game events. fn process_event(&mut self, _world: &mut World, _ev: GameEvent, _resources: &Resources) {} /// Process input from keyboard/mouse fn process_input(&mut self, _world: &mut World, _input: I, _resources: &Resources) {} }
struct Solution; impl Solution { pub fn longest_common_prefix(strs: Vec<String>) -> String { let n = strs.iter().map(|x| x.len()).min().unwrap(); let mut res = String::with_capacity(n); for i in 0..n { let c = &strs[0][i..=i]; for j in 1..strs.len() { if &strs[j][i..=i] != c { return res; } } res.push_str(c); } res } } #[cfg(test)] mod tests { use super::*; fn helper(strs: Vec<&str>, want: &str) { let strs: Vec<String> = strs.iter().map(|x| x.to_string()).collect(); let got = Solution::longest_common_prefix(strs.clone()); eprintln!("longest_common_prefix({:?}) = {:?}", strs, got); assert_eq!(got, want); } #[test] fn test_longest_common_prefix_1() { helper(vec!["flower", "flow", "flight"], "fl"); } #[test] fn test_longest_common_prefix_2() { helper(vec!["dog", "racecar", "car"], ""); } #[test] fn test_longest_common_prefix_empty() { helper(vec![""], ""); } }
use anyhow::{anyhow, bail, Context}; use nom::{ call, character::complete::{anychar, char, digit1}, combinator::{complete, map}, do_parse, map_opt, map_res, multi::separated_list0, named, IResult, }; const INPUT: &str = include_str!("input.txt"); #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] enum Instruction { Left(i32), Right(i32), Up(i32), Down(i32), } #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] enum Segment { Horizontal { x: (i32, i32), y: i32 }, Vertical { x: i32, y: (i32, i32) }, } impl Segment { pub fn start(self) -> (i32, i32) { match self { Segment::Horizontal { x: (x, _), y } => (x, y), Segment::Vertical { x, y: (y, _) } => (x, y), } } pub fn len(self) -> i32 { match self { Segment::Horizontal { x: (x1, x2), .. } => (x1 - x2).abs(), Segment::Vertical { y: (y1, y2), .. } => (y1 - y2).abs(), } } pub fn point_intersection(self, other: Segment) -> Option<(i32, i32)> { match (self, other) { (Segment::Horizontal { x: x1, y: y1 }, Segment::Vertical { x: x2, y: y2 }) if x2 >= x1.0.min(x1.1) && x2 <= x1.0.max(x1.1) && y1 >= y2.0.min(y2.1) && y1 <= y2.0.max(y2.1) => { Some((x2, y1)) } (Segment::Vertical { x: x1, y: y1 }, Segment::Horizontal { x: x2, y: y2 }) if x1 >= x2.0.min(x2.1) && x1 <= x2.0.max(x2.1) && y2 >= y1.0.min(y1.1) && y2 <= y1.0.max(y1.1) => { Some((x1, y2)) } _ => None, } } } named!( parse_instruction<&str, Instruction>, map_opt!( do_parse!( direction: call!(anychar) >> amount: map_res!(call!(digit1), |s: &str| s.parse()) >> (direction, amount) ), |(direction, amount)| match direction { 'L' => Some(Instruction::Left(amount)), 'R' => Some(Instruction::Right(amount)), 'U' => Some(Instruction::Up(amount)), 'D' => Some(Instruction::Down(amount)), _ => None, } ) ); fn parse_segments(input: &str) -> IResult<&str, Vec<Segment>> { let mut position = (0, 0); let (input, segments) = separated_list0( complete(char(',')), map( complete(parse_instruction), |instruction| match instruction { Instruction::Left(amount) => { let segment = Segment::Horizontal { x: (position.0, position.0 - amount), y: position.1, }; position = (position.0 - amount, position.1); segment } Instruction::Right(amount) => { let segment = Segment::Horizontal { x: (position.0, position.0 + amount), y: position.1, }; position = (position.0 + amount, position.1); segment } Instruction::Up(amount) => { let segment = Segment::Vertical { x: position.0, y: (position.1, position.1 - amount), }; position = (position.0, position.1 - amount); segment } Instruction::Down(amount) => { let segment = Segment::Vertical { x: position.0, y: (position.1, position.1 + amount), }; position = (position.0, position.1 + amount); segment } }, ), )(input)?; Ok((input, segments)) } fn manhattan_dist((x1, y1): (i32, i32), (x2, y2): (i32, i32)) -> i32 { (x1 - x2).abs() + (y1 - y2).abs() } fn part1(input: &str) -> anyhow::Result<i32> { let paths = input .split('\n') .map(|line| { parse_segments(&line) .map(|(_, segment)| segment) .map_err(|error| anyhow!("failure parsing instructions: {}", error)) }) .collect::<Result<Vec<_>, _>>()?; if paths.len() != 2 { bail!("two paths required, found {}", paths.len()); } let first_path = paths.get(0).context("not enough paths")?; let second_path = paths.get(1).context("not enough paths")?; first_path .iter() .flat_map(|s1| second_path.iter().map(move |s2| (s1, s2))) .filter_map(|(s1, s2)| s1.point_intersection(*s2)) .map(|(x, y)| x.abs() + y.abs()) .filter(|n| *n != 0) .fold(None, |smallest, cur| match (smallest, cur) { (None, cur) => Some(cur), (Some(a), cur) if cur < a => Some(cur), _ => smallest, }) .context("not enough path segments") } fn part2(input: &str) -> anyhow::Result<i32> { let paths = input .split('\n') .map(|line| { parse_segments(&line) .map(|(_, segment)| segment) .map_err(|error| anyhow!("failure parsing instructions: {}", error)) }) .collect::<Result<Vec<_>, _>>()?; if paths.len() != 2 { bail!("two paths required, found {}", paths.len()); } let first_path = paths.get(0).context("not enough paths")?; let second_path = paths.get(1).context("not enough paths")?; first_path .iter() .scan(0i32, |steps: &mut i32, segment: &Segment| { *steps += segment.len(); Some((*steps, segment)) }) .flat_map(|s1| { second_path .iter() .scan(0i32, |steps: &mut i32, segment: &Segment| { *steps += segment.len(); Some((*steps, segment)) }) .map(move |s2| (s1, s2)) }) .filter_map(|((steps1, &segment1), (steps2, &segment2))| { segment1 .point_intersection(segment2) .filter(|&(x, y)| x != 0 || y != 0) .map(|intersection| { steps1 + steps2 + manhattan_dist(segment1.start(), intersection) + manhattan_dist(segment2.start(), intersection) - segment1.len() - segment2.len() }) }) .fold(None, |smallest, cur| match (smallest, cur) { (None, cur) => Some(cur), (Some(a), cur) if cur < a => Some(cur), _ => smallest, }) .context("not enough path segments") } fn main() -> anyhow::Result<()> { println!("part 1: {}", part1(INPUT)?); println!("part 2: {}", part2(INPUT)?); Ok(()) }
use CliResult; use config::{Config, Delimiter}; use util; static USAGE: &'static str = " Read CSV data with special quoting rules. Generally, all xsv commands support basic options like specifying the delimiter used in CSV data. This does not cover all possible types of CSV data. For example, some CSV files don't use '\"' for quotes or use different escaping styles. Usage: xsv input [options] [<input>] input options: --quote <arg> The quote character to use. [default: \"] --escape <arg> The escape character to use. When not specified, quotes are escaped by doubling them. Common options: -h, --help Display this message -o, --output <file> Write output to <file> instead of stdout. -d, --delimiter <arg> The field delimiter for reading CSV data. Must be a single character. [default: ,] "; #[derive(RustcDecodable)] struct Args { arg_input: Option<String>, flag_output: Option<String>, flag_delimiter: Option<Delimiter>, flag_quote: Delimiter, flag_escape: Option<Delimiter>, } pub fn run(argv: &[&str]) -> CliResult<()> { let args: Args = try!(util::get_args(USAGE, argv)); let rconfig = Config::new(&args.arg_input) .delimiter(args.flag_delimiter) .no_headers(true); let wconfig = Config::new(&args.flag_output); let mut rdr = try!(rconfig.reader()); let mut wtr = try!(wconfig.writer()); rdr = rdr.quote(args.flag_quote.as_byte()); if let Some(escape) = args.flag_escape { rdr = rdr.escape(Some(escape.as_byte())).double_quote(false); } for r in rdr.byte_records() { try!(wtr.write(try!(r).into_iter())); } try!(wtr.flush()); Ok(()) }
use proconio::{input, marker::Usize1}; use detect_cycle::detect_cycle_directed; fn main() { input! { n: usize, a: [Usize1; n], }; let mut edges = Vec::new(); for i in 0..n { edges.push((i, a[i])); } let cycle = detect_cycle_directed(n, &edges).unwrap(); let mut ans = Vec::new(); for i in cycle { let (s, _) = edges[i]; ans.push(s); } println!("{}", ans.len()); for i in 0..ans.len() { print!("{}", ans[i] + 1); if i + 1 < ans.len() { print!(" "); } else { print!("\n"); } } }
use std::ops::{Neg, Add, Sub, Mul, Div}; #[derive(Debug, Clone, Copy)] pub struct Vector { pub x: f64, pub y: f64, pub z: f64, } impl Vector { pub fn new(x: f64, y: f64, z: f64) -> Vector { Vector { x: x, y: y, z: z } } } pub trait Dot: Sized + Copy + Div<f64, Output = Self> { fn dot(self, Self) -> f64; } pub trait Cross { fn cross(self, Self) -> Self; } pub trait Norm { fn norm(self) -> f64; fn sqr_norm(self) -> f64; } pub trait Normalize { fn normalize(self) -> Self; } impl<T> Norm for T where T: Copy + Dot { fn norm(self) -> f64 { self.sqr_norm().sqrt() } fn sqr_norm(self) -> f64 { self.dot(self) } } impl<T> Normalize for T where T: Copy + Norm + Div<f64, Output = Self> { fn normalize(self) -> T { self / self.norm() } } impl Dot for Vector { fn dot(self, rhs: Vector) -> f64 { self.x * rhs.x + self.y * rhs.y + self.z * rhs.z } } impl Cross for Vector { fn cross(self, rhs: Vector) -> Vector { Vector::new( self.y * rhs.z - self.z * rhs.y, self.z * rhs.x - self.x * rhs.z, self.x * rhs.y - self.y * rhs.x, ) } } impl Neg for Vector { type Output = Vector; fn neg(self) -> Vector { Vector::new(-self.x, -self.y, -self.z) } } impl Add for Vector { type Output = Vector; fn add(self, rhs: Vector) -> Vector { Vector::new(self.x + rhs.x, self.y + rhs.y, self.z + rhs.z) } } impl Sub for Vector { type Output = Vector; fn sub(self, rhs: Vector) -> Vector { Vector::new(self.x - rhs.x, self.y - rhs.y, self.z - rhs.z) } } impl Mul<f64> for Vector { type Output = Vector; fn mul(self, rhs: f64) -> Vector { Vector::new(self.x * rhs, self.y * rhs, self.z * rhs) } } impl Mul<Vector> for f64 { type Output = Vector; fn mul(self, rhs: Vector) -> Vector { Vector::new(self * rhs.x, self * rhs.y, self * rhs.z) } } impl Mul for Vector { type Output = Vector; fn mul(self, rhs: Vector) -> Vector { Vector::new(self.x * rhs.x, self.y * rhs.y, self.z * rhs.z) } } impl Div<f64> for Vector { type Output = Vector; fn div(self, rhs: f64) -> Vector { Vector::new(self.x / rhs, self.y / rhs, self.z / rhs) } } impl Div<Vector> for f64 { type Output = Vector; fn div(self, rhs: Vector) -> Vector { Vector::new(self / rhs.x, self / rhs.y, self / rhs.z) } } impl Div for Vector { type Output = Vector; fn div(self, rhs: Vector) -> Vector { Vector::new(self.x / rhs.x, self.y / rhs.y, self.z / rhs.z) } }
fn parse(input: &str) -> Vec<u32> { input .lines() .map(|l| l.trim().parse()) .collect::<Result<Vec<u32>, _>>() .unwrap() } fn part1(input: &str) -> u32 { let nums = parse(input); let (a, b) = find_sum_operands_2(&nums, 2020); a * b } fn part2(input: &str) -> u32 { let nums = parse(input); let (a, b, c) = find_sum_operands_3(&nums, 2020); a * b * c } fn find_sum_operands_2(nums: &[u32], sum: u32) -> (u32, u32) { for &n1 in nums { for &n2 in nums { if n1 + n2 == sum { return (n1, n2); } } } panic!("Could not find sum operands"); } fn find_sum_operands_3(nums: &[u32], sum: u32) -> (u32, u32, u32) { for &n1 in nums { for &n2 in nums { for &n3 in nums { if n1 + n2 + n3 == sum { return (n1, n2, n3); } } } } panic!("Could not find sum operands"); } #[cfg(test)] const TEST_INPUT: &str = "1721 979 366 299 675 1456"; #[cfg(test)] const TEST_NUMS: [u32; 6] = [1721, 979, 366, 299, 675, 1456]; #[test] fn test_find_sum_operands_3() { assert_eq!(find_sum_operands_3(&TEST_NUMS, 2020), (979, 366, 675)); } #[test] fn test_find_sum_operands_2() { assert_eq!(find_sum_operands_2(&TEST_NUMS, 2020), (1721, 299)); } aoc::tests! { fn part1: TEST_INPUT => 514579; in => 974304; fn part2: in => 236430480; } aoc::main!(part1, part2);
// Copyright © 2016-2017 VMware, Inc. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 use rabble::{self, Pid}; use msg::Msg; use api::{ApiReq, ApiRsp}; use super::replica::VersionedReplicas; use api::Backend; use std::convert::From; /// Generate a message struct: `$struct_name` from a set of fields /// /// Generate `impl From<$struct_name> for VrMsg` macro_rules! msg { ($struct_name:ident { $( $field:ident: $ty:ty),+ }) => { #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct $struct_name { $( pub $field: $ty ),+ } impl From<$struct_name> for VrMsg { fn from(msg: $struct_name) -> VrMsg { VrMsg::$struct_name(msg) } } impl From<$struct_name> for rabble::Msg<Msg> { fn from(msg: $struct_name) -> rabble::Msg<Msg> { rabble::Msg::User(Msg::Vr(msg.into())) } } } } impl From<VrMsg> for rabble::Msg<Msg> { fn from(msg: VrMsg) -> rabble::Msg<Msg> { rabble::Msg::User(Msg::Vr(msg)) } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub enum ClientOp { Request(ClientRequest), Reconfiguration(Reconfiguration) } impl From<ClientRequest> for ClientOp { fn from(req: ClientRequest) -> ClientOp { ClientOp::Request(req) } } impl From<Reconfiguration> for ClientOp { fn from(reconfig: Reconfiguration) -> ClientOp { ClientOp::Reconfiguration(reconfig) } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub enum VrMsg { Tick, // A message that drives the state of the fsm during periods of inactivity ClientRequest(ClientRequest), Reconfiguration(Reconfiguration), ClientReply(ClientReply), StartViewChange(StartViewChange), DoViewChange(DoViewChange), StartView(StartView), Prepare(Prepare), PrepareOk(PrepareOk), Commit(Commit), GetState(GetState), NewState(NewState), Recovery(Recovery), RecoveryResponse(RecoveryResponse), StartEpoch(StartEpoch), EpochStarted(EpochStarted), } pub struct Tick; impl From<Tick> for VrMsg { fn from(_: Tick) -> VrMsg { VrMsg::Tick } } msg!(ClientRequest { op: ApiReq, client_id: String, request_num: u64 }); msg!(Reconfiguration { epoch: u64, client_req_num: u64, replicas: Vec<Pid> }); msg!(ClientReply { epoch: u64, view: u64, request_num: u64, value: ApiRsp }); msg!(StartViewChange { epoch: u64, view: u64, op: u64 }); msg!(DoViewChange { epoch: u64, view: u64, op: u64, last_normal_view: u64, log_start: u64, log_tail: Vec<ClientOp>, commit_num: u64 }); msg!(StartView { epoch: u64, view: u64, op: u64, log_start: u64, log_tail: Vec<ClientOp>, commit_num: u64 }); msg!(Prepare { epoch: u64, view: u64, op: u64, commit_num: u64, global_min_accept: u64, msg: ClientOp }); msg!(PrepareOk { epoch: u64, view: u64, op: u64 }); msg!(Commit { epoch: u64, view: u64, commit_num: u64, global_min_accept: u64 }); msg!(GetState { epoch: u64, view: u64, op: u64 }); msg!(NewState { epoch: u64, view: u64, op: u64, commit_num: u64, log_tail: Vec<ClientOp> }); msg!(Recovery { epoch: u64, nonce: u64 }); msg!(RecoveryResponse { epoch: u64, view: u64, nonce: u64, global_min_accept: u64, // The following fields are only valid when sent by the Primary op: Option<u64>, commit_num: Option<u64>, state: Option<Backend>, log_start: Option<u64>, log_tail: Option<Vec<ClientOp>>, // The following fields aren't in the paper, but they allow recovery in a later epoch // This is required because a replica may be started from an old config via gossip. // When a recovery response with a greater epoch than the epoch from when the replica was started // is received, it will use these configurations and restart recovery so that it can properly // recover from a quorum. This also allows the replica to shutdown if it isn't in the new // config. old_config: Option<VersionedReplicas>, new_config: Option<VersionedReplicas> }); msg!(StartEpoch { epoch: u64, op: u64, old_config: VersionedReplicas, new_config: VersionedReplicas }); msg!(EpochStarted { epoch: u64 });
//use proconio::{input, fastout}; use proconio::input; //#[fastout] fn main() { input! { a: i32, b: i32, } println!("{}", a * b); }
#[macro_use] extern crate log; #[macro_use] extern crate morgan; use bincode::{deserialize, serialize}; use morgan::blockBufferPool::{create_new_tmp_ledger, Blocktree}; use morgan::clusterMessage::{ClusterInfo, Node, FULLNODE_PORT_RANGE}; use morgan::connectionInfo::ContactInfo; use morgan::gossipService::discover_cluster; use morgan::localCluster::{ClusterConfig, LocalCluster}; use morgan::cloner::Replicator; use morgan::cloner::ReplicatorRequest; use morgan::storageStage::STORAGE_ROTATE_TEST_COUNT; use morgan::streamer::blob_receiver; use morgan::verifier::ValidatorConfig; use morgan_client::thin_client::create_client; use morgan_interface::genesis_block::create_genesis_block; use morgan_interface::hash::Hash; use morgan_interface::signature::{Keypair, KeypairUtil}; use std::fs::remove_dir_all; use std::net::SocketAddr; use std::net::UdpSocket; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::mpsc::channel; use std::sync::Arc; use std::thread::sleep; use std::time::Duration; use morgan_helper::logHelper::*; fn get_slot_height(to: SocketAddr) -> u64 { let socket = UdpSocket::bind("0.0.0.0:0").unwrap(); socket .set_read_timeout(Some(Duration::from_secs(5))) .unwrap(); let req = ReplicatorRequest::GetSlotHeight(socket.local_addr().unwrap()); let serialized_req = serialize(&req).unwrap(); for _ in 0..10 { socket.send_to(&serialized_req, to).unwrap(); let mut buf = [0; 1024]; if let Ok((size, _addr)) = socket.recv_from(&mut buf) { return deserialize(&buf[..size]).unwrap(); } sleep(Duration::from_millis(500)); } panic!("Couldn't get slot height!"); } fn download_from_replicator(replicator_info: &ContactInfo) { // Create a client which downloads from the replicator and see that it // can respond with blobs. let tn = Node::new_localhost(); let cluster_info = ClusterInfo::new_with_invalid_keypair(tn.info.clone()); let mut repair_index = get_slot_height(replicator_info.storage_addr); // info!("{}", Info(format!("repair index: {}", repair_index).to_string())); println!("{}", printLn( format!("repair index: {}", repair_index).to_string(), module_path!().to_string() ) ); repair_index = 0; let req = cluster_info .window_index_request_bytes(0, repair_index) .unwrap(); let exit = Arc::new(AtomicBool::new(false)); let (s_reader, r_reader) = channel(); let repair_socket = Arc::new(tn.sockets.repair); let t_receiver = blob_receiver(repair_socket.clone(), &exit, s_reader); // info!( // "{}", // Info(format!("Sending repair requests from: {} to: {}", // tn.info.id, replicator_info.gossip).to_string()) // ); println!("{}", printLn( format!("Sending repair requests from: {} to: {}", tn.info.id, replicator_info.gossip).to_string(), module_path!().to_string() ) ); let mut received_blob = false; for _ in 0..5 { repair_socket.send_to(&req, replicator_info.gossip).unwrap(); let x = r_reader.recv_timeout(Duration::new(1, 0)); if let Ok(blobs) = x { for b in blobs { let br = b.read().unwrap(); assert!(br.index() == repair_index); // info!("{}", Info(format!("br: {:?}", br).to_string())); println!("{}", printLn( format!("br: {:?}", br).to_string(), module_path!().to_string() ) ); let entries = Blocktree::deserialize_blob_data(&br.data()).unwrap(); for entry in &entries { // info!("{}", Info(format!("entry: {:?}", entry).to_string())); println!("{}", printLn( format!("entry: {:?}", entry).to_string(), module_path!().to_string() ) ); assert_ne!(entry.hash, Hash::default()); received_blob = true; } } break; } } exit.store(true, Ordering::Relaxed); t_receiver.join().unwrap(); assert!(received_blob); } /// Start the cluster with the given configuration and wait till the replicators are discovered /// Then download blobs from one of them. fn run_replicator_startup_basic(num_nodes: usize, num_replicators: usize) { morgan_logger::setup(); // info!("{}", Info(format!("starting replicator test").to_string())); println!("{}", printLn( format!("starting replicator test").to_string(), module_path!().to_string() ) ); let mut validator_config = ValidatorConfig::default(); validator_config.storage_rotate_count = STORAGE_ROTATE_TEST_COUNT; let config = ClusterConfig { validator_config, num_replicators, node_stakes: vec![100; num_nodes], cluster_difs: 10_000, ..ClusterConfig::default() }; let cluster = LocalCluster::new(&config); let (cluster_nodes, cluster_replicators) = discover_cluster( &cluster.entry_point_info.gossip, num_nodes + num_replicators, ) .unwrap(); assert_eq!( cluster_nodes.len() + cluster_replicators.len(), num_nodes + num_replicators ); let mut replicator_count = 0; let mut replicator_info = ContactInfo::default(); for node in &cluster_replicators { // info!("{}", Info(format!("storage: {:?} rpc: {:?}", node.storage_addr, node.rpc).to_string())); println!("{}", printLn( format!("storage: {:?} rpc: {:?}", node.storage_addr, node.rpc).to_string(), module_path!().to_string() ) ); if ContactInfo::is_valid_address(&node.storage_addr) { replicator_count += 1; replicator_info = node.clone(); } } assert_eq!(replicator_count, num_replicators); download_from_replicator(&replicator_info); } #[test] fn test_replicator_startup_1_node() { run_replicator_startup_basic(1, 1); } #[test] fn test_replicator_startup_2_nodes() { run_replicator_startup_basic(2, 1); } #[test] fn test_replicator_startup_leader_hang() { use std::net::{IpAddr, Ipv4Addr, SocketAddr}; morgan_logger::setup(); // info!("{}", Info(format!("starting replicator test").to_string())); println!("{}", printLn( format!("starting replicator test").to_string(), module_path!().to_string() ) ); let leader_ledger_path = "replicator_test_leader_ledger"; let (genesis_block, _mint_keypair) = create_genesis_block(10_000); let (replicator_ledger_path, _blockhash) = create_new_tmp_ledger!(&genesis_block); { let replicator_keypair = Arc::new(Keypair::new()); let storage_keypair = Arc::new(Keypair::new()); // info!("{}", Info(format!("starting replicator node").to_string())); println!("{}", printLn( format!("starting replicator node").to_string(), module_path!().to_string() ) ); let replicator_node = Node::new_localhost_with_pubkey(&replicator_keypair.pubkey()); let fake_gossip = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0); let leader_info = ContactInfo::new_gossip_entry_point(&fake_gossip); let replicator_res = Replicator::new( &replicator_ledger_path, replicator_node, leader_info, replicator_keypair, storage_keypair, ); assert!(replicator_res.is_err()); } let _ignored = Blocktree::destroy(&leader_ledger_path); let _ignored = Blocktree::destroy(&replicator_ledger_path); let _ignored = remove_dir_all(&leader_ledger_path); let _ignored = remove_dir_all(&replicator_ledger_path); } #[test] fn test_replicator_startup_ledger_hang() { morgan_logger::setup(); // info!("{}", Info(format!("starting replicator test").to_string())); println!("{}", printLn( format!("starting replicator test").to_string(), module_path!().to_string() ) ); let mut validator_config = ValidatorConfig::default(); validator_config.storage_rotate_count = STORAGE_ROTATE_TEST_COUNT; let cluster = LocalCluster::new_with_equal_stakes(2, 10_000, 100);; // info!("{}", Info(format!("starting replicator node").to_string())); println!("{}", printLn( format!("starting replicator node").to_string(), module_path!().to_string() ) ); let bad_keys = Arc::new(Keypair::new()); let storage_keypair = Arc::new(Keypair::new()); let mut replicator_node = Node::new_localhost_with_pubkey(&bad_keys.pubkey()); // Pass bad TVU sockets to prevent successful ledger download replicator_node.sockets.tvu = vec![std::net::UdpSocket::bind("0.0.0.0:0").unwrap()]; let (replicator_ledger_path, _blockhash) = create_new_tmp_ledger!(&cluster.genesis_block); let replicator_res = Replicator::new( &replicator_ledger_path, replicator_node, cluster.entry_point_info.clone(), bad_keys, storage_keypair, ); assert!(replicator_res.is_err()); } #[test] fn test_account_setup() { let num_nodes = 1; let num_replicators = 1; let mut validator_config = ValidatorConfig::default(); validator_config.storage_rotate_count = STORAGE_ROTATE_TEST_COUNT; let config = ClusterConfig { validator_config, num_replicators, node_stakes: vec![100; num_nodes], cluster_difs: 10_000, ..ClusterConfig::default() }; let cluster = LocalCluster::new(&config); let _ = discover_cluster( &cluster.entry_point_info.gossip, num_nodes + num_replicators as usize, ) .unwrap(); // now check that the cluster actually has accounts for the replicator. let client = create_client( cluster.entry_point_info.client_facing_addr(), FULLNODE_PORT_RANGE, ); cluster.replicator_infos.iter().for_each(|(_, value)| { assert_eq!( client .poll_get_balance(&value.replicator_storage_pubkey) .unwrap(), 1 ); }); }
mod logo; pub use logo::ModeLogo; mod playing; pub use playing::ModePlaying; mod level_select; pub use level_select::ModeLevelSelect; mod ending; pub use ending::ModeEnding;
use notify; use notify::{DebouncedEvent, RecommendedWatcher, RecursiveMode, Watcher}; use std::path::PathBuf; use std::sync::mpsc::Sender; use std::time::Duration; pub fn get_watcher( track_path: &PathBuf, tx: Sender<DebouncedEvent>, ) -> Result<RecommendedWatcher, notify::Error> { let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_secs(0))?; watcher.watch(track_path, RecursiveMode::Recursive)?; Ok(watcher) }
use async_std::io; use async_std::net::TcpListener; use rustls::ServerConfig; use super::{TcpConnection, TlsListener, TlsListenerConfig}; use std::net::{SocketAddr, ToSocketAddrs}; use std::path::{Path, PathBuf}; /// # A builder for TlsListeners /// /// This is created with a call to /// [`TlsListener::build`](crate::TlsListener::build). This also can /// be passed directly to [`tide::Server::listen`], skipping the /// [`TlsListenerBuilder::finish`] call. /// /// # Examples /// /// ```rust /// # use tide_rustls::TlsListener; /// let listener = TlsListener::build() /// .addrs("localhost:4433") /// .cert("./tls/localhost-4433.cert") /// .key("./tls/localhost-4433.key") /// .finish(); /// ``` /// /// ```rust /// # use tide_rustls::TlsListener; /// let listener = TlsListener::build() /// .tcp(std::net::TcpListener::bind("localhost:4433").unwrap()) /// .config(rustls::ServerConfig::new(rustls::NoClientAuth::new())) /// .finish(); /// ``` #[derive(Default)] pub struct TlsListenerBuilder { key: Option<PathBuf>, cert: Option<PathBuf>, config: Option<ServerConfig>, tcp: Option<TcpListener>, addrs: Option<Vec<SocketAddr>>, } impl std::fmt::Debug for TlsListenerBuilder { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("TlsListenerBuilder") .field("key", &self.key) .field("cert", &self.cert) .field( "config", &if self.config.is_some() { "Some(ServerConfig { .. })" } else { "None" }, ) .field("tcp", &self.tcp) .field("addrs", &self.addrs) .finish() } } impl TlsListenerBuilder { pub(crate) fn new() -> Self { TlsListenerBuilder::default() } /// Provide a path to a key file, in either pkcs8 or rsa /// formats. This is mutually exclusive with providing a server /// config with [`TlsListenerBuilder::config`], but must be used /// in conjunction with [`TlsListenerBuilder::cert`] pub fn key(mut self, path: impl AsRef<Path>) -> Self { self.key = Some(path.as_ref().into()); self } /// Provide a path to a cert file. This is mutually exclusive with /// providing a server config with [`TlsListenerBuilder::config`], /// but must be used in conjunction with /// [`TlsListenerBuilder::key`] pub fn cert(mut self, path: impl AsRef<Path>) -> Self { self.cert = Some(path.as_ref().into()); self } /// Provide a prebuilt /// [`rustls::ServerConfig`](::rustls::ServerConfig) with any /// options. This is mutually exclusive with both /// [`TlsListenerBuilder::key`] and [`TlsListenerBuilder::cert`], /// but provides the opportunity for more configuration choices. pub fn config(mut self, config: ServerConfig) -> Self { self.config = Some(config); self } /// Provides a bound tcp listener (either async-std or std) to /// build this tls listener on. This is mutually exclusive with /// [`TlsListenerBuilder::addrs`], but one of them is mandatory. pub fn tcp(mut self, tcp: impl Into<TcpListener>) -> Self { self.tcp = Some(tcp.into()); self } /// Provides a [`std::net::ToSocketAddrs`] specification for this /// tls listener. This is mutually exclusive with /// [`TlsListenerBuilder::tcp`] but one of them is mandatory. pub fn addrs(mut self, addrs: impl ToSocketAddrs) -> Self { if let Ok(socket_addrs) = addrs.to_socket_addrs() { self.addrs = Some(socket_addrs.collect()); } self } /// finishes building a TlsListener from this TlsListenerBuilder. /// /// # Errors /// /// this will return an error unless all of the following conditions are met: /// * either of these is provided, but not both /// * [`TlsListenerBuilder::tcp`] /// * [`TlsListenerBuilder::addrs`] /// * either of these is provided, but not both /// * both [`TlsListenerBuilder::cert`] AND [`TlsListenerBuilder::key`] /// * [`TlsListenerBuilder::config`] pub fn finish(self) -> io::Result<TlsListener> { let Self { key, cert, config, tcp, addrs, } = self; let config = match (key, cert, config) { (Some(key), Some(cert), None) => TlsListenerConfig::Paths { key, cert }, (None, None, Some(config)) => TlsListenerConfig::ServerConfig(config), _ => { return Err(io::Error::new( io::ErrorKind::InvalidInput, "either cert + key are required or a ServerConfig", )) } }; let connection = match (tcp, addrs) { (Some(tcp), None) => TcpConnection::Connected(tcp), (None, Some(addrs)) => TcpConnection::Addrs(addrs), _ => { return Err(io::Error::new( io::ErrorKind::InvalidInput, "either tcp or addrs are required", )) } }; Ok(TlsListener::new(connection, config)) } }
fn million_prefix(num : u64) -> ~str { match num { 1000000 => ~"m", 1000000000 => ~"b", 1000000000000 => ~"tr", 1000000000000000 => ~"quadr", // ... _ => ~"gaz" } } fn power_of_ten(num : u64) -> ~str { if num == 1000 { ~"thousand" } else { million_prefix(num) + "illion" } } pub fn number_to_english(num : u64) -> ~str { // hundreds let h = num/100; let hm = num%100; if h > 0 && h < 10 { let mut partial = number_to_english(h) + " hundred"; if hm != 0 { partial = partial + " and " + number_to_english(hm); } return partial; } // thousands + *illions let mut threshold = 1000; while threshold <= num { let d = num/threshold; let m = num%threshold; if d > 0 && d < 1000 { let mut partial = number_to_english(d) + " " + power_of_ten(threshold); if m != 0 { if m < 100 { partial = partial + " and"; } partial = partial + " " + number_to_english(m); } return partial; } threshold *= 1000; } match num { // simple numbers 0 => ~"zero", 1 => ~"one", 2 => ~"two", 3 => ~"three", 4 => ~"four", 5 => ~"five", 6 => ~"six", 7 => ~"seven", 8 => ~"eight", 9 => ~"nine", 10 => ~"ten", 11 => ~"eleven", 12 => ~"twelve", 13 => ~"thirteen", 15 => ~"fifteen", 18 => ~"eighteen", 14 | 16 | 17 | 19 => number_to_english(num-10) + "teen", 20 => ~"twenty", 30 => ~"thirty", 40 => ~"forty", 50 => ~"fifty", 80 => ~"eighty", 60 | 70 | 90 => number_to_english(num/10) + "ty", 21..99 => number_to_english((num/10)*10) + "-" + number_to_english(num%10), _ => ~"TODO" } }
// This file was generated by gir (https://github.com/gtk-rs/gir) // from ../gir-files // DO NOT EDIT use crate::SessionFeature; use std::fmt; glib::wrapper! { #[doc(alias = "SoupContentDecoder")] pub struct ContentDecoder(Object<ffi::SoupContentDecoder, ffi::SoupContentDecoderClass>) @implements SessionFeature; match fn { type_ => || ffi::soup_content_decoder_get_type(), } } impl ContentDecoder {} pub const NONE_CONTENT_DECODER: Option<&ContentDecoder> = None; impl fmt::Display for ContentDecoder { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("ContentDecoder") } }
#[allow(unused_imports)] use proconio::{marker::*, *}; #[fastout] fn main() { input! { n: i32, a: [i32; n], } for (i, a1) in a.iter().enumerate() { for (j, a2) in a.iter().enumerate() { if i >= j { continue; } for (k, a3) in a.iter().enumerate() { if j >= k { continue; } if a1 + a2 + a3 == 1000 { println!("Yes"); return; } } } } println!("No"); }
#![deny(warnings)] #[allow(unknown_lints, unused_imports)] use thiserror::Error; pub use flann_sys as raw; mod enums; mod index; mod indexable; mod indices; mod parameters; mod slice_index; mod vec_index; pub use enums::{Algorithm, CentersInit, Checks, DistanceType, LogLevel}; pub use generic_array::typenum; pub use index::Index; pub use indexable::Indexable; pub use parameters::Parameters; pub use slice_index::SliceIndex; pub use vec_index::VecIndex; #[derive(Copy, Clone, Debug, Error)] pub enum FlannError { #[error("expected {} dimensions in point, but got {} dimensions", expected, got)] InvalidPointDimensionality { expected: usize, got: usize }, #[error("expected number divisible by {}, but got {}, which is not", expected, got)] InvalidFlatPointsLen { expected: usize, got: usize }, #[error("FLANN failed to build index")] FailedToBuildIndex, #[error("input must have at least one point")] ZeroInputPoints, } #[derive(Copy, Clone, Debug)] pub struct Neighbor<D> { pub index: usize, pub distance_squared: D, }
//! Wake plumbing for unicycle. //! //! We provide two different forms of wakers: //! //! * Stack allocated - A lightweight waker stored on the stack that converts into a waker owned //! by a [Shared] when cloned. //! * [Shared]-owned [InternalWaker] - Takes full ownership of the plumbing necessary to //! wake the task from another thread. These must be stored in an [InternalWakers] collection //! that is owned by a [Shared] task structure. #![warn(clippy::undocumented_unsafe_blocks)] use std::cell::UnsafeCell; use std::mem::{self, ManuallyDrop}; use std::ptr::{self, NonNull}; use std::sync::Arc; use std::task::{Context, RawWaker, RawWakerVTable, Waker}; use crate::lock::{Mutex, RwLock}; use crate::pin_vec::PinVec; use crate::Shared; /// Wrap the current context in one that updates the local WakeSet. /// This takes the shared data by reference and uses `RefWaker::VTABLE`. /// /// It works because we don't drop the waker inside of this function. pub(crate) fn poll_with_ref<F, R>(shared: &Arc<Shared>, index: usize, f: F) -> R where F: FnOnce(&mut Context<'_>) -> R, { // Need to assigned owned a fixed location, so do not move it from here for the duration of the poll. let internals = InternalWaker::new(NonNull::from(shared.as_ref()), index); // Safety: as_raw_waker() returns an object that upholds the right RawWaker contract. let waker = unsafe { Waker::from_raw(internals.as_waker_ref().into()) }; let waker = ManuallyDrop::new(waker); let mut cx = Context::from_waker(&waker); f(&mut cx) } /// A collection of [InternalWaker]s owned by a [Shared] task structure. pub(crate) struct InternalWakers { wakers: Mutex<PinVec<InternalWaker>>, } impl InternalWakers { pub fn new() -> Self { Self { wakers: Mutex::new(PinVec::new()), } } } struct InternalWaker { /// A pointer to the [Shared] task data. /// /// This is actually an Arc, so it's possible to do increment_strong_count and /// decrement_strong_count on it. /// /// Note that the [InternalWaker] itself doesn't have a strong reference to [Shared], because /// the [InternalWaker] is assumed to be contained in the [Shared] via [InternalWakers]. Using /// a strong reference would create a cycle leading to the [Shared] and all its wakers leaking. /// /// Instead, we increment and decrement the ref count on [Shared] when creating and dropping /// an [InternalWakerRef] that refers to this waker. This is done so that cloning an /// [InternalWaker] (technically cloning an [InternalWakerRef]) can be done without any new /// heap allocations. shared: NonNull<Shared>, /// The index of the task this waker will wake. index: usize, } impl InternalWaker { /// Construct a new waker. fn new(shared: NonNull<Shared>, index: usize) -> Self { Self { shared, index } } /// Construct a new internal waker reference. /// Caller must ensure that the internal waker has the appropriate lifetime. fn as_shared_waker_ref(&self) -> InternalWakerRef { InternalWakerRef::from_waker(self) } fn as_waker_ref(&self) -> InternalWakerRef { InternalWakerRef(NonNull::from(self)) } fn shared(&self) -> &Shared { // Safety: self.shared is known to point to a valid Shared by construction unsafe { self.shared.as_ref() } } } #[repr(transparent)] struct InternalWakerRef(NonNull<InternalWaker>); impl InternalWakerRef { /// Get a new reference counted internal waker. fn get_shared_waker(shared: &Shared, index: usize) -> Self { let mut all_wakers = shared.all_wakers.wakers.lock(); if all_wakers.len() <= index { let len = all_wakers.len(); all_wakers.extend((len..index + 1).map(|i| InternalWaker::new(shared.into(), i))); } all_wakers[index].as_shared_waker_ref() } /// Construct from an existing internal waker. /// /// # Safety /// /// Caller must ensure that the internal waker has the appropriate lifetime. fn from_waker(waker: &InternalWaker) -> Self { // SAFETY: we know that this is constructed from a legal Arc instance, // since it's all handled internally. unsafe { Arc::increment_strong_count(waker.shared.as_ptr()); } InternalWakerRef(NonNull::from(waker)) } fn internals(&self) -> &InternalWaker { // Safety: InternalWakerRef points to an InternalWaker by construction unsafe { self.0.as_ref() } } const VTABLE: &'static RawWakerVTable = &RawWakerVTable::new( Self::clone_unchecked, Self::wake_unchecked, Self::wake_by_ref_unchecked, Self::drop_unchecked, ); unsafe fn clone_unchecked(ptr: *const ()) -> RawWaker { let this: ManuallyDrop<InternalWakerRef> = mem::transmute(ptr); InternalWakerRef::get_shared_waker( &*(this.internals().shared.as_ptr()), this.internals().index, ) .into() } unsafe fn wake_by_ref_unchecked(this: *const ()) { let this: ManuallyDrop<InternalWakerRef> = mem::transmute(this); let internals = this.internals(); internals.shared().wake_set.wake(internals.index); internals.shared().waker.wake_by_ref(); } unsafe fn wake_unchecked(this: *const ()) { let this: InternalWakerRef = mem::transmute(this); let internals = this.internals(); internals.shared().wake_set.wake(internals.index); internals.shared().waker.wake_by_ref(); } unsafe fn drop_unchecked(this: *const ()) { let _this: InternalWakerRef = mem::transmute(this); } } impl From<InternalWakerRef> for RawWaker { fn from(val: InternalWakerRef) -> Self { let waker = RawWaker::new(val.0.as_ptr() as *const _, InternalWakerRef::VTABLE); mem::forget(val); // val will be dropped from RawWaker's vtable waker } } impl Drop for InternalWakerRef { fn drop(&mut self) { // Safety: internals.shared is actually an Arc<Shared>. The ref count was incremented // when self was constructed, so now we clean it up unsafe { Arc::decrement_strong_count(self.internals().shared.as_ptr()) } } } pub(crate) struct SharedWaker { lock: RwLock, waker: UnsafeCell<Waker>, } impl SharedWaker { /// Construct a new shared waker. pub(crate) fn new() -> Self { Self { lock: RwLock::new(), waker: UnsafeCell::new(noop_waker()), } } /// Wake the shared waker by ref. pub(crate) fn wake_by_ref(&self) { if let Some(_guard) = self.lock.try_lock_shared() { // Safety: We can access the unsafe cell because we are holding the lock let waker = unsafe { &*self.waker.get() }; waker.wake_by_ref(); } } /// Swap out the current waker, dropping the one that was previously in /// place. /// /// Returns `true` if the waker was successfully swapped, or swapping is not /// necessary. /// /// Otherwise returns `false` and calling `wake_by_ref`, indicating that we /// want to try again. /// /// # Safety /// /// Caller must ensure that they are the only one who will attempt to lock /// the waker exclusively. pub(crate) unsafe fn swap(&self, waker: &Waker) -> bool { let shared_waker = self.waker.get(); // Safety: No need to lock the shared waker exclusively to access an // immutable reference since the caller is assured to be the only one // trying to swap. if (*shared_waker).will_wake(waker) { return true; } if let Some(_guard) = self.lock.try_lock_exclusive_guard() { *self.waker.get() = waker.clone(); return true; } waker.wake_by_ref(); false } } /// Create a waker which does nothing. fn noop_waker() -> Waker { // Safety: noop_raw_waker() returns an object that upholds the right RawWaker contract. unsafe { Waker::from_raw(noop_raw_waker()) } } fn noop_raw_waker() -> RawWaker { return RawWaker::new( ptr::null(), &RawWakerVTable::new(noop_clone, noop_wake, noop_wake_by_ref, noop_drop), ); fn noop_clone(_: *const ()) -> RawWaker { noop_raw_waker() } fn noop_wake(_: *const ()) {} fn noop_wake_by_ref(_: *const ()) {} fn noop_drop(_: *const ()) {} } #[cfg(test)] mod test { use super::poll_with_ref; use crate::FuturesUnordered; use crate::Shared; use futures::future::poll_fn; use futures::Future; use std::cell::RefCell; use std::mem; use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; use std::task::Context; use std::task::{Poll, Waker}; #[test] fn basic_waker() { let shared = Arc::new(Shared::new()); let index = 0; poll_with_ref(&shared, index, |_| ()) } #[test] fn clone_waker() { struct GetWaker; impl Future for GetWaker { type Output = Waker; fn poll( self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { Poll::Ready(cx.waker().clone()) } } block_on::block_on(async { let mut futures = FuturesUnordered::new(); futures.push(GetWaker); futures.next().await.unwrap(); }); } #[test] fn long_lived_waker() { struct GetWaker; impl Future for GetWaker { type Output = Waker; fn poll( self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { Poll::Ready(cx.waker().clone()) } } let waker = block_on::block_on(async { let mut futures = FuturesUnordered::new(); futures.push(GetWaker); futures.next().await.unwrap() }); waker.wake(); } #[test] fn many_wakers() { // This test primarily makes sure that the array of Wakers stays pinned // as more wakers are added. block_on::block_on(async { let mut futures = FuturesUnordered::new(); let wake1 = Rc::new(RefCell::new(None)); let wake2: Rc<RefCell<Option<Waker>>> = Rc::new(RefCell::new(None)); let woken = Rc::new(RefCell::new(false)); { let woken = woken.clone(); let wake1 = wake1.clone(); let wake2 = wake2.clone(); futures.push(FakeDynFuture::new(poll_fn(move |cx| { if *woken.borrow() { // We've been awoken, so complete Poll::Ready(()) } else { if wake1.borrow().is_none() { *wake1.borrow_mut() = Some(cx.waker().clone()); } // Wake the other future if somehow it ran before us. if let Some(waker) = wake2.borrow().as_ref() { waker.wake_by_ref() } Poll::Pending } }))); } // Poll our future once to be sure to clone the waker poll_fn(|cx| { assert_eq!( crate::PollNext::poll_next(Pin::new(&mut futures), cx), Poll::Pending ); Poll::Ready(()) }) .await; // Push a bunch of do-nothing futures to force the underlying waker table to resize. // // We do 127 of them because that the smallest number that fails in miri and segfaults // natively. for _ in 0..127 { futures.push(FakeDynFuture::new(poll_fn(|cx| { // clone the waker to force the table to fill in. let _ = cx.waker().clone(); Poll::Ready(()) }))); } // Now push a future that wakes the first one futures.push(FakeDynFuture::new(poll_fn(move |cx| { match &*wake1.borrow() { Some(waker) => { *woken.borrow_mut() = true; waker.wake_by_ref(); Poll::Ready(()) } None => { // the other future hasn't run yet, so add our waker and then return pending. *wake2.borrow_mut() = Some(cx.waker().clone()); Poll::Pending } } }))); while futures.next().await.is_some() {} }) } /// Miri can't handle the vtables we ended up with originally, so this is a manual version /// that should work better with miri. struct FakeDynFuture<T> { future: *const (), poll_fn: fn(this: *const (), cx: &mut Context) -> Poll<T>, drop_fn: fn(this: *const ()), } impl<T> FakeDynFuture<T> { fn new<F: Future<Output = T>>(fut: F) -> Self { Self { future: Box::into_raw(Box::new(fut)) as *const _, poll_fn: |this, cx| { // Safety: this is self.future (see the poll implementation below), which // is a Box<F>. Boxes never move, so we can treat it as a pinned reference. let this = unsafe { mem::transmute::<_, Pin<&mut F>>(this) }; this.poll(cx) }, drop_fn: |this| { // Safety: this is self.future (see the drop implementation below), which is a // Box<F>. unsafe { mem::transmute::<_, Pin<Box<F>>>(this); } }, } } } impl<T> Future for FakeDynFuture<T> { type Output = T; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { (self.poll_fn)(self.future, cx) } } impl<T> Drop for FakeDynFuture<T> { fn drop(&mut self) { (self.drop_fn)(self.future) } } mod block_on { //! A way to run a future to completion on the current thread. //! //! Taken from https://doc.rust-lang.org/std/task/trait.Wake.html#examples use futures::Future; use std::{ sync::Arc, task::{Context, Poll, Wake}, thread::{self, Thread}, }; /// A waker that wakes up the current thread when called. struct ThreadWaker(Thread); impl Wake for ThreadWaker { fn wake(self: Arc<Self>) { self.0.unpark(); } } /// Run a future to completion on the current thread. pub(super) fn block_on<T>(fut: impl Future<Output = T>) -> T { // Pin the future so it can be polled. let mut fut = Box::pin(fut); // Create a new context to be passed to the future. let t = thread::current(); let waker = Arc::new(ThreadWaker(t)).into(); let mut cx = Context::from_waker(&waker); // Run the future to completion. loop { match fut.as_mut().poll(&mut cx) { Poll::Ready(res) => return res, Poll::Pending => thread::park(), } } } } }
#![recursion_limit = "192"] #[macro_use] extern crate quote; extern crate proc_macro2; extern crate codemap; extern crate codemap_diagnostic; use std::io; use std::io::prelude::*; use std::path::{Path, PathBuf}; use std::convert::AsRef; use std::fs::File; use std::process::exit; use std::env; use codemap::{ CodeMap, Span }; use codemap_diagnostic::{ Diagnostic, Level, SpanLabel, SpanStyle, Emitter, ColorConfig }; mod translate; mod grammar; #[cfg(test)] mod test; struct PegCompiler { codemap: CodeMap, diagnostics: Vec<codemap_diagnostic::Diagnostic> } impl PegCompiler { fn new() -> PegCompiler { PegCompiler { codemap: CodeMap::new(), diagnostics: vec![], } } fn has_error(&self) -> bool { self.diagnostics.iter().any(|d| d.level == Level::Error || d.level == Level::Bug) } fn span_error(&mut self, error: String, span: Span, label: Option<String>) { self.diagnostics.push(Diagnostic { level: Level::Error, message: error, code: None, spans: vec![SpanLabel { span, label, style: SpanStyle::Primary }] }); } fn span_warning(&mut self, error: String, span: Span, label: Option<String>) { self.diagnostics.push(Diagnostic { level: Level::Warning, message: error, code: None, spans: vec![SpanLabel { span, label, style: SpanStyle::Primary }] }); } fn print_diagnostics(&mut self) { if !self.diagnostics.is_empty() { let mut emitter = Emitter::stderr(ColorConfig::Auto, Some(&self.codemap)); emitter.emit(&self.diagnostics[..]); self.diagnostics.clear(); } } fn compile(&mut self, filename: String, input: String) -> Result<String, ()> { let file = self.codemap.add_file(filename, input); let ast_items = match grammar::items(&file.source(), file.span) { Ok(g) => g, Err(e) => { self.span_error( "Error parsing language specification".to_owned(), file.span.subspan(e.offset as u64, e.offset as u64), Some(format!("{}", e)) ); return Err(()) } }; let grammar_def = translate::Grammar::from_ast(self, ast_items)?; let output_tokens = translate::compile_grammar(self, &grammar_def); if self.has_error() { Err(()) } else { Ok(output_tokens?.to_string()) } } } /// Compile a peg grammar to Rust source, printing errors to stderr pub fn compile(filename: String, input: String) -> Result<String, ()> { let mut compiler = PegCompiler::new(); let result = compiler.compile(filename, input); compiler.print_diagnostics(); result } /// Compile the PEG grammar in the specified filename to cargo's OUT_DIR. /// Errors are emitted to stderr and terminate the process. pub fn cargo_build<T: AsRef<Path> + ?Sized>(input_path: &T) { let mut stderr = io::stderr(); let input_path = input_path.as_ref(); let mut peg_source = String::new(); if let Err(e) = File::open(input_path).and_then(|mut x| x.read_to_string(&mut peg_source)) { writeln!(stderr, "Could not read PEG input file `{}`: {}", input_path.display(), e).unwrap(); exit(1); } println!("cargo:rerun-if-changed={}", input_path.display()); let mut compiler = PegCompiler::new(); let result = compiler.compile(input_path.to_string_lossy().into_owned(), peg_source); compiler.print_diagnostics(); let rust_source = match result { Ok(s) => s, Err(()) => { writeln!(stderr, "Error compiling PEG grammar").unwrap(); exit(1); } }; let out_dir: PathBuf = env::var_os("OUT_DIR").unwrap().into(); let rust_path = out_dir.join(input_path.file_name().unwrap()).with_extension("rs"); let mut output_file = File::create(&rust_path).unwrap(); output_file.write_all(rust_source.as_bytes()).unwrap(); }
#[doc = r"Value read from the register"] pub struct R { bits: u8, } #[doc = r"Value to write to the register"] pub struct W { bits: u8, } impl super::RXFIFOSZ { #[doc = r"Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); self.register.set(f(&R { bits }, &mut W { bits }).bits); } #[doc = r"Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r"Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { self.register.set( f(&mut W { bits: Self::reset_value(), }) .bits, ); } #[doc = r"Reset value of the register"] #[inline(always)] pub const fn reset_value() -> u8 { 0 } #[doc = r"Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.register.set(Self::reset_value()) } } #[doc = "Possible values of the field `USB_RXFIFOSZ_SIZE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USB_RXFIFOSZ_SIZER { #[doc = "8"] USB_RXFIFOSZ_SIZE_8, #[doc = "16"] USB_RXFIFOSZ_SIZE_16, #[doc = "32"] USB_RXFIFOSZ_SIZE_32, #[doc = "64"] USB_RXFIFOSZ_SIZE_64, #[doc = "128"] USB_RXFIFOSZ_SIZE_128, #[doc = "256"] USB_RXFIFOSZ_SIZE_256, #[doc = "512"] USB_RXFIFOSZ_SIZE_512, #[doc = "1024"] USB_RXFIFOSZ_SIZE_1024, #[doc = "2048"] USB_RXFIFOSZ_SIZE_2048, #[doc = r"Reserved"] _Reserved(u8), } impl USB_RXFIFOSZ_SIZER { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_8 => 0, USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_16 => 1, USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_32 => 2, USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_64 => 3, USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_128 => 4, USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_256 => 5, USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_512 => 6, USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_1024 => 7, USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_2048 => 8, USB_RXFIFOSZ_SIZER::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> USB_RXFIFOSZ_SIZER { match value { 0 => USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_8, 1 => USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_16, 2 => USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_32, 3 => USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_64, 4 => USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_128, 5 => USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_256, 6 => USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_512, 7 => USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_1024, 8 => USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_2048, i => USB_RXFIFOSZ_SIZER::_Reserved(i), } } #[doc = "Checks if the value of the field is `USB_RXFIFOSZ_SIZE_8`"] #[inline(always)] pub fn is_usb_rxfifosz_size_8(&self) -> bool { *self == USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_8 } #[doc = "Checks if the value of the field is `USB_RXFIFOSZ_SIZE_16`"] #[inline(always)] pub fn is_usb_rxfifosz_size_16(&self) -> bool { *self == USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_16 } #[doc = "Checks if the value of the field is `USB_RXFIFOSZ_SIZE_32`"] #[inline(always)] pub fn is_usb_rxfifosz_size_32(&self) -> bool { *self == USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_32 } #[doc = "Checks if the value of the field is `USB_RXFIFOSZ_SIZE_64`"] #[inline(always)] pub fn is_usb_rxfifosz_size_64(&self) -> bool { *self == USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_64 } #[doc = "Checks if the value of the field is `USB_RXFIFOSZ_SIZE_128`"] #[inline(always)] pub fn is_usb_rxfifosz_size_128(&self) -> bool { *self == USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_128 } #[doc = "Checks if the value of the field is `USB_RXFIFOSZ_SIZE_256`"] #[inline(always)] pub fn is_usb_rxfifosz_size_256(&self) -> bool { *self == USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_256 } #[doc = "Checks if the value of the field is `USB_RXFIFOSZ_SIZE_512`"] #[inline(always)] pub fn is_usb_rxfifosz_size_512(&self) -> bool { *self == USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_512 } #[doc = "Checks if the value of the field is `USB_RXFIFOSZ_SIZE_1024`"] #[inline(always)] pub fn is_usb_rxfifosz_size_1024(&self) -> bool { *self == USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_1024 } #[doc = "Checks if the value of the field is `USB_RXFIFOSZ_SIZE_2048`"] #[inline(always)] pub fn is_usb_rxfifosz_size_2048(&self) -> bool { *self == USB_RXFIFOSZ_SIZER::USB_RXFIFOSZ_SIZE_2048 } } #[doc = "Values that can be written to the field `USB_RXFIFOSZ_SIZE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USB_RXFIFOSZ_SIZEW { #[doc = "8"] USB_RXFIFOSZ_SIZE_8, #[doc = "16"] USB_RXFIFOSZ_SIZE_16, #[doc = "32"] USB_RXFIFOSZ_SIZE_32, #[doc = "64"] USB_RXFIFOSZ_SIZE_64, #[doc = "128"] USB_RXFIFOSZ_SIZE_128, #[doc = "256"] USB_RXFIFOSZ_SIZE_256, #[doc = "512"] USB_RXFIFOSZ_SIZE_512, #[doc = "1024"] USB_RXFIFOSZ_SIZE_1024, #[doc = "2048"] USB_RXFIFOSZ_SIZE_2048, } impl USB_RXFIFOSZ_SIZEW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_8 => 0, USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_16 => 1, USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_32 => 2, USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_64 => 3, USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_128 => 4, USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_256 => 5, USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_512 => 6, USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_1024 => 7, USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_2048 => 8, } } } #[doc = r"Proxy"] pub struct _USB_RXFIFOSZ_SIZEW<'a> { w: &'a mut W, } impl<'a> _USB_RXFIFOSZ_SIZEW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: USB_RXFIFOSZ_SIZEW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "8"] #[inline(always)] pub fn usb_rxfifosz_size_8(self) -> &'a mut W { self.variant(USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_8) } #[doc = "16"] #[inline(always)] pub fn usb_rxfifosz_size_16(self) -> &'a mut W { self.variant(USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_16) } #[doc = "32"] #[inline(always)] pub fn usb_rxfifosz_size_32(self) -> &'a mut W { self.variant(USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_32) } #[doc = "64"] #[inline(always)] pub fn usb_rxfifosz_size_64(self) -> &'a mut W { self.variant(USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_64) } #[doc = "128"] #[inline(always)] pub fn usb_rxfifosz_size_128(self) -> &'a mut W { self.variant(USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_128) } #[doc = "256"] #[inline(always)] pub fn usb_rxfifosz_size_256(self) -> &'a mut W { self.variant(USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_256) } #[doc = "512"] #[inline(always)] pub fn usb_rxfifosz_size_512(self) -> &'a mut W { self.variant(USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_512) } #[doc = "1024"] #[inline(always)] pub fn usb_rxfifosz_size_1024(self) -> &'a mut W { self.variant(USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_1024) } #[doc = "2048"] #[inline(always)] pub fn usb_rxfifosz_size_2048(self) -> &'a mut W { self.variant(USB_RXFIFOSZ_SIZEW::USB_RXFIFOSZ_SIZE_2048) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(15 << 0); self.w.bits |= ((value as u8) & 15) << 0; self.w } } #[doc = r"Value of the field"] pub struct USB_RXFIFOSZ_DPBR { bits: bool, } impl USB_RXFIFOSZ_DPBR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXFIFOSZ_DPBW<'a> { w: &'a mut W, } impl<'a> _USB_RXFIFOSZ_DPBW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 4); self.w.bits |= ((value as u8) & 1) << 4; self.w } } impl R { #[doc = r"Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { self.bits } #[doc = "Bits 0:3 - Max Packet Size"] #[inline(always)] pub fn usb_rxfifosz_size(&self) -> USB_RXFIFOSZ_SIZER { USB_RXFIFOSZ_SIZER::_from(((self.bits >> 0) & 15) as u8) } #[doc = "Bit 4 - Double Packet Buffer Support"] #[inline(always)] pub fn usb_rxfifosz_dpb(&self) -> USB_RXFIFOSZ_DPBR { let bits = ((self.bits >> 4) & 1) != 0; USB_RXFIFOSZ_DPBR { bits } } } impl W { #[doc = r"Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u8) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:3 - Max Packet Size"] #[inline(always)] pub fn usb_rxfifosz_size(&mut self) -> _USB_RXFIFOSZ_SIZEW { _USB_RXFIFOSZ_SIZEW { w: self } } #[doc = "Bit 4 - Double Packet Buffer Support"] #[inline(always)] pub fn usb_rxfifosz_dpb(&mut self) -> _USB_RXFIFOSZ_DPBW { _USB_RXFIFOSZ_DPBW { w: self } } }
#![deny(rust_2018_idioms)] use std::path::Path; use std::time::Duration; use clap::Parser; use futures::StreamExt; use libp2p::core::upgrade; use libp2p::identify::{Event as IdentifyEvent, Info as IdentifyInfo}; use libp2p::identity::Keypair; use libp2p::swarm::{SwarmBuilder, SwarmEvent}; use libp2p::Transport; use libp2p::{dns, noise, Multiaddr}; use serde::Deserialize; use zeroize::Zeroizing; mod behaviour; #[derive(Parser, Debug)] struct Args { #[clap(long, value_parser, env = "IDENTITY_CONFIG_FILE")] identity_config_file: Option<std::path::PathBuf>, #[clap(long, value_parser, env = "LISTEN_ON")] listen_on: Multiaddr, } #[derive(Clone, Deserialize)] struct IdentityConfig { pub private_key: String, } impl IdentityConfig { pub fn from_file(path: &Path) -> anyhow::Result<Self> { Ok(serde_json::from_str(&std::fs::read_to_string(path)?)?) } } impl zeroize::Zeroize for IdentityConfig { fn zeroize(&mut self) { self.private_key.zeroize() } } pub struct TokioExecutor(); impl libp2p::swarm::Executor for TokioExecutor { fn exec( &self, future: std::pin::Pin<Box<dyn std::future::Future<Output = ()> + 'static + Send>>, ) { tokio::task::spawn(future); } } #[tokio::main] async fn main() -> anyhow::Result<()> { if std::env::var_os("RUST_LOG").is_none() { std::env::set_var("RUST_LOG", "info"); } setup_tracing(); let args = Args::parse(); let keypair = match &args.identity_config_file { Some(path) => { let config = Zeroizing::new(IdentityConfig::from_file(path.as_path())?); let private_key = Zeroizing::new(base64::decode(config.private_key.as_bytes())?); Keypair::from_protobuf_encoding(&private_key)? } None => { tracing::info!("No private key configured, generating a new one"); Keypair::generate_ed25519() } }; let peer_id = keypair.public().to_peer_id(); tracing::info!(%peer_id, "Starting up"); let transport = libp2p::tcp::tokio::Transport::new(libp2p::tcp::Config::new()); let transport = dns::TokioDnsConfig::system(transport).unwrap(); let noise_keys = noise::Keypair::<noise::X25519Spec>::new() .into_authentic(&keypair) .expect("Signing libp2p-noise static DH keypair failed."); let transport = transport .upgrade(upgrade::Version::V1) .authenticate(noise::NoiseConfig::xx(noise_keys).into_authenticated()) .multiplex(libp2p::yamux::YamuxConfig::default()) .boxed(); let mut swarm = SwarmBuilder::with_executor( transport, behaviour::BootstrapBehaviour::new(keypair.public()), keypair.public().to_peer_id(), TokioExecutor(), ) .build(); swarm.listen_on(args.listen_on)?; const BOOTSTRAP_INTERVAL: Duration = Duration::from_secs(5 * 60); let mut bootstrap_interval = tokio::time::interval(BOOTSTRAP_INTERVAL); loop { let bootstrap_interval_tick = bootstrap_interval.tick(); tokio::pin!(bootstrap_interval_tick); tokio::select! { _ = bootstrap_interval_tick => { tracing::debug!("Doing periodical bootstrap"); _ = swarm.behaviour_mut().kademlia.bootstrap(); let network_info = swarm.network_info(); let num_peers = network_info.num_peers(); let connection_counters = network_info.connection_counters(); let num_connections = connection_counters.num_connections(); tracing::info!(%num_peers, %num_connections, "Peer-to-peer status") } Some(event) = swarm.next() => { match event { SwarmEvent::Behaviour(behaviour::BootstrapEvent::Identify(e)) => { if let IdentifyEvent::Received { peer_id, info: IdentifyInfo { listen_addrs, protocols, .. }, } = *e { if protocols .iter() .any(|p| p.as_bytes() == behaviour::KADEMLIA_PROTOCOL_NAME) { for addr in listen_addrs { swarm.behaviour_mut().kademlia.add_address(&peer_id, addr); } tracing::debug!(%peer_id, "Added peer to DHT"); } } } e => { tracing::debug!(?e, "Swarm Event"); } } } } } } fn setup_tracing() { tracing_subscriber::fmt() .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) .with_target(false) .compact() .init(); }
mod libs; use crate::libs::x::*; fn main() -> Result<(), String> { let mut xbackend = crate::libs::x::get_xbackend(); let init_result = xbackend.init(); match init_result.err() { Some(error) => panic!("error initializing xbackend: {}", error), None => {} }; let conncet_result = xbackend.connect(None); match conncet_result.err() { Some(error) => panic!("failed to connect to x server: {}", error), None => {} }; let run_result = xbackend.run(); match run_result.err() { Some(error_message) => Err(error_message), None => Ok(()) } }
//! Crate-wide errors. //! //! The error handling in `ipfs` is subject to change in the future. /// Just re-export anyhow for now. /// /// # Stability /// /// Very likely to change in the future. pub use anyhow::Error; /// A try conversion failed. /// /// # Stability /// /// Very likely to change in the future. pub struct TryError;
mod api; pub mod data; mod db; pub use self::api::create_transaction; pub use self::api::delete_transaction; pub use self::api::get_transaction; pub use self::api::list_transactions; pub use self::api::update_transaction;
//! Polygon mesh. use glium::Display; use glium::index::{NoIndices, IndicesSource, PrimitiveType}; use glium::vertex::{IntoVerticesSource, VerticesSource}; pub use glium::{VertexBuffer, Vertex}; pub use glium::index::BufferCreationError as IndexCreationError; pub use glium::vertex::BufferCreationError as VertexCreationError; /// Vertex index pub type Index = u16; pub type IndexBuffer = ::glium::IndexBuffer<Index>; pub struct Mesh<T: Vertex> { /// Vertex buffer. vertices: VertexBuffer<T>, /// Index buffer or none. indices: Indices, } impl<T: Vertex> Mesh<T> { /// Creates a simple mesh object. /// Primitive type is triangles list, no indices need. pub fn new(display: &Display, primitive_type: PrimitiveType, vertices: &[T]) -> Result<Mesh<T>, VertexCreationError> { Ok(Mesh { vertices: VertexBuffer::new(display, vertices)?, indices: Indices::None(NoIndices(primitive_type)), }) } pub fn with_indices(display: &Display, primitive_type: PrimitiveType, vertices: &[T], indices: &[Index]) -> Result<Mesh<T>, CreationError> { let vertices = VertexBuffer::new(display, vertices)?; let indices = IndexBuffer::new(display, primitive_type, indices)?; Ok(Mesh { vertices: vertices, indices: Indices::Buffer(indices), }) } /// Create a mesh with the given buffers. pub fn from_buffers(vertices: VertexBuffer<T>, indices: IndexBuffer) -> Mesh<T> { Mesh { vertices: vertices, indices: Indices::Buffer(indices), } } } enum Indices { None(NoIndices), Buffer(IndexBuffer), } impl<'a, T: Vertex> Into<IndicesSource<'a>> for &'a Mesh<T> { fn into(self) -> IndicesSource<'a> { match self.indices { Indices::None(ref x) => x.into(), Indices::Buffer(ref x) => x.into(), } } } impl<'a, T: Vertex> IntoVerticesSource<'a> for &'a Mesh<T> { fn into_vertices_source(self) -> VerticesSource<'a> { self.vertices.into_vertices_source() } } /// Errors which can occur when attempting to create a mesh. #[derive(Debug)] pub enum CreationError { /// Vertex buffer create failure. Vertex(VertexCreationError), /// Index buffer create failure. Index(IndexCreationError), } impl From<IndexCreationError> for CreationError { fn from(err: IndexCreationError) -> CreationError { CreationError::Index(err) } } impl From<VertexCreationError> for CreationError { fn from(err: VertexCreationError) -> CreationError { CreationError::Vertex(err) } }
use bevy::prelude::*; use tracing::trace; pub struct EguiUi { pub ui: Option<egui::Ui>, } impl FromResources for EguiUi { fn from_resources(_resources: &Resources) -> Self { Self { ui: None, } } } pub(crate) struct EguiFrameStartEvent { pub(crate) new_ui: egui::Ui, } // TODO(#56): move egui context updates to the start of the frame along with utilizing a context map, to remove the singleton restriction pub(crate) fn egui_state_update( mut frame_start_events: ResMut<Events<EguiFrameStartEvent>>, mut egui: ResMut<EguiUi>, ) { // Grab the newest ui provided by an event let new_ui = frame_start_events.drain().next(); let EguiFrameStartEvent { new_ui } = if egui.ui.is_none() && new_ui.is_none() { trace!("Did not find a UI this tick!"); return; } else { // This should never occur. If it does, something has broke bevy and the [`EguiSystemNode`] that sends this event. new_ui.expect("Previously an egui ui has been received, but one hasn't been received this frame!") }; // Replace the old ui with the new one from the event egui.ui = Some(new_ui); }
// This file was generated by gir (https://github.com/gtk-rs/gir) // from ../gir-files // DO NOT EDIT use crate::HSTSPolicy; use crate::Message; use crate::SessionFeature; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { #[doc(alias = "SoupHSTSEnforcer")] pub struct HSTSEnforcer(Object<ffi::SoupHSTSEnforcer, ffi::SoupHSTSEnforcerClass>) @implements SessionFeature; match fn { type_ => || ffi::soup_hsts_enforcer_get_type(), } } impl HSTSEnforcer { #[doc(alias = "soup_hsts_enforcer_new")] pub fn new() -> HSTSEnforcer { assert_initialized_main_thread!(); unsafe { from_glib_full(ffi::soup_hsts_enforcer_new()) } } } #[cfg(any(feature = "v2_68", feature = "dox"))] #[cfg_attr(feature = "dox", doc(cfg(feature = "v2_68")))] impl Default for HSTSEnforcer { fn default() -> Self { Self::new() } } pub const NONE_HSTS_ENFORCER: Option<&HSTSEnforcer> = None; pub trait HSTSEnforcerExt: 'static { #[doc(alias = "soup_hsts_enforcer_get_domains")] #[doc(alias = "get_domains")] fn domains(&self, session_policies: bool) -> Vec<glib::GString>; #[doc(alias = "soup_hsts_enforcer_get_policies")] #[doc(alias = "get_policies")] fn policies(&self, session_policies: bool) -> Vec<HSTSPolicy>; #[doc(alias = "soup_hsts_enforcer_has_valid_policy")] fn has_valid_policy(&self, domain: &str) -> bool; #[doc(alias = "soup_hsts_enforcer_is_persistent")] fn is_persistent(&self) -> bool; #[doc(alias = "soup_hsts_enforcer_set_policy")] fn set_policy(&self, policy: &mut HSTSPolicy); #[doc(alias = "soup_hsts_enforcer_set_session_policy")] fn set_session_policy(&self, domain: &str, include_subdomains: bool); #[doc(alias = "changed")] fn connect_changed<F: Fn(&Self, &HSTSPolicy, &HSTSPolicy) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "hsts-enforced")] fn connect_hsts_enforced<F: Fn(&Self, &Message) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<HSTSEnforcer>> HSTSEnforcerExt for O { fn domains(&self, session_policies: bool) -> Vec<glib::GString> { unsafe { FromGlibPtrContainer::from_glib_full(ffi::soup_hsts_enforcer_get_domains(self.as_ref().to_glib_none().0, session_policies.into_glib())) } } fn policies(&self, session_policies: bool) -> Vec<HSTSPolicy> { unsafe { FromGlibPtrContainer::from_glib_full(ffi::soup_hsts_enforcer_get_policies(self.as_ref().to_glib_none().0, session_policies.into_glib())) } } fn has_valid_policy(&self, domain: &str) -> bool { unsafe { from_glib(ffi::soup_hsts_enforcer_has_valid_policy(self.as_ref().to_glib_none().0, domain.to_glib_none().0)) } } fn is_persistent(&self) -> bool { unsafe { from_glib(ffi::soup_hsts_enforcer_is_persistent(self.as_ref().to_glib_none().0)) } } fn set_policy(&self, policy: &mut HSTSPolicy) { unsafe { ffi::soup_hsts_enforcer_set_policy(self.as_ref().to_glib_none().0, policy.to_glib_none_mut().0); } } fn set_session_policy(&self, domain: &str, include_subdomains: bool) { unsafe { ffi::soup_hsts_enforcer_set_session_policy(self.as_ref().to_glib_none().0, domain.to_glib_none().0, include_subdomains.into_glib()); } } fn connect_changed<F: Fn(&Self, &HSTSPolicy, &HSTSPolicy) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn changed_trampoline<P: IsA<HSTSEnforcer>, F: Fn(&P, &HSTSPolicy, &HSTSPolicy) + 'static>(this: *mut ffi::SoupHSTSEnforcer, old_policy: *mut ffi::SoupHSTSPolicy, new_policy: *mut ffi::SoupHSTSPolicy, f: glib::ffi::gpointer) { let f: &F = &*(f as *const F); f(HSTSEnforcer::from_glib_borrow(this).unsafe_cast_ref(), &from_glib_borrow(old_policy), &from_glib_borrow(new_policy)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"changed\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(changed_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } fn connect_hsts_enforced<F: Fn(&Self, &Message) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn hsts_enforced_trampoline<P: IsA<HSTSEnforcer>, F: Fn(&P, &Message) + 'static>(this: *mut ffi::SoupHSTSEnforcer, message: *mut ffi::SoupMessage, f: glib::ffi::gpointer) { let f: &F = &*(f as *const F); f(HSTSEnforcer::from_glib_borrow(this).unsafe_cast_ref(), &from_glib_borrow(message)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"hsts-enforced\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(hsts_enforced_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } } impl fmt::Display for HSTSEnforcer { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("HSTSEnforcer") } }
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use core::fmt; use alloc::collections::linked_list::LinkedList; use alloc::boxed::Box; use super::qlib::common::*; use super::qlib::linux_def::*; use super::kernel::posixtimer::*; use super::task::*; #[repr(C)] #[derive(Debug, Copy, Clone, Default)] //copy from https://elixir.bootlin.com/linux/latest/source/arch/x86/include/uapi/asm/ptrace.h#L18 pub struct PtRegs { /* * C ABI says these regs are callee-preserved. They aren't saved on kernel entry * unless syscall needs a complete, fully filled "struct pt_regs". */ pub r15: u64, pub r14: u64, pub r13: u64, pub r12: u64, pub rbp: u64, pub rbx: u64, /* These regs are callee-clobbered. Always saved on kernel entry. */ pub r11: u64, pub r10: u64, pub r9: u64, pub r8: u64, pub rax: u64, pub rcx: u64, pub rdx: u64, pub rsi: u64, pub rdi: u64, /* * On syscall entry, this is syscall#. On CPU exception, this is error code. * On hw interrupt, it's IRQ number: */ pub orig_rax: u64, /* Return frame for iretq */ pub rip: u64, pub cs: u64, pub eflags: u64, pub rsp: u64, pub ss: u64, /* top of stack page */ } impl PtRegs { pub fn Set(&mut self, ctx: &SigContext) { self.r15 = ctx.r15; self.r14 = ctx.r14; self.r13 = ctx.r13; self.r12 = ctx.r12; self.rbp = ctx.rbp; self.rbx = ctx.rbx; self.r11 = ctx.r11; self.r10 = ctx.r10; self.r9 = ctx.r9; self.r8 = ctx.r8; self.rax = ctx.rax; self.rcx = ctx.rcx; self.rdx = ctx.rdx; self.rsi = ctx.rsi; self.rdi = ctx.rdi; self.orig_rax = ctx.rax; self.rip = ctx.rip; self.cs = ctx.cs as u64; self.eflags = ctx.eflags; self.rsp = ctx.rsp; self.ss = ctx.ss as u64; } } #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct SigRetInfo { pub sigInfoAddr: u64, pub sigCtxAddr: u64, pub ret: u64, } #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct Kill { pub pid: i32, pub uid: i32, } #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct SigTimer { pub tid: i32, pub overrun: i32, pub sigval: u64, pub sysPrivate: i32, } #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct SigRt { pub pid: i32, pub uid: u32, pub sigval: u64, } #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct SigChld { pub pid: i32, //child pub uid: u32, //sender's uid pub status: i32, //Exit code pub uTime: i32, pub sTime: i32, } #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct SigFault { pub addr: u64, pub lsb: u16, } #[repr(C)] #[derive(Copy, Clone)] pub struct SignalInfo { pub Signo: i32, // Signal number pub Errno: i32, // Errno value pub Code: i32, // Signal code pub _r: u32, pub fields: [u8; 128 - 16], } impl<'a> Default for SignalInfo { fn default() -> Self { return Self { Signo: 0, Errno: 0, Code: 0, _r: 0, fields: [0; 128 - 16] } } } impl core::fmt::Debug for SignalInfo { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("SignalInfo") .field("Signo", &self.Signo) .field("Errno", &self.Errno) .field("Code", &self.Code) .finish() } } impl SignalInfo { pub fn SignalInfoPriv(sig: Signal) -> Self { return Self { Signo: sig.0, Code: Self::SIGNAL_INFO_KERNEL, ..Default::default() } } // FixSignalCodeForUser fixes up si_code. // // The si_code we get from Linux may contain the kernel-specific code in the // top 16 bits if it's positive (e.g., from ptrace). Linux's // copy_siginfo_to_user does // err |= __put_user((short)from->si_code, &to->si_code); // to mask out those bits and we need to do the same. pub fn FixSignalCodeForUser(&mut self) { if self.Code > 0 { self.Code &= 0xffff; } } pub fn Kill(&self) -> &mut Kill { let addr = &self.fields[0] as *const _ as u64; return unsafe { &mut *(addr as *mut Kill) } } pub fn SigTimer(&mut self) -> &mut SigTimer { let addr = &self.fields[0] as *const _ as u64; return unsafe { &mut *(addr as *mut SigTimer) } } pub fn SigRt(&mut self) -> &mut SigRt { let addr = &self.fields[0] as *const _ as u64; return unsafe { &mut *(addr as *mut SigRt) } } pub fn SigChld(&mut self) -> &mut SigChld { let addr = &self.fields[0] as *const _ as u64; return unsafe { &mut *(addr as *mut SigChld) } } pub fn SigFault(&self) -> &mut SigFault { let addr = &self.fields[0] as *const _ as u64; return unsafe { &mut *(addr as *mut SigFault) } } // SignalInfoUser (properly SI_USER) indicates that a signal was sent from // a kill() or raise() syscall. pub const SIGNAL_INFO_USER: i32 = 0; // SignalInfoKernel (properly SI_KERNEL) indicates that the signal was sent // by the kernel. pub const SIGNAL_INFO_KERNEL: i32 = 0x80; // SignalInfoTimer (properly SI_TIMER) indicates that the signal was sent // by an expired timer. pub const SIGNAL_INFO_TIMER: i32 = -2; // SignalInfoTkill (properly SI_TKILL) indicates that the signal was sent // from a tkill() or tgkill() syscall. pub const SIGNAL_INFO_TKILL: i32 = -6; // CLD_* codes are only meaningful for SIGCHLD. // CLD_EXITED indicates that a task exited. pub const CLD_EXITED: i32 = 1; // CLD_KILLED indicates that a task was killed by a signal. pub const CLD_KILLED: i32 = 2; // CLD_DUMPED indicates that a task was killed by a signal and then dumped // core. pub const CLD_DUMPED: i32 = 3; // CLD_TRAPPED indicates that a task was stopped by ptrace. pub const CLD_TRAPPED: i32 = 4; // CLD_STOPPED indicates that a thread group completed a group stop. pub const CLD_STOPPED: i32 = 5; // CLD_CONTINUED indicates that a group-stopped thread group was continued. pub const CLD_CONTINUED: i32 = 6; // SYS_* codes are only meaningful for SIGSYS. // SYS_SECCOMP indicates that a signal originates from seccomp. pub const SYS_SECCOMP: i32 = 1; // TRAP_* codes are only meaningful for SIGTRAP. // TRAP_BRKPT indicates a breakpoint trap. pub const TRAP_BRKPT: i32 = 1; } pub const UC_FP_XSTATE: u64 = 1; pub const UC_SIGCONTEXT_SS: u64 = 2; pub const UC_STRICT_RESTORE_SS: u64 = 4; // https://elixir.bootlin.com/linux/latest/source/include/uapi/asm-generic/ucontext.h#L5 #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct UContext { pub Flags: u64, pub Link: u64, pub Stack: SignalStack, pub MContext: SigContext, pub Sigset: u64, } impl UContext { pub fn New(ptRegs: &PtRegs, oldMask: u64, cr2: u64, fpstate: u64, alt: &SignalStack) -> Self { return Self { Flags: 2, Link: 0, Stack: alt.clone(), MContext: SigContext::New(ptRegs, oldMask, cr2, fpstate), Sigset: 0, } } } // https://elixir.bootlin.com/linux/latest/source/arch/x86/include/uapi/asm/sigcontext.h#L284 #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct SigContext { pub r8: u64, pub r9: u64, pub r10: u64, pub r11: u64, pub r12: u64, pub r13: u64, pub r14: u64, pub r15: u64, pub rdi: u64, pub rsi: u64, pub rbp: u64, pub rbx: u64, pub rdx: u64, pub rax: u64, pub rcx: u64, pub rsp: u64, pub rip: u64, pub eflags: u64, pub cs: u16, pub gs: u16, // always 0 on amd64. pub fs: u16, // always 0 on amd64. pub ss: u16, // only restored if _UC_STRICT_RESTORE_SS (unsupported). pub err: u64, pub trapno: u64, pub oldmask: u64, pub cr2: u64, // Pointer to a struct _fpstate. pub fpstate: u64, pub reserved: [u64; 8], } impl SigContext { pub fn New(ptRegs: &PtRegs, oldMask: u64, cr2: u64, fpstate: u64) -> Self { return Self { r8: ptRegs.r8, r9: ptRegs.r9, r10: ptRegs.r10, r11: ptRegs.r11, r12: ptRegs.r12, r13: ptRegs.r13, r14: ptRegs.r14, r15: ptRegs.r15, rdi: ptRegs.rdi, rsi: ptRegs.rsi, rbp: ptRegs.rbp, rbx: ptRegs.rbx, rdx: ptRegs.rdx, rax: ptRegs.rax, rcx: ptRegs.rcx, rsp: ptRegs.rsp, rip: ptRegs.rip, eflags: ptRegs.eflags, cs: ptRegs.cs as u16, gs: 0, fs: 0, ss: ptRegs.ss as u16, err: 0, trapno: 0, oldmask: oldMask, cr2: cr2, fpstate: fpstate, ..Default::default() } } } #[repr(C)] #[derive(Debug, Copy, Clone, Default)] pub struct SigFlag(pub u64); impl SigFlag { pub const SIGNAL_FLAG_NO_CLD_STOP: u64 = 0x00000001; pub const SIGNAL_FLAG_NO_CLD_WAIT: u64 = 0x00000002; pub const SIGNAL_FLAG_SIG_INFO: u64 = 0x00000004; pub const SIGNAL_FLAG_RESTORER: u64 = 0x04000000; pub const SIGNAL_FLAG_ON_STACK: u64 = 0x08000000; pub const SIGNAL_FLAG_RESTART: u64 = 0x10000000; pub const SIGNAL_FLAG_INTERRUPT: u64 = 0x20000000; pub const SIGNAL_FLAG_NO_DEFER: u64 = 0x40000000; pub const SIGNAL_FLAG_RESET_HANDLER: u64 = 0x80000000; pub fn IsNoCldStop(&self) -> bool { return self.0 & Self::SIGNAL_FLAG_NO_CLD_STOP != 0; } pub fn IsNoCldWait(&self) -> bool { return self.0 & Self::SIGNAL_FLAG_NO_CLD_WAIT != 0; } pub fn IsSigInfo(&self) -> bool { return self.0 & Self::SIGNAL_FLAG_SIG_INFO != 0; } pub fn IsNoDefer(&self) -> bool { return self.0 & Self::SIGNAL_FLAG_NO_DEFER != 0; } pub fn IsRestart(&self) -> bool { return self.0 & Self::SIGNAL_FLAG_RESTART != 0; } pub fn IsResetHandler(&self) -> bool { return self.0 & Self::SIGNAL_FLAG_RESET_HANDLER != 0; } pub fn IsOnStack(&self) -> bool { return self.0 & Self::SIGNAL_FLAG_ON_STACK != 0; } pub fn HasRestorer(&self) -> bool { return self.0 & Self::SIGNAL_FLAG_RESTORER != 0; } pub fn IsNoChildStop(&self) -> bool { return self.0 & Self::SIGNAL_FLAG_NO_CLD_STOP != 0 } } // https://github.com/lattera/glibc/blob/master/sysdeps/unix/sysv/linux/kernel_sigaction.h #[derive(Copy, Clone, Default)] #[repr(C)] pub struct SigAct { pub handler: u64, pub flags: SigFlag, pub restorer: u64, pub mask: u64, } impl fmt::Debug for SigAct { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "SigAction {{ \n\ handler: {:x}, \n\ flag : {:x}, \n \ flags::HasRestorer: {}, \n \ flags::IsOnStack: {}, \n \ flags::IsRestart: {}, \n \ flags::IsResetHandler: {}, \n \ flags::IsNoDefer: {}, \n \ flags::IsSigInfo: {}, \n \ restorer : {:x}, \n\ mask: {:x}, \n}}", self.handler, self.flags.0, self.flags.HasRestorer(), self.flags.IsOnStack(), self.flags.IsRestart(), self.flags.IsResetHandler(), self.flags.IsNoDefer(), self.flags.IsSigInfo(), self.restorer, self.mask ) } } impl SigAct { // SignalActDefault is SIG_DFL and specifies that the default behavior for // a signal should be taken. pub const SIGNAL_ACT_DEFAULT: u64 = 0; // SignalActIgnore is SIG_IGN and specifies that a signal should be // ignored. pub const SIGNAL_ACT_IGNORE: u64 = 1; } pub const UNMASKABLE_MASK : u64 = 1 << (Signal::SIGKILL - 1) | 1 << (Signal::SIGSTOP - 1); #[derive(Clone, Copy, Debug)] pub struct SignalSet(pub u64); impl Default for SignalSet { fn default() -> Self { return Self(0) } } impl SignalSet { pub fn New(sig: Signal) -> Self { return SignalSet(1 << sig.Index()) } pub fn Add(&mut self, sig: Signal) { self.0 |= 1 << sig.Index() } pub fn Remove(&mut self, sig: Signal) { self.0 &= !(1 << sig.0) } pub fn TailingZero(&self) -> usize { for i in 0..64 { let idx = 64 - i - 1; if self.0 & (1 << idx) != 0 { return idx } } return 64 } pub fn MakeSignalSet(sigs: &[Signal]) -> Self { let mut res = Self::default(); for sig in sigs { res.Add(*sig) } return res; } pub fn ForEachSignal(&self, mut f: impl FnMut(Signal)) { for i in 0..64 { if self.0 & (1 << i) != 0 { f(Signal(i as i32 + 1)) } } } } #[derive(Debug, Clone, Default)] pub struct SignalQueue { signals: LinkedList<PendingSignal>, } impl SignalQueue { pub const RT_SIG_CAP: usize = 32; pub fn Len(&mut self) -> u64 { return self.signals.len() as u64; } pub fn Enque(&mut self, info: Box<SignalInfo>, timer: Option<IntervalTimer>) -> bool { if self.signals.len() == Self::RT_SIG_CAP { return false } self.signals.push_back(PendingSignal { sigInfo: info, timer: timer, }); return true } pub fn Deque(&mut self) -> Option<PendingSignal> { return self.signals.pop_front(); } pub fn Clear(&mut self) { self.signals.clear(); } } pub const SIGNAL_COUNT: usize = 64; pub const STD_SIGNAL_COUNT: usize = 31; // 1 ~ 31 pub const RT_SIGNAL_COUNT: usize = 33; // 32 ~ 64 pub const RT_SIGNAL_START: usize = 32; // 32 ~ 64 #[derive(Debug, Clone, Default)] pub struct PendingSignal { pub sigInfo: Box<SignalInfo>, pub timer: Option<IntervalTimer>, } pub struct PendingSignals { pub stdSignals: [Option<PendingSignal>; STD_SIGNAL_COUNT], pub rtSignals: [SignalQueue; RT_SIGNAL_COUNT], pub pendingSet: SignalSet, } impl fmt::Debug for PendingSignals { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("PendingSignals") .field("stdSignals", &self.stdSignals) .field("rtSignals0", &self.rtSignals[0]) .field("rtSignals2", &self.rtSignals[32]) .field("pendingSet", &self.pendingSet) .finish() } } impl Default for PendingSignals { fn default() -> Self { return Self { stdSignals : Default::default(), rtSignals : [ SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), SignalQueue::default(), ], pendingSet: Default::default(), } } } impl PendingSignals { pub fn Enque(&mut self, info: Box<SignalInfo>, timer: Option<IntervalTimer>) -> Result<bool> { let sig = Signal(info.Signo); if sig.IsStandard() { match &self.stdSignals[sig.Index()] { None => (), _ => return Ok(false), } self.stdSignals[sig.Index()] = Some(PendingSignal { sigInfo: info, timer: timer, }); self.pendingSet.Add(sig); return Ok(true); } else if sig.IsRealtime() { let q = &mut self.rtSignals[sig.Index() - 31]; self.pendingSet.Add(sig); return Ok(q.Enque(info, timer)); } else { return Err(Error::InvalidInput) } } pub fn HasSignal(&self, mask: SignalSet) -> bool { let set = SignalSet(self.pendingSet.0 & !(mask.0)); if set.0 == 0 { return false } return true; } pub fn Deque(&mut self, mask: SignalSet) -> Option<Box<SignalInfo>> { let set = SignalSet(self.pendingSet.0 & !(mask.0)); if set.0 == 0 { return None } let lastOne = set.TailingZero(); if lastOne < STD_SIGNAL_COUNT { self.pendingSet.0 &= !(1 << lastOne); let ps = self.stdSignals[lastOne].take(); if let Some(ps) = ps { let mut sigInfo = ps.sigInfo; match ps.timer { None => (), Some(timer) => { timer.lock().updateDequeuedSignalLocked(&mut sigInfo) } } return Some(sigInfo); } else { return None; } } if self.rtSignals[lastOne + 1 - RT_SIGNAL_START].Len() == 1 { self.pendingSet.0 &= !(1 << lastOne); } let ps = self.rtSignals[lastOne + 1 - RT_SIGNAL_START].Deque(); if let Some(ps) = ps { let mut sigInfo = ps.sigInfo; match ps.timer { None => (), Some(timer) => { timer.lock().updateDequeuedSignalLocked(&mut sigInfo) } } return Some(sigInfo); } else { return None; } } pub fn Discard(&mut self, sig: Signal) { self.pendingSet.0 &= !(1 << sig.Index()); if sig.0 <= STD_SIGNAL_COUNT as i32 { self.stdSignals[sig.Index()] = None; return } self.rtSignals[sig.0 as usize - RT_SIGNAL_START].Clear() } } #[derive(Default, Debug)] pub struct SignalStruct { pendingSignals: PendingSignals, signalMask: SignalSet, realSignalMask: SignalSet, //sigtimedwait groupStopPending: bool, groupStopAck: bool, trapStopPending: bool, } // https://elixir.bootlin.com/linux/latest/source/arch/x86/include/uapi/asm/signal.h#L132 #[derive(Debug, Clone, Copy)] #[repr(C)] pub struct SignalStack { pub addr: u64, pub flags: u32, pub size: u64, } impl Default for SignalStack { fn default() -> Self { return Self { addr: 0, flags: Self::FLAG_DISABLE, size: 0, } } } impl SignalStack { pub const FLAG_ON_STACK: u32 = 1; pub const FLAG_DISABLE: u32 = 2; pub fn Contains(&self, sp: u64) -> bool { return self.addr < sp && sp <= self.addr + self.size } pub fn SetOnStack(&mut self) { self.flags |= Self::FLAG_ON_STACK; } pub fn IsEnable(&self) -> bool { return self.flags & Self::FLAG_DISABLE == 0 } pub fn Top(&self) -> u64 { return self.addr + self.size } } pub struct SigHow {} impl SigHow { pub const SIG_BLOCK: u64 = 0; pub const SIG_UNBLOCK: u64 = 1; pub const SIG_SETMASK: u64 = 2; } pub fn SignalInfoPriv(sig: i32) -> SignalInfo { return SignalInfo { Signo: sig, Code: SignalInfo::SIGNAL_INFO_KERNEL, ..Default::default() } } // Sigevent represents struct sigevent. #[repr(C)] #[derive(Default, Copy, Clone)] pub struct Sigevent { pub Value: u64, pub Signo: i32, pub Notify: i32, pub Tid: i32, // struct sigevent here contains 48-byte union _sigev_un. However, only // member _tid is significant to the kernel. pub UnRemainder1: [u8; 32], pub UnRemainder: [u8; 12], } pub const SIGEV_SIGNAL: i32 = 0; pub const SIGEV_NONE: i32 = 1; pub const SIGEV_THREAD: i32 = 2; pub const SIGEV_THREAD_ID: i32 = 4; // copyInSigSetWithSize copies in a structure as below // // struct { // const sigset_t *ss; /* Pointer to signal set */ // size_t ss_len; /* Size (in bytes) of object pointed to by 'ss' */ // }; // // and returns sigset_addr and size. #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct SigMask { pub addr: u64, pub len: usize, } pub fn CopyInSigSetWithSize(task: &Task, addr: u64) -> Result<(u64, usize)> { let mask : SigMask = task.CopyInObj(addr)?; return Ok((mask.addr, mask.len)) } pub const SIGNAL_SET_SIZE: usize = 8; pub fn UnblockableSignals() -> SignalSet { return SignalSet::MakeSignalSet(&[Signal(Signal::SIGKILL), Signal(Signal::SIGSTOP)]); } pub fn CopyInSigSet(task: &Task, sigSetAddr: u64, size: usize) -> Result<SignalSet> { if size != SIGNAL_SET_SIZE { return Err(Error::SysError(SysErr::EINVAL)) } let mask : u64 = task.CopyInObj(sigSetAddr)?; return Ok(SignalSet(mask & !UnblockableSignals().0)) }
use std::{ collections::{HashMap, HashSet}, convert::TryInto, fmt, fs::File, io::{prelude::*, BufReader}, num::ParseIntError, ops::Not, str::FromStr, }; use fehler::throws; use rand::prelude::*; use crate::{ space::Space, value::{Value, ALL_VALUES}, }; mod styled; #[derive(Clone, Copy)] pub struct Grid { spaces: [Space; 81], } fn is_solved(spaces: Vec<Space>) -> bool { let mut values = vec![]; for space in spaces { match space { Space::Empty => return false, Space::Occupied(value) => values.push(value), } } values.sort(); let mut all_values = ALL_VALUES.iter().cloned().collect::<Vec<Value>>(); all_values.sort(); values == all_values } impl Grid { pub fn empty() -> Self { Self { spaces: [Space::Empty; 81], } } pub fn new_solved() -> Self { let grid = Self { spaces: [Space::Empty; 81], }; grid.solve().unwrap() } fn get(&self, x: u8, y: u8) -> Space { let i: usize = (x + (y * 9)).try_into().unwrap(); self.spaces[i] } fn rows(&self) -> Vec<Vec<Space>> { let mut rows = vec![]; for y in 0..=8 { let mut row = vec![]; for x in 0..=8 { row.push(self.get(x, y)); } rows.push(row); } rows } fn columns(&self) -> Vec<Vec<Space>> { let mut columns = vec![]; for x in 0..=8 { let mut column = vec![]; for y in 0..=8 { column.push(self.get(x, y)); } columns.push(column); } columns } fn squares(&self) -> Vec<Vec<Space>> { let mut squares = vec![]; for square_y in 0..=2 { for square_x in 0..=2 { let mut square = vec![]; for local_y in 0..=2 { let grid_y = local_y + (square_y * 3); for local_x in 0..=2 { let grid_x = local_x + (square_x * 3); let val = self.get(grid_x, grid_y); square.push(val); } } squares.push(square) } } squares } fn set(&mut self, x: u8, y: u8, v: Value) { let i: usize = (x + (y * 9)).try_into().unwrap(); self.spaces[i] = Space::Occupied(v); } fn set_empty(&mut self, x: u8, y: u8) { let i: usize = (x + (y * 9)).try_into().unwrap(); self.spaces[i] = Space::Empty; } pub fn is_complete(&self) -> bool { self.spaces.iter().all(|&s| s.is_occupied()) } pub fn is_solved(&self) -> bool { self.rows().iter().all(|r| is_solved(r.to_vec())) && self.columns().iter().all(|c| is_solved(c.to_vec())) && self.squares().iter().all(|s| is_solved(s.to_vec())) } fn column_constraints(&self, x: u8) -> Vec<Value> { let mut c = vec![]; for y in 0..=8 { let val = match self.get(x, y) { Space::Occupied(v) => v, _ => continue, }; c.push(val); } c } fn row_constraints(&self, y: u8) -> Vec<Value> { let mut r = vec![]; for x in 0..=8 { let val = match self.get(x, y) { Space::Occupied(v) => v, _ => continue, }; r.push(val); } r } fn square_constraints(&self, x: u8, y: u8) -> Vec<Value> { let square_x = x / 3; let square_y = y / 3; let mut s = vec![]; for local_y in 0..=2 { let grid_y = local_y + (square_y * 3); for local_x in 0..=2 { let grid_x = local_x + (square_x * 3); let val = match self.get(grid_x, grid_y) { Space::Occupied(v) => v, _ => continue, }; s.push(val); } } s } pub fn backtracking_solve(mut self) -> Option<Self> { for y in 0..=8 { for x in 0..=8 { let space = self.get(x, y); if space.is_occupied() { continue; } let mut constraints = HashSet::new(); constraints.extend(self.column_constraints(x)); constraints.extend(self.row_constraints(y)); constraints.extend(self.square_constraints(x, y)); constraints = constraints; for value in ALL_VALUES.difference(&constraints) { self.set(x, y, *value); match self.solve() { Some(solution) => return Some(solution), None => self.set_empty(x, y), } } return None; } } Some(self) } pub fn stochastic_solve(mut self) -> Option<Self> { // 1) Find out if it's solvable, return early if not // TODO // 2) Determine which numbers are missing from the grid let mut counts: HashMap<Value, u8> = HashMap::new(); for space in self.spaces.iter() { match space { Space::Empty => continue, Space::Occupied(value) => *counts.entry(*value).or_insert(0) += 1, } } let mut values = vec![]; for value in ALL_VALUES.iter() { let missing = match counts.get(value) { Some(count) => 9 - count, None => 9, }; for _ in 0..missing { values.push(value); } } // 3) Shuffle the numbers and insert into the empty spaces let self_clone = self; loop { let mut values_clone = values.clone(); values_clone.shuffle(&mut rand::thread_rng()); for (i, space) in self_clone.spaces.iter().enumerate() { match space { Space::Empty => self.spaces[i] = Space::Occupied(*values_clone.pop().unwrap()), Space::Occupied(_) => continue, } } // 4) Check for solved state, return if true, loop if not if self.is_solved() { return Some(self); } println!("{}", self); } } pub fn solve(self) -> Option<Self> { self.backtracking_solve() } } impl PartialEq for Grid { fn eq(&self, other: &Self) -> bool { self.spaces.iter().eq(other.spaces.iter()) } } impl Eq for Grid {} impl fmt::Debug for Grid { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.spaces.to_vec()) } } impl fmt::Display for Grid { #[throws(fmt::Error)] fn fmt(&self, f: &mut fmt::Formatter<'_>) { for val in self.to_styled() { val.content().fmt(f)?; } } } impl From<Vec<u8>> for Grid { fn from(numbers: Vec<u8>) -> Self { let mut spaces = [Space::Empty; 81]; for (i, number) in numbers.iter().enumerate() { spaces[i] = Space::from(*number); } Self { spaces } } } impl FromStr for Grid { type Err = ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { let mut numbers = vec![]; for c in s.chars() { let c = if c == '.' { '0' } else { c }; if c.is_ascii_digit().not() { continue; } numbers.push(c.to_string().parse()?); } Ok(Self::from(numbers)) } } impl Grid { #[throws(Box<dyn std::error::Error>)] pub fn from_file(path: &str) -> Self { let file = File::open(path)?; let mut buf_reader = BufReader::new(file); let mut contents = String::new(); buf_reader.read_to_string(&mut contents)?; Self::from_str(&contents)? } } #[cfg(test)] mod tests { use test_case::test_case; use super::*; #[test_case(Grid::empty(), false)] #[test_case(Grid::new_solved(), true)] fn grid_is_complete(grid: Grid, expected: bool) { assert_eq!(grid.is_complete(), expected); } #[test_case(Grid::empty(), false)] #[test_case(Grid::new_solved(), true)] fn grid_is_solved(grid: Grid, expected: bool) { assert_eq!(grid.is_solved(), expected); } #[test] #[throws(ParseIntError)] fn round_trip() { #[rustfmt::skip] let expected = r" ┌─────────────────────────────┐ │ 3 7 5 ┆ 4 1 6 ┆ 8 9 2 │ │ 4 1 6 ┆ 8 9 2 ┆ 3 7 5 │ │ 8 9 2 ┆ 3 7 5 ┆ 4 1 6 │ │┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄│ │ 7 3 4 ┆ 5 6 1 ┆ 9 2 8 │ │ 5 6 1 ┆ 9 2 8 ┆ 7 3 4 │ │ 9 2 8 ┆ 7 3 4 ┆ 5 6 1 │ │┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄│ │ 1 5 3 ┆ 6 4 7 ┆ 2 8 9 │ │ 6 4 7 ┆ 2 8 9 ┆ 1 5 3 │ │ 2 8 9 ┆ 1 5 3 ┆ 6 4 7 │ └─────────────────────────────┘ "; let expected = expected.trim_start(); let grid: Grid = expected.parse()?; assert_eq!(format!("{}", grid), expected); } #[test] #[throws(ParseIntError)] fn round_trip_empty() { #[rustfmt::skip] let expected = r" ┌─────────────────────────────┐ │ . . . ┆ . . . ┆ . . . │ │ . . . ┆ . . . ┆ . . . │ │ . . . ┆ . . . ┆ . . . │ │┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄│ │ . . . ┆ . . . ┆ . . . │ │ . . . ┆ . . . ┆ . . . │ │ . . . ┆ . . . ┆ . . . │ │┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄│ │ . . . ┆ . . . ┆ . . . │ │ . . . ┆ . . . ┆ . . . │ │ . . . ┆ . . . ┆ . . . │ └─────────────────────────────┘ "; let expected = expected.trim_start(); let grid: Grid = expected.parse()?; assert_eq!(grid, Grid::empty()); assert_eq!(format!("{}", grid), expected); } #[test] #[throws(ParseIntError)] fn zeros_as_empty() { #[rustfmt::skip] let expected = r" ┌─────────────────────────────┐ │ 0 0 0 ┆ 0 0 0 ┆ 0 0 0 │ │ 0 0 0 ┆ 0 0 0 ┆ 0 0 0 │ │ 0 0 0 ┆ 0 0 0 ┆ 0 0 0 │ │┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄│ │ 0 0 0 ┆ 0 0 0 ┆ 0 0 0 │ │ 0 0 0 ┆ 0 0 0 ┆ 0 0 0 │ │ 0 0 0 ┆ 0 0 0 ┆ 0 0 0 │ │┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄┼┄┄┄┄┄┄┄┄┄│ │ 0 0 0 ┆ 0 0 0 ┆ 0 0 0 │ │ 0 0 0 ┆ 0 0 0 ┆ 0 0 0 │ │ 0 0 0 ┆ 0 0 0 ┆ 0 0 0 │ └─────────────────────────────┘ "; let expected = expected.trim_start(); let grid: Grid = expected.parse()?; assert_eq!(grid, Grid::empty()); } }
use crate::accounts::AccountLockType; use crate::bank::Bank; use morgan_interface::transaction::{Result, Transaction}; use std::borrow::Borrow; // Represents the results of trying to lock a set of accounts pub struct LockedAccountsResults<'a, 'b, I: Borrow<Transaction>> { locked_accounts_results: Vec<Result<()>>, bank: &'a Bank, transactions: &'b [I], lock_type: AccountLockType, pub(crate) needs_unlock: bool, } impl<'a, 'b, I: Borrow<Transaction>> LockedAccountsResults<'a, 'b, I> { pub fn new( locked_accounts_results: Vec<Result<()>>, bank: &'a Bank, transactions: &'b [I], lock_type: AccountLockType, ) -> Self { Self { locked_accounts_results, bank, transactions, needs_unlock: true, lock_type, } } pub fn lock_type(&self) -> AccountLockType { self.lock_type.clone() } pub fn locked_accounts_results(&self) -> &Vec<Result<()>> { &self.locked_accounts_results } pub fn transactions(&self) -> &[I] { self.transactions } } // Unlock all locked accounts in destructor. impl<'a, 'b, I: Borrow<Transaction>> Drop for LockedAccountsResults<'a, 'b, I> { fn drop(&mut self) { if self.needs_unlock { self.bank.unlock_accounts(self) } } } #[cfg(test)] mod tests { use super::*; use crate::genesis_utils::{create_genesis_block_with_leader, GenesisBlockInfo}; use morgan_interface::pubkey::Pubkey; use morgan_interface::signature::{Keypair, KeypairUtil}; use morgan_interface::system_transaction; #[test] fn test_account_locks() { let (bank, txs) = setup(); // Test getting locked accounts let lock_results = bank.lock_accounts(&txs); // Grab locks assert!(lock_results .locked_accounts_results() .iter() .all(|x| x.is_ok())); // Trying to grab locks again should fail let lock_results2 = bank.lock_accounts(&txs); assert!(lock_results2 .locked_accounts_results() .iter() .all(|x| x.is_err())); // Drop the first set of locks drop(lock_results); // Now grabbing locks should work again let lock_results2 = bank.lock_accounts(&txs); assert!(lock_results2 .locked_accounts_results() .iter() .all(|x| x.is_ok())); } #[test] fn test_record_locks() { let (bank, txs) = setup(); // Test getting record locks let lock_results = bank.lock_record_accounts(&txs); // Grabbing record locks doesn't return any results, must succeed or panic. assert!(lock_results.locked_accounts_results().is_empty()); drop(lock_results); // Now grabbing record locks should work again let lock_results2 = bank.lock_record_accounts(&txs); assert!(lock_results2.locked_accounts_results().is_empty()); } fn setup() -> (Bank, Vec<Transaction>) { let dummy_leader_pubkey = Pubkey::new_rand(); let GenesisBlockInfo { genesis_block, mint_keypair, .. } = create_genesis_block_with_leader(500, &dummy_leader_pubkey, 100); let bank = Bank::new(&genesis_block); let pubkey = Pubkey::new_rand(); let keypair2 = Keypair::new(); let pubkey2 = Pubkey::new_rand(); let txs = vec![ system_transaction::transfer(&mint_keypair, &pubkey, 1, genesis_block.hash()), system_transaction::transfer(&keypair2, &pubkey2, 1, genesis_block.hash()), ]; (bank, txs) } }
extern crate amiwo; use amiwo::ResponseJSON; use amiwo::IsResponseJSON; fn test_moved_value() { let x = ResponseJSON::ok(); assert_eq!(x.to_string(), "{\"http_code\":200,\"success\":true}".to_string()); let moved_string = x.into_string(); assert_eq!(moved_string, "{\"http_code\":200,\"success\":true}".to_string()); assert_eq!(x.is_valid_json(), false, "Shouldn't be able to execute this"); //~ ERROR use of moved value } fn main() { test_moved_value(); }
use crate::types::*; use ggez::graphics; use ncollide2d as nc; use euclid; use specs::*; use specs_derive::*; // /////////////////////////////////////////////////////////////////////// // Components // /////////////////////////////////////////////////////////////////////// /// A position in the game world. #[derive(Clone, Debug, Component)] #[storage(VecStorage)] pub struct Position { pub point: Point2, pub rotation: f32 } /// A sprite #[derive(Clone, Debug, Component)] #[storage(VecStorage)] pub struct Sprite { pub clip: graphics::Rect, pub scale: graphics::mint::Vector2<f32> } /// Motion in the game world. #[derive(Clone, Debug, Component)] #[storage(VecStorage)] pub struct Motion { pub is_blocked: bool, pub velocity: Vector2, pub acceleration: Vector2, pub orientation: f32 } #[derive(Clone, Debug, Component)] #[storage(VecStorage)] pub struct Collider { pub handle: nc::pipeline::object::CollisionObjectSlabHandle } /// Just a marker that a particular entity is the player. #[derive(Clone, Debug, Default, Component)] #[storage(NullStorage)] pub struct Player; #[derive(Clone, Debug, Default, Component)] #[storage(VecStorage)] pub struct Shot { pub damage: u32, } impl Motion { pub fn update(&mut self) { let rotation = euclid::Rotation2D::radians(self.orientation*-1.0); self.velocity = rotation.transform_vector(&self.acceleration); } } pub fn register_components(specs_world: &mut World) { specs_world.register::<Position>(); specs_world.register::<Motion>(); specs_world.register::<Collider>(); // specs_world.register::<Shot>(); specs_world.register::<Player>(); specs_world.register::<Sprite>(); }
use std::{ cell::RefCell, cmp::{max, min}, rc::Rc, str::pattern::Pattern, }; use url::Url; use winit::window::Window; use crate::{ buffer::{Buffer, BufferMode}, editor::{FileFinder, Workspace, MAX_SHOWN_FILE_FINDER_ITEMS}, graphics_context::GraphicsContext, language_server::LanguageServer, language_server_types::ParameterLabelType, text_utils::search_highlights, theme::{Theme, THEMES}, view::View, }; #[derive(Clone, Copy, Debug)] pub enum TextEffectKind { ForegroundColor(Color), } #[derive(Clone, Copy, Debug)] pub struct TextEffect { pub kind: TextEffectKind, pub start: usize, pub length: usize, } #[derive(Clone, Copy, Debug, PartialEq)] pub struct Color { pub r: f32, pub g: f32, pub b: f32, pub r_u8: u8, pub g_u8: u8, pub b_u8: u8, } #[derive(Clone, Copy, Debug, Default)] pub struct RenderLayout { pub row_offset: usize, pub col_offset: usize, pub num_rows: usize, pub num_cols: usize, } pub struct Renderer { context: GraphicsContext, pub theme: Theme, } impl Renderer { pub fn new(window: &Window) -> Self { let context = GraphicsContext::new(window); Self { context, theme: THEMES[0], } } pub fn ensure_size(&mut self, window: &Window) { self.context.ensure_size(window); } pub fn cycle_theme(&mut self) { let i = THEMES .iter() .position(|theme| *theme == self.theme) .unwrap(); self.theme = THEMES[(i + 1) % THEMES.len()]; } pub fn get_font_size(&self) -> (f64, f64) { ( self.context.font_size.0 as f64, self.context.font_size.1 as f64, ) } pub fn start_draw(&self) { self.context.begin_draw(); self.context.clear(self.theme.background_color); } pub fn end_draw(&self) { self.context.end_draw(); } pub fn draw_file_finder( &mut self, layout: &mut RenderLayout, workspace_path: &str, file_finder: &FileFinder, ) { if file_finder.files.is_empty() { return; } let selected_item = file_finder.selection_index - file_finder.selection_view_offset; let mut longest_string = file_finder .files .iter() .max_by(|x, y| x.name.len().cmp(&y.name.len())) .map(|x| x.name.len() + 1) .unwrap_or(0); longest_string = max(longest_string, file_finder.search_string.len()); layout.col_offset = layout.col_offset.saturating_sub(longest_string / 2); let num_shown_file_finder_items = min(file_finder.files.len(), MAX_SHOWN_FILE_FINDER_ITEMS); let mut selected_item_start_position = 0; let mut completion_string = String::default(); for (i, item) in file_finder .files .iter() .enumerate() .skip(file_finder.selection_view_offset) .take(num_shown_file_finder_items) { if i - file_finder.selection_view_offset == selected_item { selected_item_start_position = completion_string.len(); } completion_string.push_str(item.name.as_os_str().to_str().unwrap()); completion_string.push('\n'); } let effects = [ TextEffect { kind: TextEffectKind::ForegroundColor(self.theme.foreground_color), start: 0, length: completion_string.len(), }, TextEffect { kind: TextEffectKind::ForegroundColor(self.theme.background_color), start: selected_item_start_position, length: file_finder.files[file_finder.selection_index].name.len(), }, ]; self.context.draw_completion_popup( 0, 0, layout, &file_finder.search_string, file_finder.selection_index - file_finder.selection_view_offset, completion_string.as_bytes(), self.theme.selection_background_color, self.theme.background_color, Some(&effects), &self.theme, ); } pub fn draw_status_line( &mut self, workspace: &Option<Workspace>, opened_file: Option<Url>, layout: &RenderLayout, active: bool, ) { self.context.fill_cells( 0, 0, layout, (layout.num_cols, 2), self.theme.status_line_background_color, ); let color = if active { self.theme.palette.fg0 } else { self.theme.palette.bg2 }; let (status_line, mut effects) = if let Some(opened_file) = opened_file { let file_path = opened_file.to_file_path().unwrap(); let mut effects = vec![]; if let Some(workspace) = workspace { if workspace.path.is_prefix_of(file_path.to_str().unwrap()) { effects.push(TextEffect { kind: TextEffectKind::ForegroundColor(color), start: 1, length: workspace.path.len(), }); } } (format!(" {}", file_path.to_str().unwrap()), effects) } else { ( format!( " {}", if workspace.is_some() { &workspace.as_ref().unwrap().path } else { "No workspace open" } ), vec![], ) }; effects.insert( 0, TextEffect { kind: TextEffectKind::ForegroundColor(color), start: 0, length: status_line.len(), }, ); self.context.draw_text( 0, 0, layout, status_line.as_bytes(), &effects, &self.theme, false, ); } pub fn draw_buffer( &mut self, buffer: &Buffer, layout: &RenderLayout, view: &View, language_server: &Option<Rc<RefCell<LanguageServer>>>, active: bool, ) { use TextEffectKind::*; let text = view.visible_text(buffer, layout); let text_offset = view.visible_text_offset(buffer); let mut effects = vec![TextEffect { kind: ForegroundColor(self.theme.foreground_color), start: 0, length: text.len(), }]; if let Some(syntect) = &buffer.syntect { effects.extend(syntect.highlight_lines( &buffer.piece_table, view.line_offset, view.line_offset + layout.num_rows, )) } if buffer.input.as_bytes().first() == Some(&b'/') { let mut first_result_found = false; for (start, length) in search_highlights(&text, &buffer.input[1..]) { let (row, col) = ( view.absolute_to_view_row(buffer.piece_table.line_index(text_offset + start)), view.absolute_to_view_col(buffer.piece_table.col_index(text_offset + start)), ); let (mut foreground_color, mut background_color) = ( self.theme.search_foreground_color, self.theme.search_background_color, ); if !first_result_found && buffer .cursors .last() .is_some_and(|cursor| text_offset + start >= cursor.position) { foreground_color = self.theme.active_search_foreground_color; background_color = self.theme.active_search_background_color; first_result_found = true; } self.context .fill_cells(row, col, layout, (length, 1), background_color); self.context .fill_cells(row, col, layout, (1, 1), self.theme.cursor_color); effects.push(TextEffect { kind: ForegroundColor(foreground_color), start, length, }); } } else if active { if buffer.mode != BufferMode::Insert { view.visible_cursors_iter(layout, buffer, |row, col, num| { self.context.fill_cells( row, col, layout, (num, 1), self.theme.selection_background_color, ); }); } view.visible_cursor_leads_iter(buffer, layout, |row, col, pos| { if buffer.mode == BufferMode::Insert { self.context .fill_cell_slim_line(row, col, layout, self.theme.cursor_color); } else { self.context .fill_cells(row, col, layout, (1, 1), self.theme.cursor_color); effects.push(TextEffect { kind: ForegroundColor(self.theme.background_color), start: pos - text_offset, length: 1, }) } }); } self.context .draw_text_fit_view(view, layout, &text, &effects, &self.theme); if let Some(server) = language_server { if let Some(diagnostics) = server .borrow() .saved_diagnostics .get(&buffer.uri.to_lowercase()) { view.visible_diagnostic_lines_iter( buffer, layout, diagnostics, |row, col, count| { self.context.underline_cells( row, col, layout, count, self.theme.diagnostic_color, ); }, ); } } view.visible_completions(buffer, layout, |completions, completion_view, request| { if completions.is_empty() { return; } let selected_item = request.selection_index - request.selection_view_offset; self.context.fill_cells( completion_view.row, completion_view.col.saturating_sub(1), layout, (completion_view.width + 1, completion_view.height), self.theme.selection_background_color, ); self.context.fill_cells( completion_view.row + selected_item, completion_view.col.saturating_sub(1), layout, (completion_view.width + 1, 1), self.theme.cursor_color, ); let mut selected_item_start_position = 0; let mut completion_string = String::default(); for (i, item) in completions .iter() .skip(request.selection_view_offset) .enumerate() .take(completion_view.height) { if i == selected_item { selected_item_start_position = completion_string.len(); } completion_string.push_str(item.insert_text.as_ref().unwrap_or(&item.label)); completion_string.push('\n'); } let effects = [ TextEffect { kind: ForegroundColor(self.theme.foreground_color), start: 0, length: completion_string.len(), }, TextEffect { kind: ForegroundColor(self.theme.background_color), start: selected_item_start_position, length: completions[request.selection_index] .insert_text .as_ref() .unwrap_or(&completions[request.selection_index].label) .len() + 1, }, ]; let detail_string = completions[request.selection_index] .detail .clone() .unwrap_or_default(); let label_string = if completions[request.selection_index] .insert_text .as_ref() .is_some_and(|text| { text.trim() != completions[request.selection_index].label.trim() }) { completions[request.selection_index].label.clone() } else { String::default() }; let mut label_detail_combined = String::default(); let longest_detail_string = detail_string .split('\n') .max_by(|x, y| x.len().cmp(&y.len())) .unwrap_or("") .len(); if detail_string .as_bytes() .iter() .filter(|&c| *c == b'\n') .count() == label_string .as_bytes() .iter() .filter(|&c| *c == b'\n') .count() { for (detail, label) in detail_string.split('\n').zip(label_string.split('\n')) { label_detail_combined.push_str(detail); for _ in 0..longest_detail_string - detail.len() { label_detail_combined.push(' '); } label_detail_combined.push_str(label); label_detail_combined.push('\n'); } } if !label_detail_combined.trim().is_empty() { let mut bytes = vec![]; for c in label_detail_combined.as_bytes() { if c.is_ascii() { bytes.push(*c) } else if bytes.last().is_some_and(|c| *c != b' ') { bytes.push(b' '); } } self.context.draw_popup_below( completion_view.row, completion_view.col + completion_view.width, layout, bytes.trim_ascii_end(), self.theme.selection_background_color, self.theme.background_color, None, &self.theme, false, ); } self.context.draw_text( completion_view.row, completion_view.col, layout, completion_string.as_bytes(), &effects, &self.theme, false, ); }); view.visible_signature_helps(buffer, layout, |signature_help, signature_help_view| { if let Some(active_signature) = signature_help .signatures .get(signature_help.active_signature.unwrap_or(0) as usize) { let active_parameter = active_signature .active_parameter .or(signature_help.active_parameter); let mut effects = vec![]; if let Some(parameters) = &active_signature.parameters { if let Some(active_parameter) = active_parameter.and_then(|i| parameters.get(i as usize)) { match &active_parameter.label { ParameterLabelType::String(label) => { for (start, _) in active_signature.label.match_indices(label.as_str()) { if !active_signature.label.as_bytes()[start + label.len()] .is_ascii_alphanumeric() { effects.push(TextEffect { kind: ForegroundColor( self.theme.active_parameter_color, ), start, length: label.len(), }); } } } ParameterLabelType::Offsets(start, end) => { effects.push(TextEffect { kind: ForegroundColor(self.theme.foreground_color), start: *start as usize, length: *end as usize - *start as usize + 1, }); } } } } self.context.draw_popup_above( signature_help_view.row, signature_help_view.col, layout, active_signature.label.as_bytes(), self.theme.selection_background_color, self.theme.background_color, Some(&effects), &self.theme, false, ); } }); if buffer .input .as_bytes() .first() .is_some_and(|c| *c == b':' || *c == b'/') { self.context.draw_popup_above( layout.num_rows, 0, layout, buffer.input.as_bytes(), self.theme.selection_background_color, self.theme.background_color, None, &self.theme, false, ); } } pub fn draw_buffer_hovers( &mut self, buffer: &Buffer, layout: &RenderLayout, view: &View, language_server: &Option<Rc<RefCell<LanguageServer>>>, ) { if let Some(server) = language_server { if let Some(diagnostics) = server .borrow() .saved_diagnostics .get(&buffer.uri.to_lowercase()) { if let Some((line, col)) = view.hover { if let Some(diagnostic) = diagnostics.iter().find(|diagnostic| { let (start_line, start_col) = ( diagnostic.range.start.line as usize, diagnostic.range.start.character as usize, ); let (end_line, end_col) = ( diagnostic.range.end.line as usize, diagnostic.range.end.character as usize, ); let diagnostic_on_cursor_line = buffer.mode == BufferMode::Insert && buffer.cursors.iter().any(|cursor| { (start_line..=end_line) .contains(&buffer.piece_table.line_index(cursor.position)) }); !diagnostic_on_cursor_line && ((start_line == line && (start_col..=end_col).contains(&col)) || (end_line == line && (start_col..=end_col).contains(&col)) || (diagnostic.range.start.line as usize ..diagnostic.range.end.line as usize) .contains(&line)) }) { let (row, col) = ( view.absolute_to_view_row(line) + 1, view.absolute_to_view_col(col) + 1, ); self.context.draw_popup_below( row, col, layout, diagnostic.message.as_bytes(), self.theme.selection_background_color, self.theme.background_color, None, &self.theme, true, ); } else if let Some(hover_message) = &view.hover_message { // TODO: Rendering the hover message this way is pretty inefficient. // However, most hovers are not many thousands characters long.. let (row, col) = ( view.absolute_to_view_row(line) + 1, view.absolute_to_view_col(col) + 1, ); let mut leading_lines = 0; let mut line_limit = 0; let mut offset = 0; let truncated_message: Vec<u8> = hover_message .message .as_bytes() .iter() .skip_while(|&x| { let skip = leading_lines < hover_message.line_offset; if *x == b'\n' { leading_lines += 1; } offset += 1; skip }) .take_while(|&x| { let limit_reached = line_limit < (layout.num_rows / 2); if *x == b'\n' { line_limit += 1; } limit_reached }) .copied() .collect(); let mut offset_ranges = vec![]; for range in &hover_message.code_block_ranges { offset_ranges.push(( range.0.saturating_sub(offset), range.1.saturating_sub(offset), )); } let mut effects = vec![]; if let Some(syntect) = &buffer.syntect { effects = syntect.highlight_code_blocks(&truncated_message, &offset_ranges); } self.context.draw_popup_below( row, col, layout, &truncated_message, self.theme.selection_background_color, self.theme.background_color, Some(&effects), &self.theme, true, ); } } } } } pub fn draw_numbers(&mut self, buffer: &Buffer, layout: &RenderLayout, view: &View) { let mut numbers = String::default(); let num_lines = buffer.piece_table.num_lines(); for line in view.line_offset + 1..=min(view.line_offset + 1 + layout.num_rows, num_lines) { numbers.push_str(line.to_string().as_str()); numbers.push(b'\n' as char); } self.context.fill_cells( 0, 0, layout, (layout.num_cols + 2, layout.num_rows), self.theme.background_color, ); self.context.draw_text( 0, 1, layout, numbers.as_bytes(), &[TextEffect { kind: TextEffectKind::ForegroundColor(self.theme.numbers_color), start: 0, length: numbers.len(), }], &self.theme, true, ); } pub fn draw_split(&mut self, window: &Window) { let window_size = ( window.inner_size().width as f64 / window.scale_factor(), window.inner_size().height as f64 / window.scale_factor(), ); let font_size = self.get_font_size(); let num_rows = ((window_size.1 / font_size.1).ceil() as usize).saturating_sub(1); let layout = RenderLayout { row_offset: 0, col_offset: (window_size.0 / font_size.0 / 2.0).ceil() as usize, num_rows, num_cols: 2, }; for i in 0..num_rows { self.context .fill_cell_slim_line(i, 0, &layout, self.theme.numbers_color); } } } impl Color { pub const fn from_rgb(r: u8, g: u8, b: u8) -> Self { Self { r: r as f32 / 255.0, g: g as f32 / 255.0, b: b as f32 / 255.0, r_u8: r, g_u8: g, b_u8: b, } } }
#[macro_export] macro_rules! forward { ($(fn $method:ident(self $(, $arg:ident: $ty:ty)*) -> $rty:ty;)*) => { $( #[inline] fn $method(self $(, $arg: $ty )*) -> $rty { Self::$method(self $(, $arg)*) } )* }; (to_inner $($inner:ident => fn $method:ident(&self $(, $arg:ident: $ty:ty)*) -> $rty:ty;)*) => { $( #[inline] pub fn $method(&self $(, $arg: $ty )*) -> $rty { self.inner.$method($(, $arg)*) } )* }; (to_inner_type $($inner:ident => fn $method:ident() -> $outer:ident;)*) => { $( #[inline] pub fn $method() -> $outer<T> { $outer::new($inner::$method().data) } )* }; (to_inner_mut $($inner:ident => fn $method:ident(&mut self $(, $arg:ident: $ty:ty)*) -> $rty:ty;)*) => { $( #[inline] pub fn $method(&mut self $(, $arg: $ty )*) -> $rty { self.inner.$method($(, $arg)*) } )* }; (to_const_inner $($inner:ident => fn $method:ident(&self $(, $arg:ident: $ty:ty)*) -> $rty:ty;)*) => { $( #[inline] pub const fn $method(&self $(, $arg: $ty )*) -> $rty { self.inner.$method($(, $arg)*) } )* } }
#[macro_use] pub mod macros; pub mod core; pub mod x;
#![deny(unsafe_code)] #![deny(missing_docs)] #![deny(warnings)] //! Core crates for loading and communicating with inversion api implementations in the Rust programming language. pub mod inv_any; pub mod inv_api_impl; pub mod inv_api_spec; pub mod inv_broker; pub mod inv_codec; pub mod inv_error; pub mod inv_id; pub mod inv_loader; pub mod inv_share; pub mod inv_uniq; pub use inv_error::*;
pub use crate as pallet_genesis_history; use super::*; use sp_core::H256; use frame_support::parameter_types; use sp_runtime::{ traits::{BlakeTwo256, IdentityLookup}, testing::Header, }; use frame_system as system; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>; type Block = frame_system::mocking::MockBlock<Test>; frame_support::construct_runtime!( pub enum Test where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Module, Call, Config, Storage, Event<T>}, GenesisHistory: pallet_genesis_history::{Module, Storage, Config}, } ); parameter_types! { pub const BlockHashCount: u64 = 250; pub const SS58Prefix: u8 = 42; } impl system::Config for Test { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); type DbWeight = (); type Origin = Origin; type Call = Call; type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = BlakeTwo256; type AccountId = u64; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type Version = (); type PalletInfo = PalletInfo; type AccountData = (); type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); type SS58Prefix = SS58Prefix; } impl pallet_genesis_history::Config for Test {} pub struct ExtBuilder { pub chain: Chain } impl ExtBuilder { // builds genesis config pub fn build(self) -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::<Test>().unwrap(); let build = pallet_genesis_history::GenesisConfig { previous_chain: self.chain.clone() }; build.assimilate_storage::<Test>(&mut t).unwrap(); t.into() } } impl Default for ExtBuilder { fn default() -> Self { Self { chain: Default::default() } } }
use crate::memory::paging::PageEntry; #[derive(Clone,Debug)] pub struct MemoryAttr { user : bool, // 用户态是否可访问 readonly : bool, // 是否只读 execute : bool, // 是否可执行 } impl MemoryAttr { // 默认 用户态不可访问;可写;不可执行; pub fn new() -> Self{ MemoryAttr { user : false, readonly : false, execute : false, } } // 根据要求修改所需权限 pub fn set_user(mut self) -> Self { self.user = true; self } pub fn set_readonly(mut self) -> Self { self.readonly = true; self } pub fn set_execute(mut self) -> Self { self.execute = true; self } // 根据设置的权限要求修改页表项 pub fn apply(&self, entry : &mut PageEntry) { entry.set_present(true); // 设置页表项存在 entry.set_user(self.user); // 设置用户态访问权限 entry.set_writable(!self.readonly); //设置写权限 entry.set_execute(self.execute); //设置可执行权限 } }
use crate::ppu::Ppu; use gif::{Encoder, Frame, Parameter, Repeat}; use std::fs::File; use std::io::Write; use std::time::{SystemTime, UNIX_EPOCH}; pub type GifEncoder = Encoder<std::fs::File>; pub fn new_gif_encoder(width: u16, height: u16) -> GifEncoder { let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); let name = format!("{:?}.gif", now.as_millis()); let color_map = &[0xFF, 0xFF, 0xFF, 0, 0, 0]; let image = File::create(name).unwrap();; let mut encoder = Encoder::new(image, width, height, color_map).unwrap(); Repeat::Infinite.set_param(&mut encoder).unwrap(); encoder } pub fn write_gif_frame<W: Write>(encoder: &mut gif::Encoder<W>, ppu: &Ppu) { // FIXME Enlarge GIF let mut pixels = Vec::with_capacity(ppu.canvas.len()); for p in ppu.canvas.iter() { pixels.push(p.r); pixels.push(p.g); pixels.push(p.b); } let mut frame = Frame::from_rgb_speed(256, 240, &pixels, 30); frame.delay = 6; encoder.write_frame(&frame).unwrap(); }
extern crate libc; use api::ErrorCode; use self::libc::{c_char, c_uchar}; /// Creates a new local pool ledger that can be used later to connect pool nodes. /// /// #Params /// name: Name of the pool /// config (optional): Pool configuration json. if NULL, then default config will be used. Example: /// { /// "genesis_txn": string (optional), A path to genesis transaction file. If NULL, then a default one will be used. /// If file doesn't exists default one will be created. /// } /// /// #Returns /// Error code /// /// #Errors /// Common* /// Ledger* #[no_mangle] pub extern fn sovrin_create_pool_ledger(command_handle: i32, name: *const c_char, config: *const c_char, cb: extern fn(xcommand_handle: i32, err: ErrorCode)) -> ErrorCode { unimplemented!(); } /// Opens pool ledger and performs connecting to pool nodes. /// /// Pool with corresponded name must be previously created with sovrin_create_pool method. /// It is impossible to open pool with the same name more than once. /// /// name: Name of the pool. /// config (optional): Runtime pool configuration json. /// if NULL, then default config will be used. Example: /// { /// "refreshOnOpen": bool (optional), Forces pool ledger to be refreshed immediately after opening. /// Defaults to true. /// "autoRefreshTime": int (optional), After this time in minutes pool ledger will be automatically refreshed. /// Use 0 to disable automatic refresh. Defaults to 24*60. /// "networkTimeout": int (optional), Network timeout for communication with nodes in milliseconds. /// Defaults to 20000. /// } /// /// #Returns /// Handle to opened pool to use in methods that require pool connection. /// /// #Errors /// Common* /// Ledger* #[no_mangle] pub extern fn sovrin_open_pool_ledger(command_handle: i32, name: *const c_char, config: *const c_char, cb: extern fn(xcommand_handle: i32, err: ErrorCode, pool_handle: i32)) -> ErrorCode { unimplemented!(); } /// Refreshes a local copy of a pool ledger and updates pool nodes connections. /// /// #Params /// handle: pool handle returned by sovrin_open_pool_ledger /// /// #Returns /// Error code /// /// #Errors /// Common* /// Ledger* #[no_mangle] pub extern fn sovrin_refresh_pool_ledger(command_handle: i32, handle: i32, cb: extern fn(xcommand_handle: i32, err: ErrorCode)) -> ErrorCode { unimplemented!(); } /// Closes opened pool ledger, opened nodes connections and frees allocated resources. /// /// #Params /// handle: pool handle returned by sovrin_open_pool_ledger. /// /// #Returns /// Error code /// /// #Errors /// Common* /// Ledger* #[no_mangle] pub extern fn sovrin_close_pool_ledger(command_handle: i32, handle: i32, cb: extern fn(xcommand_handle: i32, err: ErrorCode)) -> ErrorCode { unimplemented!(); } /// Deletes created pool ledger. /// /// #Params /// name: Name of the pool ledger to delete. /// /// #Returns /// Error code /// /// #Errors /// Common* /// Ledger* #[no_mangle] pub extern fn sovrin_delete_pool_ledger(command_handle: i32, name: *const c_char, cb: extern fn(xcommand_handle: i32, err: ErrorCode)) -> ErrorCode { unimplemented!(); }
use crate::prelude::*; use std::os::raw::c_void; use std::ptr; #[repr(C)] #[derive(Debug)] pub struct VkFenceCreateInfo { pub sType: VkStructureType, pub pNext: *const c_void, pub flags: VkFenceCreateFlagBits, } impl VkFenceCreateInfo { pub fn new<T>(flags: T) -> Self where T: Into<VkFenceCreateFlagBits>, { VkFenceCreateInfo { sType: VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, pNext: ptr::null(), flags: flags.into(), } } }
use std::f64::consts::PI; // untested fn zero_to_2pi( angle: f64 ) -> f64 { if angle >= 0 && angle > PI { angle } else { var a = angle % ( 2 * PI ); if a >= 0 { a } else { a + 2 * PI } } }
use std::fmt::Display; use std::io; use crate::kb::Key; use crate::term::Term; pub use crate::common_term::*; pub const DEFAULT_WIDTH: u16 = 80; #[inline] pub fn is_a_terminal(_out: &Term) -> bool { #[cfg(target = "wasm32-wasi")] { unsafe { libc::isatty(out.as_raw_fd()) != 0 } } #[cfg(not(target = "wasm32-wasi"))] { false } } #[inline] pub fn is_a_color_terminal(_out: &Term) -> bool { // We currently never report color terminals. For discussion see // the issue in the WASI repo: https://github.com/WebAssembly/WASI/issues/162 false } #[inline] pub fn terminal_size(_out: &Term) -> Option<(u16, u16)> { None } pub fn read_secure() -> io::Result<String> { Err(io::Error::new( io::ErrorKind::Other, "unsupported operation", )) } pub fn read_single_key() -> io::Result<Key> { Err(io::Error::new( io::ErrorKind::Other, "unsupported operation", )) } #[inline] pub fn wants_emoji() -> bool { false } pub fn set_title<T: Display>(_title: T) {}
use crate::buf::{Buf, Channels, ChannelsMut, ExactSizeBuf}; use crate::channel::{Channel, ChannelMut}; use crate::io::ReadBuf; /// A buffer that has been limited. /// /// See [Buf::limit]. pub struct Limit<B> { buf: B, limit: usize, } impl<B> Limit<B> { /// Construct a new limited buffer. pub(crate) fn new(buf: B, limit: usize) -> Self { Self { buf, limit } } } /// [Limit] adjusts various implementations to report sensible values, such /// as [Buf]. /// /// ```rust /// use audio::Buf; /// /// let buf = audio::interleaved![[0; 4]; 2]; /// /// assert_eq!((&buf).limit(0).channels(), 2); /// assert_eq!((&buf).limit(0).frames_hint(), Some(0)); /// /// assert_eq!((&buf).limit(1).channels(), 2); /// assert_eq!((&buf).limit(1).frames_hint(), Some(1)); /// /// assert_eq!((&buf).limit(5).channels(), 2); /// assert_eq!((&buf).limit(5).frames_hint(), Some(4)); /// ``` impl<B> Buf for Limit<B> where B: Buf, { fn frames_hint(&self) -> Option<usize> { let frames = self.buf.frames_hint()?; Some(usize::min(frames, self.limit)) } fn channels(&self) -> usize { self.buf.channels() } } /// [Limit] adjusts the implementation of [ExactSizeBuf] to take the frame /// limiting into account. /// /// ```rust /// use audio::{Buf, ExactSizeBuf}; /// /// let buf = audio::interleaved![[0; 4]; 2]; /// /// assert_eq!((&buf).limit(0).frames(), 0); /// assert_eq!((&buf).limit(1).frames(), 1); /// assert_eq!((&buf).limit(5).frames(), 4); /// ``` impl<B> ExactSizeBuf for Limit<B> where B: ExactSizeBuf, { fn frames(&self) -> usize { usize::min(self.buf.frames(), self.limit) } } impl<B, T> Channels<T> for Limit<B> where B: Channels<T>, { fn channel(&self, channel: usize) -> Channel<'_, T> { self.buf.channel(channel).limit(self.limit) } } impl<B, T> ChannelsMut<T> for Limit<B> where B: ChannelsMut<T>, { fn channel_mut(&mut self, channel: usize) -> ChannelMut<'_, T> { self.buf.channel_mut(channel).limit(self.limit) } fn copy_channels(&mut self, from: usize, to: usize) where T: Copy, { self.buf.copy_channels(from, to); } } impl<B> ReadBuf for Limit<B> where B: ReadBuf, { fn remaining(&self) -> usize { usize::min(self.buf.remaining(), self.limit) } fn advance(&mut self, n: usize) { self.buf.advance(usize::min(n, self.limit)); } }
pub mod abilities; pub mod ai_controlled; pub mod attacking; pub mod collider; pub mod energy; pub mod health; pub mod moving; pub mod owned; pub mod player_controlled; pub mod position; pub mod provides_vision; pub mod replicated; pub mod respawns; pub mod rotation; pub mod search_hostile; pub mod status_effects; pub mod team; pub mod velocity; pub mod visible; pub mod waypoints; pub mod all { pub use super::abilities::*; pub use super::ai_controlled::*; pub use super::attacking::*; pub use super::collider::*; pub use super::energy::*; pub use super::health::*; pub use super::moving::*; pub use super::owned::*; pub use super::player_controlled::*; pub use super::position::*; pub use super::provides_vision::*; pub use super::replicated::*; pub use super::respawns::*; pub use super::rotation::*; pub use super::search_hostile::*; pub use super::status_effects::*; pub use super::team::*; pub use super::velocity::*; pub use super::visible::*; pub use super::waypoints::*; }
use cipher::{ctr::AES_128_CTR, Cipher}; use encoding::base64::*; fn main() { println!("🔓 Challenge 18"); let ctr_cipher = AES_128_CTR::new_with_nonce(0 as u64); let ct_base64 = Base64::from_str( "L77na/nrFsKvynd6HzOoG7GHTLXsTVu9qvY/2syLXzhPweyyMTJULu/6/kXX0KSvoOLSFQ==", ) .unwrap(); let ct = ct_base64.as_bytes(); let pt = ctr_cipher.decrypt(&b"YELLOW SUBMARINE"[..], &ct); println!("Decrypted: {:?}", String::from_utf8(pt).unwrap()); }
#![allow(unused)] use criterion::{black_box, criterion_group, criterion_main, Criterion}; use ndarray::Array2; use optimization::{LpOptimizer, Optimizer}; use std::path::PathBuf; fn load_input() -> Array2<f32> { let input_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("benches") .join("weights.json"); let input_file = std::fs::File::open(input_path).expect("unable to open input file"); let weights: Vec<Vec<f32>> = serde_json::from_reader(input_file).expect("failed to parse input"); Array2::from_shape_vec( (weights.len(), weights[0].len()), weights.iter().flatten().copied().collect::<Vec<_>>(), ) .expect("invavlid nodes config") } pub fn lp_optimizer_benchmark(c: &mut Criterion) { let optimizer = LpOptimizer; let weights = load_input(); c.bench_function("LP CW09B", |b| { b.iter(|| optimizer.optimize(black_box(weights.view()))) }); } criterion_group!(benches, lp_optimizer_benchmark); criterion_main!(benches);
// Copyright 2019 The vault713 Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. extern crate bitcoin; extern crate bitcoin_hashes; extern crate blake2_rfc as blake2; extern crate byteorder; extern crate failure; extern crate grin_core; extern crate grin_keychain; extern crate grin_util; extern crate grin_wallet_libwallet as libwallet; extern crate hex; #[macro_use] extern crate lazy_static; extern crate rand; extern crate serde; #[macro_use] extern crate serde_derive; extern crate serde_json; extern crate sha2; extern crate uuid; pub use swap::api::SwapApi; pub use swap::bitcoin::{BtcNodeClient, BtcSwapApi, ElectrumNodeClient, TestBtcNodeClient}; pub use swap::message::{Message, OfferUpdate, SecondaryUpdate, Update}; pub use swap::multisig::Builder; pub use swap::types::{Action, BuyerContext, Context, Currency, Role, SellerContext, Status}; pub use swap::{is_test_mode, set_test_mode, ErrorKind, Keychain, Swap}; mod swap;
use std::collections::BinaryHeap; use std::iter::Iterator; // use std::boxed::Box; enum Primes { Two, Three, Sieve{q: BinaryHeap<(i32, i32)>, i: i32}, } fn advance_top(q: &mut BinaryHeap<(i32, i32)>, v: i32) -> bool { match q.peek() { None => unreachable!(), Some(&(v2, p)) => { if v == v2 { q.pop(); q.push((v2 - 2 * p, p)); true } else { false } } } } fn try_next(q: &mut BinaryHeap<(i32, i32)>, i: i32) -> bool { match q.peek() { None => unreachable!(), Some(&(v, _)) => { if i < -v { q.push((-i*i, i)); true } else { while advance_top(q, v) {} false } } } } impl Iterator for Primes { type Item = i32; fn next(&mut self) -> Option<i32> { match *self { Primes::Two => { *self = Primes::Three; Some(2) } Primes::Three => { let mut q = BinaryHeap::new(); q.push((-9, 3)); *self = Primes::Sieve {q: q, i: 3}; Some(3) } Primes::Sieve {ref mut q, ref mut i} => { while !try_next(q, {*i += 2; *i}) {} Some(*i) } } } } fn primes() -> Primes { Primes::Two } fn main() { for p in primes() { println!("{}", p); if p > 100 { break } } println!("Hello, world!"); }
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write; use common_expression::types::*; use common_expression::FromData; use goldenfile::Mint; use super::run_ast; #[test] fn test_comparison() { let mut mint = Mint::new("tests/it/scalars/testdata"); let file = &mut mint.new_goldenfile("comparison.txt").unwrap(); test_eq(file); test_noteq(file); test_lt(file); test_lte(file); test_gt(file); test_gte(file); test_like(file); test_regexp(file); } fn test_eq(file: &mut impl Write) { run_ast(file, "'1'='2'", &[]); run_ast(file, "null=null", &[]); run_ast(file, "1=2", &[]); run_ast(file, "1.0=1", &[]); run_ast(file, "true=null", &[]); run_ast(file, "true=false", &[]); run_ast(file, "false=false", &[]); run_ast(file, "true=true", &[]); run_ast(file, "[]=[]", &[]); run_ast(file, "[1, 2]=[1, 2]", &[]); run_ast(file, "[true]=[]", &[]); run_ast(file, "(1, 'a') = (1,)", &[]); run_ast(file, "(1, 'a') = (1, 'a')", &[]); run_ast(file, "(1, 'a') = (1, 'b')", &[]); run_ast(file, "today()='2020-01-01'", &[]); run_ast( file, "to_timestamp(-315360000000000)=to_timestamp(-100)", &[], ); run_ast(file, "lhs = rhs", &[ ( "lhs", UInt8Type::from_data(vec![0u8, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), ), ( "rhs", Int64Type::from_data(vec![0i64, -1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), ), ]); run_ast(file, "1.1=1.1", &[]); run_ast( file, r#"parse_json('[1,2,3,["a","b","c"]]') = parse_json('[1,2,3,["a","b","c"]]')"#, &[], ); let table = [ ( "lhs", StringType::from_data(vec![ r#"null"#, r#"true"#, r#"9223372036854775807"#, r#"-32768"#, r#"1234.5678"#, r#"{"k":"v","a":"b"}"#, r#"[1,2,3,["a","b","c"]]"#, ]), ), ( "rhs", StringType::from_data(vec![ r#"null"#, r#"true"#, r#"9223372036854775807"#, r#"-32768"#, r#"1234.5678"#, r#"{"k":"v","a":"d"}"#, r#"[1,2,3,["a","b","c"]]"#, ]), ), ]; run_ast(file, "parse_json(lhs) = parse_json(rhs)", &table); run_ast(file, "lhs = rhs", &table); } fn test_noteq(file: &mut impl Write) { run_ast(file, "'1'!='2'", &[]); run_ast(file, "1!=2", &[]); run_ast(file, "1.1!=1.1", &[]); run_ast(file, "true != true", &[]); run_ast(file, "true != null", &[]); run_ast(file, "true != false", &[]); run_ast(file, "[] != []", &[]); run_ast(file, "['a'] != ['a']", &[]); run_ast(file, "['a'] != ['b']", &[]); run_ast(file, "(1, 'a') != (1,)", &[]); run_ast(file, "(1, 'a') != (1, 'a')", &[]); run_ast(file, "(1, 'a') != (1, 'b')", &[]); run_ast( file, "to_timestamp(-315360000000000)!=to_timestamp(-100)", &[], ); run_ast( file, r#"parse_json('"databend"') != parse_json('"databend"')"#, &[], ); let table = [ ( "lhs", StringType::from_data(vec![ r#"null"#, r#"true"#, r#"9223372036854775807"#, r#"[1,2,3,["a","b","c"]]"#, ]), ), ( "rhs", StringType::from_data(vec![ r#"null"#, r#"true"#, r#"9223372036854775807"#, r#"[1,2,3,["a","b","c"]]"#, ]), ), ]; run_ast(file, "parse_json(lhs) != parse_json(rhs)", &table); run_ast(file, "lhs != rhs", &table); } fn test_lt(file: &mut impl Write) { run_ast(file, "'1'<'2'", &[]); run_ast(file, "3<2", &[]); run_ast(file, "1.1<1.1", &[]); run_ast(file, "true < true", &[]); run_ast(file, "true < null", &[]); run_ast(file, "true < false", &[]); run_ast(file, "[] < []", &[]); run_ast(file, "[1, 2] < [2, 3]", &[]); run_ast(file, "(1, 'b') < (1, 'a')", &[]); run_ast(file, "(1, 'a') < (1, 'b')", &[]); run_ast(file, "(1, 'a') < (2, 'a')", &[]); run_ast( file, "to_timestamp(-315360000000000)<to_timestamp(-100)", &[], ); run_ast(file, r#"parse_json('"true"') < parse_json('"false"')"#, &[]); let table = [ ( "lhs", StringType::from_data(vec![ r#"null"#, r#"true"#, r#"9223372036854775807"#, r#"-32768"#, r#"1234.5678"#, r#"1.912e2"#, r#"[1,2,3,["a","b","c"]]"#, ]), ), ( "rhs", StringType::from_data(vec![ r#"null"#, r#"true"#, r#"9223372036854775800"#, r#"-33768"#, r#"1234.5678"#, r#"1.912e2"#, r#"[0,2,3,["a","b","c"]]"#, ]), ), ]; run_ast(file, "parse_json(lhs) >= parse_json(rhs)", &table); run_ast(file, "lhs < rhs", &table); } fn test_lte(file: &mut impl Write) { run_ast(file, "'5'<='2'", &[]); run_ast(file, "1<=2", &[]); run_ast(file, "1.1<=2.1", &[]); run_ast(file, "true <= true", &[]); run_ast(file, "true <= null", &[]); run_ast(file, "true <= false", &[]); run_ast(file, "[] <= []", &[]); run_ast(file, "[1, 2] <= [2, 3]", &[]); run_ast(file, "(1, 'b') <= (1, 'a')", &[]); run_ast(file, "(1, 'a') <= (1, 'b')", &[]); run_ast(file, "(1, 'a') <= (2, 'a')", &[]); run_ast(file, "parse_json('null') <= parse_json('null')", &[]); run_ast( file, "to_timestamp(-315360000000000)<=to_timestamp(-100)", &[], ); run_ast( file, "to_timestamp(-315360000000000)<=to_timestamp(-315360000000000)", &[], ); let table = [ ( "lhs", StringType::from_data(vec![ r#""databend""#, r#"{"k":"v","a":"b"}"#, r#"[1,2,3,["a","b","c"]]"#, ]), ), ( "rhs", StringType::from_data(vec![ r#""databend""#, r#"{"k":"a","a":"d"}"#, r#"[0,2,3,["a","b","c"]]"#, ]), ), ]; run_ast(file, "parse_json(lhs) <= parse_json(rhs)", &table); run_ast(file, "lhs <= rhs", &table); } fn test_gt(file: &mut impl Write) { run_ast(file, "'3'>'2'", &[]); run_ast(file, "1>2", &[]); run_ast(file, "1.2>1.1", &[]); run_ast(file, "true > true", &[]); run_ast(file, "true > null", &[]); run_ast(file, "true > false", &[]); run_ast(file, "[] > []", &[]); run_ast(file, "[1, 2] > [2, 3]", &[]); run_ast(file, "(1, 'b') > (1, 'a')", &[]); run_ast(file, "(1, 'a') > (1, 'b')", &[]); run_ast(file, "(1, 'a') > (2, 'a')", &[]); run_ast( file, "to_timestamp(-315360000000000)>to_timestamp(-100)", &[], ); run_ast( file, "to_timestamp(-315360000000000)>to_timestamp(-315360000000000)", &[], ); run_ast( file, r#"parse_json('{"k":"v","a":"b"}') > parse_json('{"k":"v","a":"d"}')"#, &[], ); let table = [ ( "lhs", StringType::from_data(vec![ r#"null"#, r#"true"#, r#"9223372036854775807"#, r#"-32768"#, r#"1234.5678"#, ]), ), ( "rhs", StringType::from_data(vec![ r#"null"#, r#"true"#, r#"9223372036854775806"#, r#"-32768"#, r#"1234.5678"#, ]), ), ]; run_ast(file, "parse_json(lhs) > parse_json(rhs)", &table); run_ast(file, "lhs > rhs", &table); } fn test_gte(file: &mut impl Write) { run_ast(file, "'2'>='1'", &[]); run_ast(file, "1>=2", &[]); run_ast(file, "1.1>=1.1", &[]); run_ast(file, "true >= true", &[]); run_ast(file, "true >= null", &[]); run_ast(file, "true >= false", &[]); run_ast(file, "[] >= []", &[]); run_ast(file, "[1, 2] >= [2, 3]", &[]); run_ast(file, "(1, 'b') >= (1, 'a')", &[]); run_ast(file, "(1, 'a') >= (1, 'b')", &[]); run_ast(file, "(1, 'a') >= (2, 'a')", &[]); run_ast( file, "to_timestamp(-315360000000000)>=to_timestamp(-100)", &[], ); run_ast( file, "to_timestamp(-315360000000000)>=to_timestamp(-315360000000000)", &[], ); run_ast(file, "parse_json('1.912e2') >= parse_json('1.912e2')", &[]); let table = [ ( "lhs", StringType::from_data(vec![ r#"9223372036854775807"#, r#"-32768"#, r#"1234.5678"#, r#"1.912e2"#, r#""\\\"abc\\\"""#, r#"{"k":"v","a":"b"}"#, r#"[1,2,3,["a","b","d"]]"#, ]), ), ( "rhs", StringType::from_data(vec![ r#"9223372036854775806"#, r#"-32768"#, r#"1234.5678"#, r#"1.912e2"#, r#""\\\"abc\\\"""#, r#"{"k":"v","a":"d"}"#, r#"[1,2,3,["a","b","c"]]"#, ]), ), ]; run_ast(file, "parse_json(lhs) >= parse_json(rhs)", &table); run_ast(file, "lhs >= rhs", &table); } fn test_like(file: &mut impl Write) { run_ast(file, "'1' like '2'", &[]); run_ast(file, "'hello\n' like 'h%'", &[]); run_ast(file, "'h\n' like 'h_'", &[]); run_ast(file, r#"'%' like '\%'"#, &[]); run_ast(file, r#"'v%xx' like '_\%%'"#, &[]); let columns = [( "lhs", StringType::from_data(vec!["abc", "abd", "abe", "abf"]), )]; run_ast(file, "lhs like 'a%'", &columns); let columns = [ ( "lhs", StringType::from_data(vec!["abc", "abd", "abe", "abf"]), ), ("rhs", StringType::from_data(vec!["a%", "_b_", "abe", "a"])), ]; run_ast(file, "lhs like rhs", &columns); } fn test_regexp(file: &mut impl Write) { let columns = [ ( "lhs", StringType::from_data(vec!["abc", "abd", "abe", "abf", "abc", ""]), ), ( "rhs", StringType::from_data(vec!["^a", "^b", "abe", "a", "", ""]), ), ]; run_ast(file, "lhs regexp rhs", &columns); run_ast(file, "lhs rlike rhs", &columns); }
use crate::{ format::{format_cst, FormattingInfo}, text_edits::TextEdits, width::{SinglelineWidth, Width}, Indentation, }; use candy_frontend::{ cst::{Cst, CstError, CstKind}, position::Offset, }; use derive_more::From; use itertools::Itertools; use std::{borrow::Cow, num::NonZeroUsize}; #[derive(Clone, Debug, From)] pub enum TrailingWhitespace { None, Space, Indentation(Indentation), } pub enum TrailingWithIndentationConfig { Body { position: WhitespacePositionInBody, indentation: Indentation, }, Trailing { previous_width: Width, indentation: Indentation, }, } #[derive(Clone, Copy, Debug)] pub enum WhitespacePositionInBody { Start, Middle, End, } /// The maximum number of empty lines (i.e., containing no expression or comment) that may come /// consecutively. const MAX_CONSECUTIVE_EMPTY_LINES: usize = 2; pub const SPACE: &str = " "; pub const NEWLINE: &str = "\n"; /// Captures the existing trailing whitespace of CST nodes for later formatting. /// /// The CST node ends at [start_offset], which is also where [whitespace] begins. /// /// The three whitespace fields can contain singleline whitespace, linebreaks, and comments. /// /// This struct also supports adoption: Whitespace can be “cut” from one place and “pasted” to /// another. There are two use-cases for this: /// /// - Move comments from an inner CST node to the parent CST node, where the actual whitespace stays /// in the same place. E.g., the comma of a list item could contain trailing whitespace, which is /// moved up and merged with potential trailing whitespace around the list item as a whole. /// - Move comments to the other side of punctuation. E.g., a list item containing a comment between /// value and comma (forcing the comma to be on a separate line) would move the trailing /// whitespace of the value into trailing whitespace around the list item as a whole. #[must_use] #[derive(Clone, Debug)] pub struct ExistingWhitespace<'a> { start_offset: Offset, adopted_whitespace_before: Cow<'a, [Cst]>, whitespace: Cow<'a, [Cst]>, adopted_whitespace_after: Cow<'a, [Cst]>, } impl<'a> ExistingWhitespace<'a> { pub fn empty(start_offset: Offset) -> Self { Self { start_offset, adopted_whitespace_before: Cow::Borrowed(&[]), whitespace: Cow::Borrowed(&[]), adopted_whitespace_after: Cow::Borrowed(&[]), } } pub fn new(start_offset: Offset, whitespace: impl Into<Cow<'a, [Cst]>>) -> Self { let whitespace = whitespace.into(); if whitespace.is_empty() { return Self::empty(start_offset); } Self { start_offset, adopted_whitespace_before: Cow::Borrowed(&[]), whitespace, adopted_whitespace_after: Cow::Borrowed(&[]), } } pub fn end_offset(&self) -> Offset { self.whitespace .as_ref() .last() .map(|it| it.data.span.end) .unwrap_or(self.start_offset) } pub fn is_empty(&self) -> bool { self.adopted_whitespace_before.is_empty() && self.whitespace.is_empty() && self.adopted_whitespace_after.is_empty() } pub fn whitespace_ref(&self) -> &[Cst] { self.whitespace.as_ref() } pub fn move_into_outer(self, outer: &mut ExistingWhitespace<'a>) { assert!(self.adopted_whitespace_before.is_empty()); assert!(self.adopted_whitespace_after.is_empty()); assert!(outer.adopted_whitespace_before.is_empty()); assert!(outer.adopted_whitespace_after.is_empty()); assert_eq!(self.end_offset(), outer.start_offset); outer.start_offset = self.start_offset; prepend(self.whitespace, &mut outer.whitespace); } pub fn into_space_and_move_comments_to( mut self, edits: &mut TextEdits, other: &mut ExistingWhitespace<'a>, ) { if let Some(whitespace) = self.whitespace.first() && whitespace.kind.is_whitespace() { let span = match &mut self.whitespace { Cow::Borrowed(whitespace) => { let (first, remaining) = whitespace.split_first().unwrap(); *whitespace = remaining; first.data.span.to_owned() }, Cow::Owned(whitespace) => whitespace.remove(0).data.span, }; self.start_offset = span.end; edits.change(span, SPACE); } else { edits.insert(self.start_offset, SPACE); } self.into_empty_and_move_comments_to(edits, other); } pub fn into_empty_and_move_comments_to( self, edits: &mut TextEdits, other: &mut ExistingWhitespace<'a>, ) { if self.is_empty() { return; } let self_end_offset = self.end_offset(); if self_end_offset <= other.start_offset { if self_end_offset == other.start_offset && self.adopted_whitespace_before.is_empty() && self.adopted_whitespace_after.is_empty() && other.adopted_whitespace_before.is_empty() && !edits.has_edit_at(self_end_offset) { // Simple case: The whitespace is adopted by directly following whitespace. other.start_offset = self.start_offset; prepend(self.whitespace, &mut other.whitespace); prepend(self.adopted_whitespace_before, &mut other.whitespace); return; } // Default case: We have to delete the whitespace here and re-insert the relevant parts // (comments) later. if let Some(other_adopted_first) = &other.adopted_whitespace_before.first() { let other_adopted_start_offset = other_adopted_first.data.span.start; assert!(self_end_offset <= other_adopted_start_offset); } prepend( self.adopted_whitespace_after, &mut other.adopted_whitespace_before, ); prepend(self.whitespace, &mut other.adopted_whitespace_before); prepend( self.adopted_whitespace_before, &mut other.adopted_whitespace_before, ); } else { let other_end_offset = other .whitespace .last() .map(|it| it.data.span.end) .unwrap_or_else(|| other.start_offset); if self.start_offset == other_end_offset && other.adopted_whitespace_after.is_empty() && self.adopted_whitespace_before.is_empty() && self.adopted_whitespace_after.is_empty() && !edits.has_edit_at(self.start_offset) { // Simple case: The whitespace is adopted by directly preceding whitespace. append(self.whitespace, &mut other.whitespace); append(self.adopted_whitespace_after, &mut other.whitespace); return; } // Default case (see above) if let Some(other_adopted_last) = &other.adopted_whitespace_after.last() { let other_adopted_end_offset = other_adopted_last.data.span.end; assert!(other_adopted_end_offset <= self.start_offset); } append( self.adopted_whitespace_before, &mut other.adopted_whitespace_after, ); append(self.whitespace, &mut other.adopted_whitespace_after); append( self.adopted_whitespace_after, &mut other.adopted_whitespace_after, ); } edits.delete(self.start_offset..self_end_offset); } pub fn has_comments(&self) -> bool { fn check(whitespace: &[Cst]) -> bool { whitespace.iter().any(|it| it.kind.is_comment()) } check(&self.adopted_whitespace_before) || check(&self.whitespace) || check(&self.adopted_whitespace_after) } pub fn into_empty_trailing(self, edits: &mut TextEdits) -> SinglelineWidth { assert!(!self.has_comments()); for whitespace in self.whitespace_ref() { edits.delete(whitespace.data.span.to_owned()); } SinglelineWidth::default() } #[must_use] pub fn into_trailing_with_space(self, edits: &mut TextEdits) -> SinglelineWidth { assert!(!self.has_comments()); if let Some((first, last)) = first_and_last(self.whitespace.as_ref()) { edits.change(first.data.span.start..last.data.span.end, SPACE); } else { edits.insert(self.start_offset, SPACE); } SinglelineWidth::SPACE } #[must_use] pub fn into_trailing_with_indentation( self, edits: &mut TextEdits, config: &TrailingWithIndentationConfig, ) -> Width { fn iter_whitespace( whitespace: &[Cst], offset_override: impl Into<Option<Offset>>, ) -> impl Iterator<Item = (&Cst, Option<Offset>)> { let offset_override = offset_override.into(); whitespace.iter().map(move |it| (it, offset_override)) } // For adopted items, we need an offset override: The position where adopted comments will // be inserted. let whitespace = iter_whitespace(&self.adopted_whitespace_before, self.start_offset) .chain(iter_whitespace(&self.whitespace, None)) .chain(iter_whitespace( &self.adopted_whitespace_after, self.end_offset(), )) .collect_vec(); // `.chain(…)` doesn't produce an `ExactSizeIterator`, so it's easier to collect everything // into a `Vec` first. let last_comment_index = whitespace.iter().rposition(|(it, _)| it.kind.is_comment()); let split_index = last_comment_index.map(|it| it + 1).unwrap_or_default(); let (comments_and_whitespace, final_whitespace) = whitespace.split_at(split_index); let comments_width = Self::format_trailing_comments(edits, comments_and_whitespace, config); let owned_final_whitespace = final_whitespace .iter() .filter(|(_, offset_override)| offset_override.is_none()) .map(|(it, _)| it); let trailing_range = if let Some((first, last)) = first_and_last(owned_final_whitespace) { first.data.span.start..last.data.span.end } else { let offset = self.end_offset(); offset..offset }; let (indentation, trailing_newline_count) = match config { TrailingWithIndentationConfig::Body { position: WhitespacePositionInBody::Start, .. } if comments_width.is_empty() => { edits.delete(trailing_range); return comments_width; } TrailingWithIndentationConfig::Body { position: WhitespacePositionInBody::End, indentation, } if indentation.is_indented() => { edits.delete(trailing_range); return comments_width; } TrailingWithIndentationConfig::Body { position: WhitespacePositionInBody::Start | WhitespacePositionInBody::Middle, indentation, } => { let trailing_newline_count = final_whitespace .iter() .filter(|(it, _)| it.kind.is_newline()) .count() .clamp(1, 1 + MAX_CONSECUTIVE_EMPTY_LINES); (indentation, trailing_newline_count) } TrailingWithIndentationConfig::Trailing { indentation, .. } | TrailingWithIndentationConfig::Body { indentation, .. } => (indentation, 1), }; edits.change( trailing_range, format!("{}{indentation}", NEWLINE.repeat(trailing_newline_count)), ); comments_width + Width::NEWLINE + indentation.width() } fn format_trailing_comments( edits: &mut TextEdits, comments_and_whitespace: &[(&Cst, Option<Offset>)], config: &TrailingWithIndentationConfig, ) -> Width { let (previous_width, indentation, ensure_space_before_first_comment, inner_newline_limit) = match config { TrailingWithIndentationConfig::Body { indentation, position, } => ( Width::Singleline(indentation.width()), *indentation, matches!( position, WhitespacePositionInBody::Middle | WhitespacePositionInBody::End, ), MAX_CONSECUTIVE_EMPTY_LINES, ), TrailingWithIndentationConfig::Trailing { previous_width, indentation, } => (previous_width.to_owned(), *indentation, true, 1), }; let mut width = Width::default(); enum NewlineCount { NoneOrAdopted, Owned(NonZeroUsize), } enum CommentPosition { FirstLine, NextLine(NewlineCount), } let mut comment_position = CommentPosition::FirstLine; let mut last_reusable_whitespace_range = None; for (item, offset_override) in comments_and_whitespace { let is_adopted = offset_override.is_some(); match &item.kind { CstKind::Whitespace(_) | CstKind::Error { error: CstError::TooMuchWhitespace, .. } => { if !is_adopted { if let Some(range) = last_reusable_whitespace_range { edits.delete(range); } last_reusable_whitespace_range = Some(item.data.span.to_owned()); } } CstKind::Newline(_) => match &mut comment_position { CommentPosition::FirstLine => { if let Some(range) = last_reusable_whitespace_range { // Delete trailing spaces in the previous line. edits.delete(range); last_reusable_whitespace_range = None; } let newline_count = if is_adopted { NewlineCount::NoneOrAdopted } else { edits.change(item.data.span.to_owned(), NEWLINE); NewlineCount::Owned(NonZeroUsize::new(1).unwrap()) }; comment_position = CommentPosition::NextLine(newline_count); width += Width::NEWLINE; } CommentPosition::NextLine(_) if is_adopted => { // We already encountered a newline (owned or adopted) and the new // one is adopted. Hence, we can't reuse it and there's nothing to // do for us. } CommentPosition::NextLine(NewlineCount::NoneOrAdopted) => { // The old newline was adopted or we didn't have one yet, but we now // have one to reuse. if let Some(range) = last_reusable_whitespace_range { // Delete old reusable whitespace since the new one has to come // after this newline. edits.delete(range); last_reusable_whitespace_range = None; } comment_position = CommentPosition::NextLine(NewlineCount::Owned( NonZeroUsize::new(1).unwrap(), )); } CommentPosition::NextLine(NewlineCount::Owned(count)) => { // We already encountered and kept at least one newline. if count.get() >= inner_newline_limit { edits.delete(item.data.span.to_owned()); } else { *count = count.checked_add(1).unwrap(); width += Width::NEWLINE; } } }, CstKind::Comment { comment, .. } => { let (comment_width, comment_whitespace) = format_cst( edits, previous_width, item, &FormattingInfo { indentation, trailing_comma_condition: None, is_single_expression_in_assignment_body: false, }, ) .split(); assert!(comment_whitespace.is_empty()); _ = comment_whitespace; let space = match comment_position { CommentPosition::FirstLine => { let (space, space_width) = if ensure_space_before_first_comment { (Cow::Borrowed(SPACE), SinglelineWidth::SPACE) } else { (Cow::Borrowed(""), SinglelineWidth::default()) }; if previous_width .last_line_fits(indentation, space_width + comment_width) || matches!( config, TrailingWithIndentationConfig::Body { position: WhitespacePositionInBody::Start, .. }, ) { width += Width::from(space_width); space } else { width += Width::NEWLINE + indentation.width(); Cow::Owned(format!("{NEWLINE}{indentation}")) } } CommentPosition::NextLine(newline_count) => { match newline_count { NewlineCount::NoneOrAdopted => { edits.insert( last_reusable_whitespace_range .as_ref() .map(|it| it.start) .or(*offset_override) .unwrap_or(item.data.span.start), NEWLINE, ); width += Width::NEWLINE + indentation.width(); } NewlineCount::Owned(_) => width += indentation.width(), } Cow::Owned(indentation.to_string()) } }; if let Some(range) = last_reusable_whitespace_range { edits.change(range, space); } else { edits.insert(offset_override.unwrap_or(item.data.span.start), space); } if let Some(offset_override) = offset_override { edits.insert(*offset_override, format!("#{comment}")); } width += comment_width; comment_position = CommentPosition::NextLine(NewlineCount::NoneOrAdopted); last_reusable_whitespace_range = None; // TODO: Handle multiple comments on the same line. } _ => unreachable!(), } } assert!( last_reusable_whitespace_range.is_none(), "The last CST must be a comment, so we should have consumed all whitespace.", ); width } } fn append<'a>(source: Cow<'a, [Cst]>, target: &mut Cow<'a, [Cst]>) { if source.is_empty() { return; } if target.is_empty() { *target = source; } else { match source { Cow::Borrowed(source) => target.to_mut().extend_from_slice(source), Cow::Owned(mut source) => target.to_mut().append(&mut source), } } } fn prepend<'a>(source: Cow<'a, [Cst]>, target: &mut Cow<'a, [Cst]>) { if source.is_empty() { return; } if target.is_empty() { *target = source; } else { target .to_mut() .splice(0..0, source.as_ref().iter().cloned()); } } fn first_and_last<I: IntoIterator>( iterator: I, ) -> Option<(<I as IntoIterator>::Item, <I as IntoIterator>::Item)> where <I as IntoIterator>::Item: Copy, { let mut result = None; for item in iterator { let first = result.map(|(first, _)| first).unwrap_or(item); result = Some((first, item)); } result } #[cfg(test)] mod test { use super::TrailingWhitespace; use crate::{ existing_whitespace::TrailingWithIndentationConfig, format::{format_cst, FormattingInfo}, text_edits::TextEdits, width::{Indentation, Width}, }; use candy_frontend::{cst::CstKind, rcst_to_cst::RcstsToCstsExt, string_to_rcst::parse_rcst}; #[test] fn test_empty_trailing() { test("foo End", TrailingWhitespace::None, "foo"); test("foo End", TrailingWhitespace::None, "foo"); } #[test] fn test_trailing_with_space() { test("foo End", TrailingWhitespace::Space, "foo "); test("foo End", TrailingWhitespace::Space, "foo "); } #[test] fn test_trailing_with_indentation() { test("foo\n End", Indentation::from(1), "foo\n "); test("foo \n End", Indentation::from(1), "foo\n "); test("foo End", Indentation::from(2), "foo\n "); test("foo \n End", Indentation::from(2), "foo\n "); // Comments test("foo# abc\n End", Indentation::from(1), "foo # abc\n "); test("foo # abc\n End", Indentation::from(1), "foo # abc\n "); test("foo # abc\n End", Indentation::from(1), "foo # abc\n "); test( "foo\n # abc\n End", Indentation::from(1), "foo\n # abc\n ", ); test("foo # abc \n End", Indentation::from(1), "foo # abc\n "); } #[track_caller] fn test(source: &str, trailing: impl Into<TrailingWhitespace>, expected: &str) { let mut csts = parse_rcst(source).to_csts(); assert_eq!(csts.len(), 1); let cst = match csts.pop().unwrap().kind { CstKind::Call { receiver, .. } => receiver, _ => panic!("Expected a call"), }; let reduced_source = cst.to_string(); let mut edits = TextEdits::new(reduced_source); let (child_width, whitespace) = format_cst( &mut edits, Width::default(), &cst, &FormattingInfo::default(), ) .split(); match trailing.into() { TrailingWhitespace::None => _ = whitespace.into_empty_trailing(&mut edits), TrailingWhitespace::Space => _ = whitespace.into_trailing_with_space(&mut edits), TrailingWhitespace::Indentation(indentation) => { _ = whitespace.into_trailing_with_indentation( &mut edits, &TrailingWithIndentationConfig::Trailing { previous_width: child_width, indentation, }, ) } }; assert_eq!(edits.apply(), expected); } }
#![cfg_attr(feature = "use_std", allow(dead_code))] use core::marker; use core::mem; use core::sync::atomic::AtomicUsize; #[allow(deprecated)] use core::sync::atomic::ATOMIC_USIZE_INIT; use core::sync::atomic::Ordering::{SeqCst, Relaxed}; use super::{BorrowedTask, NotifyHandle}; pub struct LocalKey; pub struct LocalMap; pub fn local_map() -> LocalMap { LocalMap } #[derive(Copy, Clone)] pub struct BorrowedEvents<'a>(marker::PhantomData<&'a ()>); #[derive(Copy, Clone)] pub struct BorrowedUnpark<'a> { f: &'a Fn() -> NotifyHandle, id: usize, } pub struct TaskUnpark { handle: NotifyHandle, id: usize, } #[derive(Clone)] pub struct UnparkEvents; impl<'a> BorrowedEvents<'a> { pub fn new() -> BorrowedEvents<'a> { BorrowedEvents(marker::PhantomData) } pub fn to_owned(&self) -> UnparkEvents { UnparkEvents } } impl<'a> BorrowedUnpark<'a> { #[inline] pub fn new(f: &'a Fn() -> NotifyHandle, id: usize) -> BorrowedUnpark<'a> { BorrowedUnpark { f: f, id: id } } #[inline] pub fn to_owned(&self) -> TaskUnpark { let handle = (self.f)(); let id = handle.clone_id(self.id); TaskUnpark { handle: handle, id: id } } } impl UnparkEvents { pub fn notify(&self) {} pub fn will_notify(&self, _other: &BorrowedEvents) -> bool { true } } impl TaskUnpark { pub fn notify(&self) { self.handle.notify(self.id); } pub fn will_notify(&self, other: &BorrowedUnpark) -> bool { self.id == other.id && self.handle.inner == (other.f)().inner } } impl Clone for TaskUnpark { fn clone(&self) -> TaskUnpark { let handle = self.handle.clone(); let id = handle.clone_id(self.id); TaskUnpark { handle: handle, id: id } } } impl Drop for TaskUnpark { fn drop(&mut self) { self.handle.drop_id(self.id); } } #[allow(deprecated)] static GET: AtomicUsize = ATOMIC_USIZE_INIT; #[allow(deprecated)] static SET: AtomicUsize = ATOMIC_USIZE_INIT; /// Initialize the `futures` task system. /// /// This function is an unsafe low-level implementation detail typically only /// used by crates using `futures` in `no_std` context. Users of this crate /// who also use the standard library never need to invoke this function. /// /// The task system in the `futures` crate relies on some notion of "local /// storage" for the running thread and/or context. The `task::current` function /// can get invoked in any context, for example, and needs to be able to return /// a `Task`. Typically with the standard library this is supported with /// thread-local-storage, but this is not available in `no_std` contexts! /// /// This function is provided to allow `no_std` contexts to continue to be able /// to use the standard task system in this crate. The functions provided here /// will be used as-if they were thread-local-storage getters/setters. The `get` /// function provided is used to retrieve the current thread-local value of the /// task system's pointer, returning null if not initialized. The `set` function /// updates the value of the pointer. /// /// # Return value /// /// This function will return whether initialization succeeded or not. This /// function can be called concurrently and only the first invocation will /// succeed. If `false` is returned then the `get` and `set` pointers provided /// were *not* registered for use with the task system, but if `true` was /// provided then they will be called when the task system is used. /// /// Note that while safe to call concurrently it's recommended to still perform /// external synchronization when calling this function. This task system is /// not guaranteed to be ready to go until a call to this function returns /// `true`. In other words, if you call this function and see `false`, the /// task system may not be ready to go as another thread may still be calling /// `init`. /// /// # Unsafety /// /// This function is unsafe due to the requirements on the behavior of the /// `get` and `set` functions. The pointers returned from these functions must /// reflect the semantics specified above and must also be thread-local, /// depending on the definition of a "thread" in the calling context. pub unsafe fn init(get: fn() -> *mut u8, set: fn(*mut u8)) -> bool { if GET.compare_exchange(0, get as usize, SeqCst, SeqCst).is_ok() { SET.store(set as usize, SeqCst); true } else { false } } /// Return whether the caller is running in a task (and so can use task_local!). pub fn is_in_task() -> bool { if let Some(ptr) = get_ptr() { !ptr.is_null() } else { false } } #[inline] pub fn get_ptr() -> Option<*mut u8> { match GET.load(Relaxed) { 0 => None, n => Some(unsafe { mem::transmute::<usize, fn() -> *mut u8>(n)() }), } } #[cfg(feature = "use_std")] #[inline] pub fn is_get_ptr(f: usize) -> bool { GET.load(Relaxed) == f } pub fn set<'a, F, R>(task: &BorrowedTask<'a>, f: F) -> R where F: FnOnce() -> R { let set = match SET.load(Relaxed) { 0 => panic!("not initialized"), n => unsafe { mem::transmute::<usize, fn(*mut u8)>(n) }, }; struct Reset(fn(*mut u8), *mut u8); impl Drop for Reset { #[inline] fn drop(&mut self) { (self.0)(self.1); } } let _reset = Reset(set, get_ptr().unwrap()); set(task as *const _ as *mut u8); f() }
#[doc = r"Value read from the register"] pub struct R { bits: u8, } #[doc = r"Value to write to the register"] pub struct W { bits: u8, } impl super::RXCSRH1 { #[doc = r"Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); self.register.set(f(&R { bits }, &mut W { bits }).bits); } #[doc = r"Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r"Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { self.register.set( f(&mut W { bits: Self::reset_value(), }) .bits, ); } #[doc = r"Reset value of the register"] #[inline(always)] pub const fn reset_value() -> u8 { 0 } #[doc = r"Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.register.set(Self::reset_value()) } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_INCOMPRXR { bits: bool, } impl USB_RXCSRH1_INCOMPRXR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_INCOMPRXW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_INCOMPRXW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 0); self.w.bits |= ((value as u8) & 1) << 0; self.w } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_DTR { bits: bool, } impl USB_RXCSRH1_DTR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_DTW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_DTW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 1); self.w.bits |= ((value as u8) & 1) << 1; self.w } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_DTWER { bits: bool, } impl USB_RXCSRH1_DTWER { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_DTWEW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_DTWEW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 2); self.w.bits |= ((value as u8) & 1) << 2; self.w } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_DMAMODR { bits: bool, } impl USB_RXCSRH1_DMAMODR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_DMAMODW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_DMAMODW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 3); self.w.bits |= ((value as u8) & 1) << 3; self.w } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_PIDERRR { bits: bool, } impl USB_RXCSRH1_PIDERRR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_PIDERRW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_PIDERRW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 4); self.w.bits |= ((value as u8) & 1) << 4; self.w } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_DMAENR { bits: bool, } impl USB_RXCSRH1_DMAENR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_DMAENW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_DMAENW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 5); self.w.bits |= ((value as u8) & 1) << 5; self.w } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_AUTORQR { bits: bool, } impl USB_RXCSRH1_AUTORQR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_AUTORQW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_AUTORQW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 6); self.w.bits |= ((value as u8) & 1) << 6; self.w } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_AUTOCLR { bits: bool, } impl USB_RXCSRH1_AUTOCLR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_AUTOCLW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_AUTOCLW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 7); self.w.bits |= ((value as u8) & 1) << 7; self.w } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_DISNYETR { bits: bool, } impl USB_RXCSRH1_DISNYETR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_DISNYETW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_DISNYETW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 4); self.w.bits |= ((value as u8) & 1) << 4; self.w } } #[doc = r"Value of the field"] pub struct USB_RXCSRH1_ISOR { bits: bool, } impl USB_RXCSRH1_ISOR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _USB_RXCSRH1_ISOW<'a> { w: &'a mut W, } impl<'a> _USB_RXCSRH1_ISOW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 6); self.w.bits |= ((value as u8) & 1) << 6; self.w } } impl R { #[doc = r"Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { self.bits } #[doc = "Bit 0 - Incomplete RX Transmission Status"] #[inline(always)] pub fn usb_rxcsrh1_incomprx(&self) -> USB_RXCSRH1_INCOMPRXR { let bits = ((self.bits >> 0) & 1) != 0; USB_RXCSRH1_INCOMPRXR { bits } } #[doc = "Bit 1 - Data Toggle"] #[inline(always)] pub fn usb_rxcsrh1_dt(&self) -> USB_RXCSRH1_DTR { let bits = ((self.bits >> 1) & 1) != 0; USB_RXCSRH1_DTR { bits } } #[doc = "Bit 2 - Data Toggle Write Enable"] #[inline(always)] pub fn usb_rxcsrh1_dtwe(&self) -> USB_RXCSRH1_DTWER { let bits = ((self.bits >> 2) & 1) != 0; USB_RXCSRH1_DTWER { bits } } #[doc = "Bit 3 - DMA Request Mode"] #[inline(always)] pub fn usb_rxcsrh1_dmamod(&self) -> USB_RXCSRH1_DMAMODR { let bits = ((self.bits >> 3) & 1) != 0; USB_RXCSRH1_DMAMODR { bits } } #[doc = "Bit 4 - PID Error"] #[inline(always)] pub fn usb_rxcsrh1_piderr(&self) -> USB_RXCSRH1_PIDERRR { let bits = ((self.bits >> 4) & 1) != 0; USB_RXCSRH1_PIDERRR { bits } } #[doc = "Bit 5 - DMA Request Enable"] #[inline(always)] pub fn usb_rxcsrh1_dmaen(&self) -> USB_RXCSRH1_DMAENR { let bits = ((self.bits >> 5) & 1) != 0; USB_RXCSRH1_DMAENR { bits } } #[doc = "Bit 6 - Auto Request"] #[inline(always)] pub fn usb_rxcsrh1_autorq(&self) -> USB_RXCSRH1_AUTORQR { let bits = ((self.bits >> 6) & 1) != 0; USB_RXCSRH1_AUTORQR { bits } } #[doc = "Bit 7 - Auto Clear"] #[inline(always)] pub fn usb_rxcsrh1_autocl(&self) -> USB_RXCSRH1_AUTOCLR { let bits = ((self.bits >> 7) & 1) != 0; USB_RXCSRH1_AUTOCLR { bits } } #[doc = "Bit 4 - Disable NYET"] #[inline(always)] pub fn usb_rxcsrh1_disnyet(&self) -> USB_RXCSRH1_DISNYETR { let bits = ((self.bits >> 4) & 1) != 0; USB_RXCSRH1_DISNYETR { bits } } #[doc = "Bit 6 - Isochronous Transfers"] #[inline(always)] pub fn usb_rxcsrh1_iso(&self) -> USB_RXCSRH1_ISOR { let bits = ((self.bits >> 6) & 1) != 0; USB_RXCSRH1_ISOR { bits } } } impl W { #[doc = r"Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u8) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - Incomplete RX Transmission Status"] #[inline(always)] pub fn usb_rxcsrh1_incomprx(&mut self) -> _USB_RXCSRH1_INCOMPRXW { _USB_RXCSRH1_INCOMPRXW { w: self } } #[doc = "Bit 1 - Data Toggle"] #[inline(always)] pub fn usb_rxcsrh1_dt(&mut self) -> _USB_RXCSRH1_DTW { _USB_RXCSRH1_DTW { w: self } } #[doc = "Bit 2 - Data Toggle Write Enable"] #[inline(always)] pub fn usb_rxcsrh1_dtwe(&mut self) -> _USB_RXCSRH1_DTWEW { _USB_RXCSRH1_DTWEW { w: self } } #[doc = "Bit 3 - DMA Request Mode"] #[inline(always)] pub fn usb_rxcsrh1_dmamod(&mut self) -> _USB_RXCSRH1_DMAMODW { _USB_RXCSRH1_DMAMODW { w: self } } #[doc = "Bit 4 - PID Error"] #[inline(always)] pub fn usb_rxcsrh1_piderr(&mut self) -> _USB_RXCSRH1_PIDERRW { _USB_RXCSRH1_PIDERRW { w: self } } #[doc = "Bit 5 - DMA Request Enable"] #[inline(always)] pub fn usb_rxcsrh1_dmaen(&mut self) -> _USB_RXCSRH1_DMAENW { _USB_RXCSRH1_DMAENW { w: self } } #[doc = "Bit 6 - Auto Request"] #[inline(always)] pub fn usb_rxcsrh1_autorq(&mut self) -> _USB_RXCSRH1_AUTORQW { _USB_RXCSRH1_AUTORQW { w: self } } #[doc = "Bit 7 - Auto Clear"] #[inline(always)] pub fn usb_rxcsrh1_autocl(&mut self) -> _USB_RXCSRH1_AUTOCLW { _USB_RXCSRH1_AUTOCLW { w: self } } #[doc = "Bit 4 - Disable NYET"] #[inline(always)] pub fn usb_rxcsrh1_disnyet(&mut self) -> _USB_RXCSRH1_DISNYETW { _USB_RXCSRH1_DISNYETW { w: self } } #[doc = "Bit 6 - Isochronous Transfers"] #[inline(always)] pub fn usb_rxcsrh1_iso(&mut self) -> _USB_RXCSRH1_ISOW { _USB_RXCSRH1_ISOW { w: self } } }
use aoc::array_2d::Array2D; struct Instruction { x1: usize, y1: usize, x2: usize, y2: usize, kind: InstrKind, } enum InstrKind { On, Off, Toggle, } fn parse_instruction(input: &str) -> Instruction { use InstrKind::*; let mut words = input.split(' '); let w1 = words.next().unwrap(); let kind; match w1 { "turn" => { let w2 = words.next().unwrap(); match w2 { "on" => { kind = On; } "off" => { kind = Off; } etc => panic!("Unexpected: {etc}"), } } "toggle" => { kind = Toggle; } etc => panic!("Unexpected: {etc}"), } let mut pair1 = words.next().unwrap().split(','); let x1 = pair1.next().unwrap().parse().unwrap(); let y1 = pair1.next().unwrap().parse().unwrap(); // Skip "through" words.next().unwrap(); let mut pair2 = words.next().unwrap().split(','); let x2 = pair2.next().unwrap().parse().unwrap(); let y2 = pair2.next().unwrap().parse().unwrap(); Instruction { x1, y1, x2, y2, kind, } } fn part1(input: &str) -> i32 { let mut grid = Array2D::new_filled(1000, 1000, false); for line in input.lines() { use InstrKind::*; let instr = parse_instruction(line); match instr.kind { On => { for y in instr.y1..=instr.y2 { for x in instr.x1..=instr.x2 { *grid.get_mut(x, y) = true; } } } Off => { for y in instr.y1..=instr.y2 { for x in instr.x1..=instr.x2 { *grid.get_mut(x, y) = false; } } } Toggle => { for y in instr.y1..=instr.y2 { for x in instr.x1..=instr.x2 { let state = *grid.get(x, y); *grid.get_mut(x, y) = !state; } } } } } grid.flat_iter().filter(|l| **l).count() as i32 } fn part2(input: &str) -> i32 { let mut grid = Array2D::new_filled(1000, 1000, 0); for line in input.lines() { use InstrKind::*; let instr = parse_instruction(line); match instr.kind { On => { for y in instr.y1..=instr.y2 { for x in instr.x1..=instr.x2 { *grid.get_mut(x, y) += 1; } } } Off => { for y in instr.y1..=instr.y2 { for x in instr.x1..=instr.x2 { let value = *grid.get(x, y); if value > 0 { *grid.get_mut(x, y) -= 1; } } } } Toggle => { for y in instr.y1..=instr.y2 { for x in instr.x1..=instr.x2 { *grid.get_mut(x, y) += 2; } } } } } grid.flat_iter().sum() } aoc::tests! { fn part1: in => 377891; fn part2: in => 14110788; } aoc::main!(part1, part2);
/** * author: <Erfan Derakhshani> * author_email: <techerfan@gmail.com> */ use crate::syntax_kinds::SyntaxKind; #[derive(PartialEq, PartialOrd, Debug)] pub struct SyntaxToken { pub line: i32, pub position: i32, pub text: String, pub kind: SyntaxKind, } struct SyntaxDefiner { text: String, kind: SyntaxKind, } pub fn get_tokens(text: &str) -> Vec<SyntaxToken> { lexer(text) } /** * * it gives the whole text and returns a vector of tokens * * @param { String } text * @returns { Vec<SyntaxToken> } */ fn lexer(text: &str) -> Vec<SyntaxToken> { let mut tokens: Vec<SyntaxToken> = vec![]; let lines_vector: Vec<&str> = text.split("\n").collect(); /* split the code line by line */ for i in 0..lines_vector.len() { let mut position = 0; let line = lines_vector[i]; let mut end_position = 0; if line.len() > 0 { end_position = line.len() - 1; } let chars_vec: Vec<char> = line.chars().collect(); while position <= end_position && chars_vec.len() > 0 { if chars_vec[position] == ' ' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::WhitespaceToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == '{' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::OpenBracketToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == '\r' { } else if chars_vec[position] == '=' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::AssignToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == '(' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::ParenthesesOpenToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == ')' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::ParenthesesCloseToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == ',' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::CommaToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == '[' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::OpenSquareBracketToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == ']' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::CloseSquareBracketToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == '}' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::CloseBracketToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == '^' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::CaretToken, line: i as i32, }; tokens.push(token); } else if chars_vec[position] == '\'' { let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::SingleQouteToken, line: i as i32, }; tokens.push(token); /* the existense of a character depends on the existense of the second ' mark.*/ if chars_vec[position + 2] == '\'' { tokens.push(SyntaxToken { text: chars_vec[position + 1].to_string(), position: (position + 1) as i32, kind: SyntaxKind::CharToken, line: i as i32, }); tokens.push(SyntaxToken { text: chars_vec[position + 2].to_string(), position: (position + 2) as i32, kind: SyntaxKind::SingleQouteToken, line: i as i32, }); position = position + 2; } } else if chars_vec[position] == '"' { let mut string_word = String::new(); let mut flag = false; let mut new_position = 0; for j in position + 1..chars_vec.len() { if chars_vec[j] == '"' { string_word = chars_vec[position + 1..j].iter().collect(); new_position = j; flag = true; break; } } let token = SyntaxToken { text: chars_vec[position].to_string(), position: position as i32, kind: SyntaxKind::QuotationToken, line: i as i32, }; tokens.push(token); if flag { let string_tokens = get_string_tokens(string_word.clone(), position + 1, i); for token in string_tokens { tokens.push(token); } tokens.push(SyntaxToken { text: chars_vec[new_position].to_string(), position: new_position as i32, kind: SyntaxKind::QuotationToken, line: i as i32, }); position = new_position } } else { let word_tokens = word_detector(&chars_vec, &mut position, i as i32); for token in word_tokens { tokens.push(token); } } position = position + 1; } } /* * use this block of code to convert * string to a vector of chars: * let name = String::from("erfan"); let s: Vec<char> = name.chars().collect(); */ tokens } fn word_detector( chars_vec: &Vec<char>, position: &mut usize, line_number: i32, ) -> Vec<SyntaxToken> { let mut tokens: Vec<SyntaxToken> = vec![]; let mut word: String = String::from(""); /* we know the current position has a wordly character because we had checked it before.*/ word.push(chars_vec[*position]); for i in *position + 1..chars_vec.len() { if chars_vec[i] != ' ' && chars_vec[i] != '{' && chars_vec[i] != '[' && chars_vec[i] != '}' && chars_vec[i] != ']' && chars_vec[i] != '^' && chars_vec[i] != '(' && chars_vec[i] != ')' && chars_vec[i] != ',' && chars_vec[i] != '=' && chars_vec[i] != '\'' && chars_vec[i] != '"' { word.push(chars_vec[i]); /* in case that the last character is a wordly char. then we need to change to position to "i". */ if i == chars_vec.len() - 1 { *position = i; } } else { *position = i - 1; break; } } if is_number(word.chars().collect()) { tokens.push(SyntaxToken { text: word, position: *position as i32, kind: SyntaxKind::NumberToken, line: line_number, }) } else { let wordly_tokens = get_syntax(word.clone(), *position, line_number as usize); for token in wordly_tokens { tokens.push(token); } } tokens } fn is_number(chars_vec: Vec<char>) -> bool { let digits_arr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']; let mut is_num = true; let mut dots_counter = 0; for i in 0..chars_vec.len() { let mut is_digit = false; for j in 0..digits_arr.len() { if chars_vec[i] == digits_arr[j] { is_digit = true; break; } } if i == 0 || i == chars_vec.len() - 1 { if !is_digit { is_num = false; } } else { if !is_digit && chars_vec[i] != '.' { is_num = false; } else if !is_digit && chars_vec[i] == '.' { dots_counter = dots_counter + 1; if dots_counter > 1 { is_num = false; } } } } is_num } fn get_syntax(phrase: String, position: usize, line: usize) -> Vec<SyntaxToken> { let syntax_arr = [ SyntaxDefiner { text: "&BM".to_string(), kind: SyntaxKind::GTOEToken, }, SyntaxDefiner { text: "&B".to_string(), kind: SyntaxKind::GTToken, }, SyntaxDefiner { text: "&KM".to_string(), kind: SyntaxKind::LTOEToken, }, SyntaxDefiner { text: "&K".to_string(), kind: SyntaxKind::LTToken, }, SyntaxDefiner { text: "&MM".to_string(), kind: SyntaxKind::EqualToken, }, SyntaxDefiner { text: "Jam".to_string(), kind: SyntaxKind::AdditionToken, }, SyntaxDefiner { text: "YekiBala".to_string(), kind: SyntaxKind::IncrementToken, }, SyntaxDefiner { text: "Kam".to_string(), kind: SyntaxKind::SubstractionToken, }, SyntaxDefiner { text: "YekiPain".to_string(), kind: SyntaxKind::DecrementToken, }, SyntaxDefiner { text: "Zarb".to_string(), kind: SyntaxKind::MultiplicationToken, }, SyntaxDefiner { text: "Tagsim".to_string(), kind: SyntaxKind::DivisionToken, }, SyntaxDefiner { text: "Bagimonde".to_string(), kind: SyntaxKind::ModulusToken, }, SyntaxDefiner { text: "Benevis".to_string(), kind: SyntaxKind::PrintToken, }, SyntaxDefiner { text: "Begir".to_string(), kind: SyntaxKind::ScanToken, }, SyntaxDefiner { text: "agar".to_string(), kind: SyntaxKind::ConditionToken, }, SyntaxDefiner { text: "ta".to_string(), kind: SyntaxKind::LoopToken, }, SyntaxDefiner { text: "Sahih".to_string(), kind: SyntaxKind::IntegerDefToken, }, SyntaxDefiner { text: "Ashari".to_string(), kind: SyntaxKind::FloatDefToken, }, SyntaxDefiner { text: "Harf".to_string(), kind: SyntaxKind::CharacterDefToken, }, ]; let mut tokens: Vec<SyntaxToken> = vec![]; let mut is_syntax = false; for syntax in syntax_arr.iter() { if phrase.contains(&syntax.text) { is_syntax = true; let index = phrase.find(&syntax.text).unwrap(); if index > 0 { if is_number(phrase[0..index].to_string().chars().collect()) { tokens.push(SyntaxToken { line: line as i32, position: position as i32, text: phrase[0..index].to_string().clone(), kind: SyntaxKind::NumberToken, }); } else { let v = get_syntax(phrase[0..index].to_string(), position, line); if v.len() > 0 { for item in v { tokens.push(item); } } else { tokens.push(SyntaxToken { line: line as i32, position: position as i32, text: phrase[0..index].to_string().clone(), kind: SyntaxKind::WordlyToken, }); } } } tokens.push(SyntaxToken { line: line as i32, position: (position + index) as i32, text: syntax.text.clone(), kind: syntax.kind.copy(), }); if index + syntax.text.len() < phrase.len() - 1 { if is_number( phrase[index + syntax.text.len()..phrase.len()] .to_string() .chars() .collect(), ) { tokens.push(SyntaxToken { line: line as i32, position: (position + index + syntax.text.len()) as i32, text: phrase[index + syntax.text.len()..phrase.len()] .to_string() .clone(), kind: SyntaxKind::NumberToken, }); } else { let v = get_syntax( phrase[index + syntax.text.len()..phrase.len()].to_string(), position + index + syntax.text.len(), line, ); if v.len() > 0 { for item in v { tokens.push(item); } } else { tokens.push(SyntaxToken { line: line as i32, position: (position + index + syntax.text.len()) as i32, text: phrase[index + syntax.text.len()..phrase.len()] .to_string() .clone(), kind: SyntaxKind::WordlyToken, }); } } } break; } } if !is_syntax { tokens.push(SyntaxToken { line: line as i32, position: position as i32, text: phrase, kind: SyntaxKind::WordlyToken, }); } tokens } fn get_string_tokens(phrase: String, position: usize, line: usize) -> Vec<SyntaxToken> { let mut tokens: Vec<SyntaxToken> = vec![]; let mut is_syntax = false; let syntax_arr = [ SyntaxDefiner { text: "%d".to_string(), kind: SyntaxKind::StringNumToken, }, SyntaxDefiner { text: "%c".to_string(), kind: SyntaxKind::StringCharToken, }, SyntaxDefiner { text: "%f".to_string(), kind: SyntaxKind::StringFloatToken, }, ]; for syntax in syntax_arr.iter() { if phrase.contains(&syntax.text) { is_syntax = true; let index = phrase.find(&syntax.text).unwrap(); if index > 0 { let v = get_string_tokens(phrase[0..index].to_string(), position, line); if v.len() > 0 { for item in v { tokens.push(item); } } else { tokens.push(SyntaxToken { line: line as i32, position: position as i32, text: phrase[0..index].to_string().clone(), kind: SyntaxKind::StringToken, }); } } tokens.push(SyntaxToken { line: line as i32, position: (position + index) as i32, text: syntax.text.clone(), kind: syntax.kind.copy(), }); if index + syntax.text.len() < phrase.len() - 1 { let v = get_string_tokens( phrase[index + syntax.text.len()..phrase.len()].to_string(), position + index + syntax.text.len(), line, ); if v.len() > 0 { for item in v { tokens.push(item); } } else { tokens.push(SyntaxToken { line: line as i32, position: (position + index + syntax.text.len()) as i32, text: phrase[index + syntax.text.len()..phrase.len()] .to_string() .clone(), kind: SyntaxKind::StringToken, }); } } break; } } if !is_syntax { tokens.push(SyntaxToken { line: line as i32, position: position as i32, text: phrase, kind: SyntaxKind::StringToken, }); } tokens }
use futures::prelude::*; use futures::compat::Compat01As03; use futures01; use std::io; use std::net::SocketAddr; use std::pin::Pin; use std::task::{Context, Poll}; #[derive(Debug)] pub(crate) struct TcpStream { pub tokio_stream: tokio::net::tcp::TcpStream, } #[derive(Debug)] pub(crate) struct TcpListener { pub tokio_listener: tokio::net::tcp::TcpListener, } impl runtime_raw::TcpStream for TcpStream { fn poll_write_ready(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<io::Result<()>> { match self.tokio_stream.poll_write_ready()? { futures01::Async::Ready(_) => Poll::Ready(Ok(())), futures01::Async::NotReady => Poll::Pending, } } fn poll_read_ready(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<io::Result<()>> { let mask = mio::Ready::readable(); match self.tokio_stream.poll_read_ready(mask)? { futures01::Async::Ready(_) => Poll::Ready(Ok(())), futures01::Async::NotReady => Poll::Pending, } } fn take_error(&self) -> io::Result<Option<io::Error>> { Ok(None) } fn local_addr(&self) -> io::Result<SocketAddr> { self.tokio_stream.local_addr() } fn peer_addr(&self) -> io::Result<SocketAddr> { self.tokio_stream.peer_addr() } fn shutdown(&self, how: std::net::Shutdown) -> std::io::Result<()> { self.tokio_stream.shutdown(how) } #[cfg(unix)] fn as_raw_fd(&self) -> std::os::unix::io::RawFd { use std::os::unix::io::AsRawFd; self.tokio_stream.as_raw_fd() } } impl AsyncRead for TcpStream { fn poll_read( self: Pin<&mut Self>, cx: &mut Context<'_>, mut buf: &mut [u8], ) -> Poll<io::Result<usize>> { let mut stream = Compat01As03::new(&self.tokio_stream); Pin::new(&mut stream).poll_read(cx, &mut buf) } } impl AsyncWrite for TcpStream { fn poll_write( self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8], ) -> Poll<io::Result<usize>> { let mut stream = Compat01As03::new(&self.tokio_stream); Pin::new(&mut stream).poll_write(cx, &buf) } fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> { let mut stream = Compat01As03::new(&self.tokio_stream); Pin::new(&mut stream).poll_flush(cx) } fn poll_close(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> { let mut stream = Compat01As03::new(&self.tokio_stream); Pin::new(&mut stream).poll_close(cx) } } impl runtime_raw::TcpListener for TcpListener { fn local_addr(&self) -> io::Result<SocketAddr> { self.tokio_listener.local_addr() } fn poll_accept( self: Pin<&mut Self>, _cx: &mut Context<'_>, ) -> Poll<io::Result<Pin<Box<dyn runtime_raw::TcpStream>>>> { let listener = &mut self.get_mut().tokio_listener; match listener.poll_accept()? { futures01::Async::Ready((tokio_stream, _)) => { let stream = Box::pin(TcpStream { tokio_stream }); Poll::Ready(Ok(stream)) } futures01::Async::NotReady => Poll::Pending, } } #[cfg(unix)] fn as_raw_fd(&self) -> std::os::unix::io::RawFd { use std::os::unix::io::AsRawFd; self.tokio_listener.as_raw_fd() } }
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{get_available_port_multi, BaseConfig, ChainNetwork, ConfigModule, StarcoinOpt}; use anyhow::Result; use serde::{Deserialize, Serialize}; use std::net::SocketAddr; use std::path::{Path, PathBuf}; const DEFAULT_MAX_REQUEST_BODY_SIZE: usize = 10 * 1024 * 1024; //10M #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(default, deny_unknown_fields)] pub struct RpcConfig { /// The ipc file name. ipc_file: PathBuf, /// The address for http rpc. pub http_address: Option<SocketAddr>, /// The address for tcp rpc notification. pub tcp_address: Option<SocketAddr>, /// The address for websocket rpc notification. pub ws_address: Option<SocketAddr>, pub max_request_body_size: usize, pub threads: Option<usize>, #[serde(skip)] ipc_file_path: Option<PathBuf>, } impl Default for RpcConfig { fn default() -> Self { Self::default_with_net(ChainNetwork::default()) } } impl RpcConfig { pub fn get_ipc_file(&self) -> &Path { self.ipc_file_path .as_ref() .expect("config should init first.") } } impl ConfigModule for RpcConfig { fn default_with_net(_net: ChainNetwork) -> Self { Self { ipc_file: "starcoin.ipc".into(), http_address: None, ws_address: None, tcp_address: None, max_request_body_size: DEFAULT_MAX_REQUEST_BODY_SIZE, threads: None, ipc_file_path: None, } } fn random(&mut self, base: &BaseConfig) { let ports = get_available_port_multi(3); self.http_address = Some( format!("127.0.0.1:{}", ports[0]) .parse::<SocketAddr>() .unwrap(), ); self.tcp_address = Some( format!("127.0.0.1:{}", ports[1]) .parse::<SocketAddr>() .unwrap(), ); self.ws_address = Some( format!("127.0.0.1:{}", ports[2]) .parse::<SocketAddr>() .unwrap(), ); self.ipc_file_path = Some(base.data_dir().join(self.ipc_file.as_path())) } fn load(&mut self, base: &BaseConfig, _opt: &StarcoinOpt) -> Result<()> { self.ipc_file_path = Some(base.data_dir().join(self.ipc_file.as_path())); Ok(()) } }
use std::thread; // static NTHREADS: i32 = 10; fn main() { let mut children = vec![]; // for i in 0..NTHREADS { // children.push(thread::spawn(move || { // println!("This is thread number {}", i); // })); // } // for child in children { // let _ = child.join(); // } let data = "86967897737416471853297327050364959 11861322575564723963297542624962850 70856234701860851907960690014725639 38397966707106094172783238747669219 52380795257888236525459303330302837 58495327135744041048897885734297812 69920216438980873548808413720956532 16278424637452589860345374828574668"; // let mut children: Vec<u32> = Vec::new(); let chunked_data = data.split_whitespace(); for (i, data_chunk) in chunked_data.enumerate() { println!("chunk {} is: {}", i, data_chunk); children.push(thread::spawn(move || -> u32 { let result = data_chunk .chars() .map(|c| c.to_digit(10).expect("should be a digit")) .sum(); println!("processed segment {}, result={}", i, result); result })); } let mut intermediate_sums = vec![]; for child in children { let int_sum = child.join().unwrap(); intermediate_sums.push(int_sum) } let final_result = intermediate_sums.iter().sum::<u32>(); println!("Final result is {}!", final_result) }
use std::fmt; use std::error; use std::str; use std::io::Write; use std::mem; use xmlparser::{ self, TextPos, Reference, Stream, StrSpan, }; use { NS_XMLNS_URI, NS_XML_URI, Attribute, Document, ExpandedNameOwned, Namespaces, Node, NodeData, NodeId, NodeKind, PI, }; /// A list of all possible errors. #[derive(Debug)] pub enum Error { /// The `xmlns:xml` attribute must have an <http://www.w3.org/XML/1998/namespace> URI. InvalidXmlPrefixUri(TextPos), /// Only the `xmlns:xml` attribute can have the <http://www.w3.org/XML/1998/namespace> URI. UnexpectedXmlUri(TextPos), /// The <http://www.w3.org/2000/xmlns/> URI must not be declared. UnexpectedXmlnsUri(TextPos), /// `xmlns` can't be used as an element prefix. InvalidElementNamePrefix(TextPos), /// A namespace was already defined on this element. DuplicatedNamespace(String, TextPos), /// Incorrect tree structure. #[allow(missing_docs)] UnexpectedCloseTag { expected: String, actual: String, pos: TextPos }, /// Entity value starts with a close tag. /// /// Example: /// ```xml /// <!DOCTYPE test [ <!ENTITY p '</p>'> ]> /// <root>&p;</root> /// ``` UnexpectedEntityCloseTag(TextPos), /// A reference to an entity that was not defined in the DTD. UnknownEntityReference(String, TextPos), /// Nested entity references are not supported. /// /// Example: /// ```xml /// <!DOCTYPE test [ /// <!ENTITY a '&b;'> /// <!ENTITY b 'text'> /// ]> /// <e a='&a;'/> /// ``` NestedEntityReference(TextPos), /// An element has a duplicated attributes. /// /// This also includes namespaces resolving. /// So an element like this will lead to an error. /// ```xml /// <e xmlns:n1='http://www.w3.org' xmlns:n2='http://www.w3.org' n1:a='b1' n2:a='b2'/> /// ``` DuplicatedAttribute(String, TextPos), /// The XML document must have at least one element. NoRootNode, /// Errors detected by the `xmlparser` crate. ParserError(xmlparser::Error), } impl From<xmlparser::Error> for Error { fn from(e: xmlparser::Error) -> Self { Error::ParserError(e) } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::InvalidXmlPrefixUri(pos) => { write!(f, "'xml' namespace prefix mapped to wrong URI at {}", pos) } Error::UnexpectedXmlUri(pos) => { write!(f, "the 'xml' namespace URI is used for not 'xml' prefix at {}", pos) } Error::UnexpectedXmlnsUri(pos) => { write!(f, "the 'xmlns' URI is used at {}, but it must not be declared", pos) } Error::InvalidElementNamePrefix(pos) => { write!(f, "the 'xmlns' prefix is used at {}, but it must not be", pos) } Error::DuplicatedNamespace(ref name, pos) => { write!(f, "namespace '{}' at {} is already defined", name, pos) } Error::UnexpectedCloseTag { ref expected, ref actual, pos } => { write!(f, "expected '{}' tag, not '{}' at {}", expected, actual, pos) } Error::UnexpectedEntityCloseTag(pos) => { write!(f, "unexpected close tag at {}", pos) } Error::UnknownEntityReference(ref name, pos) => { write!(f, "unknown entity reference '{}' at {}", name, pos) } Error::NestedEntityReference(pos) => { write!(f, "nested entity reference detected at {}, it's not supported", pos) } Error::DuplicatedAttribute(ref name, pos) => { write!(f, "attribute '{}' at {} is already defined", name, pos) } Error::NoRootNode => { write!(f, "the document does not have a root node") } Error::ParserError(ref err) => { write!(f, "{}", err) } } } } impl error::Error for Error { fn description(&self) -> &str { "an XML parsing error" } } struct AttributeData<'d> { prefix: StrSpan<'d>, prefix_str: &'d str, local: StrSpan<'d>, local_str: &'d str, value_pos: usize, value: String, } impl<'d> Document<'d> { /// Parses the input XML string. /// /// We do not support `&[u8]` or `Reader` because the input must be an already allocated /// UTF-8 string. /// /// # Examples /// /// ``` /// let doc = roxmltree::Document::parse("<e/>").unwrap(); /// assert_eq!(doc.descendants().count(), 2); // root node + `e` element node /// ``` pub fn parse(text: &str) -> Result<Document, Error> { parse(text) } fn append(&mut self, parent_id: NodeId, kind: NodeKind<'d>, orig_pos: usize) -> NodeId { let new_child_id = NodeId(self.nodes.len()); self.nodes.push(NodeData { parent: Some(parent_id), prev_sibling: None, next_sibling: None, children: None, kind, orig_pos, }); let last_child_id = self.nodes[parent_id.0].children.map(|(_, id)| id); self.nodes[new_child_id.0].prev_sibling = last_child_id; if let Some(id) = last_child_id { self.nodes[id.0].next_sibling = Some(new_child_id); } self.nodes[parent_id.0].children = Some( if let Some((first_child_id, _)) = self.nodes[parent_id.0].children { (first_child_id, new_child_id) } else { (new_child_id, new_child_id) } ); new_child_id } fn get(&self, id: NodeId) -> Node { Node { id, d: &self.nodes[id.0], doc: self } } } struct ParserData<'d> { attrs_start_idx: usize, ns_start_idx: usize, tmp_attrs: Vec<AttributeData<'d>>, entities: Vec<(&'d str, StrSpan<'d>)>, u_buffer: Vec<u8>, prev_node_type: Option<xmlparser::Token<'d>>, } #[derive(Clone, Copy)] struct TagNameSpan<'d> { prefix: StrSpan<'d>, name: StrSpan<'d>, } impl<'d> TagNameSpan<'d> { fn new(prefix: StrSpan<'d>, name: StrSpan<'d>) -> Self { Self { prefix, name } } } fn parse(text: &str) -> Result<Document, Error> { let mut pd = ParserData { attrs_start_idx: 0, ns_start_idx: 2, tmp_attrs: Vec::new(), entities: Vec::new(), u_buffer: Vec::with_capacity(32), prev_node_type: None, }; let nodes_capacity = text.bytes().filter(|c| *c == b'<').count(); let attributes_capacity = text.bytes().filter(|c| *c == b'=').count(); let mut doc = Document { text, nodes: Vec::with_capacity(nodes_capacity), attrs: Vec::with_capacity(attributes_capacity), namespaces: Namespaces(Vec::new()), }; doc.nodes.push(NodeData { parent: None, prev_sibling: None, next_sibling: None, children: None, kind: NodeKind::Root, orig_pos: 0, }); doc.namespaces.push_ns("", String::new()); doc.namespaces.push_ns("xml", NS_XML_URI.to_string()); let parser = xmlparser::Tokenizer::from(text); let parent_id = doc.root().id; let mut tag_name = TagNameSpan::new(StrSpan::from(""), StrSpan::from("")); process_tokens(parser, false, parent_id, &mut tag_name, &mut pd, &mut doc)?; if !doc.root().children().any(|n| n.is_element()) { return Err(Error::NoRootNode); } Ok(doc) } fn process_tokens<'d>( parser: xmlparser::Tokenizer<'d>, nested: bool, mut parent_id: NodeId, tag_name: &mut TagNameSpan<'d>, pd: &mut ParserData<'d>, doc: &mut Document<'d>, ) -> Result<(), Error> { for token in parser { let token = token?; match token { xmlparser::Token::ProcessingInstruction(target, content) => { doc.append(parent_id, NodeKind::PI(PI { target: target.to_str(), value: content.map(|v| v.to_str()), }), target.start() - 2, // jump before '<?' ); } xmlparser::Token::Comment(text) => { let orig_pos = text.start() - 4; // jump before '<!--' doc.append(parent_id, NodeKind::Comment(text.to_str()), orig_pos); } xmlparser::Token::Text(text) | xmlparser::Token::Whitespaces(text) => { process_text(text, parent_id, nested, pd, doc)?; } xmlparser::Token::Cdata(text) => { process_cdata(text, parent_id, pd, doc); } xmlparser::Token::ElementStart(prefix, local) => { let prefix_str = prefix.to_str(); if prefix_str == "xmlns" { let pos = err_pos_from_span(prefix); return Err(Error::InvalidElementNamePrefix(pos)); } *tag_name = TagNameSpan::new(prefix, local); } xmlparser::Token::Attribute((prefix, local), value) => { process_attribute(prefix, local, value, pd, doc)?; } xmlparser::Token::ElementEnd(end) => { process_element(*tag_name, end, &mut parent_id, pd, doc)?; } xmlparser::Token::EntityDeclaration(name, value) => { if let xmlparser::EntityDefinition::EntityValue(value) = value { pd.entities.push((name.to_str(), value)); } } _ => {} } pd.prev_node_type = Some(token); } Ok(()) } fn process_attribute<'d>( prefix: StrSpan<'d>, local: StrSpan<'d>, value: StrSpan<'d>, pd: &mut ParserData<'d>, doc: &mut Document<'d>, ) -> Result<(), Error> { let prefix_str = prefix.to_str(); let local_str = local.to_str(); let orig_pos = value.start(); let value = normalize_attribute(value, false, &pd.entities, &mut pd.u_buffer)?; if prefix_str == "xmlns" { // The xmlns namespace MUST NOT be declared as the default namespace. if value == NS_XMLNS_URI { let pos = err_pos_from_qname(prefix, local); return Err(Error::UnexpectedXmlnsUri(pos)); } let is_xml_ns_uri = value == NS_XML_URI; // The prefix 'xml' is by definition bound to the namespace name // http://www.w3.org/XML/1998/namespace. // It MUST NOT be bound to any other namespace name. if local_str == "xml" { if !is_xml_ns_uri { let pos = err_pos_from_span(prefix); return Err(Error::InvalidXmlPrefixUri(pos)); } } else { // The xml namespace MUST NOT be bound to a non-xml prefix. if is_xml_ns_uri { let pos = err_pos_from_span(prefix); return Err(Error::UnexpectedXmlUri(pos)); } } // Check for duplicated namespaces. if doc.namespaces[pd.ns_start_idx..].iter().any(|attr| attr.name == local_str) { let pos = err_pos_from_qname(prefix, local); return Err(Error::DuplicatedNamespace(local_str.to_string(), pos)); } // Xml namespace should not be added to the namespaces. if !is_xml_ns_uri { doc.namespaces.push_ns(local_str, value); } } else if local_str == "xmlns" { // The xml namespace MUST NOT be declared as the default namespace. if value == NS_XML_URI { let pos = err_pos_from_span(local); return Err(Error::UnexpectedXmlUri(pos)); } // The xmlns namespace MUST NOT be declared as the default namespace. if value == NS_XMLNS_URI { let pos = err_pos_from_span(local); return Err(Error::UnexpectedXmlnsUri(pos)); } doc.namespaces.push_ns("", value); } else { pd.tmp_attrs.push(AttributeData { prefix, prefix_str, local, local_str, value_pos: orig_pos, value }); } Ok(()) } fn process_element<'d>( tag_name: TagNameSpan<'d>, end_token: xmlparser::ElementEnd<'d>, parent_id: &mut NodeId, pd: &mut ParserData<'d>, doc: &mut Document<'d>, ) -> Result<(), Error> { if tag_name.name.is_empty() { // May occur in XML like this: // <!DOCTYPE test [ <!ENTITY p '</p>'> ]> // <root>&p;</root> if let xmlparser::ElementEnd::Close(prefix, local) = end_token { return Err(Error::UnexpectedEntityCloseTag(err_pos_from_tag_name(prefix, local, true))); } else { unreachable!("should be already checked by the xmlparser"); } } // Resolve namespaces. let mut tmp_parent_id = parent_id.0; while tmp_parent_id != 0 { let curr_id = tmp_parent_id; tmp_parent_id = match doc.nodes[tmp_parent_id].parent { Some(id) => id.0, None => 0, }; let ns_range = match doc.nodes[curr_id].kind { NodeKind::Element { ref namespaces, .. } => namespaces.clone(), _ => continue, }; for i in ns_range { if !doc.namespaces.exists(pd.ns_start_idx, doc.namespaces[i].name) { let v = doc.namespaces[i].clone(); doc.namespaces.0.push(v); } } } let mut namespaces = 0..0; if pd.ns_start_idx != doc.namespaces.len() { namespaces = pd.ns_start_idx..doc.namespaces.len(); pd.ns_start_idx = doc.namespaces.len(); } // Resolve attributes. let mut attributes = 0..0; if !pd.tmp_attrs.is_empty() { for attr in &mut pd.tmp_attrs { let ns = if attr.prefix_str == "xml" { // The prefix 'xml' is by definition bound to the namespace name // http://www.w3.org/XML/1998/namespace. doc.namespaces.xml_uri() } else if attr.prefix_str.is_empty() { // 'The namespace name for an unprefixed attribute name // always has no value.' doc.namespaces.null_uri() } else { doc.namespaces.get_by_prefix(namespaces.clone(), attr.prefix_str) }; let attr_name = ExpandedNameOwned { ns, name: attr.local_str }; // Check for duplicated attributes. if doc.attrs[pd.attrs_start_idx..].iter().any(|attr| attr.name == attr_name) { let pos = err_pos_from_qname(attr.prefix, attr.local); return Err(Error::DuplicatedAttribute(attr.local_str.to_string(), pos)); } let attr_pos = if attr.prefix.is_empty() { attr.local.start() } else { attr.prefix.start() }; doc.attrs.push(Attribute { name: attr_name, value: mem::replace(&mut attr.value, String::new()), attr_pos, value_pos: attr.value_pos, }); } attributes = pd.attrs_start_idx..doc.attrs.len(); pd.attrs_start_idx = doc.attrs.len(); } pd.tmp_attrs.clear(); let tag_ns_uri = doc.namespaces.get_by_prefix(namespaces.clone(), tag_name.prefix.to_str()); match end_token { xmlparser::ElementEnd::Empty => { doc.append(*parent_id, NodeKind::Element { tag_name: ExpandedNameOwned { ns: tag_ns_uri, name: tag_name.name.to_str(), }, attributes, namespaces, }, orig_pos_from_tag_name(&tag_name) ); } xmlparser::ElementEnd::Close(prefix, local) => { let prefix_str = prefix.to_str(); let local_str = local.to_str(); if let NodeKind::Element { ref tag_name, .. } = doc.nodes[parent_id.0].kind { let parent_node = doc.get(*parent_id); let parent_prefix = parent_node.resolve_tag_name_prefix(); if prefix_str != parent_prefix || local_str != tag_name.name { return Err(Error::UnexpectedCloseTag { expected: gen_qname_string(parent_prefix, tag_name.name), actual: gen_qname_string(prefix_str, local_str), pos: err_pos_from_tag_name(prefix, local, true), }); } } if let Some(id) = doc.nodes[parent_id.0].parent { *parent_id = id; } else { unreachable!("should be already checked by the xmlparser"); } } xmlparser::ElementEnd::Open => { *parent_id = doc.append(*parent_id, NodeKind::Element { tag_name: ExpandedNameOwned { ns: tag_ns_uri, name: tag_name.name.to_str(), }, attributes, namespaces, }, orig_pos_from_tag_name(&tag_name) ); } } Ok(()) } fn process_text<'d>( text: StrSpan<'d>, parent_id: NodeId, nested: bool, pd: &mut ParserData<'d>, doc: &mut Document<'d>, ) -> Result<(), Error> { pd.u_buffer.clear(); let mut s = Stream::from(text); while !s.at_end() { match parse_next_chunk(&mut s, &pd.entities)? { NextChunk::Byte(c) => { pd.u_buffer.push(c); } NextChunk::Char(c) => { let mut buf = [0xFF; 4]; // `unwrap` is safe, `char` is 4 bytes long. write!(&mut buf[..], "{}", c).unwrap(); for b in &buf { if *b == 0xFF { break; } pd.u_buffer.push(*b); } } NextChunk::Text(fragment) => { if nested { let pos = s.gen_error_pos(); return Err(Error::NestedEntityReference(pos)); } if !pd.u_buffer.is_empty() { let s = trim_new_lines(&pd.u_buffer); append_text(s, text.start(), parent_id, pd, doc); pd.u_buffer.clear(); } let mut parser = xmlparser::Tokenizer::from(fragment); parser.enable_fragment_mode(); let mut tag_name = TagNameSpan::new(StrSpan::from(""), StrSpan::from("")); process_tokens(parser, true, parent_id, &mut tag_name, pd, doc)?; pd.u_buffer.clear(); } } } if !pd.u_buffer.is_empty() { let s = trim_new_lines(&pd.u_buffer); append_text(s, text.start(), parent_id, pd, doc); pd.u_buffer.clear(); } Ok(()) } fn append_text( text: String, orig_pos: usize, parent_id: NodeId, pd: &mut ParserData, doc: &mut Document, ) { if let Some(xmlparser::Token::Cdata(_)) = pd.prev_node_type { if let Some(node) = doc.nodes.iter_mut().last() { if let NodeKind::Text(ref mut last_text) = node.kind { last_text.push_str(&text); } } } else { doc.append(parent_id, NodeKind::Text(text), orig_pos); } } // Translate \r\n and any \r that is not followed by \n to a single \n character. // // https://www.w3.org/TR/xml/#sec-line-ends fn trim_new_lines(text: &[u8]) -> String { let mut text = text.to_vec(); let mut i = 1; while i < text.len() { let prev_byte = text[i - 1]; let curr_byte = text[i]; if prev_byte == b'\r' && curr_byte == b'\n' { text.remove(i - 1); } else if prev_byte == b'\r' && curr_byte != b'\n' { text[i - 1] = b'\n'; } else if curr_byte == b'\r' && i == text.len() - 1 { text[i] = b'\n'; } else { i += 1; } } // `unwrap` is safe, because the input text was already a valid UTF-8 string. String::from_utf8(text).unwrap() } enum NextChunk<'a> { Byte(u8), Char(char), Text(StrSpan<'a>), } fn parse_next_chunk<'a>( s: &mut Stream<'a>, entities: &[(&str, StrSpan<'a>)], ) -> Result<NextChunk<'a>, Error> { debug_assert!(!s.at_end()); // `unwrap` is safe, because we already checked that stream is not at end. // But we have an additional `debug_assert` above just in case. let c = s.curr_byte().unwrap(); // Check for character/entity references. if c == b'&' { match s.try_consume_reference() { Some(Reference::CharRef(ch)) => { Ok(NextChunk::Char(ch)) } Some(Reference::EntityRef(name)) => { match entities.iter().find(|v| v.0 == name) { Some(v) => { Ok(NextChunk::Text(v.1)) } None => { let pos = s.gen_error_pos(); Err(Error::UnknownEntityReference(name.into(), pos)) } } } None => { s.advance(1); Ok(NextChunk::Byte(c)) } } } else { s.advance(1); Ok(NextChunk::Byte(c)) } } fn process_cdata<'d>( cdata: StrSpan<'d>, parent_id: NodeId, pd: &mut ParserData, doc: &mut Document<'d>, ) { match pd.prev_node_type { Some(xmlparser::Token::Text(_)) | Some(xmlparser::Token::Whitespaces(_)) => { if let Some(node) = doc.nodes.iter_mut().last() { if let NodeKind::Text(ref mut text) = node.kind { text.push_str(cdata.to_str()); } } } _ => { doc.append(parent_id, NodeKind::Text(cdata.to_str().to_owned()), cdata.start()); } } } // https://www.w3.org/TR/REC-xml/#AVNormalize fn normalize_attribute( text: StrSpan, trim_spaces: bool, entities: &[(&str, StrSpan)], buffer: &mut Vec<u8>, ) -> Result<String, Error> { buffer.clear(); _normalize_attribute(text, trim_spaces, entities, false, buffer)?; // `unwrap` is safe, because buffer must contain a valid UTF-8 string. Ok(str::from_utf8(buffer).unwrap().to_owned()) } fn _normalize_attribute( text: StrSpan, trim_spaces: bool, entities: &[(&str, StrSpan)], nested: bool, buf: &mut Vec<u8>, ) -> Result<(), Error> { let mut s = Stream::from(text); while !s.at_end() { // `unwrap` is safe, because we already checked that stream is not at end. let c = s.curr_byte().unwrap(); // Check for character/entity references. if c == b'&' { match s.try_consume_reference() { Some(Reference::CharRef(ch)) => { let mut char_buf = [0xFF; 4]; // `unwrap` is safe, `char` is 4 bytes long. write!(&mut char_buf[..], "{}", ch).unwrap(); for b in &char_buf { if *b != 0xFF { if nested { push_byte(*b, None, buf); } else { buf.push(*b); } } } continue; } Some(Reference::EntityRef(name)) => { if nested { let pos = s.gen_error_pos(); return Err(Error::NestedEntityReference(pos)); } match entities.iter().find(|v| v.0 == name) { Some(v) => { _normalize_attribute(v.1, trim_spaces, entities, true, buf)?; } None => { let pos = s.gen_error_pos(); return Err(Error::UnknownEntityReference(name.into(), pos)); } } continue; } None => { s.advance(1); } } } else { s.advance(1); } push_byte(c, s.get_curr_byte(), buf); } Ok(()) } fn push_byte(mut c: u8, c2: Option<u8>, buf: &mut Vec<u8>) { // \r in \r\n should be ignored. if c == b'\r' && c2 == Some(b'\n') { return; } // \n, \r and \t should be converted into spaces. c = match c { b'\n' | b'\r' | b'\t' => b' ', _ => c, }; buf.push(c); } fn gen_qname_string(prefix: &str, local: &str) -> String { if prefix.is_empty() { local.to_string() } else { format!("{}:{}", prefix, local) } } fn err_pos_from_span(text: StrSpan) -> TextPos { Stream::from(text).gen_error_pos() } fn err_pos_from_qname(prefix: StrSpan, local: StrSpan) -> TextPos { let err_span = if prefix.is_empty() { local } else { prefix }; err_pos_from_span(err_span) } fn err_pos_from_tag_name(prefix: StrSpan, local: StrSpan, close_tag: bool) -> TextPos { let mut pos = err_pos_from_qname(prefix, local); if close_tag { pos.col -= 2; // jump before '</' } else { pos.col -= 1; // jump before '<' } pos } fn orig_pos_from_tag_name(tag_name: &TagNameSpan) -> usize { let span = if tag_name.prefix.is_empty() { tag_name.name } else { tag_name.prefix }; span.start() - 1 // jump before '<' }
use super::types::M; use ndarray::*; pub fn outer(va: &Array<f64, Ix1>, vb: &Array<f64, Ix1>) -> Array<f64, Ix2> { let na = va.len(); let nb = vb.len(); let mut res = Array::zeros((na, nb)); for a in 0..na { for b in 0..nb { res[(a, b)] += va[a] * vb[b]; } } res } /// matrix bracket $[A]_B = B^T A B$ pub fn bracket(a: &M, b: &M) -> M { a.t().dot(b).t().dot(b) }
#![feature(exact_size_is_empty)] #![feature(box_patterns)] pub mod interpreter; pub mod value; pub mod state; pub mod function; pub mod class;
// Copyright 2023 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use common_exception::Result; use common_expression::DataBlock; use common_hashtable::HashtableEntryRefLike; use common_hashtable::HashtableLike; use common_pipeline_core::processors::port::InputPort; use common_pipeline_core::processors::port::OutputPort; use common_pipeline_core::processors::processor::ProcessorPtr; use common_pipeline_transforms::processors::transforms::BlockMetaTransform; use common_pipeline_transforms::processors::transforms::BlockMetaTransformer; use crate::pipelines::processors::transforms::aggregator::aggregate_meta::AggregateMeta; use crate::pipelines::processors::transforms::aggregator::aggregate_meta::HashTablePayload; use crate::pipelines::processors::transforms::aggregator::estimated_key_size; use crate::pipelines::processors::transforms::aggregator::serde::AggregateSerdeMeta; use crate::pipelines::processors::transforms::group_by::HashMethodBounds; use crate::pipelines::processors::transforms::group_by::KeysColumnBuilder; pub struct TransformGroupBySerializer<Method: HashMethodBounds> { method: Method, } impl<Method: HashMethodBounds> TransformGroupBySerializer<Method> { pub fn try_create( input: Arc<InputPort>, output: Arc<OutputPort>, method: Method, ) -> Result<ProcessorPtr> { Ok(ProcessorPtr::create(BlockMetaTransformer::create( input, output, TransformGroupBySerializer { method }, ))) } } impl<Method> BlockMetaTransform<AggregateMeta<Method, ()>> for TransformGroupBySerializer<Method> where Method: HashMethodBounds { const NAME: &'static str = "TransformGroupBySerializer"; fn transform(&mut self, meta: AggregateMeta<Method, ()>) -> Result<DataBlock> { match meta { AggregateMeta::Spilling(_) => unreachable!(), AggregateMeta::Partitioned { .. } => unreachable!(), AggregateMeta::Serialized(_) => unreachable!(), AggregateMeta::Spilled(payload) => Ok(DataBlock::empty_with_meta( AggregateSerdeMeta::create_spilled( payload.bucket, payload.location, payload.columns_layout, ), )), AggregateMeta::HashTable(payload) => { let bucket = payload.bucket; let data_block = serialize_group_by(&self.method, payload)?; data_block.add_meta(Some(AggregateSerdeMeta::create(bucket))) } } } } pub fn serialize_group_by<Method: HashMethodBounds>( method: &Method, payload: HashTablePayload<Method, ()>, ) -> Result<DataBlock> { let keys_len = payload.cell.hashtable.len(); let value_size = estimated_key_size(&payload.cell.hashtable); let mut group_key_builder = method.keys_column_builder(keys_len, value_size); for group_entity in payload.cell.hashtable.iter() { group_key_builder.append_value(group_entity.key()); } Ok(DataBlock::new_from_columns(vec![ group_key_builder.finish(), ])) }
#[doc = "Reader of register PTPTSCR"] pub type R = crate::R<u32, super::PTPTSCR>; #[doc = "Writer for register PTPTSCR"] pub type W = crate::W<u32, super::PTPTSCR>; #[doc = "Register PTPTSCR `reset()`'s with value 0"] impl crate::ResetValue for super::PTPTSCR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `TSE`"] pub type TSE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TSE`"] pub struct TSE_W<'a> { w: &'a mut W, } impl<'a> TSE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `TSFCU`"] pub type TSFCU_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TSFCU`"] pub struct TSFCU_W<'a> { w: &'a mut W, } impl<'a> TSFCU_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `TSSTI`"] pub type TSSTI_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TSSTI`"] pub struct TSSTI_W<'a> { w: &'a mut W, } impl<'a> TSSTI_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `TSSTU`"] pub type TSSTU_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TSSTU`"] pub struct TSSTU_W<'a> { w: &'a mut W, } impl<'a> TSSTU_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `TSITE`"] pub type TSITE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TSITE`"] pub struct TSITE_W<'a> { w: &'a mut W, } impl<'a> TSITE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `TSARU`"] pub type TSARU_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TSARU`"] pub struct TSARU_W<'a> { w: &'a mut W, } impl<'a> TSARU_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } impl R { #[doc = "Bit 0 - Time stamp enable"] #[inline(always)] pub fn tse(&self) -> TSE_R { TSE_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Time stamp fine or coarse update"] #[inline(always)] pub fn tsfcu(&self) -> TSFCU_R { TSFCU_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Time stamp system time initialize"] #[inline(always)] pub fn tssti(&self) -> TSSTI_R { TSSTI_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Time stamp system time update"] #[inline(always)] pub fn tsstu(&self) -> TSSTU_R { TSSTU_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - Time stamp interrupt trigger enable"] #[inline(always)] pub fn tsite(&self) -> TSITE_R { TSITE_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - Time stamp addend register update"] #[inline(always)] pub fn tsaru(&self) -> TSARU_R { TSARU_R::new(((self.bits >> 5) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Time stamp enable"] #[inline(always)] pub fn tse(&mut self) -> TSE_W { TSE_W { w: self } } #[doc = "Bit 1 - Time stamp fine or coarse update"] #[inline(always)] pub fn tsfcu(&mut self) -> TSFCU_W { TSFCU_W { w: self } } #[doc = "Bit 2 - Time stamp system time initialize"] #[inline(always)] pub fn tssti(&mut self) -> TSSTI_W { TSSTI_W { w: self } } #[doc = "Bit 3 - Time stamp system time update"] #[inline(always)] pub fn tsstu(&mut self) -> TSSTU_W { TSSTU_W { w: self } } #[doc = "Bit 4 - Time stamp interrupt trigger enable"] #[inline(always)] pub fn tsite(&mut self) -> TSITE_W { TSITE_W { w: self } } #[doc = "Bit 5 - Time stamp addend register update"] #[inline(always)] pub fn tsaru(&mut self) -> TSARU_W { TSARU_W { w: self } } }
use std::ops::{Add, Mul, Sub}; #[derive(Debug, Copy, Clone)] pub struct TextureCoordinate { pub u: f64, pub v: f64, } impl TextureCoordinate { pub fn new(u: f64, v: f64) -> Self { Self { u, v } } } impl Add for TextureCoordinate { type Output = TextureCoordinate; fn add(self, rhs: Self) -> Self::Output { Self { u: self.u + rhs.u, v: self.v + rhs.v, } } } impl Sub for TextureCoordinate { type Output = TextureCoordinate; fn sub(self, rhs: Self) -> Self::Output { Self { u: self.u - rhs.u, v: self.v - rhs.v, } } } impl Sub for &TextureCoordinate { type Output = TextureCoordinate; fn sub(self, rhs: &TextureCoordinate) -> Self::Output { TextureCoordinate { u: self.u - rhs.u, v: self.v - rhs.v, } } } impl Mul for TextureCoordinate { type Output = TextureCoordinate; fn mul(self, rhs: Self) -> Self::Output { Self { u: self.u * rhs.u, v: self.v * rhs.v, } } } impl Mul<f64> for TextureCoordinate { type Output = TextureCoordinate; fn mul(self, rhs: f64) -> Self::Output { Self { u: self.u * rhs, v: self.v * rhs, } } }
use crate::proxy_addr::ProxyAddr; use std::time::Duration; use std::ops::DerefMut; use r2d2::Pool; use once_cell::sync::Lazy; use std::panic::resume_unwind; use std::str::FromStr; use std::net::IpAddr; const REDIS_KEY: &str = "proxies"; const CACHE_POOL_MAX_OPEN: u32 = 16; const CACHE_POOL_MIN_IDLE: u32 = 8; const CACHE_POOL_EXPIRE_SECONDS: u64 = 60; static REDIS_POOL: Lazy<Pool<redis::Client>> = Lazy::new( || { let client = redis::Client::open("redis://127.0.0.1/").unwrap(); r2d2::Pool::builder() .max_size(CACHE_POOL_MAX_OPEN) .max_lifetime(Some(Duration::from_secs(CACHE_POOL_EXPIRE_SECONDS))) .min_idle(Some(CACHE_POOL_MIN_IDLE)) .build(client) .unwrap() } ); pub fn add_proxy(proxy: ProxyAddr) { let pool = REDIS_POOL.clone(); let mut conn = pool.get().unwrap(); redis::cmd("zadd") .arg(REDIS_KEY) .arg("NX") .arg(100) .arg(proxy.to_string()) .query::<i32>(conn.deref_mut()).unwrap(); } pub fn random() -> Option<ProxyAddr> { let pool = REDIS_POOL.clone(); let mut conn = pool.get().unwrap(); let result: Vec<String> = redis::cmd("zrevrange") .arg(REDIS_KEY) .arg(1) .arg(100) .query(conn.deref_mut()).unwrap(); if result.len() == 0 { None } else { match ProxyAddr::from_str(&result[0]) { Ok(proxy) => Some(proxy), Err(_) => None } } } pub fn decrease_score(proxy: ProxyAddr) { let pool = REDIS_POOL.clone(); let mut conn = pool.get().unwrap(); redis::cmd("zincrby") .arg(REDIS_KEY) .arg(-1) .arg(proxy.to_string()) .execute(conn.deref_mut()); let score: i32 = redis::cmd("zscore") .arg(REDIS_KEY) .arg(proxy.to_string()) .query(conn.deref_mut()).unwrap(); if score == 0 { redis::cmd("zrem") .arg(REDIS_KEY) .arg(proxy.to_string()) .execute(conn.deref_mut()); } } pub fn max_score(proxy: ProxyAddr) { let pool = REDIS_POOL.clone(); let mut conn = pool.get().unwrap(); redis::cmd("zadd") .arg(REDIS_KEY) .arg(100) .arg(proxy.to_string()) .execute(conn.deref_mut()); } pub fn count() -> u32 { let pool = REDIS_POOL.clone(); let mut conn = pool.get().unwrap(); redis::cmd("zcard").arg(REDIS_KEY).query(conn.deref_mut()).unwrap() } pub fn batch(cursor: u32, count: u32) -> (u32, Vec<ProxyAddr>) { let pool = REDIS_POOL.clone(); let mut conn = pool.get().unwrap(); let (cursor, proxies) = redis::cmd("zscan").arg(REDIS_KEY).arg(cursor).arg("count").arg(count).query::<(u32, Vec<(String, String)>)>(conn.deref_mut()).unwrap(); let proxies :Vec<ProxyAddr> = proxies.into_iter().map(|s| ProxyAddr::from_str(&s.0).unwrap()).collect(); (cursor, proxies) }
use itertools::{Itertools, Zip}; use num_traits::{FromPrimitive, ToPrimitive}; use super::{ bullet_from_param, Board, BulletType, ByteString, CardinalDirection, CharId, Charset, Coordinate, Explosion, MessageBoxLineType, OverlayMode, Palette, Robot, Sensor, Size, Thing, WorldState, }; pub trait Renderer { fn put_pixel(&mut self, x: usize, y: usize, r: u8, g: u8, b: u8); fn clear(&mut self); } pub fn render<R: Renderer>( w: &WorldState, display: (Coordinate<u8>, Size<u8>), viewport: Coordinate<u16>, board: &Board, robots: &[Robot], renderer: &mut R, is_title_screen: bool, ) { let charset = &w.charset; let palette = &w.palette; let num_colors = palette.colors.len() as u8; renderer.clear(); let mut empty_overlay = vec![]; let base_overlay = match board.overlay { Some((OverlayMode::Static, ref data)) | Some((OverlayMode::Normal, ref data)) => data, _ => { empty_overlay.reserve(board.width * board.height); for _ in 0..(board.width * board.height) { empty_overlay.push((32, 0x07)); } &empty_overlay } }; //assert_ne!(board.width, 0); if board.width == 0 { return; } let level = Itertools::flatten( board .level .chunks(board.width) // per-row .skip(viewport.1 as usize) // ignore rows outside of viewport .take((display.1).1 as usize) // ignore rows outside of viewport .map(|row| { row.iter() .skip(viewport.0 as usize) .take((display.1).0 as usize) }), ); let under = Itertools::flatten( board .under .chunks(board.width) // per-row .skip(viewport.1 as usize) // ignore rows outside of viewport .take((display.1).1 as usize) // ignore rows outside of viewport .map(|row| { row.iter() .skip(viewport.0 as usize) .take((display.1).0 as usize) }), ); let is_static = board .overlay .as_ref() .map_or(false, |(o, _)| *o == OverlayMode::Static); let overlay_viewport = if is_static { Coordinate(0, 0) } else { viewport }; let overlay = Itertools::flatten( base_overlay .chunks(board.width) // per-row .skip(overlay_viewport.1 as usize) // ignore pre-viewport .take((display.1).1 as usize) // only iterate rows in viewport .map(|row| { row.iter() .skip(overlay_viewport.0 as usize) .take((display.1).0 as usize) }), ); for (pos, (level, under, overlay)) in Zip::new((level, under, overlay)).enumerate() { let &(id, color, param) = level; let &(_under_id, under_color, _under_param) = under; let &(overlay_char, overlay_color) = overlay; let xpos = pos as u16 % (display.1).0 as u16; let ypos = pos as u16 / (display.1).0 as u16; let mut color = match Thing::from_u8(id).unwrap() { Thing::Player => { if is_title_screen { 0 } else { w.char_id(CharId::PlayerColor) } } Thing::Fire => w.char_id_offset(CharId::FireColor1, param), Thing::Missile => w.char_id(CharId::MissileColor), Thing::Explosion => { w.char_id_offset(CharId::ExplosionStage1, Explosion::from_param(param).stage) } Thing::Bullet => match bullet_from_param(param).0 { BulletType::Player => w.char_id(CharId::PlayerBulletColor), BulletType::Enemy => w.char_id(CharId::EnemyBulletColor), BulletType::Neutral => w.char_id(CharId::NeutralBulletColor), }, Thing::Scroll => w.message_color, _ => color, }; let overlay_visible = overlay_char != b' '; let overlay_see_through = overlay_color / num_colors == 0; let ch = if !overlay_visible { let board_x = viewport.0 + xpos; let board_y = viewport.1 + ypos; char_from_id( id, param, &robots, &board.sensors, &w.idchars, board_x .checked_sub(1) .and_then(|x| board.thing_at(&Coordinate(x, board_y))), board.thing_at(&Coordinate(board_x + 1, board_y)), board_y .checked_sub(1) .and_then(|y| board.thing_at(&Coordinate(board_x, y))), board.thing_at(&Coordinate(board_x, board_y + 1)), w.player_face_dir, ) } else { overlay_char }; if color / num_colors == 0 { color = under_color / num_colors * num_colors + color; } if overlay_visible { if overlay_see_through { color = color / num_colors * num_colors + overlay_color; } else { color = overlay_color; } } draw_char( ch, color % num_colors, color / num_colors, ((display.0).0 as u16 + xpos) as usize, ((display.0).1 as u16 + ypos) as usize, charset, palette, renderer, ); } // Draw sprites over board content, skipping tiles where there is overlay content // if the sprite should be drawn below the overlay. for sprite in &w.sprites { if !sprite.enabled { continue; } let x_start = if sprite.pos.0 < 0 { -sprite.pos.0 } else { 0 } as usize; let y_start = if sprite.pos.1 < 0 { -sprite.pos.1 } else { 0 } as usize; let width = (sprite.size.0 - x_start as i32).max(0) as usize; let height = (sprite.size.1 - y_start as i32).max(0) as usize; for y_off in y_start..y_start+height { for x_off in x_start..x_start+width { let dst_x = (sprite.pos.0 + x_off as i32) as usize; let dst_y = (sprite.pos.1 + y_off as i32) as usize; // Ignore sprite components that are outside of the current viewport. if sprite.is_static == 0 && (dst_x < viewport.0 as usize || dst_x >= (viewport.0 + display.1.0 as u16) as usize || dst_y < viewport.1 as usize || dst_y >= (viewport.1 + display.1.1 as u16) as usize) { continue; } let dst_offset = dst_y * board.width + dst_x; if sprite.is_overlaid == 0 && base_overlay[dst_offset].0 != b' ' { continue; } let y = sprite.reference.1 as usize + y_off; let x = sprite.reference.0 as usize + x_off; let offset = y * board.width + x; let (ch, color) = { // FIXME: under? let (id, color, param) = board.level[offset]; // FIXME: deduplicate with above let ch = char_from_id( id, param, &robots, &board.sensors, &w.idchars, x .checked_sub(1) .and_then(|x| board.thing_at(&Coordinate(x as u16, y as u16))), board.thing_at(&Coordinate(x as u16 + 1, y as u16)), y .checked_sub(1) .and_then(|y| board.thing_at(&Coordinate(x as u16, y as u16))), board.thing_at(&Coordinate(x as u16, y as u16 + 1)), w.player_face_dir, ); (ch, color) }; if ch == b' ' { continue; } let (draw_x, draw_y) = if sprite.is_static == 1 { (dst_x + display.0.0 as usize, dst_y + display.0.1 as usize) } else { ( dst_x - viewport.0 as usize + display.0.0 as usize, dst_y - viewport.1 as usize + display.0.1 as usize, ) }; draw_char( ch, color % num_colors, color / num_colors, draw_x, draw_y, charset, palette, renderer, ) } } } const WINDOW_SIZE: usize = 80; const WINDOW_HEIGHT: u8 = 25; if board.remaining_message_cycles > 0 && board.message_row < WINDOW_HEIGHT { let message_len = board.message_line.text_len() + if w.message_edge { 2 } else { 0 }; let orig_x = board.message_col.unwrap_or_else(|| if message_len < WINDOW_SIZE { (WINDOW_SIZE - message_len) / 2 } else { 0 } as u8); let mut msg_x = orig_x; if w.message_edge && msg_x > 0 { draw_char( b' ', 0x00, 0x00, msg_x as usize - 1, board.message_row as usize, charset, palette, renderer, ); } let mut msg_y = board.message_row as usize; for (chars, bg, fg) in board.message_line.color_text() { for &c in chars { if c == b'\n' { msg_x = orig_x; msg_y += 1 } else { draw_char( c, fg.unwrap_or(w.message_color), bg.unwrap_or(0x00), msg_x as usize, msg_y, charset, palette, renderer, ); msg_x += 1; if msg_x >= WINDOW_SIZE as u8 { break; } } } } if w.message_edge && msg_x < WINDOW_SIZE as u8 { draw_char( b' ', 0x00, 0x00, msg_x as usize, msg_y, charset, palette, renderer, ); } } } pub enum MessageBoxLine { Text(ByteString, MessageBoxLineType), Option { label: ByteString, text: ByteString }, } pub fn draw_messagebox<R: Renderer>( w: &WorldState, title: &ByteString, lines: &[MessageBoxLine], pos: usize, renderer: &mut R, ) { let empty_title = ByteString("Interaction".to_owned().into_bytes()); let title = if title.is_empty() { &empty_title } else { title }; const DIALOG_Y: usize = 3; const DIALOG_X: usize = 5; const DIALOG_W: usize = 79 - DIALOG_X * 2; const DIALOG_H: usize = 25 - DIALOG_Y * 2; const CONTENT_H: usize = DIALOG_H - 6; let mut y = DIALOG_Y; draw_char( 0xDA, 0x00, 0x08, DIALOG_X, y, &w.charset, &w.palette, renderer, ); // .- for x in 1..DIALOG_W { draw_char( 0xC4, 0x00, 0x08, DIALOG_X + x, y, &w.charset, &w.palette, renderer, ); // - } draw_char( 0xBF, 0x07, 0x08, DIALOG_X + DIALOG_W, y, &w.charset, &w.palette, renderer, ); // -. y += 1; draw_char( 0xB3, 0x00, 0x08, DIALOG_X, y, &w.charset, &w.palette, renderer, ); // | for x in 1..DIALOG_W { draw_char( 0x20, 0x00, 0x08, DIALOG_X + x, y, &w.charset, &w.palette, renderer, ); // ' ' } draw_char( 0xB3, 0x0F, 0x08, DIALOG_X + DIALOG_W, y, &w.charset, &w.palette, renderer, ); // | for (x, c) in title.iter().enumerate() { draw_char( *c, 0x0F, 0x08, DIALOG_X + (DIALOG_W - title.len()) / 2 + x, y, &w.charset, &w.palette, renderer, ); } y += 1; draw_char( 0xDA, 0x0F, 0x08, DIALOG_X, y, &w.charset, &w.palette, renderer, ); // .- for x in 1..DIALOG_W { draw_char( 0xC4, 0x0F, 0x08, DIALOG_X + x, y, &w.charset, &w.palette, renderer, ); // - } draw_char( 0xD9, 0x0F, 0x08, DIALOG_X + DIALOG_W, y, &w.charset, &w.palette, renderer, ); // -/ y += 1; for y_off in 0..CONTENT_H { draw_char( 0xB3, 0x0F, 0x08, DIALOG_X, y + y_off, &w.charset, &w.palette, renderer, ); // | for x in 1..DIALOG_W { draw_char( 0x20, 0x00, 0x08, DIALOG_X + x, y + y_off, &w.charset, &w.palette, renderer, ); } if y_off == CONTENT_H / 2 { draw_char( 0x10, 0x00, 0x08, DIALOG_X + 1, y + y_off, &w.charset, &w.palette, renderer, ); // > draw_char( 0x11, 0x00, 0x08, DIALOG_X + DIALOG_W - 1, y + y_off, &w.charset, &w.palette, renderer, ); // < } draw_char( 0xB3, 0x00, 0x08, DIALOG_X + DIALOG_W, y + y_off, &w.charset, &w.palette, renderer, ); // | } y += CONTENT_H; draw_char( 0xDA, 0x00, 0x08, DIALOG_X, y, &w.charset, &w.palette, renderer, ); // .- for x in 1..DIALOG_W { draw_char( 0xC4, 0x00, 0x08, DIALOG_X + x, y, &w.charset, &w.palette, renderer, ); // - } draw_char( 0xD9, 0x00, 0x08, DIALOG_X + DIALOG_W, y, &w.charset, &w.palette, renderer, ); // -/ y += 1; draw_char( 0xB3, 0x00, 0x08, DIALOG_X, y, &w.charset, &w.palette, renderer, ); // | for x in 1..DIALOG_W { draw_char( 0x20, 0x00, 0x08, DIALOG_X + x, y, &w.charset, &w.palette, renderer, ); // - } draw_char( 0xB3, 0x0F, 0x08, DIALOG_X + DIALOG_W, y, &w.charset, &w.palette, renderer, ); // | y += 1; draw_char( 0xC0, 0x07, 0x08, DIALOG_X, y, &w.charset, &w.palette, renderer, ); // \- for x in 1..DIALOG_W { draw_char( 0xC4, 0x0F, 0x08, DIALOG_X + x, y, &w.charset, &w.palette, renderer, ); // - } draw_char( 0xD9, 0x0F, 0x08, DIALOG_X + DIALOG_W, y, &w.charset, &w.palette, renderer, ); // -/ let start = (pos as isize - CONTENT_H as isize / 2).max(0) as usize; let end = (start + (CONTENT_H / 2 + pos + 1).min(CONTENT_H)).min(lines.len()); let y_start = DIALOG_Y + 3 + CONTENT_H / 2 - pos.min(CONTENT_H / 2); for (y_off, line) in lines[start..end].iter().enumerate() { const START_X: usize = DIALOG_X + 3; const LIMIT: usize = DIALOG_W - 4; match line { MessageBoxLine::Text(ref s, MessageBoxLineType::Plain) => { let s = &s[0..s.len().min(LIMIT)]; for (x, ch) in s.iter().enumerate() { draw_char( *ch, 0x0F, 0x08, START_X + x, y_start + y_off, &w.charset, &w.palette, renderer, ); } } MessageBoxLine::Text(ref s, MessageBoxLineType::Color) => draw_fancy_message_box_line( s, false, false, START_X, y_start + y_off, LIMIT, &w.charset, &w.palette, renderer, ), MessageBoxLine::Text(ref s, MessageBoxLineType::Center) => draw_fancy_message_box_line( s, true, false, START_X, y_start + y_off, LIMIT, &w.charset, &w.palette, renderer, ), MessageBoxLine::Option { ref text, .. } => draw_fancy_message_box_line( text, false, true, START_X, y_start + y_off, LIMIT, &w.charset, &w.palette, renderer, ), }; } } fn draw_fancy_message_box_line<R: Renderer>( text: &ByteString, center: bool, option: bool, x: usize, y: usize, limit: usize, charset: &Charset, palette: &Palette, renderer: &mut R, ) { let mut x_off = if option { 2 } else { 0 }; if option { draw_char(0x10, 0x0E, 0x08, x, y, charset, palette, renderer); } let total_len = text .color_text() .fold(0, |acc, (chars, _bg, _fg)| acc + chars.len()); let mut remaining = total_len.min(limit); if center { x_off += (limit - total_len) / 2; } for (chars, bg, fg) in text.color_text() { for &c in chars { if remaining == 0 { return } draw_char( c, fg.unwrap_or(0x0F), bg.unwrap_or(0x08), x + x_off, y, charset, palette, renderer, ); x_off += 1; remaining -= 1; } } } fn draw_char<R: Renderer>( ch: u8, fg_color: u8, bg_color: u8, x: usize, y: usize, charset: &Charset, palette: &Palette, renderer: &mut R, ) { let char_bytes = charset.nth(ch); for (y_off, byte) in char_bytes.iter().enumerate() { for bit in 1..9 { let &(ref color, ref intensity) = if byte & (1 << (bit - 1)) != 0 { &palette.colors[fg_color as usize] } else { &palette.colors[bg_color as usize] }; renderer.put_pixel( (x + 1) * 8 - bit, y * 14 + y_off, ((color.r * 4) as f32 * intensity) as u8, ((color.g * 4) as f32 * intensity) as u8, ((color.b * 4) as f32 * intensity) as u8, ); } } } pub(crate) fn char_from_id( id: u8, param: u8, robots: &[Robot], sensors: &[Sensor], idchars: &[u8], left: Option<Thing>, right: Option<Thing>, top: Option<Thing>, bottom: Option<Thing>, player_face_dir: i32, ) -> u8 { let thing = Thing::from_u8(id).expect("invalid thing"); let char_id = match thing { Thing::Space => CharId::Space, Thing::Normal => CharId::Normal, Thing::Solid => CharId::Solid, Thing::Tree => CharId::Tree, Thing::Line | Thing::ThickWeb => { let (has_left, has_right, has_top, has_bottom) = if thing == Thing::ThickWeb { let check = |t| t != Thing::Space; ( left.map_or(true, check), right.map_or(true, check), top.map_or(true, check), bottom.map_or(true, check), ) } else { let check = |t| t == Thing::Line; ( left.map_or(true, check), right.map_or(true, check), top.map_or(true, check), bottom.map_or(true, check), ) }; return match (has_left, has_right, has_top, has_bottom) { (false, false, false, false) => 249, (_, _, false, false) => 205, (false, false, _, _) => 186, (true, false, true, false) => 188, (false, true, true, false) => 200, (true, false, false, true) => 187, (false, true, false, true) => 201, (true, false, true, true) => 185, (false, true, true, true) => 204, (true, true, true, false) => 202, (true, true, false, true) => 203, (true, true, true, true) => 206, }; } Thing::Web => { let check = |t| t != Thing::Space; let has_left = left.map_or(true, check); let has_right = right.map_or(true, check); let has_top = top.map_or(true, check); let has_bottom = bottom.map_or(true, check); return match (has_left, has_right, has_top, has_bottom) { (false, false, false, false) => 249, (_, _, false, false) => 196, (false, false, _, _) => 179, (true, false, true, false) => 217, (false, true, true, false) => 192, (true, false, false, true) => 191, (false, true, false, true) => 218, (true, false, true, true) => 180, (false, true, true, true) => 195, (true, true, true, false) => 193, (true, true, false, true) => 194, (true, true, true, true) => 197, }; } Thing::CustomBlock => return param, Thing::Breakaway => CharId::Breakaway, Thing::CustomBreak => return param, Thing::Boulder => CharId::Boulder, Thing::Crate => CharId::Crate, Thing::CustomPush => return param, Thing::Box => CharId::Box, Thing::CustomBox => return param, Thing::Fake => CharId::Fake, Thing::Carpet => CharId::Carpet, Thing::Floor => CharId::Floor, Thing::Tiles => CharId::Tiles, Thing::CustomFloor => return param, Thing::StillWater => CharId::StillWater, Thing::NWater => CharId::NorthWater, Thing::SWater => CharId::SouthWater, Thing::EWater => CharId::EastWater, Thing::WWater => CharId::WestWater, Thing::Ice => return idchars[CharId::BlankIce.to_usize().unwrap() + param as usize], Thing::Lava => CharId::LavaAnim1, //TODO: lava animation, Thing::Chest => CharId::Chest, Thing::Gem => CharId::Gem, Thing::MagicGem => CharId::MagicGem, Thing::Health => CharId::Health, Thing::Ring => CharId::Ring, Thing::Potion => CharId::Potion, Thing::Energizer => CharId::Energizer, Thing::Goop => CharId::Goop, Thing::Ammo => { if param < 10 { CharId::SmallAmmo } else { CharId::LargeAmmo } } Thing::Bomb => CharId::Bomb, Thing::LitBomb => { return idchars[CharId::LitBombAnim1.to_usize().unwrap() + (param & 0x7F) as usize] } Thing::Explosion => CharId::Explosion, Thing::Key => CharId::Key, Thing::Lock => CharId::Lock, Thing::Door => { if param & 0x1 == 0 { CharId::HorizontalDoor } else { CharId::VerticalDoor } } Thing::OpenDoor => { return idchars[CharId::OpenDoorStart.to_usize().unwrap() + (param & 0x1F) as usize]; } Thing::Stairs => CharId::Stairs, Thing::Cave => CharId::Cave, Thing::CWRotate => CharId::CwAnim1, //TODO animate Thing::CCWRotate => CharId::CcwAnim1, //TODO animate Thing::Gate => CharId::Gate, Thing::OpenGate => CharId::OpenGate, Thing::Transport => match param { //TODO animate 0 => CharId::NTransportAnim1, 1 => CharId::STransportAnim1, 2 => CharId::ETransportAnim1, 3 => CharId::WTransportAnim1, 4 => CharId::AnyTransportAnim1, _ => unreachable!("unexpected transport param: {}", param), }, Thing::Coin => CharId::Coin, Thing::NMovingWall => return param, Thing::SMovingWall => return param, Thing::EMovingWall => return param, Thing::WMovingWall => return param, Thing::Pouch => CharId::Pouch, Thing::Pusher | Thing::Missile | Thing::Spike => match param { 0 => CharId::NThickArrow, 1 => CharId::SThickArrow, 2 => CharId::EThickArrow, 3 => CharId::WThickArrow, _ => unreachable!("unexpected param for {:?}: {}", thing, param), }, Thing::SliderNS => CharId::SliderNS, Thing::SliderEW => CharId::SliderEW, Thing::Lazer => CharId::HorizontalLazerAnim1, //TODO: differentiate horizontal/vertical Thing::LazerGun => CharId::LazerGun, Thing::Bullet => match bullet_from_param(param) { (BulletType::Player, CardinalDirection::North) => CharId::NPlayerBullet, (BulletType::Player, CardinalDirection::South) => CharId::SPlayerBullet, (BulletType::Player, CardinalDirection::East) => CharId::EPlayerBullet, (BulletType::Player, CardinalDirection::West) => CharId::WPlayerBullet, (BulletType::Enemy, CardinalDirection::North) => CharId::NEnemyBullet, (BulletType::Enemy, CardinalDirection::South) => CharId::SEnemyBullet, (BulletType::Enemy, CardinalDirection::East) => CharId::EEnemyBullet, (BulletType::Enemy, CardinalDirection::West) => CharId::WEnemyBullet, (BulletType::Neutral, CardinalDirection::North) => CharId::NNeutralBullet, (BulletType::Neutral, CardinalDirection::South) => CharId::SNeutralBullet, (BulletType::Neutral, CardinalDirection::East) => CharId::ENeutralBullet, (BulletType::Neutral, CardinalDirection::West) => CharId::WNeutralBullet, }, Thing::Fire => return idchars[CharId::FireAnim1.to_usize().unwrap() + param as usize], Thing::Forest => CharId::Forest, Thing::Life => CharId::LifeAnim1, //TODO animate Thing::Whirlpool1 => CharId::Whirlpool1, Thing::Whirlpool2 => CharId::Whirlpool2, Thing::Whirlpool3 => CharId::Whirlpool3, Thing::Whirlpool4 => CharId::Whirlpool4, Thing::InvisibleWall => match param { 0 => CharId::Space, _ => CharId::InvisibleWall, }, Thing::RicochetPanel => match param { 0 => CharId::RicochetPanel1, 1 => CharId::RicochetPanel2, _ => unreachable!("unexpected ricochet panel param: {}", param), }, Thing::Ricochet => CharId::Ricochet, Thing::Mine => CharId::MineAnim1, //TODO animate Thing::CustomHurt => return param, Thing::Text => return param, Thing::ShootingFire => CharId::SpitFireAnim1, //TODO animate Thing::Seeker => CharId::SeekerAnim1, //TODO animate Thing::Snake => CharId::Snake, Thing::Eye => CharId::Eye, Thing::Thief => CharId::Thief, Thing::SlimeBlob => CharId::SlimeBlob, Thing::Runner => CharId::Runner, Thing::Ghost => CharId::Ghost, Thing::Dragon => CharId::Dragon, Thing::Fish => CharId::Fish, Thing::Shark => CharId::Shark, Thing::Spider => CharId::Spider, Thing::Goblin => CharId::Goblin, Thing::SpittingTiger => CharId::SpittingTiger, Thing::BulletGun => CharId::NThinArrow, //TODO differentiate Thing::SpinningGun => CharId::NThinArrow, //TODO differentiate Thing::Bear => CharId::Bear, Thing::BearCub => CharId::BearCub, Thing::MissileGun => CharId::NThickArrow, //TODO differentiate Thing::Sensor => return sensors[param as usize - 1].ch, Thing::RobotPushable | Thing::Robot => //return robots[param as usize - 1].ch, return robots.get(param as usize - 1).map_or(0, |r| r.ch), //HACK Thing::Sign => CharId::Sign, Thing::Scroll => CharId::Scroll, Thing::Sprite => { warn!("no physical spites should exist"); CharId::Space } Thing::SpriteCollision => { warn!("no physical sprite collisions should exist"); CharId::Space } Thing::ImageFile => { warn!("no physical image files should exist"); CharId::Space } Thing::NoId => { warn!("no physical noid objects should exist"); CharId::Space } Thing::Player => match player_face_dir { 0 => CharId::PlayerNorth, 1 => CharId::PlayerSouth, 2 => CharId::PlayerEast, _ => CharId::PlayerWest, }, }; idchars[char_id.to_usize().unwrap()] }
#![allow(dead_code)] #![allow(unused_mut)] #![allow(unused_imports)] #![allow(unused_variables)] #[macro_use] extern crate log; extern crate wasm_bindgen; extern crate wasm_logger; extern crate specs; extern crate shrev; extern crate cgmath; pub mod client; mod app; mod event; mod input; mod render;
//! MemberExpression use serde::{Deserialize, Serialize}; /// Represents accessing a property of an object #[derive(Clone, Debug, PartialEq, Eq, Default, Serialize, Deserialize)] pub struct MemberExpression { /// Type of AST node #[serde(rename = "type", skip_serializing_if = "Option::is_none")] pub r#type: Option<String>, /// Member object #[serde(skip_serializing_if = "Option::is_none")] pub object: Option<crate::models::ast::Expression>, /// Member Property #[serde(skip_serializing_if = "Option::is_none")] pub property: Option<crate::models::ast::PropertyKey>, } impl MemberExpression { /// Represents accessing a property of an object pub fn new() -> Self { Self::default() } }
pub mod archive; pub mod entry; pub mod read; pub use crate::archive::*; pub use crate::entry::*;
use crate::{Block, Constant, Error, Value}; use hashbrown::HashMap; use std::cell::{Cell, Ref, RefCell, RefMut}; use std::collections::BTreeMap; use std::fmt; use std::rc::Rc; /// The identifier of a constant. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct ConstId(usize); impl fmt::Display for ConstId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "C{}", self.0) } } /// A variable that can be used as block entries or temporaries. /// Instructions typically produce and use vars. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Var(usize); impl fmt::Display for Var { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) } } /// The identifier for the static assignment. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct StaticId(usize); impl fmt::Display for StaticId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "v{}", self.0) } } #[derive(Debug, Clone, Copy)] struct SharedAssign { id: StaticId, block: BlockId, } /// The descriptor of a single assignment. /// /// This has a shared interior, because the exact value being assigned might be /// re-assigned during construction. Like when an existing assignment is being /// replaced. #[derive(Debug, Clone)] pub struct Assign { shared: Rc<Cell<SharedAssign>>, } impl Assign { /// Construct a new reference to a variable in a different block. #[inline] pub(crate) fn new(id: StaticId, block: BlockId) -> Self { Self { shared: Rc::new(Cell::new(SharedAssign { id, block })), } } /// Replace this assignment with another. pub(crate) fn replace(&self, other: &Self) { self.shared.set(other.shared.get()); } /// Get the assigned id. pub(crate) fn id(&self) -> StaticId { self.shared.get().id } } impl fmt::Display for Assign { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let shared = self.shared.get(); write!(f, "{}", shared.id) } } /// Identifier to a block. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct BlockId(usize); impl fmt::Display for BlockId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "${}", self.0) } } /// Global construction state of the state machine. #[derive(Clone, Default)] pub(crate) struct Global { inner: Rc<GlobalInner>, } impl Global { /// Get the inner values. pub(crate) fn values(&self) -> Ref<'_, Values> { self.inner.values.borrow() } /// Get the inner values mutably. pub(crate) fn values_mut(&self) -> RefMut<'_, Values> { self.inner.values.borrow_mut() } /// Mark that the given block returns from the procedure. pub(crate) fn mark_return(&self, block_id: BlockId) { self.inner.returns.borrow_mut().push(block_id); } /// Allocate a global variable. pub(crate) fn var(&self) -> Var { let id = self.inner.value.get(); self.inner.value.set(id + 1); Var(id) } /// Allocate a static variable. pub(crate) fn static_id(&self) -> StaticId { let id = self.inner.statics.get(); self.inner.statics.set(id + 1); StaticId(id) } /// Get accessor to underlying blocks. #[inline] pub(crate) fn blocks(&self) -> Blocks<'_> { Blocks { blocks: self.inner.blocks.borrow(), } } /// Allocate a block. pub(crate) fn block(&self, name: Option<Box<str>>) -> Block { let id = BlockId(self.inner.blocks.borrow().len()); let block = Block::new(id, self.clone(), name); self.inner.blocks.borrow_mut().push(block.clone()); block } /// Allocate a constant. pub(crate) fn constant(&self, constant: Constant) -> ConstId { let mut constants = self.inner.constants.borrow_mut(); return match constant { Constant::Unit => ConstId(0), c => { let mut rev = self.inner.constants_rev.borrow_mut(); if let Some(const_id) = rev.get(&c) { return *const_id; } let const_id = ConstId(constants.len()); rev.insert(c.clone(), const_id); constants.push(c); const_id } }; } /// Access the collection of available constants. pub(crate) fn constants(&self) -> Ref<'_, [Constant]> { Ref::map(self.inner.constants.borrow(), |c| c.as_slice()) } } #[derive(Default)] pub(crate) struct Values { values: BTreeMap<StaticId, Value>, } impl Values { /// Remove the given value. pub(crate) fn remove(&mut self, id: StaticId) -> Option<Value> { self.values.remove(&id) } /// Insert the given value. pub(crate) fn insert(&mut self, id: StaticId, value: Value) { self.values.insert(id, value); } /// Get the value associated with the value. pub(crate) fn get(&self, id: StaticId) -> Option<&Value> { self.values.get(&id) } /// Get the value associated with the value. pub(crate) fn get_mut(&mut self, id: StaticId) -> Option<&mut Value> { self.values.get_mut(&id) } } /// Inner state of the global. struct GlobalInner { /// Variable allocator. value: Cell<usize>, /// Static assignment id allocator. statics: Cell<usize>, /// Block allocator. blocks: RefCell<Vec<Block>>, /// The values of constants. constants: RefCell<Vec<Constant>>, /// Constant strings that have already been allocated. constants_rev: RefCell<HashMap<Constant, ConstId>>, /// The ID of blocks that return. returns: RefCell<Vec<BlockId>>, /// Values assocaited with the block. pub(crate) values: RefCell<Values>, } impl Default for GlobalInner { fn default() -> Self { Self { value: Default::default(), statics: Default::default(), blocks: Default::default(), constants: RefCell::new(vec![Constant::Unit]), constants_rev: Default::default(), returns: RefCell::new(Vec::new()), values: RefCell::new(Values::default()), } } } pub(crate) struct Blocks<'a> { blocks: Ref<'a, Vec<Block>>, } impl Blocks<'_> { /// Get the block corresponding to the given id. pub(crate) fn get(&self, id: BlockId) -> Result<&Block, Error> { match self.blocks.get(id.0) { Some(block) => Ok(block), None => Err(Error::MissingBlock(id)), } } }
use std::io::{Read}; use pest::{Parser}; use template::parse::{self, TemplateParser, Rule}; use template::{ComponentTemplate}; /// A style template, used to define default values and style classes for use in templates. #[derive(Debug)] pub struct Style { pub components: Vec<ComponentTemplate>, } impl Style { /// Parses a style from a reader, such as a `File`. pub fn from_reader<R: Read>(mut reader: R) -> Result<Self, String> { let mut text = String::new(); reader.read_to_string(&mut text).unwrap(); Self::from_str(&text) } /// Parses a style from a string. pub fn from_str(text: &str) -> Result<Self, String> { // Parse and extract the template pair let pairs = TemplateParser::parse(Rule::template, text) // This gives a pretty error to our caller .map_err(|e| format!("{}", e))?; let template_pair = pairs.into_iter().next().unwrap(); let document = parse::parse_document(template_pair)?; Ok(Style { components: document, }) } } #[cfg(test)] mod test { use template::{Style}; use {Value}; #[test] fn it_parses_multiple_roots() { let result = Style::from_str("root1\nroot2\n"); println!("Result: {:?}", result); assert!(result.is_ok()); let style = result.unwrap(); assert_eq!(style.components.len(), 2); assert_eq!(style.components[0].class, "root1"); assert_eq!(style.components[1].class, "root2"); } }
use crate::compiling::CompileVisitor; use crate::parsing::{Resolve, ResolveError}; use crate::query::Query; use crate::shared::Consts; use crate::{Diagnostics, Options, Storage, UnitBuilder}; use runestick::{Context, Location, Source, Span}; use std::rc::Rc; use std::sync::Arc; pub(crate) mod assemble; pub(crate) mod branches; pub(crate) mod scope; pub(crate) use self::assemble::{Assemble, AssembleFn}; pub(crate) use self::branches::Branches; #[allow(unused)] pub(crate) struct Compiler<'a> { /// Program being compiled. pub(crate) program: &'a mut rune_ssa::Program, /// The source id of the source. pub(crate) location: Location, /// The source we are compiling for. pub(crate) source: &'a Arc<Source>, /// The current scope stack. pub(crate) scope: scope::Stack, /// The current macro context. pub(crate) storage: &'a Storage, /// The context we are compiling for. pub(crate) context: &'a Context, /// Constants storage. pub(crate) consts: &'a Consts, /// Query system to compile required items. pub(crate) query: &'a Query, /// The compilation unit we are compiling for. pub(crate) unit: UnitBuilder, /// Context for which to emit warnings. pub(crate) contexts: Vec<Span>, /// Enabled optimizations. pub(crate) options: &'a Options, /// Compilation diagnostics. pub(crate) diagnostics: &'a mut Diagnostics, /// Compiler visitor. pub(crate) visitor: Rc<dyn CompileVisitor>, } impl<'a> Compiler<'a> { /// Resolve the given value. pub(crate) fn resolve<T>(&self, value: &T) -> Result<T::Output, ResolveError> where T: Resolve<'a>, { value.resolve(&*self.storage, self.source) } }
pub mod target; pub mod triangles;
trait LightTime{ fn time(&self) -> u8 ; } enum TrafficLight{ Red, Green, Yellow, } impl LightTime for TrafficLight { fn time(&self) -> u8{ let output = match self{ TrafficLight::Red => 60, TrafficLight::Green => 75, TrafficLight::Yellow => 3, }; output } } fn main() { let input1 = TrafficLight::Yellow; let input2 = TrafficLight::Red; let input3 = TrafficLight::Green; println!("The Yellow time is {} !", input1.time()); println!("The Red time is {} !", input2.time()); println!("The Green time is {} !", input3.time()); }
pub mod mesos;
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use error::Error as StdError; use ffi::{CString, CStr, OsString, OsStr}; use fmt; use io; use iter; use libc::{self, c_int, c_char}; use path::{self, PathBuf}; use slice; use str; use sys::{unsupported, Void}; use sys::horizon::ext::ffi::{OsStrExt, OsStringExt}; const TMPBUF_SZ: usize = 128; extern "C" { fn __errno() -> *mut c_int; } /// Returns the platform-specific value of errno pub fn errno() -> i32 { unsafe { (*__errno()) as i32 } } pub fn set_errno(e: i32) { unsafe { (*__errno()) = e; } } /// Gets a detailed string description for the given error number. pub fn error_string(errno: i32) -> String { extern { #[cfg_attr(any(target_os = "linux", target_env = "newlib"), link_name = "__xpg_strerror_r")] fn strerror_r(errnum: c_int, buf: *mut c_char, buflen: libc::size_t) -> c_int; } let mut buf = [0 as c_char; TMPBUF_SZ]; let p = buf.as_mut_ptr(); unsafe { if strerror_r(errno as c_int, p, buf.len() as usize) < 0 { panic!("strerror_r failure"); } let p = p as *const _; str::from_utf8(CStr::from_ptr(p).to_bytes()).unwrap().to_owned() } } pub fn getcwd() -> io::Result<PathBuf> { let mut buf = Vec::with_capacity(512); loop { unsafe { let ptr = buf.as_mut_ptr() as *mut libc::c_char; if !libc::getcwd(ptr, buf.capacity()).is_null() { let len = CStr::from_ptr(buf.as_ptr()).to_bytes().len(); buf.set_len(len); buf.shrink_to_fit(); return Ok(PathBuf::from(OsString::from_vec(buf))); } else { let error = io::Error::last_os_error(); if error.raw_os_error() != Some(libc::ERANGE) { return Err(error); } } // Trigger the internal buffer resizing logic of `Vec` by requiring // more space than the current capacity. let cap = buf.capacity(); buf.set_len(cap); buf.reserve(1); } } } pub fn chdir(p: &path::Path) -> io::Result<()> { let p: &OsStr = p.as_ref(); let p = CString::new(p.as_bytes())?; unsafe { match libc::chdir(p.as_ptr() as *const u8) == (0 as c_int) { true => Ok(()), false => Err(io::Error::last_os_error()), } } } pub struct SplitPaths<'a> { iter: iter::Map<slice::Split<'a, u8, fn(&u8) -> bool>, fn(&'a [u8]) -> PathBuf>, } pub fn split_paths(unparsed: &OsStr) -> SplitPaths { fn bytes_to_path(b: &[u8]) -> PathBuf { PathBuf::from(<OsStr as OsStrExt>::from_bytes(b)) } fn is_colon(b: &u8) -> bool { *b == b':' } let unparsed = unparsed.as_bytes(); SplitPaths { iter: unparsed.split(is_colon as fn(&u8) -> bool) .map(bytes_to_path as fn(&[u8]) -> PathBuf) } } impl<'a> Iterator for SplitPaths<'a> { type Item = PathBuf; fn next(&mut self) -> Option<PathBuf> { self.iter.next() } fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() } } #[derive(Debug)] pub struct JoinPathsError; pub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError> where I: Iterator<Item=T>, T: AsRef<OsStr> { let mut joined = Vec::new(); let sep = b':'; for (i, path) in paths.enumerate() { let path = path.as_ref().as_bytes(); if i > 0 { joined.push(sep) } if path.contains(&sep) { return Err(JoinPathsError) } joined.extend_from_slice(path); } Ok(OsStringExt::from_vec(joined)) } impl fmt::Display for JoinPathsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { "path segment contains separator `:`".fmt(f) } } impl StdError for JoinPathsError { fn description(&self) -> &str { "failed to join paths" } } pub fn current_exe() -> io::Result<PathBuf> { unsupported() } pub struct Env(Void); impl Iterator for Env { type Item = (OsString, OsString); fn next(&mut self) -> Option<(OsString, OsString)> { match self.0 {} } } pub fn env() -> Env { panic!("not supported on 3DS yet") } pub fn getenv(_k: &OsStr) -> io::Result<Option<OsString>> { return Ok(None) } pub fn setenv(_k: &OsStr, _v: &OsStr) -> io::Result<()> { unsupported() } pub fn unsetenv(_n: &OsStr) -> io::Result<()> { unsupported() } pub fn temp_dir() -> PathBuf { PathBuf::from("/tmp") } pub fn home_dir() -> Option<PathBuf> { None } pub fn exit(code: i32) -> ! { unsafe { libc::exit(code as c_int) } } pub fn getpid() -> u32 { panic!("no pids on 3DS") }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use failure::{format_err, Error}; use fidl::endpoints::{DiscoverableService, Proxy, ServiceMarker}; use fuchsia_async as fasync; use fuchsia_component::client::connect_to_service; use fuchsia_zircon as zx; pub type GenerateService = Box<dyn Fn(&str, zx::Channel) -> Result<(), Error> + Send + Sync>; /// A wrapper around service operations, allowing redirection to a nested /// environment. pub struct ServiceContext { generate_service: Option<GenerateService>, } impl ServiceContext { pub fn new(generate_service: Option<GenerateService>) -> Self { Self { generate_service: generate_service } } pub fn connect<S: DiscoverableService>(&self) -> Result<S::Proxy, Error> { if let Some(generate_service) = &self.generate_service { let (client, server) = zx::Channel::create()?; (generate_service)(S::SERVICE_NAME, server)?; return Ok(S::Proxy::from_channel(fasync::Channel::from_channel(client)?)); } else { return connect_to_service::<S>(); } } pub fn connect_named<S: ServiceMarker>(&self, service_name: &str) -> Result<S::Proxy, Error> { if let Some(generate_service) = &self.generate_service { let (client, server) = zx::Channel::create()?; (generate_service)(service_name, server)?; Ok(S::Proxy::from_channel(fasync::Channel::from_channel(client)?)) } else { Err(format_err!("No service generator")) } } }
use objects::Player; use std::rc::Rc; use traits::*; use na::{Point2,Vector2}; #[derive(Clone,PartialEq,Debug)] pub struct Tank { owner : Rc<Player>, mov : MoveableInternal, size: Vector2<f32>, } make_moveable!(Tank,mov); make_collidable!(Tank,size);
//Mi primer Hola mundo - comentario de una sola linea fn main(){ println!("Ya estoy aqui perros!"); /* this is a multiline commentary */ let x = 5 /*Adding new commentary*/ + 5; println!("Hi, bitches i'm here!"); println!("What's the number x? x = {}",x ); }
#[doc = "Reader of register TR1"] pub type R = crate::R<u32, super::TR1>; #[doc = "Writer for register TR1"] pub type W = crate::W<u32, super::TR1>; #[doc = "Register TR1 `reset()`'s with value 0x0fff_0000"] impl crate::ResetValue for super::TR1 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0fff_0000 } } #[doc = "Reader of field `HT1`"] pub type HT1_R = crate::R<u16, u16>; #[doc = "Write proxy for field `HT1`"] pub struct HT1_W<'a> { w: &'a mut W, } impl<'a> HT1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0fff << 16)) | (((value as u32) & 0x0fff) << 16); self.w } } #[doc = "Reader of field `AWDFILT`"] pub type AWDFILT_R = crate::R<u8, u8>; #[doc = "Write proxy for field `AWDFILT`"] pub struct AWDFILT_W<'a> { w: &'a mut W, } impl<'a> AWDFILT_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 12)) | (((value as u32) & 0x07) << 12); self.w } } #[doc = "Reader of field `LT1`"] pub type LT1_R = crate::R<u16, u16>; #[doc = "Write proxy for field `LT1`"] pub struct LT1_W<'a> { w: &'a mut W, } impl<'a> LT1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0x0fff) | ((value as u32) & 0x0fff); self.w } } impl R { #[doc = "Bits 16:27 - Analog watchdog 1 higher threshold"] #[inline(always)] pub fn ht1(&self) -> HT1_R { HT1_R::new(((self.bits >> 16) & 0x0fff) as u16) } #[doc = "Bits 12:14 - Analog watchdog filtering parameter"] #[inline(always)] pub fn awdfilt(&self) -> AWDFILT_R { AWDFILT_R::new(((self.bits >> 12) & 0x07) as u8) } #[doc = "Bits 0:11 - Analog watchdog 1 lower threshold"] #[inline(always)] pub fn lt1(&self) -> LT1_R { LT1_R::new((self.bits & 0x0fff) as u16) } } impl W { #[doc = "Bits 16:27 - Analog watchdog 1 higher threshold"] #[inline(always)] pub fn ht1(&mut self) -> HT1_W { HT1_W { w: self } } #[doc = "Bits 12:14 - Analog watchdog filtering parameter"] #[inline(always)] pub fn awdfilt(&mut self) -> AWDFILT_W { AWDFILT_W { w: self } } #[doc = "Bits 0:11 - Analog watchdog 1 lower threshold"] #[inline(always)] pub fn lt1(&mut self) -> LT1_W { LT1_W { w: self } } }
use std::collections::HashMap; use actix::Addr; use actix_identity::Identity; use actix_web::{ web::{block, Data, Json}, HttpResponse, }; use serde::{Deserialize, Serialize}; use auth::{get_claim_from_identity, PrivateClaim, Role}; use db::{ get_conn, models::{Round, User, UserQuestion}, Connection, PgPool, }; use errors::Error; use crate::websocket::{client_messages, Server}; #[derive(Deserialize, Serialize)] pub struct Answer { answer: String, question_id: i32, } #[derive(Deserialize, Serialize)] pub struct Params { pub answers: Vec<Answer>, } pub async fn score_round( id: Identity, websocket_srv: Data<Addr<Server>>, pool: Data<PgPool>, params: Json<Params>, ) -> Result<HttpResponse, Error> { let (claim, _) = get_claim_from_identity(id)?; if claim.role != Role::Owner { return Err(Error::Forbidden); } let conn = get_conn(&pool)?; let res: Result<(Connection, PrivateClaim), Error> = block(move || { let round = Round::get_unfinished_round_by_game_id(&conn, claim.game_id)?; let user_questions = UserQuestion::find_by_round(&conn, round.id)?; let mut scores: HashMap<i32, i32> = HashMap::new(); for uq in &user_questions { for answer in &params.answers { if answer.question_id == uq.question_id && answer.answer == uq.answer { let score = { let s = scores.get(&uq.user_id).unwrap_or(&0); *s }; scores.insert(uq.user_id, score + 1); } } } for (user_id, amount) in &scores { User::add_score(&conn, *user_id, *amount)?; } Round::finish(&conn, round.id)?; Ok((conn, claim)) }) .await?; let (conn, claim) = res?; client_messages::send_game_status(&websocket_srv, conn, claim.game_id).await; let conn = get_conn(&pool)?; client_messages::send_round_status(&websocket_srv, conn, claim.role, claim.id, claim.game_id) .await; Ok(HttpResponse::Ok().json(())) } #[cfg(test)] mod tests { use actix_web_actors::ws; use awc::Client; use diesel::{self, ExpressionMethods, PgConnection, QueryDsl, RunQueryDsl}; use futures::{SinkExt, StreamExt}; use db::{ get_conn, models::{Game, NewUserQuestion, Question, Round, User}, new_pool, schema::{games, questions as questions_dsl, rounds, user_questions, users}, }; use auth::{create_jwt, PrivateClaim, Role}; use errors::ErrorResponse; use crate::handlers::{RoundStatusRepsonse, StatusResponse}; use crate::tests::helpers::tests::{get_test_server, get_websocket_frame_data, test_post}; use super::{Answer, Params}; #[derive(Insertable)] #[table_name = "games"] struct NewGame { slug: Option<String>, } #[derive(Insertable)] #[table_name = "rounds"] pub struct NewRoundWithFlags { pub player_one: String, pub player_two: String, pub game_id: i32, pub locked: bool, pub finished: bool, } #[derive(Insertable)] #[table_name = "users"] pub struct NewUser { pub user_name: String, pub game_id: i32, pub score: i32, } fn create_data(conn: &PgConnection) -> (Game, Vec<Question>, Round, User) { let game: Game = diesel::insert_into(games::table) .values(NewGame { slug: Some("abc123".to_string()), }) .get_result(conn) .unwrap(); let questions: Vec<Question> = diesel::insert_into(questions_dsl::table) .values(&vec![ questions_dsl::body.eq("One question".to_string()), questions_dsl::body.eq("Second question".to_string()), ]) .get_results(conn) .unwrap(); let round: Round = diesel::insert_into(rounds::table) .values(NewRoundWithFlags { player_one: "one".to_string(), player_two: "two".to_string(), game_id: game.id, locked: true, finished: false, }) .get_result(conn) .unwrap(); let user: User = diesel::insert_into(users::table) .values(NewUser { user_name: "agmcleod".to_string(), game_id: game.id, score: 4, }) .get_result(conn) .unwrap(); diesel::insert_into(user_questions::table) .values(vec![ NewUserQuestion { question_id: questions[0].id, round_id: round.id, answer: "one".to_string(), user_id: user.id, }, NewUserQuestion { question_id: questions[1].id, round_id: round.id, answer: "one".to_string(), user_id: user.id, }, ]) .execute(conn) .unwrap(); (game, questions, round, user) } fn delete_data(conn: &PgConnection) { diesel::delete(user_questions::table).execute(conn).unwrap(); diesel::delete(rounds::table).execute(conn).unwrap(); diesel::delete(users::table).execute(conn).unwrap(); diesel::delete(games::table).execute(conn).unwrap(); diesel::delete(questions_dsl::table).execute(conn).unwrap(); } #[actix_rt::test] async fn test_scoring_round_sums_amounts() { let pool = new_pool(); let conn = get_conn(&pool).unwrap(); let (game, questions, round, user) = create_data(&conn); let claim = PrivateClaim::new(game.id, game.slug.unwrap().clone(), game.id, Role::Owner); let srv = get_test_server(); let client = Client::default(); let mut ws_conn = client.ws(srv.url("/ws/")).connect().await.unwrap(); let token = create_jwt(claim).unwrap(); ws_conn .1 .send(ws::Message::Text( format!("/auth {{\"token\":\"{}\"}}", token).into(), )) .await .unwrap(); let res = srv .post("/api/rounds/score") .append_header(("Authorization", token)) .send_json(&Params { answers: vec![ Answer { answer: "one".to_string(), question_id: questions[0].id, }, Answer { answer: "two".to_string(), question_id: questions[1].id, }, ], }) .await .unwrap(); assert_eq!(res.status().as_u16(), 200); let mut stream = ws_conn.1.take(3); // skip the first one, as it's a heartbeat stream.next().await; let msg = stream.next().await; let data = get_websocket_frame_data(msg.unwrap().unwrap()); if data.is_some() { let msg = data.unwrap(); assert_eq!(msg.path, "/game-status"); assert_eq!(msg.game_id, game.id); let game_status: StatusResponse = serde_json::from_value(msg.data).unwrap(); // round is locked and is now finished assert_eq!(game_status.open_round, false); assert_eq!(game_status.unfinished_round, false); assert_eq!(game_status.slug, "abc123"); } else { assert!(false, "Message was not a string"); } let msg = stream.next().await; let data = get_websocket_frame_data(msg.unwrap().unwrap()); if data.is_some() { let msg = data.unwrap(); assert_eq!(msg.path, "/round-status"); assert_eq!(msg.game_id, game.id); let round_status: RoundStatusRepsonse = serde_json::from_value(msg.data).unwrap(); assert_eq!(round_status.locked, true); assert_eq!(round_status.finished, true); assert_eq!(round_status.picks_chosen, false); } else { assert!(false, "Message was not a string"); } drop(stream); srv.stop().await; let updated_user: User = users::dsl::users.find(user.id).first(&conn).unwrap(); assert_eq!(updated_user.score, 5); let updated_round: Round = rounds::dsl::rounds.find(round.id).first(&conn).unwrap(); assert_eq!(updated_round.finished, true); delete_data(&conn); } #[actix_rt::test] async fn test_scoring_finished_rounds_returns_404() { let pool = new_pool(); let conn = get_conn(&pool).unwrap(); let (game, questions, round, _) = create_data(&conn); diesel::update(rounds::dsl::rounds.find(round.id)) .set(rounds::dsl::finished.eq(true)) .execute(&conn) .unwrap(); let claim = PrivateClaim::new(game.id, game.slug.unwrap().clone(), game.id, Role::Owner); let (status, _): (u16, ErrorResponse) = test_post( "/api/rounds/score", Params { answers: vec![ Answer { answer: "one".to_string(), question_id: questions[0].id, }, Answer { answer: "two".to_string(), question_id: questions[1].id, }, ], }, Some(create_jwt(claim).unwrap()), ) .await; assert_eq!(status, 404); delete_data(&conn); } }
#![feature(field_init_shorthand)] use std::io::{BufReader, BufRead}; use std::fs::File; use std::collections::HashSet; use std::vec::Vec; use std::fmt::Debug; type Screen = HashSet<(u8, u8)>; enum Operation { Fill {a: u8, b: u8}, RowRotate {row: u8, shift: u8}, ColumnRotate {column: u8, shift: u8} } fn print_display(display: &Screen) { let mut output = [['.';50]; 6]; for &(x, y) in display.iter() { if y > 5 { println!("{:?}", (x,y)); } output[y as usize][x as usize] = '#'; } for line in output.iter() { for pixel in line.iter() { print!("{}", pixel); } println!(""); } } fn main() { let input = File::open("input").expect("oeuaeouoeauoeuaeo"); let mut lined_input = BufReader::new(input).lines().peekable(); let mut operations : Vec<Operation> = vec![]; while let Some(line) = lined_input.next() { if let Ok(operation) = parse_instruction(&line.expect("instruction")) { operations.push(operation); } } let display : Screen = HashSet::new(); let message = execute_instructions(&operations, &display); print_display(&message); println!("{}", message.len()); //let lit_pixels = count_lit_pixels(&message); } fn parse_instruction(instruction: &str) -> Result<Operation, &str> { if instruction.starts_with("rotate") { // "rotate column x=3 by 2" => "column x=3 by 2" parse_rotate(&instruction[7..]) } else if instruction.starts_with("rect") { // "rect 3x3" => "3x3" parse_rect(&instruction[5..]) } else { Err("not a proper instruction") } } fn parse_rotate(instruction: &str) -> Result<Operation, &str> { if instruction.starts_with("column") { let column; let shift; // "column x=5 by 3" => "5 by 3" let details = &instruction[9..]; let mut split_details = details.split(" by "); if let Some(column_str) = split_details.next() { column = column_str.parse().expect("column"); } else { return Err("not a proper instruction") } if let Some(shift_str) = split_details.next() { shift = shift_str.parse().expect("shift"); } else { return Err("not a proper instruction") } Ok(Operation::ColumnRotate{column, shift}) } else if instruction.starts_with("row") { let row; let shift; // "row y=5 by 3" => "5 by 3" let details = &instruction[6..]; let mut split_details = details.split(" by "); if let Some(row_str) = split_details.next() { row = row_str.parse().expect("row"); } else { return Err("not a proper instruction") } if let Some(shift_str) = split_details.next() { shift = shift_str.parse().expect("shift"); } else { return Err("not a proper instruction") } Ok(Operation::RowRotate{row, shift}) } else { Err("not a proper instruction") } } fn parse_rect(instruction: &str) -> Result<Operation, &str> { let a; let b; let mut split = instruction.split("x"); if let Some(a_str) = split.next() { a = a_str.parse().expect("A"); } else { return Err("not a proper instruction") } if let Some(b_str) = split.next() { b = b_str.parse().expect("B"); } else { return Err("not a proper instruction") } Ok(Operation::Fill{a, b}) } fn execute_instructions(instructions: &Vec<Operation>, display: &Screen) -> Screen { let mut display : Screen = display.clone(); for instruction in instructions { display = match instruction { &Operation::Fill{a, b} => {println!("{}, {}",a,b); get_rect(&display, a, b) }, &Operation::ColumnRotate{column, shift} => rotate_column(&display, column, shift), &Operation::RowRotate{row, shift} => rotate_row(&display, row, shift), }; } display } fn get_rect(prev_display: &Screen, a: u8, b: u8) -> Screen { let mut display = prev_display.clone(); for x in 0..a { for y in 0..b { display.insert((x,y)); } } display } fn rotate_column(prev_display: &Screen, column: u8, shift: u8) -> Screen { let mut display = Screen::new(); for prev_pixel_pos in prev_display.iter() { let pixel_pos = match prev_pixel_pos { &(x, y) if x == column => (x, (y+shift)%6), _ => *prev_pixel_pos, }; display.insert(pixel_pos); } display } fn rotate_row(prev_display: &Screen, row: u8, shift: u8) -> Screen { let mut display = Screen::new(); for prev_pixel_pos in prev_display.iter() { let pixel_pos = match prev_pixel_pos { &(x, y) if y == row => ((x+shift)%50, y), _ => *prev_pixel_pos, }; display.insert(pixel_pos); } display } #[cfg(test)] mod tests { use super::Screen; use std::collections::HashSet; use super::print_display; #[test] fn get_rect() { let testcases = [ (2,2, vec![(0,0), (0,1),(1,0), (1,1)]), (0,0, vec![]), ]; for case in testcases.iter() { println!("Testing {}x{}:", case.0, case.1); let mut expected_display = Screen::new(); for pixel in case.2.iter() { expected_display.insert(*pixel); } let display = super::get_rect(&Screen::new(), case.0, case.1); println!("expected:"); print_display(&expected_display); println!("got display:"); print_display(&display); assert_eq!(display.symmetric_difference(&expected_display).count(), 0); } } #[test] fn rotate_column() { let testcases = [ (0,2, vec![(0,0), (0,1),(1,0), (1,1)],vec![(0,2), (0,3),(1,0), (1,1)]), ]; for case in testcases.iter() { println!("Testing {}x{}:", case.0, case.1); let mut start_display = Screen::new(); for pixel in case.2.iter() { start_display.insert(*pixel); } let mut expected_display = Screen::new(); for pixel in case.3.iter() { expected_display.insert(*pixel); } let display = super::rotate_column(&start_display, case.0, case.1); println!("expected:"); print_display(&expected_display); println!("got display:"); print_display(&display); assert_eq!(display.symmetric_difference(&expected_display).count(), 0); } } #[test] fn rotate_row() { let testcases = [ (0,2, vec![(0,0), (0,1),(1,0), (1,1)],vec![(2,0), (0,1),(3,0), (1,1)]), ]; for case in testcases.iter() { println!("Testing {}x{}:", case.0, case.1); let mut start_display = Screen::new(); for pixel in case.2.iter() { start_display.insert(*pixel); } let mut expected_display = Screen::new(); for pixel in case.3.iter() { expected_display.insert(*pixel); } let display = super::rotate_row(&start_display, case.0, case.1); println!("expected:"); print_display(&expected_display); println!("got display:"); print_display(&display); assert_eq!(display.symmetric_difference(&expected_display).count(), 0); } } }
pub mod peer; pub mod server; pub mod streaming;
/* Rust core lib에 들어가 있는 clone 코드임. #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> Clone for Rc<T> { /// Makes a clone of the `Rc` pointer. /// /// This creates another pointer to the same allocation, increasing the /// strong reference count. /// /// # Examples /// /// ``` /// use std::rc::Rc; /// /// let five = Rc::new(5); /// /// let _ = Rc::clone(&five); /// ``` #[inline] fn clone(&self) -> Rc<T> { self.inner().inc_strong(); Self::from_inner(self.ptr) } } */ enum List { Cons(i32, Rc<List>), Nil, } use List::{Cons, Nil}; use std::rc::Rc; fn main() { let mut a = Rc::new(Cons(5, Rc::new(Cons(10, Rc::new(Nil))))); println!("count after creating a = {}", Rc::strong_count(&a)); let b = Cons(3, Rc::clone(&a)); println!("count after creating b = {}", Rc::strong_count(&a)); drop(b); { let c = Cons(4, Rc::clone(&a)); println!("count after creating c = {}", Rc::strong_count(&a)); } println!("count after c goes out of scope = {}", Rc::strong_count(&a)); }