text stringlengths 8 4.13M |
|---|
// Copyright (c) The diem-devtools Contributors
// SPDX-License-Identifier: MIT OR Apache-2.0
//! Metadata management.
use crate::{
reporter::TestEvent,
runner::{RunDescribe, TestRunStatus, TestStatus},
test_list::TestInstance,
};
use anyhow::{Context, Result};
use camino::{Utf8Path, Utf8PathBuf};
use chrono::{DateTime, FixedOffset, Utc};
use nextest_config::{MetadataConfig, NextestProfile};
use quick_junit::{NonSuccessKind, Report, TestRerun, Testcase, TestcaseStatus, Testsuite};
use std::{collections::HashMap, fs::File, time::SystemTime};
#[derive(Clone, Debug)]
pub(crate) struct MetadataReporter<'a> {
workspace_root: &'a Utf8Path,
name: &'a str,
config: &'a MetadataConfig,
testsuites: HashMap<&'a str, Testsuite>,
}
impl<'a> MetadataReporter<'a> {
pub(crate) fn new(workspace_root: &'a Utf8Path, profile: NextestProfile<'a>) -> Self {
Self {
workspace_root,
name: profile.name(),
config: profile.metadata_config(),
testsuites: HashMap::new(),
}
}
pub(crate) fn write_event(&mut self, event: TestEvent<'a>) -> Result<()> {
match event {
TestEvent::RunStarted { .. } => {}
TestEvent::TestStarted { .. } => {}
TestEvent::TestRetry { .. } => {
// Retries are recorded in TestFinished.
}
TestEvent::TestFinished {
test_instance,
run_statuses,
} => {
fn kind_ty(run_status: &TestRunStatus) -> (NonSuccessKind, &'static str) {
match run_status.status {
TestStatus::Fail => (NonSuccessKind::Failure, "test failure"),
TestStatus::ExecFail => (NonSuccessKind::Error, "execution failure"),
TestStatus::Pass => unreachable!("this is a failure status"),
}
}
let testsuite = self.testsuite_for(test_instance);
let (mut testcase_status, main_status, reruns) = match run_statuses.describe() {
RunDescribe::Success { run_status } => {
(TestcaseStatus::success(), run_status, &[][..])
}
RunDescribe::Flaky {
last_status,
prior_statuses,
} => (TestcaseStatus::success(), last_status, prior_statuses),
RunDescribe::Failure {
first_status,
retries,
..
} => {
let (kind, ty) = kind_ty(first_status);
let mut testcase_status = TestcaseStatus::non_success(kind);
testcase_status.set_type(ty);
(testcase_status, first_status, retries)
}
};
for rerun in reruns {
let (kind, ty) = kind_ty(rerun);
let mut test_rerun = TestRerun::new(kind);
test_rerun
.set_timestamp(to_datetime(rerun.start_time))
.set_time(rerun.time_taken)
.set_type(ty)
.set_system_out_lossy(rerun.stdout())
.set_system_err_lossy(rerun.stderr());
// TODO: also publish time? it won't be standard JUnit (but maybe that's ok?)
testcase_status.add_rerun(test_rerun);
}
// TODO: set message/description on testcase_status?
let mut testcase = Testcase::new(test_instance.name, testcase_status);
testcase
.set_classname(test_instance.binary_id)
.set_timestamp(to_datetime(main_status.start_time))
.set_time(main_status.time_taken);
// TODO: also provide stdout and stderr for passing tests?
// TODO: allure seems to want the output to be in a format where text files are
// written out to disk:
// https://github.com/allure-framework/allure2/blob/master/plugins/junit-xml-plugin/src/main/java/io/qameta/allure/junitxml/JunitXmlPlugin.java#L192-L196
// we may have to update this format to handle that.
if !main_status.status.is_success() {
// TODO: use the Arc wrapper, don't clone the system out and system err bytes
testcase
.set_system_out_lossy(main_status.stdout())
.set_system_err_lossy(main_status.stderr());
}
testsuite.add_testcase(testcase);
}
TestEvent::TestSkipped { .. } => {
// TODO: report skipped tests? causes issues if we want to aggregate runs across
// skipped and non-skipped tests. Probably needs to be made configurable.
// let testsuite = self.testsuite_for(test_instance);
//
// let mut testcase_status = TestcaseStatus::skipped();
// testcase_status.set_message(format!("Skipped: {}", reason));
// let testcase = Testcase::new(test_instance.name, testcase_status);
//
// testsuite.add_testcase(testcase);
}
TestEvent::RunBeginCancel { .. } => {}
TestEvent::RunFinished {
start_time,
elapsed,
..
} => {
// Write out the report to the given file.
let mut report = Report::new(self.name);
report
.set_timestamp(to_datetime(start_time))
.set_time(elapsed)
.add_testsuites(self.testsuites.drain().map(|(_, testsuite)| testsuite));
if let Some(junit) = &self.config.junit {
let junit_path: Utf8PathBuf = [
self.workspace_root,
self.config.dir.as_ref(),
junit.as_ref(),
]
.iter()
.collect();
let junit_dir = junit_path.parent().expect("junit path must have a parent");
std::fs::create_dir_all(junit_dir).with_context(|| {
format!("failed to create junit output directory '{}'", junit_dir)
})?;
let f = File::create(&junit_path).with_context(|| {
format!("failed to open junit file '{}' for writing", junit_path)
})?;
report
.serialize(f)
.with_context(|| format!("failed to serialize junit to {}", junit_path))?;
}
}
}
Ok(())
}
fn testsuite_for(&mut self, test_instance: TestInstance<'a>) -> &mut Testsuite {
self.testsuites
.entry(test_instance.binary_id)
.or_insert_with(|| Testsuite::new(test_instance.binary_id))
}
}
fn to_datetime(system_time: SystemTime) -> DateTime<FixedOffset> {
// Serialize using UTC.
let datetime = DateTime::<Utc>::from(system_time);
datetime.into()
}
|
use api_client::ApiClient;
use serde_json;
use serde_json::Value;
use std::collections::HashMap;
use std::io;
use std::io::{Cursor, Read};
use std::io::ErrorKind as IoErrorKind;
use utils::decode_list;
use errors::*;
chef_json_type!(NodeJsonClass, "Chef::Node");
chef_json_type!(NodeChefType, "node");
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct Node {
#[serde(default)] pub name: Option<String>,
#[serde(default)] chef_type: NodeChefType,
#[serde(default)] json_class: NodeJsonClass,
#[serde(default)] pub chef_environment: String,
#[serde(default)] pub run_list: Vec<String>,
#[serde(default)] pub normal: HashMap<String, Value>,
#[serde(default)] pub automatic: HashMap<String, Value>,
#[serde(default)] pub default: HashMap<String, Value>,
#[serde(default, rename = "override")] pub overrides : HashMap<String, Value>,
}
impl Read for Node {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if let Ok(node) = serde_json::to_vec(self) {
let mut node = Cursor::new(node.as_ref() as &[u8]);
Read::read(&mut node, buf)
} else {
Err(io::Error::new(
IoErrorKind::InvalidData,
"Failed to convert node to JSON",
))
}
}
}
impl Node {
pub fn new<S>(name: S) -> Self
where
S: Into<String>,
{
Node {
name: Some(name.into()),
..Default::default()
}
}
pub fn fetch(client: &ApiClient, name: String) -> Result<Node> {
let org = &client.config.organization_path();
let path = format!("{}/nodes/{}", org, name);
client.get::<Node>(path.as_ref())
}
pub fn save(&self, client: &ApiClient) -> Result<Node> {
let name = &self.name.clone().unwrap();
let org = &client.config.organization_path();
let path = format!("{}/nodes/{}", org, name);
client.put::<&Node, Node>(path.as_ref(), &self)
}
pub fn create(&self, client: &ApiClient) -> Result<Node> {
let org = &client.config.organization_path();
let path = format!("{}/nodes", org);
client.post::<&Node, Node>(path.as_ref(), &self)
}
pub fn delete(&self, client: &ApiClient) -> Result<Node> {
let name = &self.name.clone().unwrap();
let org = &client.config.organization_path();
let path = format!("{}/nodes/{}", org, name);
client.delete::<Node>(path.as_ref())
}
}
pub fn delete_node(client: &ApiClient, name: &str) -> Result<Node> {
let org = &client.config.organization_path();
let path = format!("{}/nodes/{}", org, name);
client.delete::<Node>(path.as_ref())
}
#[derive(Debug)]
pub struct NodeList {
count: usize,
pub nodes: Vec<String>,
client: ApiClient,
}
impl NodeList {
pub fn new(client: &ApiClient) -> Self {
let org = &client.config.organization_path();
let path = format!("{}/nodes", org);
client
.get(path.as_ref())
.and_then(decode_list)
.and_then(|list| {
Ok(NodeList {
nodes: list,
count: 0,
client: client.clone(),
})
})
.unwrap()
}
}
impl Iterator for NodeList {
type Item = Result<Node>;
fn count(self) -> usize {
self.nodes.len()
}
fn next(&mut self) -> Option<Self::Item> {
if self.nodes.len() >= 1 {
Some(Node::fetch(&self.client, self.nodes.remove(0)))
} else {
None
}
}
}
// #[cfg(test)]
// mod tests {
// use super::Node;
// use std::fs::File;
// #[test]
// fn test_node_from_file() {
// let fh = File::open("fixtures/node.json").unwrap();
// let node = Node::from_json(fh).unwrap();
// assert_eq!(node.name.unwrap(), "test")
// }
// }
|
use std::ops::Index;
use super::View;
use crate::{dim, dim::Dim, Matrix};
pub trait RowView<'a>: View<'a, M = dim!(1)> {}
impl<'a, T: 'a, V> RowView<'a> for V where V: View<'a, M = dim!(1), Entry = T> {}
#[derive(Debug)]
pub struct RowSlice<'a, T, M, N> {
mat: &'a Matrix<T, M, N>,
i: usize,
}
impl<'a, T, M: Dim, N: Dim> RowSlice<'a, T, M, N> {
pub(crate) fn new(mat: &'a Matrix<T, M, N>, i: usize) -> Self {
assert!(i < mat.m.dim());
Self { mat, i }
}
}
impl<T, M, N> Copy for RowSlice<'_, T, M, N> {}
impl<T, M, N> Clone for RowSlice<'_, T, M, N> {
#[inline]
fn clone(&self) -> Self {
Self {
mat: self.mat,
i: self.i,
}
}
}
impl<'a, T, M: Dim, N: Dim> View<'a> for RowSlice<'a, T, M, N> {
type M = dim!(1);
type N = N;
type Entry = T;
#[inline]
fn m(&self) -> dim!(1) {
dim!(1)
}
#[inline]
fn n(&self) -> N {
self.mat.n
}
#[inline]
fn get(&self, i: usize, j: usize) -> Option<&'a T> {
if i == 0 && j < self.n().dim() {
Some(&self.mat.items[self.i * self.n().dim() + j])
} else {
None
}
}
type Iter = std::slice::Iter<'a, T>;
fn iter(&self) -> Self::Iter {
let a = self.i * self.mat.n.dim();
let b = a + self.mat.n.dim();
self.mat.items[a..b].iter()
}
}
impl<T, M: Dim, N: Dim> Index<usize> for RowSlice<'_, T, M, N> {
type Output = T;
#[inline]
fn index(&self, j: usize) -> &T {
&self.mat[[self.i, j]]
}
}
impl<T, M: Dim, N: Dim> Index<[usize; 2]> for RowSlice<'_, T, M, N> {
type Output = T;
#[inline]
fn index(&self, [i, j]: [usize; 2]) -> &T {
assert_eq!(i, 0);
&self.mat[[self.i, j]]
}
}
impl<'a, 'b, T, M: Dim, N: Dim, Rhs> PartialEq<Rhs> for RowSlice<'a, T, M, N>
where
Rhs: View<'b>,
T: PartialEq<Rhs::Entry>,
dim!(1): PartialEq<Rhs::M>,
N: PartialEq<Rhs::N>,
{
#[inline]
fn eq(&self, rhs: &Rhs) -> bool {
self.equal(*rhs)
}
}
|
use super::position::{Dir, Position};
use crate::util::clip;
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
pub struct Health(pub f32);
pub(super) type Damage = Health;
impl Health {
pub fn suffer(&mut self, attack: Attack) -> Damage {
self.0 -= attack.0;
Health(attack.0)
}
}
impl std::ops::Mul<f32> for Health {
type Output = Health;
fn mul(self, rhs: f32) -> Self::Output {
debug_assert!(!rhs.is_nan());
Health(self.0 * rhs)
}
}
impl std::ops::Mul<Health> for f32 {
type Output = Health;
fn mul(self, rhs: Health) -> Self::Output {
debug_assert!(!self.is_nan());
Health(self * rhs.0)
}
}
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
pub struct Meat(pub f32);
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
pub struct Attack(pub f32);
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
pub struct Satiation(pub f32);
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
pub struct Fatigue(pub f32);
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug)]
pub struct Speed(pub f32);
impl std::ops::Mul<f32> for Speed {
type Output = Speed;
fn mul(self, rhs: f32) -> Self::Output {
Speed(self.0 * rhs)
}
}
#[derive(PartialOrd, PartialEq, Copy, Clone, Debug, Default)]
pub struct MoveProgress(pub f32);
impl std::ops::AddAssign<Speed> for MoveProgress {
fn add_assign(&mut self, rhs: Speed) {
self.0 += rhs.0
}
}
#[derive(Clone, Debug)]
pub struct PhysicalState {
pub health: Health,
pub max_health: Health,
pub meat: Meat,
pub speed: Speed,
pub move_progress: MoveProgress,
pub move_target: Option<Dir>,
pub attack: Option<Attack>,
pub satiation: Satiation,
pub fatigue: Fatigue,
}
impl std::fmt::Display for PhysicalState {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
if self.is_dead() {
writeln!(f, "{:.2} meat remaining", self.meat.0)?;
} else {
writeln!(f, "Health: {:.2}/{:.2}", self.health.0, self.max_health.0)?;
writeln!(f, "Speed : {:.}", self.speed.0)?;
if let Some(att) = self.attack {
writeln!(f, "Attack: {:.}", att.0)?;
}
writeln!(f, "Satiation: {:.2}", self.satiation.0)?;
writeln!(f, "Fatigue: {:.2}", self.fatigue.0)?;
}
Ok(())
}
}
impl PhysicalState {
pub const SATIATION_DECR: Satiation = Satiation(0.1);
pub fn is_dead(&self) -> bool {
self.health.0 <= 0.0
}
pub fn partial_move(&mut self, dir: Dir) -> MoveProgress {
if Some(dir) != self.move_target {
self.move_target = Some(dir);
self.move_progress = MoveProgress(0.0);
}
self.move_progress += self.speed
* (self.health.0 / self.max_health.0)
* (1.0 - 0.75 * self.fatigue.0 * self.fatigue.0);
self.move_progress
}
pub fn new(max_health: Health, speed: Speed, attack: Option<Attack>) -> Self {
Self {
health: max_health,
max_health,
meat: Meat(max_health.0 * 0.5),
speed,
move_progress: MoveProgress::default(),
move_target: None,
attack,
satiation: Satiation(10.0),
fatigue: Fatigue(0.0),
}
}
}
impl std::ops::AddAssign<Satiation> for PhysicalState {
fn add_assign(&mut self, rhs: Satiation) {
debug_assert!(!rhs.0.is_nan());
let mut val = self.satiation.0 + rhs.0;
if val < 0.0 {
self.health.0 += 0.1 * val;
val = 0.0;
}
self.satiation.0 = clip(val, 0.0, self.max_health.0);
}
}
impl std::ops::SubAssign<Satiation> for PhysicalState {
fn sub_assign(&mut self, rhs: Satiation) {
*self += Satiation(-rhs.0)
}
}
impl std::ops::AddAssign<Health> for PhysicalState {
fn add_assign(&mut self, rhs: Health) {
debug_assert!(!rhs.0.is_nan());
self.health.0 = clip(self.health.0 + rhs.0, 0.0, self.max_health.0);
}
}
impl std::ops::SubAssign<Health> for PhysicalState {
fn sub_assign(&mut self, rhs: Health) {
*self += Health(-rhs.0)
}
}
impl std::ops::AddAssign<Fatigue> for Fatigue {
fn add_assign(&mut self, rhs: Fatigue) {
debug_assert!(!rhs.0.is_nan());
self.0 = clip(self.0 + rhs.0, 0.0, 1.0);
}
}
|
fn main() {
proconio::input! {
n: usize,
ab: [(usize, usize); n],
}
let mut ans = 0;
for i in 0..n {
for j in ab[i].0..=ab[i].1 {
ans += j;
}
}
println!("{}", ans);
} |
use crate::{
bson::doc,
bson_util,
cmap::StreamDescription,
coll::Namespace,
concern::ReadConcern,
operation::{
test::{self, handle_response_test},
Operation,
},
options::{CountOptions, Hint},
};
use super::CountDocuments;
#[test]
fn build() {
let ns = Namespace {
db: "test_db".to_string(),
coll: "test_coll".to_string(),
};
let mut count_op = CountDocuments::new(ns, Some(doc! { "x": 1 }), None).unwrap();
let mut count_command = count_op
.build(&StreamDescription::new_testing())
.expect("error on build");
let mut expected_body = doc! {
"aggregate": "test_coll",
"pipeline": [
{ "$match": { "x": 1 } },
{ "$group": { "_id": 1, "n": { "$sum": 1 } } },
],
"cursor": { }
};
bson_util::sort_document(&mut expected_body);
bson_util::sort_document(&mut count_command.body);
assert_eq!(count_command.body, expected_body);
assert_eq!(count_command.target_db, "test_db");
}
#[test]
fn build_with_options() {
let skip = 2;
let limit = 5;
let options = CountOptions::builder()
.skip(skip)
.limit(limit)
.hint(Hint::Name("_id_1".to_string()))
.read_concern(ReadConcern::available())
.build();
let ns = Namespace {
db: "test_db".to_string(),
coll: "test_coll".to_string(),
};
let mut count_op = CountDocuments::new(ns, None, Some(options)).unwrap();
let count_command = count_op
.build(&StreamDescription::new_testing())
.expect("error on build");
assert_eq!(count_command.target_db, "test_db");
let mut expected_body = doc! {
"aggregate": "test_coll",
"$db": "test_db",
"pipeline": [
{ "$match": {} },
{ "$skip": skip as i64 },
{ "$limit": limit as i64 },
{ "$group": { "_id": 1, "n": { "$sum": 1 } } },
],
"hint": "_id_1",
"cursor": { },
"readConcern": { "level": "available" },
};
bson_util::sort_document(&mut expected_body);
let serialized_command = count_op.serialize_command(count_command).unwrap();
let mut cmd_doc = bson::from_slice(&serialized_command).unwrap();
bson_util::sort_document(&mut cmd_doc);
assert_eq!(cmd_doc, expected_body);
}
#[test]
fn op_selection_criteria() {
test::op_selection_criteria(|selection_criteria| {
let options = CountOptions {
selection_criteria,
..Default::default()
};
CountDocuments::new(Namespace::empty(), None, Some(options)).unwrap()
});
}
#[test]
fn handle_success() {
let ns = Namespace {
db: "test_db".to_string(),
coll: "test_coll".to_string(),
};
let count_op = CountDocuments::new(ns, None, None).unwrap();
let n = 26;
let response = doc! {
"ok": 1.0,
"cursor": {
"id": 0,
"ns": "test_db.test_coll",
"firstBatch": [ { "_id": 1, "n": n as i32 } ],
}
};
let actual_values = handle_response_test(&count_op, response).unwrap();
assert_eq!(actual_values, n);
}
|
use crate::grammar::ast::{BooleanLit, CharLit, Identifier, NumLit, ScopedName, StringLit};
use std::fmt::Debug;
/// A Pattern used in pattern matching.
#[allow(missing_docs)]
#[derive(Clone, Debug)]
pub enum Pattern<SourceCodeReference: Clone + Debug> {
NumLit(NumLitPattern<SourceCodeReference>),
CharLit(CharLit<SourceCodeReference>),
StringLit(StringLit<SourceCodeReference>),
BooleanLit(BooleanLit<SourceCodeReference>),
Identifier(Identifier<SourceCodeReference>),
ScopedName(ScopedName<SourceCodeReference>),
Underscore(Underscore<SourceCodeReference>),
}
/// An underscore pattern in source code.
#[derive(Clone, Debug)]
pub struct Underscore<SourceCodeReference: Clone + Debug> {
/// Associated source code.
pub source: SourceCodeReference,
}
/// Number literal pattern
#[derive(Clone, Debug)]
pub struct NumLitPattern<SourceCodeReference: Clone + Debug> {
/// Associated source code reference.
pub source: SourceCodeReference,
/// Whether the number literal pattern has '-' in front
pub negative: bool,
/// Inner number literal value
pub inner: NumLit<SourceCodeReference>,
}
|
#[cfg(test)]
mod tests {
extern crate rust_sike;
use self::rust_sike::KEM;
#[test]
fn basic_sike_test() {
let params = rust_sike::sike_p751_params(None, None).unwrap();
let kem = KEM::setup(params);
// Alice runs keygen, publishes pk3. Values s and sk3 are secret
let (s, sk3, pk3) = kem.keygen().unwrap();
// Bob uses pk3 to derive a key k and encapsulation c
let (c, k) = kem.encaps(&pk3).unwrap();
// Bob sends c to Alice
// Alice uses s, c, sk3 and pk3 to recover k
let k_recovered = kem.decaps(&s, &sk3, &pk3, c).unwrap();
assert_eq!(k, k_recovered);
}
}
|
// Copyright (C) 2017 1aim GmbH
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::{
metadata::{Descriptor, Format},
phone_number::Type,
};
use regex_cache::CachedRegex;
/// Phone number metadata.
#[derive(Clone, Debug)]
pub struct Metadata {
pub(crate) descriptors: Descriptors,
pub(crate) id: String,
pub(crate) country_code: u16,
pub(crate) international_prefix: Option<CachedRegex>,
pub(crate) preferred_international_prefix: Option<String>,
pub(crate) national_prefix: Option<String>,
pub(crate) preferred_extension_prefix: Option<String>,
pub(crate) national_prefix_for_parsing: Option<CachedRegex>,
pub(crate) national_prefix_transform_rule: Option<String>,
pub(crate) formats: Vec<Format>,
pub(crate) international_formats: Vec<Format>,
pub(crate) main_country_for_code: bool,
pub(crate) leading_digits: Option<CachedRegex>,
pub(crate) mobile_number_portable: bool,
}
/// Descriptors for various types of phone number.
#[derive(Clone, Debug)]
pub struct Descriptors {
pub(crate) general: Descriptor,
pub(crate) fixed_line: Option<Descriptor>,
pub(crate) mobile: Option<Descriptor>,
pub(crate) toll_free: Option<Descriptor>,
pub(crate) premium_rate: Option<Descriptor>,
pub(crate) shared_cost: Option<Descriptor>,
pub(crate) personal_number: Option<Descriptor>,
pub(crate) voip: Option<Descriptor>,
pub(crate) pager: Option<Descriptor>,
pub(crate) uan: Option<Descriptor>,
pub(crate) emergency: Option<Descriptor>,
pub(crate) voicemail: Option<Descriptor>,
pub(crate) short_code: Option<Descriptor>,
pub(crate) standard_rate: Option<Descriptor>,
pub(crate) carrier: Option<Descriptor>,
pub(crate) no_international: Option<Descriptor>,
}
impl Metadata {
/// Descriptors for the various types of phone number.
pub fn descriptors(&self) -> &Descriptors {
&self.descriptors
}
/// The CLDR 2-letter representation of a country/region, with the exception
/// of "country calling codes" used for non-geographical entities, such as
/// Universal International Toll Free Number (+800). These are all given the
/// ID "001", since this is the numeric region code for the world according
/// to UN M.49: http://en.wikipedia.org/wiki/UN_M.49
pub fn id(&self) -> &str {
&self.id
}
/// The country calling code that one would dial from overseas when trying to
/// dial a phone number in this country. For example, this would be "64" for
/// New Zealand.
pub fn country_code(&self) -> u16 {
self.country_code
}
/// The international prefix of country A is the number that needs to be
/// dialled from country A to another country (country B). This is followed
/// by the country code for country B. Note that some countries may have more
/// than one international prefix, and for those cases, a regular expression
/// matching the international prefixes will be stored in this field.
pub fn international_prefix(&self) -> Option<&CachedRegex> {
self.international_prefix.as_ref()
}
/// If more than one international prefix is present, a preferred prefix can
/// be specified here for out-of-country formatting purposes. If this field
/// is not present, and multiple international prefixes are present, then "+"
/// will be used instead.
pub fn preferred_international_prefix(&self) -> Option<&str> {
self.preferred_international_prefix
.as_ref()
.map(AsRef::as_ref)
}
/// The national prefix of country A is the number that needs to be dialled
/// before the national significant number when dialling internally. This
/// would not be dialled when dialling internationally. For example, in New
/// Zealand, the number that would be locally dialled as 09 345 3456 would be
/// dialled from overseas as +64 9 345 3456. In this case, 0 is the national
/// prefix.
pub fn national_prefix(&self) -> Option<&str> {
self.national_prefix.as_ref().map(AsRef::as_ref)
}
/// The preferred prefix when specifying an extension in this country. This
/// is used for formatting only, and if this is not specified, a suitable
/// default should be used instead. For example, if you wanted extensions to
/// be formatted in the following way:
///
/// 1 (365) 345 445 ext. 2345
/// " ext. " should be the preferred extension prefix.
pub fn preferred_extension_prefix(&self) -> Option<&str> {
self.preferred_extension_prefix.as_ref().map(AsRef::as_ref)
}
/// This field is used for cases where the national prefix of a country
/// contains a carrier selection code, and is written in the form of a
/// regular expression. For example, to dial the number 2222-2222 in
/// Fortaleza, Brazil (area code 85) using the long distance carrier Oi
/// (selection code 31), one would dial 0 31 85 2222 2222. Assuming the only
/// other possible carrier selection code is 32, the field will contain
/// "03[12]".
///
/// When it is missing from the XML file, this field inherits the value of
/// national prefix, if that is present.
pub fn national_prefix_for_parsing(&self) -> Option<&CachedRegex> {
self.national_prefix_for_parsing.as_ref()
}
/// This field is only populated and used under very rare situations. For
/// example, mobile numbers in Argentina are written in two completely
/// different ways when dialed in-country and out-of-country (e.g. 0343 15
/// 555 1212 is exactly the same number as +54 9 343 555 1212).
///
/// This field is used together with `national_prefix_for_parsing` to transform
/// the number into a particular representation for storing in the
/// phonenumber proto buffer in those rare cases.
pub fn national_prefix_transform_rule(&self) -> Option<&str> {
self.national_prefix_transform_rule
.as_ref()
.map(AsRef::as_ref)
}
/// Note that the number format here is used for formatting only, not
/// parsing. Hence all the varied ways a user *may* write a number need not
/// be recorded - just the ideal way we would like to format it for them.
///
/// When this element is absent, the national significant number will be
/// formatted as a whole without any formatting applied.
pub fn formats(&self) -> &[Format] {
&self.formats
}
/// This field is populated only when the national significant number is
/// formatted differently when it forms part of the INTERNATIONAL format and
/// NATIONAL format. A case in point is mobile numbers in Argentina: The
/// number, which would be written in INTERNATIONAL format as +54 9 343 555
/// 1212, will be written as 0343 15 555 1212 for NATIONAL format. In this
/// case, the prefix 9 is inserted when dialling from overseas, but otherwise
/// the prefix 0 and the carrier selection code
/// 15 (inserted after the area code of 343) is used.
///
/// Note: this field is populated by setting a value for <intlFormat> inside
/// the <numberFormat> tag in the XML file. If <intlFormat> is not set then
/// it defaults to the same value as the <format> tag.
///
/// Examples:
/// To set the <intlFormat> to a different value than the <format>:
/// <numberFormat pattern=....>
/// <format>$1 $2 $3</format>
/// <intlFormat>$1-$2-$3</intlFormat>
/// </numberFormat>
///
/// To have a format only used for national formatting, set <intlFormat> to
/// "NA":
/// <numberFormat pattern=....>
/// <format>$1 $2 $3</format>
/// <intlFormat>NA</intlFormat>
/// </numberFormat>
pub fn international_formats(&self) -> &[Format] {
&self.international_formats
}
/// This field is set when this country is considered to be the main country
/// for a calling code. It may not be set by more than one country with the
/// same calling code, and it should not be set by countries with a unique
/// calling code. This can be used to indicate that "GB" is the main country
/// for the calling code "44" for example, rather than Jersey or the Isle of
/// Man.
pub fn is_main_country_for_code(&self) -> bool {
self.main_country_for_code
}
/// This field is populated only for countries or regions that share a
/// country calling code. If a number matches this pattern, it could belong
/// to this region. This is not intended as a replacement for
/// IsValidForRegion since a matching prefix is insufficient for a number to
/// be valid. Furthermore, it does not contain all the prefixes valid for a
/// region - for example, 800 numbers are valid for all NANPA countries and
/// are hence not listed here.
///
/// This field should be a regular expression of the expected prefix match.
///
/// It is used merely as a short-cut for working out which region a number
/// comes from in the case that there is only one, so leading digit prefixes
/// should not overlap.
pub fn leading_digits(&self) -> Option<&CachedRegex> {
self.leading_digits.as_ref()
}
/// This field is set when this country has implemented mobile number
/// portability. This means that transferring mobile numbers between carriers
/// is allowed. A consequence of this is that phone prefix to carrier mapping
/// is less reliable.
pub fn is_mobile_number_portable(&self) -> bool {
self.mobile_number_portable
}
}
impl Descriptors {
/// Get the proper descriptor for the given phone number type, if any.
pub fn get(&self, kind: Type) -> Option<&Descriptor> {
match kind {
Type::Unknown => Some(&self.general),
Type::FixedLine | Type::FixedLineOrMobile => self.fixed_line.as_ref(),
Type::Mobile => self.mobile.as_ref(),
Type::TollFree => self.toll_free.as_ref(),
Type::PremiumRate => self.premium_rate.as_ref(),
Type::SharedCost => self.shared_cost.as_ref(),
Type::PersonalNumber => self.personal_number.as_ref(),
Type::Voip => self.voip.as_ref(),
Type::Pager => self.pager.as_ref(),
Type::Uan => self.uan.as_ref(),
Type::Emergency => self.emergency.as_ref(),
Type::Voicemail => self.voicemail.as_ref(),
Type::ShortCode => self.short_code.as_ref(),
Type::StandardRate => self.standard_rate.as_ref(),
Type::Carrier => self.carrier.as_ref(),
Type::NoInternational => self.no_international.as_ref(),
}
}
pub fn general(&self) -> &Descriptor {
&self.general
}
pub fn fixed_line(&self) -> Option<&Descriptor> {
self.fixed_line.as_ref()
}
pub fn mobile(&self) -> Option<&Descriptor> {
self.mobile.as_ref()
}
pub fn toll_free(&self) -> Option<&Descriptor> {
self.toll_free.as_ref()
}
pub fn premium_rate(&self) -> Option<&Descriptor> {
self.premium_rate.as_ref()
}
pub fn shared_cost(&self) -> Option<&Descriptor> {
self.shared_cost.as_ref()
}
pub fn personal_number(&self) -> Option<&Descriptor> {
self.personal_number.as_ref()
}
pub fn voip(&self) -> Option<&Descriptor> {
self.voip.as_ref()
}
pub fn pager(&self) -> Option<&Descriptor> {
self.pager.as_ref()
}
pub fn uan(&self) -> Option<&Descriptor> {
self.uan.as_ref()
}
pub fn emergency(&self) -> Option<&Descriptor> {
self.emergency.as_ref()
}
pub fn voicemail(&self) -> Option<&Descriptor> {
self.voicemail.as_ref()
}
pub fn short_code(&self) -> Option<&Descriptor> {
self.short_code.as_ref()
}
pub fn standard_rate(&self) -> Option<&Descriptor> {
self.standard_rate.as_ref()
}
pub fn carrier(&self) -> Option<&Descriptor> {
self.carrier.as_ref()
}
pub fn no_international(&self) -> Option<&Descriptor> {
self.no_international.as_ref()
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use crate::server::Service;
use failure_ext::Result;
use serde::{Deserialize, Serialize};
/// Implementation of the control service.
#[derive(Debug, Default)]
pub struct ControlService;
impl Service for ControlService {
type Request = ControlRequest;
type Response = ControlResponse;
fn serve(&self, request: &Self::Request) -> Result<Self::Response> {
match request {
ControlRequest::Status => Ok(ControlResponse::Status {
message: "initializing".to_string(),
}),
ControlRequest::UpdateNamingTable { .. } => unimplemented!(),
}
}
}
#[derive(Debug, Deserialize)]
#[serde(tag = "type")]
pub enum ControlRequest {
Status,
UpdateNamingTable { files: Vec<String> },
}
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
pub enum ControlResponse {
Status { message: String },
UpdateNamingTable,
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::io;
use reverie::Pid;
use safeptrace::Error as TraceError;
use thiserror::Error;
use super::commands::CommandParseError;
use super::hex::GdbHexError;
use super::packet::PacketParseError;
#[derive(Error, Debug)]
#[allow(clippy::enum_variant_names)]
pub enum Error {
#[error("gdb server not started yet")]
GdbServerNotStarted,
#[error("Failed waiting for gdb client to connect")]
WaitForGdbConnect {
#[source]
source: io::Error,
},
#[error("Connection reset")]
ConnReset,
#[error("gdb session not started")]
SessionNotStarted,
#[error(transparent)]
PacketError(PacketParseError),
#[error("No inferior attached")]
Detached,
#[error(transparent)]
TraceError(TraceError),
#[error("Failed to send gdb request over tx channel")]
GdbRequestSendError,
#[error("Failed to receive reply from gdb request")]
GdbRequestRecvError,
#[error("gdbserver failed to resume/step")]
GdbResumeError,
#[error("gdbserver failed to forward gdb packet")]
GdbServerSendPacketError,
#[error("No threadid is being specified")]
ThreadIdNotSpecified,
#[error("Unknown thread {0}")]
UnknownThread(Pid),
#[error("gdbserver failed to receive stop event")]
GdbServerStopEventRecvError,
}
impl From<CommandParseError> for PacketParseError {
fn from(err: CommandParseError) -> Self {
PacketParseError::CommandError(err)
}
}
impl From<PacketParseError> for Error {
fn from(err: PacketParseError) -> Self {
Error::PacketError(err)
}
}
impl From<CommandParseError> for Error {
fn from(err: CommandParseError) -> Self {
Error::PacketError(err.into())
}
}
impl From<GdbHexError> for Error {
fn from(err: GdbHexError) -> Self {
Error::PacketError(err.into())
}
}
impl From<TraceError> for Error {
fn from(err: TraceError) -> Self {
Error::TraceError(err)
}
}
|
fn square_area_to_circle(size:f64) -> f64 {
std::f64::consts::PI * (size / 4.0)
}
#[test]
fn test0() {
assert_close(square_area_to_circle(9.0), 7.0685834705770345, 1e-8);
}
#[test]
fn test1() {
assert_close(square_area_to_circle(20.0), 15.70796326794897, 1e-8);
}
#[test]
fn test2() {
assert_close(square_area_to_circle(16.0), 12.56637061435917, 1e-8);
}
#[test]
fn test3() {
assert_close(square_area_to_circle(64.0), 50.26548245743669, 1e-8);
}
#[test]
fn test4() {
assert_close(square_area_to_circle(256.0), 201.0619298297467, 1e-8);
} |
//! Handles finding a hooking library, and provides types and macros for using the library
//! to hook game code.
use cached::proc_macro::cached;
use dlopen::symbor::Library;
use eyre::Context;
use log::error;
fn get_single_symbol<T: Copy>(path: &str, sym_name: &str) -> eyre::Result<T> {
let lib = Library::open(path).wrap_err_with(|| format!("failed to open library {}", path))?;
let symbol = unsafe { lib.symbol::<T>(sym_name) }
.wrap_err_with(|| format!("unable to find {} in {}", sym_name, path))?;
Ok(*symbol)
}
#[cached(result = true)]
fn get_raw_hook_fn() -> eyre::Result<usize> {
get_single_symbol("libsubstrate.dylib", "MSHookFunction")
}
#[cached(result = true)]
fn get_shit_raw_hook_fn() -> eyre::Result<usize> {
get_single_symbol("libhooker.dylib", "LHHookFunctions")
}
#[cached(result = true)]
fn get_aslr_offset_fn() -> eyre::Result<fn(u32) -> usize> {
get_single_symbol::<fn(image: u32) -> usize>(
"/usr/lib/system/libdyld.dylib",
"_dyld_get_image_vmaddr_slide",
)
}
#[cached]
pub fn get_image_aslr_offset(image: u32) -> usize {
let function = get_aslr_offset_fn().expect("Failed to get ASLR offset function!");
function(image)
}
// Represents libhooker's struct LHFunctionHook.
#[repr(C)]
struct ShitFunctionHook<FuncType> {
function: FuncType,
replacement: FuncType,
old_ptr: usize,
options: usize,
}
fn gen_shit_hook_fn<FuncType>() -> fn(FuncType, FuncType, &mut Option<FuncType>) {
|function, replacement, original| {
let hook_struct = ShitFunctionHook {
function,
replacement,
old_ptr: unsafe { std::mem::transmute(original) },
options: 0,
};
unsafe {
let hook_fn: fn(*const ShitFunctionHook<FuncType>, i32) -> i32 =
std::mem::transmute(get_shit_raw_hook_fn().expect("need a hook function"));
let struct_ptr: *const ShitFunctionHook<FuncType> = &hook_struct;
if hook_fn(struct_ptr, 1) != 1 {
error!("Hook failed!");
}
}
}
}
fn get_hook_fn<FuncType>() -> fn(FuncType, FuncType, &mut Option<FuncType>) {
// Use libhooker if found.
if get_shit_raw_hook_fn().is_ok() {
return gen_shit_hook_fn();
}
let raw = get_raw_hook_fn().expect("get_hook_fn: get_raw_hook_fn failed");
// Reinterpret cast the address to get a function pointer.
// We get the address as a usize so that it can be cached once and then reused
// to get different signatures.
unsafe {
let addr_ptr: *const usize = &raw;
*(addr_ptr as *const fn(FuncType, FuncType, &mut Option<FuncType>))
}
}
pub enum Target<FuncType> {
/// A function pointer.
_Function(FuncType),
/// A raw address, to which the ASLR offset for the current image will be applied.
Address(usize),
/// A raw address, to which the ASLR offset for the image given as the second parameter will be applied.
_ForeignAddress(usize, u32),
}
impl<FuncType> Target<FuncType> {
fn get_absolute(&self) -> usize {
match self {
Target::_Function(func) => unsafe { std::mem::transmute_copy(func) },
Target::Address(addr) => {
let aslr_offset = get_image_aslr_offset(0);
addr + aslr_offset
}
Target::_ForeignAddress(addr, image) => {
let aslr_offset = get_image_aslr_offset(*image);
addr + aslr_offset
}
}
}
fn get_as_fn(&self) -> FuncType {
unsafe { std::mem::transmute_copy(&self.get_absolute()) }
}
pub fn hook_hard(&self, replacement: FuncType) {
get_hook_fn::<FuncType>()(self.get_as_fn(), replacement, &mut None);
}
pub fn hook_soft(&self, replacement: FuncType, original_out: &mut Option<FuncType>) {
get_hook_fn::<FuncType>()(self.get_as_fn(), replacement, original_out);
}
}
#[macro_export]
macro_rules! create_hard_target {
($name:ident, $addr:literal, $sig:ty) => {
#[allow(dead_code)]
pub mod $name {
#[allow(unused_imports)]
use super::*;
const TARGET: crate::hook::Target<$sig> = crate::hook::Target::Address($addr);
pub fn install(replacement: $sig) {
TARGET.hook_hard(replacement);
}
}
};
}
#[macro_export]
macro_rules! create_soft_target {
($name:ident, $addr:literal, $sig:ty) => {
#[allow(dead_code)]
pub mod $name {
#[allow(unused_imports)]
use super::*;
const TARGET: crate::hook::Target<$sig> = crate::hook::Target::Address($addr);
pub static mut ORIGINAL: Option<$sig> = None;
pub fn install(replacement: $sig) {
TARGET.hook_soft(replacement, unsafe { &mut ORIGINAL });
}
}
};
}
#[macro_export]
macro_rules! deref_original {
($orig_name:expr) => {
unsafe { $orig_name.unwrap() }
};
}
#[macro_export]
macro_rules! call_original {
($hook_module:path) => {{
use $hook_module as base;
#[allow(unused_unsafe)]
unsafe { base::ORIGINAL }.unwrap()()
}};
($hook_module:path, $($args:expr),+) => {{
// Workaround for $hook_module::x not working - see #48067.
use $hook_module as base;
#[allow(unused_unsafe)]
unsafe { base::ORIGINAL }.unwrap()($($args),+)
}}
}
pub fn slide<T: Copy>(address: usize) -> T {
unsafe {
let addr_ptr: *const usize = &(address + crate::hook::get_image_aslr_offset(0));
*(addr_ptr as *const T)
}
}
pub fn get_global<T: Copy>(address: usize) -> T {
let slid: *const T = slide(address);
unsafe { *slid }
}
pub fn is_german_game() -> bool {
std::env::current_exe()
.unwrap()
.display()
.to_string()
.ends_with("ger")
}
pub fn generate_backtrace() -> String {
// Generate a resolved backtrace. The symbol names aren't always correct, but we
// should still display them because they are helpful for Rust functions.
let resolved = backtrace::Backtrace::new();
let slide = get_image_aslr_offset(0) as u64;
let mut lines = vec![
format!("ASLR offset for image 0 is {:#x}.", slide),
"Warning: All addresses will be assumed to be from image 0.".to_string(),
];
for (i, frame) in resolved.frames().iter().enumerate() {
let address = frame.symbol_address() as u64;
let string = format!(
"{}: {:#x} - {:#x} = {:#x}\n symbols: {:?}",
i,
address,
slide,
address - slide,
frame.symbols()
);
lines.push(string);
}
lines.join("\n\n")
}
|
use crate::common::{check_installed, create_virtualenv, maybe_mock_cargo, test_python_path};
use anyhow::{bail, Context, Result};
#[cfg(feature = "zig")]
use cargo_zigbuild::Zig;
use clap::Parser;
use maturin::{BuildOptions, PlatformTag, PythonInterpreter};
use std::env;
use std::path::Path;
use std::process::Command;
use std::str;
/// For each installed python version, this builds a wheel, creates a virtualenv if it
/// doesn't exist, installs the package and runs check_installed.py
pub fn test_integration(
package: impl AsRef<Path>,
bindings: Option<String>,
unique_name: &str,
zig: bool,
target: Option<&str>,
) -> Result<()> {
maybe_mock_cargo();
// Pass CARGO_BIN_EXE_maturin for testing purpose
env::set_var(
"CARGO_BIN_EXE_cargo-zigbuild",
env!("CARGO_BIN_EXE_maturin"),
);
let package_string = package.as_ref().join("Cargo.toml").display().to_string();
// The first argument is ignored by clap
let shed = format!("test-crates/wheels/{unique_name}");
let target_dir = format!("test-crates/targets/{unique_name}");
let python_interp = test_python_path();
let mut cli = vec![
"build",
"--quiet",
"--manifest-path",
&package_string,
"--target-dir",
&target_dir,
"--out",
&shed,
];
if let Some(ref bindings) = bindings {
cli.push("--bindings");
cli.push(bindings);
}
if let Some(target) = target {
cli.push("--target");
cli.push(target)
}
#[cfg(feature = "zig")]
let zig_found = Zig::find_zig().is_ok();
#[cfg(not(feature = "zig"))]
let zig_found = false;
let test_zig = if zig && (env::var("GITHUB_ACTIONS").is_ok() || zig_found) {
cli.push("--zig");
true
} else {
cli.push("--compatibility");
cli.push("linux");
false
};
if let Some(interp) = python_interp.as_ref() {
cli.push("--interpreter");
cli.push(interp);
}
let options: BuildOptions = BuildOptions::try_parse_from(cli)?;
let build_context = options.into_build_context(false, cfg!(feature = "faster-tests"), false)?;
let wheels = build_context.build_wheels()?;
// For abi3 on unix, we didn't use a python interpreter, but we need one here
let interpreter = if build_context.interpreter.is_empty() {
let error_message = "python3 should be a python interpreter";
let venv_interpreter = PythonInterpreter::check_executable(
python_interp.as_deref().unwrap_or("python3"),
&build_context.target,
build_context.bridge(),
)
.context(error_message)?
.context(error_message)?;
vec![venv_interpreter]
} else {
build_context.interpreter
};
// We can do this since we know that wheels are built and returned in the
// order they are in the build context
for ((filename, supported_version), python_interpreter) in wheels.iter().zip(interpreter) {
if test_zig
&& build_context.target.is_linux()
&& !build_context.target.is_musl_libc()
&& build_context.target.get_minimum_manylinux_tag() != PlatformTag::Linux
{
let rustc_ver = rustc_version::version()?;
let python_arch = build_context.target.get_python_arch();
let file_suffix = if rustc_ver >= semver::Version::new(1, 64, 0) {
format!("manylinux_2_17_{python_arch}.manylinux2014_{python_arch}.whl")
} else {
format!("manylinux_2_12_{python_arch}.manylinux2010_{python_arch}.whl")
};
assert!(filename.to_string_lossy().ends_with(&file_suffix))
}
let mut venv_name = if supported_version == "py3" {
format!("{unique_name}-py3")
} else {
format!(
"{}-py{}.{}",
unique_name, python_interpreter.major, python_interpreter.minor,
)
};
if let Some(target) = target {
venv_name = format!("{venv_name}-{target}");
}
let (venv_dir, python) =
create_virtualenv(&venv_name, Some(python_interpreter.executable.clone()))?;
let command = [
"-m",
"pip",
"--disable-pip-version-check",
"--no-cache-dir",
"install",
"--force-reinstall",
];
let output = Command::new(&python)
.args(command)
.arg(dunce::simplified(filename))
.output()
.context(format!("pip install failed with {python:?}"))?;
if !output.status.success() {
let full_command = format!("{} {}", python.display(), command.join(" "));
bail!(
"pip install in {} failed running {:?}: {}\n--- Stdout:\n{}\n--- Stderr:\n{}\n---\n",
venv_dir.display(),
full_command,
output.status,
str::from_utf8(&output.stdout)?.trim(),
str::from_utf8(&output.stderr)?.trim(),
);
}
if !output.stderr.is_empty() {
bail!(
"pip raised a warning running {:?}: {}\n--- Stdout:\n{}\n--- Stderr:\n{}\n---\n",
&command,
output.status,
str::from_utf8(&output.stdout)?.trim(),
str::from_utf8(&output.stderr)?.trim(),
);
}
check_installed(package.as_ref(), &python)?;
}
Ok(())
}
pub fn test_integration_conda(package: impl AsRef<Path>, bindings: Option<String>) -> Result<()> {
use crate::common::create_conda_env;
use std::path::PathBuf;
use std::process::Stdio;
let package_string = package.as_ref().join("Cargo.toml").display().to_string();
// Create environments to build against, prepended with "A" to ensure that integration
// tests are executed with these environments
let mut interpreters = Vec::new();
for minor in 7..=10 {
let (_, venv_python) = create_conda_env(&format!("A-maturin-env-3{minor}"), 3, minor)?;
interpreters.push(venv_python);
}
// The first argument is ignored by clap
let mut cli = vec![
"build",
"--manifest-path",
&package_string,
"--quiet",
"--interpreter",
];
for interp in &interpreters {
cli.push(interp.to_str().unwrap());
}
if let Some(ref bindings) = bindings {
cli.push("--bindings");
cli.push(bindings);
}
let options = BuildOptions::try_parse_from(cli)?;
let build_context = options.into_build_context(false, cfg!(feature = "faster-tests"), false)?;
let wheels = build_context.build_wheels()?;
let mut conda_wheels: Vec<(PathBuf, PathBuf)> = vec![];
for ((filename, _), python_interpreter) in wheels.iter().zip(build_context.interpreter) {
let executable = python_interpreter.executable;
if executable.to_str().unwrap().contains("maturin-env-") {
conda_wheels.push((filename.clone(), executable))
}
}
assert_eq!(
interpreters.len(),
conda_wheels.len(),
"Error creating or detecting conda environments."
);
for (wheel_file, executable) in conda_wheels {
let output = Command::new(&executable)
.args([
"-m",
"pip",
"--disable-pip-version-check",
"install",
"--force-reinstall",
])
.arg(dunce::simplified(&wheel_file))
.stderr(Stdio::inherit())
.output()?;
if !output.status.success() {
panic!();
}
check_installed(package.as_ref(), &executable)?;
}
Ok(())
}
|
use error_chain::{bail, error_chain};
error_chain! {
foreign_links {
IO(std::io::Error);
String(std::string::FromUtf8Error);
Regex(regex::Error);
Env(std::env::VarError);
}
}
pub fn run_cmd() -> Result<()> {
use std::process::Command;
use regex::Regex;
#[derive(PartialEq, Default, Clone, Debug)]
struct Commit {
hash: String,
message: String,
}
let output = Command::new("git").arg("log").arg("--oneline").output()?;
if !output.status.success() {
bail!("command executed with failing code error");
}
let pattern = Regex::new(
r"(?x)
([0-9a-fA-F]+) # commit hash
(.*) # The commit message",
)?;
String::from_utf8(output.stdout)?
.lines()
.filter_map(|line| pattern.captures(line))
.map(|cap| Commit {
hash: cap[1].to_string(),
message: cap[2].trim().to_string(),
})
.take(5)
.for_each(|x| println!("{:?}", x));
Ok(())
}
fn run_python() -> Result<()> {
use std::collections::HashSet;
use std::io::Write;
use std::process::{Command, Stdio};
let mut child = Command::new("python")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
child
.stdin
.as_mut()
.ok_or("open stdout failed!")?
.write_all(b"import this; copyright(); credits(); exit()")?;
let output = child.wait_with_output()?;
if output.status.success() {
let raw_output = String::from_utf8(output.stdout)?;
let words = raw_output
.split_whitespace()
.map(|s| s.to_lowercase())
.collect::<HashSet<_>>();
println!("Found {} unique words:", words.len());
println!("{:#?}", words);
}
Ok(())
}
pub fn like_grep() -> Result<()> {
use std::process::{Command, Stdio};
let current_dir = std::env::current_dir()?;
let mut command1 = Command::new("du")
.arg("-ah")
.arg(¤t_dir)
.stdout(Stdio::piped())
.spawn()?;
if let Some(output1) = command1.stdout.take() {
let mut command2 = Command::new("sort")
.arg("-hr")
.stdout(Stdio::piped())
.stdin(output1)
.spawn()?;
command1.wait()?;
if let Some(output2) = command2.stdout.take() {
let command3 = Command::new("head")
.args(&["-n", "10"])
.stdin(output2)
.stdout(Stdio::piped())
.spawn()?;
let output3 = command3.wait_with_output()?;
command2.wait();
println!(
"Top 10 biggest files and directories in '{}':\n{}",
current_dir.display(),
String::from_utf8(output3.stdout).unwrap()
);
}
}
Ok(())
}
pub fn eprint_to_file() -> Result<()> {
use std::process::{Command, Stdio};
let outputs = std::fs::File::create("xxx.log")?;
let errors = outputs.try_clone()?;
Command::new("ls")
.args(&[".", "oops"])
.stdout(Stdio::from(outputs))
.stderr(Stdio::from(errors))
.spawn()?
.wait_with_output()?;
Ok(())
}
pub fn read_env() -> Result<()> {
// 从环境变量 `CONFIG` 读取配置路径 `config_path`。
// 如果 `CONFIG` 未设置,采用默认配置路径。
let config_path = std::env::var("CONFIG.env".to_string()).unwrap_or("CONFIG.env".to_string());
let config: String = std::fs::read_to_string(config_path)?;
println!(
"----------------------------------------------------Config: {}",
config
);
Ok(())
}
#[test]
fn test() {
// run_cmd();
// if let Err(errors) = run_python() {
// errors
// .iter()
// .enumerate()
// .for_each(|(index, error)| println!("└> {} - {}", index, error));
// }
// like_grep();
read_env();
eprint_to_file();
}
|
use serde::{Deserialize};
use serde_json;
use std::convert::TryFrom;
use std::fmt::Display;
use serde_repr::*;
use cached::proc_macro::cached;
#[derive(Deserialize, Debug)]
pub struct Move {
pub name: String,
pub move_id: MoveId,
pub available: bool,
pub effects: String,
#[serde(rename = "type")]
pub ty: Ty,
pub tr: Tr,
pub tm: Tm,
pub category: usize,
pub power: u32,
pub pp: u32,
pub priority: i32,
pub target: MoveTargets
}
#[derive(Deserialize, Debug)]
pub struct Level(u32);
#[derive(Deserialize, Debug, PartialEq)]
pub struct MoveId(usize);
#[derive(Deserialize, Debug, PartialEq)]
pub struct Tr(Option<TrNo>);
#[derive(Deserialize, Debug, PartialEq)]
pub struct TrNo(usize);
#[derive(Deserialize, Debug, PartialEq)]
pub struct Tm(Option<TmNo>);
#[derive(Deserialize, Debug, PartialEq)]
pub struct TmNo(usize);
#[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug)]
#[repr(usize)]
pub enum Ty {
Normal = 0 as usize,
Fighting = 1 as usize,
Flying = 2 as usize,
Poison = 3 as usize,
Ground = 4 as usize,
Rock = 5 as usize,
Bug = 6 as usize,
Ghost = 7 as usize,
Psychic = 8 as usize,
Steel = 9 as usize,
Fire = 10 as usize,
Water = 11 as usize,
Grass = 12 as usize,
Electric = 13 as usize,
Ice = 14 as usize,
Dragon = 15 as usize,
Dark = 16 as usize,
Fairy = 17 as usize
}
impl Display for Ty {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match self {
&Ty::Normal => { write!(f, "Normal") },
&Ty::Fighting => { write!(f, "Fighting") },
&Ty::Flying => { write!(f, "Flying") },
&Ty::Poison => { write!(f, "Poison") },
&Ty::Ground => { write!(f, "Ground") },
&Ty::Rock => { write!(f, "Rock") },
&Ty::Bug => { write!(f, "Bug") },
&Ty::Ghost => { write!(f, "Ghost") },
&Ty::Psychic => { write!(f, "Psychic") },
&Ty::Steel => { write!(f, "Steel") },
&Ty::Fire => { write!(f, "Fire") },
&Ty::Water => { write!(f, "Water") },
&Ty::Grass => { write!(f, "Grass") },
&Ty::Electric => { write!(f, "Electric") },
&Ty::Ice => { write!(f, "Ice") },
&Ty::Dragon => { write!(f, "Dragon") },
&Ty::Dark => { write!(f, "Dark") },
&Ty::Fairy => { write!(f, "Fairy") }
}
}
}
impl TryFrom<usize> for Ty {
type Error = &'static str;
fn try_from(ty_id: usize) -> Result<Ty, Self::Error> {
match ty_id {
0 => { Ok(Ty::Normal) },
1 => { Ok(Ty::Fighting) },
2 => { Ok(Ty::Flying) },
3 => { Ok(Ty::Poison) },
4 => { Ok(Ty::Ground) },
5 => { Ok(Ty::Rock) },
6 => { Ok(Ty::Bug) },
7 => { Ok(Ty::Ghost) },
8 => { Ok(Ty::Psychic) },
9 => { Ok(Ty::Steel) },
10 => { Ok(Ty::Fire) },
11 => { Ok(Ty::Water) },
12 => { Ok(Ty::Grass) },
13 => { Ok(Ty::Electric) },
14 => { Ok(Ty::Ice) },
15 => { Ok(Ty::Dragon) },
16 => { Ok(Ty::Dark) },
17 => { Ok(Ty::Fairy) },
_ => { Err("A type mapping does not exist for the supplied value.") }
}
}
}
#[derive(Deserialize, Debug)]
pub enum MoveTargets {
All , // = "All",
AllAdjacent , // = "AllAdjacent",
AllAdjacentOpponents, // = "AllAdjacentOpponents",
AllAllies , // = "AllAllies",
Ally , // = "Ally",
AllyOrSelf , // = "AllyOrSelf",
AnyExceptSelf , // = "AnyExceptSelf",
Counter , // = "Counter",
Opponent , // = "Opponent",
RandomOpponent , // = "RandomOpponent",
#[serde(rename = "Self")]
Self_ , // = "Self",
SideAll , // = "SideAll",
SideOpponent, // = "SideOpponent",
SideSelf , // = "SideSelf"
}
#[derive(Debug, Deserialize)]
pub struct PokedexEntry {
pub id: u32,
pub name: String,
pub stage: u32,
pub galar_dex: Option<String>,
pub base_stats: [u32; 6],
pub ev_yield: [u32; 6],
pub abilities: Vec<String>,
pub types: Vec<String>,
pub items: serde_json::Value,
pub exp_group: String,
pub egg_groups: Vec<String>,
pub hatch_cycles: Option<u32>,
pub height: f32,
pub weight: f32,
pub color: String,
pub level_up_moves: Vec<(Level, MoveId)>,
pub egg_moves: Vec<MoveId>,
pub tms: Vec<TmNo>,
pub trs: Vec<TrNo>,
pub evolutions: Vec<serde_json::Map<String, serde_json::Value>>,
pub description: Option<String>,
pub catch_rate: Option<u32>
}
impl PokedexEntry {
pub fn load_all() -> Vec<PokedexEntry> {
static DEX_JSON: &'static str = include_str!("../pokemon-dex-updated.json");
let pokedex: Vec<PokedexEntry> = serde_json::from_str(&DEX_JSON)
.expect("This will never not work.");
pokedex
}
}
impl Move {
pub fn load_all() -> Vec<Self> {
static MV_DATA: &'static str = include_str!("../new-moves.json");
let moves: Vec<Self> = serde_json::from_str(&MV_DATA)
.expect("This should never fail");
moves
}
}
/*
pub enum MoveForeignKey {
MoveId,
TmNo,
TrNo
}*/
pub trait IntoMove {
fn into_move(&self) -> Move;
}
impl IntoMove for MoveId {
fn into_move(&self) -> Move {
Move::load_all()
.into_iter()
.nth(self.0)
.unwrap()
}
}
impl IntoMove for TmNo {
fn into_move(&self) -> Move {
Move::load_all()
.into_iter()
.find(|mv| mv.tm == Tm(Some(TmNo(self.0))))
.unwrap()
}
}
impl IntoMove for TrNo {
fn into_move(&self) -> Move {
Move::load_all()
.into_iter()
.find(|mv| mv.tr == Tr(Some(TrNo(self.0))))
.unwrap()
}
}
impl IntoMove for Tm {
fn into_move(&self) -> Move {
Move::load_all()
.into_iter()
.find(|mv| mv.tm.0.as_ref().unwrap().0 == self.0.as_ref().unwrap().0)
.unwrap()
}
}
impl IntoMove for Tr {
fn into_move(&self) -> Move {
Move::load_all()
.into_iter()
.find(|mv| mv.tr.0.as_ref().unwrap().0 == self.0.as_ref().unwrap().0)
.unwrap()
}
}
|
use yew::prelude::*;
pub struct Header {
pub title: String,
}
#[derive(Clone, Default, PartialEq)]
pub struct Props {
pub title: String,
}
impl Component for Header {
type Message = ();
type Properties = Props;
fn create(props: Self::Properties, _: ComponentLink<Self>) -> Self {
Header {
title: props.title
}
}
fn update(&mut self, _msg: Self::Message) -> ShouldRender {
false
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
self.title = props.title;
true
}
}
impl Renderable<Header> for Header {
fn view(&self) -> Html<Self> {
html!{
<header class="Banner",>
<h1 class="PageTitle",>
{ &self.title }
</h1>
</header>
}
}
}
|
use iced::{Align, button, Button, Checkbox, Element, Row, Text, text_input, TextInput, VerticalAlignment};
use crate::central_ui;
use crate::puzzle_backend;
#[derive(Debug, Clone)]
pub enum State {
Main,
New,
Save,
Open,
OperationResult(String),
}
pub struct ControlsRow {
new_but: button::State,
mini_but: button::State,
weekday_but: button::State,
weekday_asym_but: button::State,
sunday_but: button::State,
back_but: button::State,
state: State,
save_but: button::State,
save_field: text_input::State,
pub save_path_string: String,
pub save_empty_grid: bool,
open_but: button::State,
open_field: text_input::State,
pub open_path_string: String,
}
impl ControlsRow {
pub fn new() -> Self {
ControlsRow {
new_but: Default::default(),
mini_but: Default::default(),
weekday_but: Default::default(),
weekday_asym_but: Default::default(),
sunday_but: Default::default(),
back_but: Default::default(),
state: State::Main,
save_but: Default::default(),
save_field: Default::default(),
save_path_string: std::env::current_dir().unwrap().to_str().unwrap().to_string(),
save_empty_grid: false,
open_but: Default::default(),
open_field: Default::default(),
open_path_string: std::env::current_dir().unwrap().to_str().unwrap().to_string(),
}
}
pub fn set_state(&mut self, s: State) {
self.state = s;
}
pub fn view(&mut self) -> Element<central_ui::Message> {
match &self.state {
State::Main => {
Row::new()
.spacing(10)
.push(
Button::new(&mut self.new_but, Text::new("New ...")).on_press(central_ui::Message::ControlSetState(State::New))
)
.push(
Button::new(&mut self.save_but, Text::new("Save ...")).on_press(central_ui::Message::ControlSetState(State::Save))
)
.push(
Button::new(&mut self.open_but, Text::new("Open ...")).on_press(central_ui::Message::ControlSetState(State::Open))
)
.into()
}
State::New => {
Row::new()
.spacing(10)
.push(
Button::new(&mut self.back_but, Text::new("Back")).on_press(central_ui::Message::ControlSetState(State::Main))
)
.push(
Button::new(&mut self.mini_but, Text::new("New Mini")).on_press(central_ui::Message::NewPuzzle(puzzle_backend::PuzzleType::Mini))
)
.push(
Button::new(&mut self.weekday_but, Text::new("New Weekday")).on_press(central_ui::Message::NewPuzzle(puzzle_backend::PuzzleType::Weekday))
)
.push(
Button::new(&mut self.weekday_asym_but, Text::new("New Weekday (Asymmetric)")).on_press(central_ui::Message::NewPuzzle(puzzle_backend::PuzzleType::WeekdayAsymmetric))
)
.push(
Button::new(&mut self.sunday_but, Text::new("New Sunday")).on_press(central_ui::Message::NewPuzzle(puzzle_backend::PuzzleType::Sunday))
)
.into()
}
State::Save => {
Row::new()
.spacing(10)
.align_items(Align::Center)
.push(
Button::new(&mut self.back_but, Text::new("Back")).on_press(central_ui::Message::ControlSetState(State::Main))
)
.push(
Text::new("Save to file (*.cro): ").vertical_alignment(VerticalAlignment::Center)
)
.push(
TextInput::new(&mut self.save_field, "Save file path..." , &self.save_path_string, central_ui::Message::SavePathModified)
.on_submit(central_ui::Message::AttemptSave)
)
.push(
Checkbox::new(
self.save_empty_grid,
"Save final solvable grid?".to_string(),
central_ui::Message::SaveEmptyGrid
)
)
.push(
Button::new(&mut self.save_but, Text::new("Save")).on_press(central_ui::Message::AttemptSave)
)
.into()
}
State::Open => {
Row::new()
.spacing(10)
.align_items(Align::Center)
.push(
Button::new(&mut self.back_but, Text::new("Back")).on_press(central_ui::Message::ControlSetState(State::Main))
)
.push(
Text::new("Open file (*.cro): ").vertical_alignment(VerticalAlignment::Center)
)
.push(
TextInput::new(&mut self.open_field, "Open file path..." , &self.open_path_string, central_ui::Message::OpenPathModified)
.on_submit(central_ui::Message::AttemptOpen)
)
.push(
Button::new(&mut self.open_but, Text::new("Open")).on_press(central_ui::Message::AttemptOpen)
)
.into()
}
State::OperationResult(s) => {
Row::new()
.spacing(10)
.align_items(Align::Center)
.push(
Button::new(&mut self.back_but, Text::new("OK")).on_press(central_ui::Message::ControlSetState(State::Main))
)
.push(
Text::new(s)
)
.into()
}
}
}
} |
// use std::cmp::max;
fn countSumOfTwoRepresentations3(n: i32, l: i32, r: i32) -> i32 {
// max(n / 2 - max(l, n-r) + 1, 0)
0.max(n / 2 - l.max(n-r) + 1)
}
|
use rusty8::cpu::CPU;
use std::env;
use std::fs::File;
use std::time::Duration;
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
use sdl2::pixels::{Color, PixelFormatEnum};
use sdl2::render::TextureAccess;
const WINDOW_WIDTH: u32 = 800;
const WINDOW_HEIGHT: u32 = 600;
fn main() -> std::io::Result<()> {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
println!("No rom passed in.");
return Ok(());
}
let rom_path = &args[1];
let sdl_context = sdl2::init().unwrap();
let video = sdl_context.video().unwrap();
let window = video
.window("Rusty 8", WINDOW_WIDTH, WINDOW_HEIGHT)
.position_centered()
.build()
.unwrap();
let mut canvas = window.into_canvas().build().unwrap();
let texture_creator = canvas.texture_creator();
canvas.set_draw_color(Color::RGB(0, 0, 0));
canvas.set_logical_size(800, 600).unwrap();
let mut texture = texture_creator
.create_texture(PixelFormatEnum::ARGB8888, TextureAccess::Streaming, 64, 32)
.unwrap();
let mut file = File::open(rom_path)?;
let mut chip8 = CPU::init();
chip8.load_rom(&mut file)?;
let mut event_pump = sdl_context.event_pump().unwrap();
let mut buf = [0_u32; 2048]; // temp texture buffer
'running: loop {
chip8.cycle();
for event in event_pump.poll_iter() {
match event {
Event::Quit { .. }
| Event::KeyDown {
keycode: Some(Keycode::Escape),
..
} => break 'running,
Event::KeyDown { keycode, .. } => match keycode {
Some(Keycode::Num1) => chip8.keyboard[0x1] = true,
Some(Keycode::Num2) => chip8.keyboard[0x2] = true,
Some(Keycode::Num3) => chip8.keyboard[0x3] = true,
Some(Keycode::Num4) => chip8.keyboard[0xC] = true,
Some(Keycode::Q) => chip8.keyboard[0x4] = true,
Some(Keycode::W) => chip8.keyboard[0x5] = true,
Some(Keycode::E) => chip8.keyboard[0x6] = true,
Some(Keycode::R) => chip8.keyboard[0xD] = true,
Some(Keycode::A) => chip8.keyboard[0x7] = true,
Some(Keycode::S) => chip8.keyboard[0x8] = true,
Some(Keycode::D) => chip8.keyboard[0x9] = true,
Some(Keycode::F) => chip8.keyboard[0xE] = true,
Some(Keycode::Z) => chip8.keyboard[0xA] = true,
Some(Keycode::X) => chip8.keyboard[0x0] = true,
Some(Keycode::C) => chip8.keyboard[0xB] = true,
Some(Keycode::V) => chip8.keyboard[0xF] = true,
_ => {}
},
Event::KeyUp { keycode, .. } => match keycode {
Some(Keycode::Num1) => chip8.keyboard[0x1] = false,
Some(Keycode::Num2) => chip8.keyboard[0x2] = false,
Some(Keycode::Num3) => chip8.keyboard[0x3] = false,
Some(Keycode::Num4) => chip8.keyboard[0xC] = false,
Some(Keycode::Q) => chip8.keyboard[0x4] = false,
Some(Keycode::W) => chip8.keyboard[0x5] = false,
Some(Keycode::E) => chip8.keyboard[0x6] = false,
Some(Keycode::R) => chip8.keyboard[0xD] = false,
Some(Keycode::A) => chip8.keyboard[0x7] = false,
Some(Keycode::S) => chip8.keyboard[0x8] = false,
Some(Keycode::D) => chip8.keyboard[0x9] = false,
Some(Keycode::F) => chip8.keyboard[0xE] = false,
Some(Keycode::Z) => chip8.keyboard[0xA] = false,
Some(Keycode::X) => chip8.keyboard[0x0] = false,
Some(Keycode::C) => chip8.keyboard[0xB] = false,
Some(Keycode::V) => chip8.keyboard[0xF] = false,
_ => {}
},
_ => {}
}
}
if chip8.draw_flag {
canvas.clear();
for (i, p) in buf.iter_mut().enumerate() {
*p = (0x00FF_FFFF * chip8.gfx[i] as u32) | 0xFF00_0000;
}
// let the gambiarra reign supreme
let ptr = buf.as_ptr().cast::<u8>();
let nbuf = unsafe { std::slice::from_raw_parts(ptr, buf.len()) };
texture
.update(None, &nbuf, 64 * std::mem::size_of::<u32>())
.unwrap();
canvas.copy(&texture, None, None).unwrap();
canvas.present();
chip8.draw_flag = false;
}
std::thread::sleep(Duration::from_micros(1200));
}
Ok(())
}
|
extern crate wasm_bindgen;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub fn sums(value: i32) -> i32 {
value + 1
}
#[wasm_bindgen]
pub fn fibonacci(n: i32) -> i32 {
match n {
0 | 1 => 1,
_ => fibonacci(n - 1) + fibonacci(n - 2),
}
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
|
//! `Monotonic` implementation based on RTC peripheral
//!
//! The RTC provides TICK events to the TIMER task via ppi in
//! addition to handling the COMPARE events for the RTIC timer queue.
// TODO - revisit this, probably just use the RTC for ticks, 24 bits of ticks
// is probably fine
// use absolute val in set_compare no need to use rel duration
// https://rtic.rs/dev/book/en/by-example/tips_monotonic_impl.html
use crate::hal::{
clocks::LFCLK_FREQ,
pac,
ppi::{ConfigurablePpi, Ppi, Ppi3},
rtc::{self, Rtc, RtcCompareReg, RtcInterrupt},
timer,
};
use rtic::rtic_monotonic::{
embedded_time::{clock::Error, fraction::Fraction},
Clock, Instant, Monotonic,
};
pub const TICK_RATE_HZ: u32 = 1024;
/// Using TIMERx to count ticks, 32 bit
pub const MAX_TICKS: u32 = 0xFFFF_FFFF;
pub type Rtc1Monotonic = RtcMonotonic<pac::RTC1, pac::TIMER1>;
/// Example:
/// ```rust
/// #[monotonic(binds = TIMERx, default = true)]
/// type RtcMono = RtcMonotonic<RTCx, TIMERx>;
/// ```
pub struct RtcMonotonic<RTC: rtc::Instance, TIM: timer::Instance> {
rtc: Rtc<RTC>,
timer: TIM,
_ppi: Ppi3,
}
impl<RTC, TIM> RtcMonotonic<RTC, TIM>
where
RTC: rtc::Instance,
TIM: timer::Instance,
{
/// NOTE: LFCLK must be started before using the RTC peripheral
pub fn new(rtc: RTC, timer: TIM, mut ppi: Ppi3) -> Result<Self, rtc::Error> {
unsafe { rtc.tasks_stop.write(|w| w.bits(1)) };
timer.timer_cancel();
timer.disable_interrupt();
timer.as_timer0().events_compare[0].reset();
timer
.as_timer0()
.shorts
.write(|w| w.compare0_clear().disabled().compare0_stop().disabled());
timer
.as_timer0()
.prescaler
.write(|w| unsafe { w.prescaler().bits(0) });
timer.as_timer0().bitmode.write(|w| w.bitmode()._32bit());
timer
.as_timer0()
.mode
.write(|w| w.mode().low_power_counter());
timer
.as_timer0()
.tasks_clear
.write(|w| unsafe { w.bits(1) });
// Route RTC TICK event to the TIMER counter task
ppi.set_task_endpoint(&timer.as_timer0().tasks_count);
ppi.set_event_endpoint(&rtc.events_tick);
ppi.enable();
// LFCLK_FREQ = 32768 Hz
// fRTC = 32_768 / (prescaler + 1 )
let prescaler = (LFCLK_FREQ / TICK_RATE_HZ) - 1;
let mut rtc = Rtc::new(rtc, prescaler)?;
// NOTE: the counter is started in the `reset` method
rtc.disable_counter();
rtc.disable_interrupt(RtcInterrupt::Compare0, None);
rtc.disable_interrupt(RtcInterrupt::Tick, None);
rtc.disable_event(RtcInterrupt::Compare0);
rtc.disable_event(RtcInterrupt::Tick);
rtc.clear_counter();
Ok(RtcMonotonic {
rtc,
timer,
_ppi: ppi,
})
}
}
impl<RTC, TIM> Clock for RtcMonotonic<RTC, TIM>
where
RTC: rtc::Instance,
TIM: timer::Instance,
{
type T = u32;
const SCALING_FACTOR: Fraction = Fraction::new(1, TICK_RATE_HZ);
#[inline(always)]
fn try_now(&self) -> Result<Instant<Self>, Error> {
Ok(Instant::new(self.timer.read_counter()))
}
}
impl<RTC, TIM> Monotonic for RtcMonotonic<RTC, TIM>
where
RTC: rtc::Instance,
TIM: timer::Instance,
{
const DISABLE_INTERRUPT_ON_EMPTY_QUEUE: bool = true;
unsafe fn reset(&mut self) {
// TICK event routed to TIMER COUNTER task
self.rtc.enable_event(RtcInterrupt::Tick);
self.rtc.disable_interrupt(RtcInterrupt::Tick, None);
self.rtc.set_compare(RtcCompareReg::Compare0, 0).unwrap();
self.rtc.clear_counter();
self.rtc.enable_event(RtcInterrupt::Compare0);
self.rtc.enable_interrupt(RtcInterrupt::Compare0, None);
self.rtc.enable_counter();
}
fn set_compare(&mut self, val: &Instant<Self>) {
let now: Instant<Self> = Instant::new(self.timer.read_counter());
let max = 0x00FF_FFFF;
let dur = match val.checked_duration_since(&now) {
None => {
1 // In the past
}
Some(x) => max.min(x.integer()).max(1),
};
self.rtc.set_compare(RtcCompareReg::Compare0, dur).unwrap();
self.rtc.clear_counter();
}
fn clear_compare_flag(&mut self) {
if self.rtc.is_event_triggered(RtcInterrupt::Compare0) {
self.rtc.reset_event(RtcInterrupt::Compare0);
}
}
fn on_interrupt(&mut self) {
if self.rtc.is_event_triggered(RtcInterrupt::Tick) {
self.rtc.reset_event(RtcInterrupt::Tick);
}
}
fn enable_timer(&mut self) {
self.rtc.enable_event(RtcInterrupt::Compare0);
self.rtc.enable_interrupt(RtcInterrupt::Compare0, None);
}
fn disable_timer(&mut self) {
self.rtc.disable_interrupt(RtcInterrupt::Compare0, None);
self.rtc.disable_event(RtcInterrupt::Compare0);
}
}
|
#![allow(non_snake_case)]
#![doc(html_logo_url = "https://s3-us-west-1.amazonaws.com/passivetotal-website/public/core-pt-logo-sm.png",
html_favicon_url = "https://passivetotal.org/static/img/favicon/png",
html_root_url = "https://passivetotal.org/")]
// Disable warnings that JSON struct fields are camelCase.
// This is just to reflect the true field names of the JSON document.
//! This is the Rust implementation of a client for the PassiveTotal API.
//! Usage is provided through `passivetotal::client::PTClient`
//!
//! Before running anything, you will need to create a JSON configuration. See the [config
//! module][1]
//! documentations for details.
//!
//! Please see the [`passivetotal::client::PTClient`][2] documentation for the available methods.
//!
//! See the [response module documentation][3] for the specification for the response structs.
//!
//! See the [API documentation][4] for more resources.
//!
//! # Examples
//! ```
//! use passivetotal::config::read_config;
//! use passivetotal::client::PTClient;
//!
//! let conf = match read_config() {
//! Ok(conf) => conf,
//! _ => panic!("Please create your config at ~/.config/api_config.json"),
//! }
//! let client = PTClient::new(conf);
//! let response = match client.get_pdns("passivetotal.org") {
//! Ok(response) => response,
//! _ => panic!("Something bad happened in the JSON response"),
//! };
//! ```
//!
//! [1]: ./config/index.html
//! [2]: ./client/struct.PTClient.html
//! [3]: ./response/index.html
//! [4]: https://api.passivetotal.org/api/docs/
extern crate rustc_serialize;
extern crate hyper;
mod macros;
pub mod client;
pub mod constants;
pub mod config;
pub mod response;
|
use crate::player::{PlayerPlugin, Player};
use bevy::window::WindowMode;
use bevy::prelude::*;
mod screens;
mod gamedata;
mod gamestate;
mod ground;
mod player;
use crate::ground::GroundPlugin;
use crate::gamestate::*;
use crate::screens::ScreensPlugin;
use crate::gamedata::GameData;
const SCREEN_WIDTH: f32 = 1280.0;
const SCREEN_HEIGHT: f32 = 720.0;
fn main() {
App::build()
// insert all plugins
.add_plugins(DefaultPlugins)
.add_plugin(GroundPlugin)
.add_plugin(PlayerPlugin)
.add_plugin(ScreensPlugin)
.add_plugin(GameStatePlugin)
.add_startup_system(setup.system())
// insert resources
.insert_resource(WindowDescriptor{
vsync: true,
resizable: false,
mode: WindowMode::BorderlessFullscreen,
title: "Nimagix".to_string(),
width: SCREEN_WIDTH,
height: SCREEN_HEIGHT,
..Default::default()
})
.insert_resource(ClearColor(Color::rgb(0.51, 0.62, 0.86)))
.insert_resource(GameData{
state: GameState::Menu,
score: 0,
})
.insert_resource(Player::new())
.run();
}
fn setup(
mut commands: Commands,
){
commands.spawn_bundle(OrthographicCameraBundle::new_2d());
}
|
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::{stdin, Write};
use tokenizers::tokenizer::Tokenizer;
use vsm::indexing::build_index;
use vsm::posting_list::{DocId, Frequency};
fn get_input() -> String {
println!("Enter your query:");
let mut query = String::new();
stdin().read_line(&mut query).unwrap();
query.trim().to_lowercase()
}
fn get_query_map(
raw_tokens: Vec<String>,
stop_words: &HashSet<String>,
punctuations: &HashSet<String>,
) -> HashMap<String, (Frequency, f64, f64)> {
let tokens = raw_tokens
.iter()
.map(|token| token.to_lowercase())
.filter(|token| !stop_words.contains(token) && !punctuations.contains(token));
// term-frequency, idf, weight
let mut query_map = HashMap::<String, (Frequency, f64, f64)>::new();
for token in tokens {
query_map
.entry(token)
.and_modify(|entry| entry.0 = entry.0 + 1)
.or_insert((1, 0.0, 0.0));
}
query_map
}
fn main() {
let index_data = build_index();
let normalized_index = index_data.normalized_index;
let mut f = File::create("data/indexes/normalized.txt").unwrap();
f.write_fmt(format_args!("{:#?}", normalized_index))
.unwrap();
let stop_words = index_data.stop_words;
let punctuations = index_data.punctuations;
let total_tokens_count = index_data.total_tokens_count;
let tokenizer = Tokenizer::from_pretrained("bert-base-cased", None).unwrap();
println!("Welcome to VSM based search engine");
loop {
let raw_tokens = tokenizer
.encode(get_input(), false)
.unwrap()
.get_tokens()
.to_vec();
let mut query_map = get_query_map(raw_tokens, &stop_words, &punctuations);
// calculate inverse document frequencies
query_map.iter_mut().for_each(|(key, value)| {
// doc frequency
let df: usize = match normalized_index.get(key) {
Some(v) => v.document_frequency,
None => 0,
};
// idf
value.1 = (total_tokens_count as f64 / df as f64).log10();
// weight
value.2 = (value.0 as f64) * value.1;
});
// calculate term frequencies
// calculate dot product for all documents and select top 10 at max
let mut dot_product_map = HashMap::<DocId, f64>::new();
for (key, (_, _, weight)) in query_map {
// 1. get posting-list from normalized-index
match normalized_index.get(&key) {
Some(list) => {
// 2. for each doc-id in the posting-list, insert a key in the
// dot-product-map and multiply the weight (query-term) with cosine
// similarity (doc-term)
list.dimension_map.iter().for_each(|(k, v)| {
dot_product_map
.entry(*k)
.and_modify(|sum| *sum += v * weight)
.or_insert(v * weight);
});
}
None => continue,
}
}
let mut results = dot_product_map.into_iter().collect::<Vec<_>>();
results.sort_by(|(a_id, va), (b_id, vb)| {
if va < vb {
Ordering::Greater
} else if va > vb {
Ordering::Less
} else if a_id < b_id {
Ordering::Greater
} else {
Ordering::Less
}
});
if results.len() < 10 {
for v in results {
println!("Doc {:2} - Relevance {:.4}", v.0, v.1);
}
} else {
for k in results.get(0..=10) {
for v in k {
println!("Doc {:2} - Relevance {:.4}", v.0, v.1);
}
}
}
}
}
|
pub mod file_select;
pub mod artifact;
|
mod builder;
mod css;
mod menu;
mod stack;
mod state;
mod statusbar;
mod toolbar;
pub mod prelude;
pub use state::{Message, SortOrder, State, ViewStyle};
use glib::clone;
use std::path::Path;
use std::thread;
use super::nix_query_tree::exec_nix_store::NixStoreErr;
use prelude::*;
fn render_nix_store_err(
state: &State,
nix_store_path: &Path,
nix_store_err: &NixStoreErr,
) {
statusbar::show_msg(
state,
&format!(
"Error running `nix-store --query --tree {}`",
nix_store_path.to_string_lossy()
),
);
let error_dialog: gtk::MessageDialog = state.get_error_dialog();
let error_msg = &format!(
"Error running `nix-store --query --tree {}`:\n\n{}",
nix_store_path.to_string_lossy(),
nix_store_err
);
error_dialog.set_property_secondary_text(Some(error_msg));
error_dialog.run();
error_dialog.hide();
}
fn search_for(state: &State, nix_store_path: &Path) {
// nix-store --query --tree /nix/store/jymg0kanmlgbcv35wxd8d660rw0fawhv-hello-2.10.drv
// nix-store --query --tree /nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10
disable(state);
statusbar::show_msg(
state,
&format!("Searching for {}...", nix_store_path.display()),
);
let nix_store_path_buf = nix_store_path.to_path_buf();
thread::spawn(clone!(@strong state.sender as sender => move || {
let exec_nix_store_res =
super::nix_query_tree::exec_nix_store::run(&nix_store_path_buf);
sender
.send(Message::Display(exec_nix_store_res))
.expect("sender is already closed. This should never happen");
}));
}
fn set_sort_order(state: &State, new_sort_order: SortOrder) {
state.write_sort_order(new_sort_order);
stack::change_sort_order(state);
}
pub fn set_view_style(state: &State, new_view_style: ViewStyle) {
state.write_view_style(new_view_style);
stack::change_view_style(state);
}
fn redisplay_data(state: &State) {
statusbar::clear(state);
stack::redisplay_data(state);
}
fn disable(state: &State) {
stack::disable(state);
toolbar::disable(state);
}
fn enable(state: &State) {
stack::enable(state);
toolbar::enable(state);
}
fn handle_msg_recv(state: &State, msg: Message) {
enable(state);
match msg {
Message::Display(exec_nix_store_res) => match exec_nix_store_res.res {
Err(nix_store_err) => {
render_nix_store_err(
state,
&exec_nix_store_res.nix_store_path,
&nix_store_err,
);
}
Ok(nix_store_res) => {
state.write_nix_store_res(nix_store_res);
redisplay_data(state);
}
},
}
}
fn app_activate(app: gtk::Application) {
let (sender, receiver) =
glib::MainContext::channel(glib::source::PRIORITY_DEFAULT);
let state = State::new(app, sender);
let window: gtk::ApplicationWindow = state.get_app_win();
window.set_application(Some(&state.app));
css::setup(window.upcast_ref());
menu::setup(&state);
toolbar::setup(&state);
stack::setup(&state);
window.show_all();
receiver.attach(
None,
clone!(@strong state => move |msg| {
handle_msg_recv(&state, msg);
glib::source::Continue(true)
}),
);
// Do the initial search and display the results.
let opts = crate::opts::Opts::parse_from_args();
search_for(&state, &opts.nix_store_path);
}
pub fn run() {
let uiapp = gtk::Application::new(
Some("com.github.cdepillabout.nix-query-tree-viewer"),
gio::ApplicationFlags::FLAGS_NONE,
)
.expect("Application::new failed");
uiapp.connect_activate(|app| app_activate(app.clone()));
// uiapp.run(&env::args().collect::<Vec<_>>());
uiapp.run(&[]);
}
|
use std::time::{Duration, Instant};
use toykio_runtime::{Delay, Toykio};
fn main() {
let mut runtime = Toykio::new();
runtime.spawn(async {
println!("Spawned");
let when = Instant::now() + Duration::from_millis(1500);
let future = Delay { when };
println!("Wait 1.5sec...");
let out = future.await;
println!("Done");
assert_eq!(out, "done");
});
runtime.run();
}
|
/*
Copyright ⓒ 2016 rust-custom-derive contributors.
Licensed under the MIT license (see LICENSE or <http://opensource.org
/licenses/MIT>) or the Apache License, Version 2.0 (see LICENSE of
<http://www.apache.org/licenses/LICENSE-2.0>), at your option. All
files in the project carrying such notice may not be copied, modified,
or distributed except according to those terms.
*/
#[doc(hidden)]
#[macro_export]
macro_rules! parse_macros_util {
(
@call
($cb:ident!($($cb_arg:tt)*)),
$($output:tt)*
) => {
$cb!(
$($cb_arg)*
$($output)*
)
};
(
@call
($cb:ident!{$($cb_arg:tt)*}),
$($output:tt)*
) => {
$cb! {
$($cb_arg)*
$($output)*
}
};
(@inc_ord_ident $cb:tt, 0) => { parse_macros_util!{ @call $cb, 1, _ord_01 } };
(@inc_ord_ident $cb:tt, 1) => { parse_macros_util!{ @call $cb, 2, _ord_02 } };
(@inc_ord_ident $cb:tt, 2) => { parse_macros_util!{ @call $cb, 3, _ord_03 } };
(@inc_ord_ident $cb:tt, 3) => { parse_macros_util!{ @call $cb, 4, _ord_04 } };
(@inc_ord_ident $cb:tt, 4) => { parse_macros_util!{ @call $cb, 5, _ord_05 } };
(@inc_ord_ident $cb:tt, 5) => { parse_macros_util!{ @call $cb, 6, _ord_06 } };
(@inc_ord_ident $cb:tt, 6) => { parse_macros_util!{ @call $cb, 7, _ord_07 } };
(@inc_ord_ident $cb:tt, 7) => { parse_macros_util!{ @call $cb, 8, _ord_08 } };
(@inc_ord_ident $cb:tt, 8) => { parse_macros_util!{ @call $cb, 9, _ord_09 } };
(@inc_ord_ident $cb:tt, 9) => { parse_macros_util!{ @call $cb, 10, _ord_10 } };
(@inc_ord_ident $cb:tt, 10) => { parse_macros_util!{ @call $cb, 11, _ord_11 } };
(@inc_ord_ident $cb:tt, 11) => { parse_macros_util!{ @call $cb, 12, _ord_12 } };
(@inc_ord_ident $cb:tt, 12) => { parse_macros_util!{ @call $cb, 13, _ord_13 } };
(@inc_ord_ident $cb:tt, 13) => { parse_macros_util!{ @call $cb, 14, _ord_14 } };
(@inc_ord_ident $cb:tt, 14) => { parse_macros_util!{ @call $cb, 15, _ord_15 } };
(@inc_ord_ident $cb:tt, 15) => { parse_macros_util!{ @call $cb, 16, _ord_16 } };
(@inc_ord_ident $cb:tt, 16) => { parse_macros_util!{ @call $cb, 17, _ord_17 } };
(@inc_ord_ident $cb:tt, 17) => { parse_macros_util!{ @call $cb, 18, _ord_18 } };
(@inc_ord_ident $cb:tt, 18) => { parse_macros_util!{ @call $cb, 19, _ord_19 } };
(@inc_ord_ident $cb:tt, 19) => { parse_macros_util!{ @call $cb, 20, _ord_20 } };
(@inc_ord_ident $cb:tt, 20) => { parse_macros_util!{ @call $cb, 21, _ord_21 } };
(@inc_ord_ident $cb:tt, 21) => { parse_macros_util!{ @call $cb, 22, _ord_22 } };
(@inc_ord_ident $cb:tt, 22) => { parse_macros_util!{ @call $cb, 23, _ord_23 } };
(@inc_ord_ident $cb:tt, 23) => { parse_macros_util!{ @call $cb, 24, _ord_24 } };
(@inc_ord_ident $cb:tt, 24) => { parse_macros_util!{ @call $cb, 25, _ord_25 } };
(@inc_ord_ident $cb:tt, 25) => { parse_macros_util!{ @call $cb, 26, _ord_26 } };
(@inc_ord_ident $cb:tt, 26) => { parse_macros_util!{ @call $cb, 27, _ord_27 } };
(@inc_ord_ident $cb:tt, 27) => { parse_macros_util!{ @call $cb, 28, _ord_28 } };
(@inc_ord_ident $cb:tt, 28) => { parse_macros_util!{ @call $cb, 29, _ord_29 } };
(@inc_ord_ident $cb:tt, 29) => { parse_macros_util!{ @call $cb, 30, _ord_30 } };
(@inc_ord_ident $cb:tt, 30) => { parse_macros_util!{ @call $cb, 31, _ord_31 } };
(@inc_ord_ident $cb:tt, 31) => { parse_macros_util!{ @call $cb, 32, _ord_32 } };
}
|
#[doc = "Reader of register LLH_FEATURE_CONFIG"]
pub type R = crate::R<u32, super::LLH_FEATURE_CONFIG>;
#[doc = "Writer for register LLH_FEATURE_CONFIG"]
pub type W = crate::W<u32, super::LLH_FEATURE_CONFIG>;
#[doc = "Register LLH_FEATURE_CONFIG `reset()`'s with value 0x06"]
impl crate::ResetValue for super::LLH_FEATURE_CONFIG {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x06
}
}
#[doc = "Reader of field `QUICK_TRANSMIT`"]
pub type QUICK_TRANSMIT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `QUICK_TRANSMIT`"]
pub struct QUICK_TRANSMIT_W<'a> {
w: &'a mut W,
}
impl<'a> QUICK_TRANSMIT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `SL_DSM_EN`"]
pub type SL_DSM_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SL_DSM_EN`"]
pub struct SL_DSM_EN_W<'a> {
w: &'a mut W,
}
impl<'a> SL_DSM_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `US_COUNTER_OFFSET_ADJ`"]
pub type US_COUNTER_OFFSET_ADJ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `US_COUNTER_OFFSET_ADJ`"]
pub struct US_COUNTER_OFFSET_ADJ_W<'a> {
w: &'a mut W,
}
impl<'a> US_COUNTER_OFFSET_ADJ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
impl R {
#[doc = "Bit 0 - Quick transmit feature in slave latency is enabled by setting this bit. When slave latency is enabled, this feature enables the slave to transmit in the immediate connection interval, in case required, instead of waiting till the end of slave latency"]
#[inline(always)]
pub fn quick_transmit(&self) -> QUICK_TRANSMIT_R {
QUICK_TRANSMIT_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Enable/Disable Slave Latency Period DSM."]
#[inline(always)]
pub fn sl_dsm_en(&self) -> SL_DSM_EN_R {
SL_DSM_EN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Enable/Disable the connection US counter offset adjust. For non-MMMS mode, this bit must be tied to 1."]
#[inline(always)]
pub fn us_counter_offset_adj(&self) -> US_COUNTER_OFFSET_ADJ_R {
US_COUNTER_OFFSET_ADJ_R::new(((self.bits >> 2) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Quick transmit feature in slave latency is enabled by setting this bit. When slave latency is enabled, this feature enables the slave to transmit in the immediate connection interval, in case required, instead of waiting till the end of slave latency"]
#[inline(always)]
pub fn quick_transmit(&mut self) -> QUICK_TRANSMIT_W {
QUICK_TRANSMIT_W { w: self }
}
#[doc = "Bit 1 - Enable/Disable Slave Latency Period DSM."]
#[inline(always)]
pub fn sl_dsm_en(&mut self) -> SL_DSM_EN_W {
SL_DSM_EN_W { w: self }
}
#[doc = "Bit 2 - Enable/Disable the connection US counter offset adjust. For non-MMMS mode, this bit must be tied to 1."]
#[inline(always)]
pub fn us_counter_offset_adj(&mut self) -> US_COUNTER_OFFSET_ADJ_W {
US_COUNTER_OFFSET_ADJ_W { w: self }
}
}
|
use serde::*;
use std::fs::File;
use std::io::BufReader;
const UNIXEPOCH_U8_SIZE: usize = 10;
const GEOHASH_U8_SIZE: usize = 10;
const QUERY_U8_SIZE: usize = UNIXEPOCH_U8_SIZE + GEOHASH_U8_SIZE;
// バファリングするクエリはせいぜい10000なので64bitで余裕
pub type QueryId = u64;
#[derive(Serialize, Deserialize, Debug)]
pub struct QueryData {
pub data: Vec<QueryDataDetail>,
pub client_size: usize,
}
impl QueryData {
pub fn read_raw_from_file(filename: &str) -> Self {
let file = File::open(filename).unwrap();
let reader = BufReader::new(file);
let query_data: QueryData = serde_json::from_reader(reader).unwrap();
if query_data.client_size != query_data.data.len() {
println!("[Error] Invalid data format from {}!", filename);
panic!()
}
query_data
}
pub fn total_size(&self) -> usize {
let mut sum = 0;
for data in self.data.iter() {
sum += data.query_size*QUERY_U8_SIZE;
}
sum
}
pub fn total_data_to_u8(&self) -> Vec<u8> {
let str_list: Vec<String> = self.data.iter().map(|detail| detail.geodata.clone()).collect();
hex_string_to_u8(&str_list.join(""))
}
pub fn size_list(&self) -> Vec<usize> {
self.data.iter().map(|d| d.query_size).collect()
}
pub fn query_id_list(&self) -> Vec<u64> {
self.data.iter().map(|d| d.query_id).collect()
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct QueryDataDetail {
query_id: QueryId,
geodata: String,
query_size: usize,
}
fn hex_string_to_u8(hex_string: &String) -> Vec<u8> {
let decoded = hex::decode(hex_string).expect("Decoding failed: Expect hex string!");
decoded
} |
use nom::branch::alt;
use nom::bytes::complete::escaped_transform;
use nom::bytes::complete::is_not;
use nom::bytes::complete::tag;
use nom::character::complete::one_of;
use nom::character::complete::space1;
use nom::multi::many0;
use nom::multi::separated_list0;
use nom::sequence::delimited;
use nom::sequence::pair;
use nom::sequence::tuple;
use nom::IResult;
#[derive(Debug, PartialEq)]
pub enum Flag {
Nothing,
Unanalyzed,
Untranslated,
UnableToGenerateOrStartOfInvariablePart,
}
#[derive(Debug, PartialEq)]
pub struct SubLU {
ling_form: String,
flag: Flag,
tags: Vec<String>,
}
#[derive(Debug, PartialEq)]
pub enum StreamUnit {
LexicalUnit(Vec<SubLU>),
Space(String),
Format(String),
JoinedLexicalUnit(Vec<Vec<SubLU>>),
Chunk(SubLU, Vec<StreamUnit>),
}
pub fn parse_tag(input: &str) -> IResult<&str, &str> {
let mut parse = delimited(tag("<"), is_not(r#"<>"#), tag(">"));
parse(input)
}
pub fn parse_basic_lu(input: &str) -> IResult<&str, StreamUnit> {
let parse_analyses = separated_list0(tag("/"), parse_sub_lu);
let mut parse = delimited(tag("^"), parse_analyses, tag("$"));
let res = parse(input);
res.map(|(i, o)| (i, StreamUnit::LexicalUnit(o)))
}
pub fn make_flag(s: &str) -> Flag {
match s {
"*" => Flag::Unanalyzed,
"@" => Flag::Untranslated,
"#" => Flag::UnableToGenerateOrStartOfInvariablePart,
_ => Flag::Nothing,
}
}
pub fn parse_sub_lu_basic(input: &str) -> IResult<&str, SubLU> {
let ling_form_inner_parse = is_not(r#"^$@*/<>{}\[]"#);
let ling_form_escape_parse =
escaped_transform(ling_form_inner_parse, '\\', one_of(r#"^$@*/<>{}\[]"#));
let mut parse = tuple((
alt((tag("*"), tag("#"), tag("@"), tag(""))),
ling_form_escape_parse,
many0(parse_tag),
));
parse(input).map(|(i, (flag, ling_form, tags))| {
(
i,
SubLU {
ling_form: ling_form.to_string(),
tags: tags.iter().map(|tag| String::from(*tag)).collect(),
flag: make_flag(flag),
},
)
})
}
pub fn parse_sub_lu_without_ling_form(input: &str) -> IResult<&str, SubLU> {
let mut parse = tuple((
alt((tag("*"), tag("#"), tag("@"), tag(""))),
many0(parse_tag),
));
parse(input).map(|(i, (flag, tags))| {
(
i,
SubLU {
ling_form: String::from(""),
tags: tags.iter().map(|tag| String::from(*tag)).collect(),
flag: make_flag(flag),
},
)
})
}
pub fn parse_sub_lu(input: &str) -> IResult<&str, SubLU> {
alt((parse_sub_lu_basic, parse_sub_lu_without_ling_form))(input)
}
pub fn parse_joined_lu(input: &str) -> IResult<&str, StreamUnit> {
let parse_sub_lus = separated_list0(tag("+"), parse_sub_lu);
let parse_analyses = separated_list0(tag("/"), parse_sub_lus);
let mut parse = delimited(tag("^"), parse_analyses, tag("$"));
let res = parse(input);
res.map(|(i, o)| (i, StreamUnit::JoinedLexicalUnit(o)))
}
pub fn parse_lu_or_space_or_format(input: &str) -> IResult<&str, StreamUnit> {
alt((parse_format, parse_basic_lu, parse_joined_lu, parse_space))(input)
}
pub fn parse_chunk(input: &str) -> IResult<&str, StreamUnit> {
let parse_children = delimited(tag("{"), many0(parse_lu_or_space_or_format), tag("}"));
let mut parse = pair(parse_sub_lu, parse_children);
let res = parse(input);
res.map(|(i, (head, children))| (i, StreamUnit::Chunk(head, children)))
}
pub fn parse_format(input: &str) -> IResult<&str, StreamUnit> {
let mut parse = delimited(tag("["), is_not(r#"[]"#), tag("]"));
let res = parse(input);
res.map(|(i, o)| (i, StreamUnit::Format(String::from(o))))
}
pub fn parse_space(input: &str) -> IResult<&str, StreamUnit> {
alt((space1, tag("\n")))(input).map(|(i, o)| (i, StreamUnit::Space(String::from(o))))
}
pub fn parse_stream_unit(input: &str) -> IResult<&str, StreamUnit> {
alt((
parse_space,
parse_format,
parse_basic_lu,
parse_joined_lu,
parse_chunk,
))(input)
}
pub fn parse_stream(input: &str) -> IResult<&str, Vec<StreamUnit>> {
let mut parse = many0(parse_stream_unit);
parse(input)
}
#[cfg(test)]
mod tests {
use super::*;
use slurp;
#[test]
fn basic_lu() {
assert_eq!(
parse_stream_unit("^กา$"),
Ok((
"",
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("กา"),
tags: vec![],
flag: Flag::Nothing,
}])
))
);
}
#[test]
fn lu_surface_escape() {
assert_eq!(
parse_stream_unit("^\\^ab\\$$"),
Ok((
"",
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("^ab$"),
tags: vec![],
flag: Flag::Nothing,
}])
))
);
}
#[test]
fn ambiguous_lu() {
assert_eq!(
parse_stream_unit("^ab/xy$"),
Ok((
"",
StreamUnit::LexicalUnit(vec![
SubLU {
ling_form: String::from("ab"),
tags: vec![],
flag: Flag::Nothing,
},
SubLU {
ling_form: String::from("xy"),
tags: vec![],
flag: Flag::Nothing,
}
])
))
);
}
#[test]
fn basic_lus() {
assert_eq!(
parse_stream("^ab$"),
Ok((
"",
vec![StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("ab"),
tags: vec![],
flag: Flag::Nothing,
}])]
))
);
assert_eq!(
parse_stream("^ab$ ^cd$"),
Ok((
"",
vec![
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("ab"),
tags: vec![],
flag: Flag::Nothing,
}]),
StreamUnit::Space(String::from(" ")),
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("cd"),
tags: vec![],
flag: Flag::Nothing,
}])
]
))
);
}
#[test]
fn parse_basic_tag() {
assert_eq!(parse_tag("<n>"), Ok(("", "n")));
}
#[test]
fn parse_basic_stream_with_tags() {
assert_eq!(
parse_stream("^ab/xy<n>$ ^cd$"),
Ok((
"",
vec![
StreamUnit::LexicalUnit(vec![
SubLU {
ling_form: String::from("ab"),
tags: vec![],
flag: Flag::Nothing,
},
SubLU {
ling_form: String::from("xy"),
tags: vec![String::from("n")],
flag: Flag::Nothing,
}
]),
StreamUnit::Space(String::from(" ")),
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("cd"),
tags: vec![],
flag: Flag::Nothing,
}])
]
))
);
}
#[test]
fn parse_basic_stream_with_tags_sans_space() {
assert_eq!(
parse_stream("^ab/xy<n>$^cd$"),
Ok((
"",
vec![
StreamUnit::LexicalUnit(vec![
SubLU {
ling_form: String::from("ab"),
tags: vec![],
flag: Flag::Nothing,
},
SubLU {
ling_form: String::from("xy"),
tags: vec![String::from("n")],
flag: Flag::Nothing,
}
]),
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("cd"),
tags: vec![],
flag: Flag::Nothing,
}])
]
))
);
}
#[test]
fn parse_basic_stream_with_tags_sans_space_with_format() {
assert_eq!(
parse_stream("[<j>]^ab/xy<n>$[</j>]^cd$"),
Ok((
"",
vec![
StreamUnit::Format(String::from("<j>")),
StreamUnit::LexicalUnit(vec![
SubLU {
ling_form: String::from("ab"),
tags: vec![],
flag: Flag::Nothing,
},
SubLU {
ling_form: String::from("xy"),
tags: vec![String::from("n")],
flag: Flag::Nothing,
}
]),
StreamUnit::Format(String::from("</j>")),
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("cd"),
tags: vec![],
flag: Flag::Nothing,
}])
]
))
);
}
#[test]
fn parse_joined_lu_basic() {
assert_eq!(
parse_stream_unit("^ab/xy<n>+tx<a>$"),
Ok((
"",
StreamUnit::JoinedLexicalUnit(vec![
vec![SubLU {
ling_form: String::from("ab"),
tags: vec![],
flag: Flag::Nothing,
}],
vec![
SubLU {
ling_form: String::from("xy"),
tags: vec![String::from("n")],
flag: Flag::Nothing,
},
SubLU {
ling_form: String::from("tx"),
tags: vec![String::from("a")],
flag: Flag::Nothing,
}
],
]),
))
);
}
#[test]
fn parse_chunk_with_format() {
assert_eq!(
parse_stream_unit("N1<SN><a>{^i$ [<o>]^j$[</o>]^k$}"),
Ok((
"",
StreamUnit::Chunk(
SubLU {
ling_form: String::from("N1"),
tags: vec![String::from("SN"), String::from("a")],
flag: Flag::Nothing,
},
vec![
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("i"),
tags: vec![],
flag: Flag::Nothing,
}]),
StreamUnit::Space(String::from(" ")),
StreamUnit::Format(String::from("<o>")),
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("j"),
tags: vec![],
flag: Flag::Nothing,
}]),
StreamUnit::Format(String::from("</o>")),
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("k"),
tags: vec![],
flag: Flag::Nothing,
}]),
],
),
))
);
}
#[test]
fn parse_escape_bracket() {
let raw = "^\\]<vblex><pres>$";
let (i, _) = parse_stream_unit(raw).unwrap();
assert_eq!(i.len(), 0);
}
#[test]
fn parse_at_lu() {
let raw = "^\\@<det><ind><sg>$";
let (i, _) = parse_stream_unit(raw).unwrap();
assert_eq!(i.len(), 0);
}
#[test]
fn parse_special_lemma() {
let raw = "^*t<det><ind><sg>$";
let (i, su) = parse_stream_unit(&raw).unwrap();
assert_eq!(i.len(), 0);
assert_eq!(
su,
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from("t"),
tags: vec![String::from("det"), String::from("ind"), String::from("sg")],
flag: Flag::Unanalyzed,
}])
)
}
#[test]
fn parse_special_lemma_only() {
let raw = "^*<det><ind><sg>$";
let (i, su) = parse_stream_unit(&raw).unwrap();
assert_eq!(i.len(), 0);
assert_eq!(
su,
StreamUnit::LexicalUnit(vec![SubLU {
ling_form: String::from(""),
tags: vec![String::from("det"), String::from("ind"), String::from("sg")],
flag: Flag::Unanalyzed,
}])
)
}
#[test]
fn parse_large_thai_data() {
let raw = slurp::read_all_to_string("test_data/i_like_a_dog_sent.apertium_stream").unwrap();
let (i, stream) = parse_stream(&raw).unwrap();
assert_eq!(i.len(), 0);
assert_eq!(
stream
.into_iter()
.filter_map(|su| match su {
StreamUnit::Space(_) => None,
StreamUnit::LexicalUnit(analyses) => Some(analyses[0].ling_form.clone()),
_ => Some(String::from("_")),
})
.count(),
5
);
}
}
|
extern crate shred;
use shred::{DispatcherBuilder, Read, ResourceId, System, SystemData, World, Write};
#[derive(Debug, Default)]
struct ResA;
#[derive(Debug, Default)]
struct ResB;
#[derive(SystemData)]
struct Data<'a> {
a: Read<'a, ResA>,
b: Write<'a, ResB>,
}
struct EmptySystem(*mut i8); // System is not thread-safe
impl<'a> System<'a> for EmptySystem {
type SystemData = Data<'a>;
fn run(&mut self, bundle: Data<'a>) {
println!("thread local: {:?}", &*bundle.a);
println!("thread local: {:?}", &*bundle.b);
}
}
struct PrintSystem;
impl<'a> System<'a> for PrintSystem {
type SystemData = (Read<'a, ResA>, Write<'a, ResB>);
fn run(&mut self, data: Self::SystemData) {
let (a, mut b) = data;
println!("{:?}", &*a);
println!("{:?}", &*b);
// We can mutate ResB here because it's `Write`.
*b = ResB;
}
}
fn main() {
let mut x = 5;
let mut resources = World::empty();
resources.insert(ResA);
resources.insert(ResB);
let mut dispatcher = DispatcherBuilder::new()
.with(PrintSystem, "print", &[]) // Adds a system "print" without dependencies
.with_thread_local(EmptySystem(&mut x))
.build_async(resources);
dispatcher.dispatch();
dispatcher.wait();
dispatcher.dispatch();
dispatcher.wait();
}
|
#![allow(non_snake_case)]
#[macro_use]
use std::fs;
use regex::Regex;
use std::collections::HashMap;
pub fn solve() {
println!("\n************* Day - 3 ******************");
let txtPath = [env!("CARGO_MANIFEST_DIR"), "/inputs/day3.input1.txt"].join("");
let contents = fs::read_to_string(txtPath).expect("err reading file");
let arr: Vec<&str> = contents.split("\n").collect();
let mut fabric: [[i32; 1000]; 1000] = [[0; 1000]; 1000];
let mut fabricOverlap: [[bool; 1000]; 1000] = [[false; 1000]; 1000];
let mut totalSquareInches = 0;
let mut overlapMap = HashMap::new();
let re = Regex::new(r"#(\d+) @ (\d+),(\d+): (\d+)x(\d+)").unwrap();
for line in &arr {
let mut id: i32 = -1;
let mut x: i32 = 0;
let mut y: i32 = 0;
let mut width: i32 = 0;
let mut height: i32 = 0;
for cap in re.captures_iter(&line) {
id = cap[1].to_string().parse::<i32>().unwrap();
x = cap[2].to_string().parse::<i32>().unwrap();
y = cap[3].to_string().parse::<i32>().unwrap();
width = cap[4].to_string().parse::<i32>().unwrap();
height = cap[5].to_string().parse::<i32>().unwrap();
// println!("Res: {:?}, {:?}", cap, height);
}
for i in x..(x+width) {
for j in y..(y+height) {
let val = fabric[i as usize][j as usize];
if val > 0 {
overlapMap.entry(val).or_insert(true);
overlapMap.entry(id).or_insert(true);
if !fabricOverlap[i as usize][j as usize] {
totalSquareInches += 1;
fabricOverlap[i as usize][j as usize] = true;
}
}
fabric[i as usize][j as usize] = id;
}
}
}
// Part 1
println!("Total Square Inches: {}", totalSquareInches);
// Part 2
for i in 1..arr.len() {
if !overlapMap.contains_key(&(i as i32)) {
println!("Claim without any overlap: {}", i);
}
}
} |
use cgmath;
use crate::model::model_utils::SpriteAnimationMetaData;
use crate::model::animation_trait::SpriteAnimation;
pub struct Fox
{
pub pos: cgmath::Vector2<f32>,
sprite_animations: std::collections::HashMap<&'static str, SpriteAnimationMetaData>,
current_animation: &'static str,
animation_time: f32,
}
impl Fox
{
pub const MOVE_LEFT: &'static str = "MoveLeft";
pub const MOVE_RIGHT: &'static str = "MoveRight";
pub const MOVE_DOWN: &'static str = "MoveDown";
pub const MOVE_UP: &'static str = "MoveUp";
pub fn new(pos: cgmath::Vector2<f32>) -> Fox
{
let mut animations = std::collections::HashMap::new();
animations.insert(Fox::MOVE_LEFT, SpriteAnimationMetaData{ from_index: 3, to_index: 6, timeout: 0.1 });
animations.insert(Fox::MOVE_RIGHT, SpriteAnimationMetaData{ from_index: 6, to_index: 9, timeout: 0.1 });
animations.insert(Fox::MOVE_DOWN, SpriteAnimationMetaData{ from_index: 0, to_index: 3, timeout: 0.1 });
animations.insert(Fox::MOVE_UP, SpriteAnimationMetaData{ from_index: 9, to_index: 12, timeout: 0.1 });
Fox { pos: pos, sprite_animations: animations, current_animation: Fox::MOVE_LEFT, animation_time: 0.0 }
}
}
impl SpriteAnimation for Fox
{
fn get_sprite_animations(&self) -> &std::collections::HashMap<&'static str, SpriteAnimationMetaData>
{
&self.sprite_animations
}
fn get_current_animation(&self) -> &'static str
{
self.current_animation
}
fn set_current_animation(&mut self, new_current_animation: &'static str)
{
self.current_animation = new_current_animation;
}
fn get_animation_time(&self) -> f32
{
self.animation_time
}
fn set_animation_time(&mut self, new_animation_time: f32)
{
self.animation_time = new_animation_time;
}
} |
/// This can be used in `Vec::from_iter` to create a vector that has more capacity than
/// the `inner` iterator needs.
pub struct SizeHintExtendingIter<TInner> {
inner: TInner,
extend_size_by: usize,
}
impl<TInner> SizeHintExtendingIter<TInner> {
pub fn new(inner: TInner, extend_size_by: usize) -> Self {
Self {
inner,
extend_size_by,
}
}
}
impl<TInner> Iterator for SizeHintExtendingIter<TInner>
where
TInner: Iterator,
{
type Item = TInner::Item;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
/// Ok, manipulating this is maybe not very nice but it seems to be OK when used in
/// `Vec::from_iter`; according to the documentation no bad things (like memory corruption)
/// should ever happen (even if the `Vec::from_iter` implementation changes):
///
/// > `size_hint()` is primarily intended to be used for optimizations such as
/// > reserving space for the elements of the iterator, but must not be
/// > trusted to e.g., omit bounds checks in unsafe code. An incorrect
/// > implementation of `size_hint()` should not lead to memory safety
/// > violations.
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (lower, upper) = self.inner.size_hint();
let upper = upper.map(|upper| upper.saturating_add(self.extend_size_by));
let lower = lower.saturating_add(self.extend_size_by);
(lower, upper)
}
}
impl<TInner> ExactSizeIterator for SizeHintExtendingIter<TInner>
where
TInner: ExactSizeIterator,
{
/// here we return the correct length.
#[inline]
fn len(&self) -> usize {
self.inner.len()
}
}
|
use tokio::io::AsyncRead;
use crate::error::TychoResult;
use crate::read::async_::func::read_byte_async;
pub(crate) async fn read_length_async<R: AsyncRead + Unpin>(reader: &mut R) -> TychoResult<usize> {
let mut number: u64 = 0;
let mut count = 0_i32;
loop {
let byte = read_byte_async(reader).await?;
number |= ((byte & 0x7F) as u64) << (7 * count);
if byte & 0x80 == 0 {
return Ok(number as usize);
}
count += 1;
}
} |
use std::thread::Thread;
use std::sync::{Arc,Mutex};
/* simple example
fn main() {
for _ in range(0u64, 10u64) {
/*
* Thread::spawn() get one argument: closure ({} of statements)
* move keyword + || + closure, means moving closure.
*/
Thread::spawn(move || {
println!("Hello, world!");
});
}
}
*/
/*
fn main() {
let mut numbers = vec![1i, 2i, 3i];
for i in range(0u, 3u) {
Thread::spawn(move || {
for j in range(0, 3) { numbers[j] += 1 }
});
}
}
*/
fn main() {
let numbers = Arc::new(Mutex::new(vec![1i64, 2i64, 3i64]));
for i in range(0u64, 3u64) {
let number = numbers.clone();
Thread::spawn(move || {
let mut array = number.lock().unwrap();
(*array)[i] += 1;
println!("numbers[{}] is {}", i, (*array)[i]);
});
}
}
|
use crate::Result;
use flume::{Receiver, Sender};
use std::task::Poll;
use tracing::trace;
pub(crate) struct SocketState {
readable: bool,
writable: bool,
events: Receiver<SocketEvent>,
handle: SocketStateHandle,
}
impl Default for SocketState {
fn default() -> Self {
let (sender, receiver) = flume::unbounded();
Self {
readable: true,
writable: true,
events: receiver,
handle: SocketStateHandle { sender },
}
}
}
#[derive(Clone)]
pub struct SocketStateHandle {
sender: Sender<SocketEvent>,
}
#[derive(Debug)]
pub enum SocketEvent {
Readable,
Writable,
Wake,
}
impl SocketState {
pub(crate) fn readable(&mut self) -> bool {
self.readable
}
pub(crate) fn writable(&mut self) -> bool {
self.writable
}
pub(crate) fn wait(&mut self) {
self.handle_event(self.events.recv().expect("waiting for socket event failed"))
}
pub(crate) fn handle(&self) -> SocketStateHandle {
self.handle.clone()
}
pub(crate) fn handle_read_poll(&mut self, poll: Poll<usize>) -> Option<usize> {
match poll {
Poll::Ready(sz) => Some(sz),
Poll::Pending => {
self.readable = false;
None
}
}
}
pub(crate) fn handle_write_poll<T>(&mut self, poll: Poll<T>) -> Option<T> {
match poll {
Poll::Ready(sz) => Some(sz),
Poll::Pending => {
self.writable = false;
None
}
}
}
pub(crate) fn handle_io_result(&self, result: Result<()>) -> Result<()> {
if let Err(err) = result {
if err.interrupted() {
self.handle.wake();
} else if err.wouldblock() {
// ignore, let the reactor handle that
} else {
return Err(err);
}
}
Ok(())
}
pub(crate) fn poll_events(&mut self) {
while let Ok(event) = self.events.try_recv() {
self.handle_event(event);
}
}
fn handle_event(&mut self, event: SocketEvent) {
trace!(?event, "Got event for socket");
match event {
SocketEvent::Readable => self.readable = true,
SocketEvent::Writable => self.writable = true,
SocketEvent::Wake => {}
}
}
}
impl SocketStateHandle {
pub fn send(&self, event: SocketEvent) {
let _ = self.sender.send(event);
}
pub fn wake(&self) {
self.send(SocketEvent::Wake);
}
}
|
#![forbid(unknown_lints)]
use binds::{BindCount, BindsInternal, CollectBinds};
use join::CastVecJoin;
use row_locking::RowLocking;
use std::fmt;
use std::fmt::Write;
use std::marker::PhantomData;
use write_sql::WriteSql;
mod macros;
#[cfg(test)]
mod test;
mod binds;
mod cte;
mod distinct;
mod expr;
mod filter;
mod from;
mod group;
mod join;
mod limit;
mod offset;
mod order;
mod query_dsl;
mod row_locking;
mod select;
mod write_sql;
pub mod sql_types;
pub use binds::{Bind, Binds};
pub use cte::Ctes;
pub use distinct::Distinct;
pub use expr::{BinOp, Expr, ExprDsl, IntoExpr, UnOp};
pub use filter::Filter;
pub use from::{from, FromClause, IntoSubQuery, SubQuery};
pub use group::Group;
pub use join::{Join, JoinKind, JoinOn, JoinOnDsl};
pub use limit::Limit;
pub use offset::Offset;
pub use order::{NullsPosition, NullsPositionDsl, Order, OrderDsl};
pub use query_dsl::QueryDsl;
pub use select::{count, star, Select, Selection};
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Table {
name: &'static str,
}
impl WriteSql for &Table {
fn write_sql<W: Write>(self, f: &mut W, _: &mut BindCount) -> fmt::Result {
write!(f, "\"{}\"", self.name)
}
}
impl Table {
pub fn new(name: &'static str) -> Self {
Table { name }
}
pub fn name(&self) -> &'static str {
&self.name
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Column {
table: &'static str,
name: &'static str,
}
impl Column {
pub fn new(table: &'static str, name: &'static str) -> Self {
Self { table, name }
}
}
impl WriteSql for &Column {
fn write_sql<W: Write>(self, f: &mut W, _: &mut BindCount) -> fmt::Result {
write!(f, "\"{}\".\"{}\"", self.table, self.name)
}
}
#[derive(Debug, Clone)]
pub struct Query<T> {
ctes: Ctes<T>,
from: FromClause<T>,
joins: Vec<Join<T>>,
filter: Option<Filter>,
group: Option<Group>,
having: Option<Filter>,
order: Option<Order>,
limit: Option<Limit>,
offset: Option<Offset>,
row_locking: RowLocking,
distinct: Option<Distinct>,
explain: Option<Explain>,
_marker: PhantomData<T>,
}
impl<T> Query<T> {
pub fn cast_to<K>(self) -> Query<K> {
let Query {
from,
ctes,
joins,
filter,
group,
having,
order,
limit,
offset,
row_locking,
distinct,
explain,
_marker,
} = self;
Query {
from: from.cast_to::<K>(),
ctes: ctes.cast_to::<K>(),
joins: joins.cast_to::<K>(),
filter,
group,
having,
order,
limit,
offset,
row_locking,
distinct,
explain,
_marker: PhantomData,
}
}
pub fn remove_joins(mut self) -> Self {
self.joins.clear();
self
}
pub fn remove_filters(mut self) -> Self {
self.filter = None;
self
}
pub fn remove_group_by(mut self) -> Self {
self.group = None;
self
}
pub fn remove_having(mut self) -> Self {
self.having = None;
self
}
pub fn remove_order_by(mut self) -> Self {
self.order = None;
self
}
pub fn remove_limit(mut self) -> Self {
self.limit = None;
self
}
pub fn remove_offset(mut self) -> Self {
self.offset = None;
self
}
pub fn remove_for_update(mut self) -> Self {
self.row_locking.for_update = false;
self
}
pub fn remove_skip_locked(mut self) -> Self {
self.row_locking.skip_locked = false;
self
}
pub fn remove_for_key_share(mut self) -> Self {
self.row_locking.for_key_share = false;
self
}
pub fn remove_for_no_key_update(mut self) -> Self {
self.row_locking.for_no_key_update = false;
self
}
pub fn remove_for_share(mut self) -> Self {
self.row_locking.for_share = false;
self
}
pub fn remove_no_wait(mut self) -> Self {
self.row_locking.no_wait = false;
self
}
pub fn remove_distinct(mut self) -> Self {
self.distinct = None;
self
}
pub fn remove_ctes(mut self) -> Self {
self.ctes = Ctes::default();
self
}
pub fn remove_explain(mut self) -> Self {
self.explain = None;
self
}
fn add_join(&mut self, join: JoinOn<T>, kind: JoinKind) {
match join {
JoinOn::Known { from, filter } => {
self.joins.push(Join::Known { kind, from, filter });
}
JoinOn::Raw(sql) => {
self.joins.push(Join::RawWithKind(sql));
}
}
}
}
impl<T, K> From<K> for Query<T>
where
K: Into<FromClause<T>>,
{
fn from(from: K) -> Self {
Self {
from: from.into(),
ctes: Ctes::default(),
filter: None,
joins: Vec::new(),
group: None,
having: None,
order: None,
limit: None,
offset: None,
distinct: None,
row_locking: RowLocking::new(),
explain: None,
_marker: PhantomData,
}
}
}
#[derive(Debug, Clone)]
pub struct QueryWithSelect<T> {
query: Query<T>,
selection: Select,
}
impl<T> QueryWithSelect<T> {
pub fn to_sql(self) -> (String, Binds) {
let mut bind_count = BindCount::new();
let mut sql = String::new();
self.to_sql_string(&mut sql, &mut bind_count);
let mut binds = BindsInternal::with_capacity(bind_count.count());
self.collect_binds(&mut binds);
(sql, Binds::from(binds))
}
fn to_sql_string<W: Write>(&self, f: &mut W, bind_count: &mut BindCount) {
let result = (|| -> fmt::Result {
if let Some(explain) = self.query.explain {
explain.write_sql(f, bind_count)?;
}
self.query.ctes.write_sql(f, bind_count)?;
write!(f, "SELECT ")?;
if let Some(distinct) = &self.query.distinct {
distinct.write_sql(f, bind_count)?;
}
self.selection.write_sql(f, bind_count)?;
write!(f, " FROM ")?;
self.query.from.write_sql(f, bind_count)?;
for join in &self.query.joins {
write!(f, " ")?;
join.write_sql(f, bind_count)?;
}
if let Some(filter) = &self.query.filter {
write!(f, " WHERE ")?;
filter.write_sql(f, bind_count)?;
}
if let Some(group) = &self.query.group {
write!(f, " GROUP BY ")?;
group.write_sql(f, bind_count)?;
}
if let Some(having) = &self.query.having {
write!(f, " HAVING ")?;
having.write_sql(f, bind_count)?;
}
if let Some(order) = &self.query.order {
write!(f, " ORDER BY ")?;
order.write_sql(f, bind_count)?;
}
if let Some(limit) = &self.query.limit {
write!(f, " LIMIT ")?;
limit.0.write_sql(f, bind_count)?;
}
if let Some(offset) = &self.query.offset {
write!(f, " OFFSET ")?;
offset.0.write_sql(f, bind_count)?;
}
self.query.row_locking.write_sql(f, bind_count)?;
Ok(())
})();
result.expect("WriteSql should never fail");
}
fn collect_binds(&self, binds: &mut BindsInternal) {
self.query.collect_binds(binds);
}
pub fn cast_to<K>(self) -> QueryWithSelect<K> {
let QueryWithSelect { query, selection } = self;
QueryWithSelect {
query: query.cast_to::<K>(),
selection,
}
}
pub fn union<K>(self, other: QueryWithSelect<K>) -> Union<T> {
Union::Pair(UnionKind::Default, self, other.cast_to())
}
pub fn union_all<K>(self, other: QueryWithSelect<K>) -> Union<T> {
Union::Pair(UnionKind::All, self, other.cast_to())
}
pub fn union_distinct<K>(self, other: QueryWithSelect<K>) -> Union<T> {
Union::Pair(UnionKind::Distinct, self, other.cast_to())
}
}
impl<T> WriteSql for &QueryWithSelect<T> {
fn write_sql<W: Write>(self, f: &mut W, bind_count: &mut BindCount) -> fmt::Result {
self.to_sql_string(f, bind_count);
Ok(())
}
}
impl<T> CollectBinds for Box<QueryWithSelect<T>> {
fn collect_binds(&self, binds: &mut BindsInternal) {
self.query.collect_binds(binds)
}
}
impl<T> CollectBinds for Query<T> {
fn collect_binds(&self, binds: &mut BindsInternal) {
self.ctes.collect_binds(binds);
self.from.collect_binds(binds);
self.joins.collect_binds(binds);
if let Some(filter) = &self.filter {
filter.collect_binds(binds);
}
if let Some(group) = &self.group {
group.collect_binds(binds);
}
if let Some(having) = &self.having {
having.collect_binds(binds);
}
if let Some(order) = &self.order {
order.collect_binds(binds);
}
if let Some(limit) = &self.limit {
limit.0.collect_binds(binds);
}
if let Some(offset) = &self.offset {
offset.0.collect_binds(binds);
}
self.row_locking.collect_binds(binds);
}
}
impl CollectBinds for Table {
fn collect_binds(&self, _: &mut BindsInternal) {}
}
pub trait IntoColumns {
fn into_columns(self) -> Vec<Column>;
}
impl<T> IntoColumns for T
where
T: Into<Column>,
{
fn into_columns(self) -> Vec<Column> {
vec![self.into()]
}
}
impl<T> IntoColumns for (T,)
where
T: Into<Column>,
{
fn into_columns(self) -> Vec<Column> {
vec![self.0.into()]
}
}
macro_rules! impl_into_columns {
(
$first:ident, $second:ident,
) => {
#[allow(warnings)]
impl<$first, $second> IntoColumns for ($first, $second)
where
$first: Into<Column>,
$second: Into<Column>,
{
fn into_columns(self) -> Vec<Column> {
let ($first, $second) = self;
vec![$first.into(), $second.into()]
}
}
};
(
$head:ident, $($tail:ident),*,
) => {
#[allow(warnings)]
impl<$head, $($tail),*> IntoColumns for ($head, $($tail),*)
where
$head: Into<Column>,
$( $tail: Into<Column> ),*
{
fn into_columns(self) -> Vec<Column> {
let ($head, $($tail),*) = self;
vec![
$head.into(),
$( $tail.into(), )*
]
}
}
impl_into_columns!($($tail),*,);
};
}
impl_into_columns!(
T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21,
T22, T23, T24, T25, T26, T27, T28, T29, T30, T31, T32,
);
#[derive(Debug)]
pub enum UnionKind {
Default,
All,
Distinct,
}
#[derive(Debug)]
pub enum Union<T> {
Pair(UnionKind, QueryWithSelect<T>, QueryWithSelect<T>),
And(UnionKind, Box<Union<T>>, QueryWithSelect<T>),
}
impl WriteSql for &UnionKind {
fn write_sql<W: Write>(self, f: &mut W, _: &mut BindCount) -> fmt::Result {
match self {
UnionKind::Default => write!(f, " UNION "),
UnionKind::All => write!(f, " UNION ALL "),
UnionKind::Distinct => write!(f, " UNION DISTINCT "),
}
}
}
impl<T> Union<T> {
pub fn to_sql(self) -> (String, Binds) {
let mut bind_count = BindCount::new();
let mut sql = String::new();
self.to_sql_recurs(&mut sql, &mut bind_count);
let mut binds = BindsInternal::with_capacity(bind_count.count());
self.collect_binds_recurs(&mut binds);
(sql, Binds::from(binds))
}
fn to_sql_recurs(&self, sql: &mut String, bind_count: &mut BindCount) {
match self {
Union::Pair(kind, lhs, rhs) => {
lhs.to_sql_string(sql, bind_count);
kind.write_sql(sql, bind_count).unwrap();
rhs.to_sql_string(sql, bind_count);
}
Union::And(kind, head, tail) => {
head.to_sql_recurs(sql, bind_count);
kind.write_sql(sql, bind_count).unwrap();
tail.to_sql_string(sql, bind_count);
}
}
}
fn collect_binds_recurs(&self, binds: &mut BindsInternal) {
match self {
Union::Pair(_, lhs, rhs) => {
lhs.collect_binds(binds);
rhs.collect_binds(binds);
}
Union::And(_, head, tail) => {
head.collect_binds_recurs(binds);
tail.collect_binds(binds);
}
}
}
pub fn union<K>(self, other: QueryWithSelect<K>) -> Union<T> {
Union::And(UnionKind::Default, Box::new(self), other.cast_to())
}
pub fn union_all<K>(self, other: QueryWithSelect<K>) -> Union<T> {
Union::And(UnionKind::All, Box::new(self), other.cast_to())
}
pub fn union_distinct<K>(self, other: QueryWithSelect<K>) -> Union<T> {
Union::And(UnionKind::Distinct, Box::new(self), other.cast_to())
}
}
#[derive(Debug, Clone, Copy)]
pub enum Explain {
Default,
Analyze,
}
impl WriteSql for Explain {
fn write_sql<W: Write>(self, f: &mut W, _: &mut BindCount) -> fmt::Result {
match self {
Explain::Default => write!(f, "EXPLAIN "),
Explain::Analyze => write!(f, "EXPLAIN ANALYZE "),
}
}
}
|
// コマンドライン引数
pub mod a {
use std::env;
// コマンドライン引数の数を得る。
pub fn args_count() -> i32 {
let argv: Vec<String> = env::args().collect();
(argv.len() - 1) as i32
}
// コマンドライン引数のリストを得る。
pub fn args() -> Vec<String> {
let mut argv: Vec<String> = env::args().collect();
argv.remove(0);
argv
}
}
// 変換
pub mod b {
// 文字列 &str と String
// &str から String へ
pub fn to_String(s: &str) -> String {
String::from(s)
}
// 文字列 &str と数
pub fn str_to_i32(s:&str) -> i32 {
s.parse().unwrap()
}
// 文字列 String と数
pub fn string_to_i32(s:String) -> i32 {
s.parse().unwrap()
}
pub fn i32_to_string(n:i32) -> String {
n.to_string()
}
// 数値どうし
pub fn f64_to_i32(f:f64) -> i32 {
f as i32
}
pub fn i32_to_f64(n:i32) -> f64 {
n as f64
}
}
// 文字列 String
pub mod c {
pub fn length(s:String) -> i32 {
s.len() as i32
}
pub fn toupper(s:String) -> String {
s.to_uppercase()
}
pub fn tolower(s:String) -> String {
s.to_lowercase()
}
pub fn strip(s:String) -> String {
let s1: &str = &s;
let s2: &str = s1.trim();
String::from(s2)
}
} |
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::account_state::AccountState;
use libra_types::account_address::AccountAddress;
use std::collections::BTreeMap;
use std::ops::{Deref, DerefMut};
//TODO (jole) need maintain network state?
#[derive(Clone, Debug, Copy, Ord, PartialOrd, Eq, PartialEq)]
pub enum ChannelStage {
/// Channel is waiting to open operator finish.
Opening,
/// Channel is idle, can execute new txn.
Idle,
/// Channel is pending for some tx not finished.
Pending,
/// Channel is(or will start) applying some txn to local db.
Syncing,
/// Channel is closed.
Closed,
Locked,
Challenged,
Resolved,
}
#[derive(Clone, Debug)]
pub struct ChannelState {
address: AccountAddress,
state: AccountState,
}
impl ChannelState {
pub fn empty(address: AccountAddress) -> Self {
Self {
address,
state: AccountState::new(),
}
}
pub fn new(address: AccountAddress, state: AccountState) -> Self {
Self { address, state }
}
// pub fn paths(&self) -> Result<HashSet<DataPath>> {
// let paths =
// self.state
// .keys()
// .map(|k| DataPath::from(k))
// .try_fold(HashSet::new(), |mut s, e| {
// e.map(|dp| {
// s.insert(dp);
// s
// })
// });
//
// paths
// }
pub fn address(&self) -> &AccountAddress {
&self.address
}
pub fn version(&self) -> u64 {
self.state.version()
}
}
impl Deref for ChannelState {
type Target = BTreeMap<Vec<u8>, Vec<u8>>;
fn deref(&self) -> &Self::Target {
&self.state
}
}
impl DerefMut for ChannelState {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.state
}
}
impl AsRef<BTreeMap<Vec<u8>, Vec<u8>>> for ChannelState {
fn as_ref(&self) -> &BTreeMap<Vec<u8>, Vec<u8>> {
&self.state
}
}
impl AsMut<BTreeMap<Vec<u8>, Vec<u8>>> for ChannelState {
fn as_mut(&mut self) -> &mut BTreeMap<Vec<u8>, Vec<u8>> {
&mut self.state
}
}
|
use anyhow::Result;
use log::info;
use proger_backend::{DynamoDbDriver, Server};
use rusoto_core::Region;
use rusoto_dynamodb::DynamoDbClient;
use std::env;
use std::str::FromStr;
fn main() -> Result<()> {
// Set the logging verbosity
env::set_var(
"RUST_LOG",
format!(
"actix_web={},core={},backend={},proger_backend={}",
"debug", "debug", "debug", "debug",
),
);
// Initialize the logger
env_logger::init();
// Create and start the server
info!("Starting server");
let server = Server::new(
"localhost:8080".to_string(),
DynamoDbDriver(DynamoDbClient::new(Region::UsEast1)),
)?;
// Start the server
server.start()?;
Ok(())
}
|
extern crate structopt;
use assembly_maps::raw::reader::*;
use byteorder::{ReadBytesExt, LE};
use std::fs::File;
use std::io::BufReader;
use std::io::Write;
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(name = "read-raw", about = "Analyze a LU Terrain File.")]
struct Opt {
/// Input file (`*raw`)
#[structopt(parse(from_os_str))]
input: PathBuf,
}
#[derive(Debug)]
pub enum Error {
NotImplemented,
FileNotFound,
}
pub fn main() -> Result<(), Error> {
let opt = Opt::from_args();
if !opt.input.exists() || !opt.input.is_file() {
return Err(Error::FileNotFound);
}
let file = File::open(opt.input.as_path()).unwrap();
let mut buf = BufReader::new(file);
let header = buf.read_terrain_header().unwrap();
let chunk1 = buf.read_terrain_chunk().unwrap();
let hmh = buf.read_height_map_header().unwrap();
let _hm_data = buf.read_height_map_data(hmh.width, hmh.height).unwrap();
let _cm_data = buf.read_color_map_data().unwrap();
let lm_data = buf.read_embedded_file().unwrap();
let _cm2_data = buf.read_color_map_data().unwrap();
let _padding1 = buf.read_u8().unwrap();
let lm2_data = buf.read_embedded_file().unwrap();
let _padding2 = buf.read_i32::<LE>().unwrap();
println!("{:?}", header);
println!("{:?}", chunk1);
dbg!(_padding1);
dbg!(_padding2);
let mut out = File::create("out.dds").unwrap();
out.write_all(lm_data.as_slice()).unwrap();
let mut out = File::create("out2.dds").unwrap();
out.write_all(lm2_data.as_slice()).unwrap();
Ok(())
}
|
//! Rust API wrapper for the Tendermint JSONRPC/HTTP, with support for querying
//! state from a running full node.
#![deny(warnings, missing_docs, unused_import_braces, unused_qualifications)]
#![forbid(unsafe_code)]
#![doc(html_root_url = "https://docs.rs/tendermint-rpc/0.0.0")]
#[macro_use]
extern crate serde_derive;
pub mod channel;
pub mod endpoints;
pub mod jsonrpc;
mod node_info;
pub use crate::node_info::NodeInfo;
pub use tendermint::Address;
|
//! Crate `inspector` extends popular data structures (such as `Option` and `Result`)
//! with additional methods for inspecting their payload. It is inspired by the `Iterator::inspect`.
//! Since no such methods are available by default on `Option` and `Result` types, this crate
//! implements a new traits for these types, which augment the respective types with various
//! inspection capabilities.
//!
//! Implementation and availability of each trait is guarded by the dedicated feature, so that
//! you can choose which one is available. Sometimes you want these only for debug purposes, but
//! prefer to always leave the code in place. Feature `debug-only` helps in this case.
//! If enabled and compiled in `release` mode the combinators become effectively NOP.
//! This feature does nothing in `debug` mode.
//!
//! # Features
//! - `debug-only` - turnes the combinators into NOP in release mode
//! - `option` - enables trait `OptionInspector`
//! - `result` - enables trait `ResultInspector`
//! - `futures` - enables trait `FuturesInspector`
#![cfg_attr(feature = "pedantic", warn(clippy::pedantic))]
#![warn(clippy::use_self)]
#![warn(deprecated_in_future)]
#![warn(future_incompatible)]
#![warn(unreachable_pub)]
#![warn(missing_debug_implementations)]
#![warn(rust_2018_compatibility)]
#![warn(rust_2018_idioms)]
#![warn(unused)]
#![deny(warnings)]
#[cfg(feature = "futures")]
mod future;
#[cfg(feature = "iter")]
mod iter;
#[cfg(feature = "option")]
mod option;
#[cfg(feature = "result")]
mod result;
#[cfg(feature = "futures")]
pub use crate::future::FutureInspector;
#[cfg(feature = "iter")]
pub use crate::iter::IterInspector;
#[cfg(feature = "option")]
pub use crate::option::OptionInspector;
#[cfg(feature = "result")]
pub use crate::result::ResultInspector;
|
use crate::db::Db;
use crate::error::Result;
use crate::query::Query;
pub fn render_query(query: &Query, db: &Db) -> Result<String> {
let mut b = String::with_capacity(2048);
let model = db.get_model(&query.model)?;
b.push_str("<table><tr>");
let mut fields = Vec::new();
for select in &query.fields {
let field = model.get_field(select)?;
b.push_str("<th>");
b.push_str(&field.label());
b.push_str("</th>");
fields.push(field);
}
b.push_str("</tr>");
db.select_query(query, |row| {
b.push_str("<tr>");
let mut i = 0;
for field in &fields {
b.push_str("<td>");
field.render_html(&mut b, row, i)?;
b.push_str("</td>");
i = i + 1;
}
b.push_str("</tr>");
Ok(())
})?;
b.push_str("</table>");
Ok(b)
}
|
pub mod component;
pub mod entity;
pub mod system;
|
use serenity::framework::standard::{macros::command, Args, CommandError, CommandResult};
use serenity::model::prelude::{Member, Message, UserId};
use serenity::prelude::Context;
use serenity::utils::{parse_username, Colour};
use crate::core::consts::MAIN_COLOUR;
fn user_id_from_message(message: &Message, mut args: Args) -> Result<UserId, CommandError> {
if args.is_empty() {
return Ok(message.author.id);
}
let arg = args.single::<String>()?;
if let Some(user_id) = parse_username(&arg).map(UserId) {
return Ok(user_id);
}
if let Ok(user_id) = arg.parse::<UserId>() {
return Ok(user_id);
}
Err(CommandError::from("Error parsing mention"))
}
#[command]
async fn who(context: &Context, message: &Message, args: Args) -> CommandResult {
let user_id = user_id_from_message(&message, args)?;
let member: Member = match message.guild_id {
Some(guild_id) => guild_id
.member(&context, user_id)
.await
.map_err(|_| CommandError::from("The user is not a member of this guild."))?,
None => return Err(CommandError::from("Unexpected Error! Seek help!")),
};
let colour = member
.colour(&context)
.unwrap_or_else(|| Colour::new(MAIN_COLOUR));
let nick = member
.nick
.clone()
.unwrap_or_else(|| member.display_name().to_string());
let joined_date = match member.joined_at {
Some(date) => date.format("%a, %B %e, %Y at %H:%M:%S").to_string(),
None => "N/A".to_string(),
};
let avatar_url = member
.user
.avatar_url()
.unwrap_or_else(|| member.user.default_avatar_url());
let mut roles = member
.roles
.into_iter()
.map(|role_id| format!("<@&{}>", role_id))
.collect::<Vec<String>>();
roles.push("@everyone".to_string());
let _ = message
.channel_id
.send_message(&context, |m| {
m.embed(|e| {
e.title(nick)
.colour(colour)
.thumbnail(avatar_url)
.field("Roles", roles.join(" "), false)
.field("Joined", joined_date, false)
})
})
.await;
Ok(())
}
|
//! `loggest` provides a high performance logging facility for Rust's [log](https://docs.rs/log) crate.
//!
//! Instead of writing logs to a file, `loggest` writes them to a pipe. The other end of the pipe is
//! opened by a daemon which is responsible for writing the logs (and possibly compressing them).
//! # Multithreading
//!
//! Each thread maintains its connection to the log daemon to avoid locking for each log line.
mod ignore;
mod output;
mod session;
use derive_more::From;
use log::{set_logger, set_max_level, LevelFilter, Log, Metadata, Record};
use std::ffi::OsString;
use std::io;
use thiserror::Error;
pub use output::flush;
static LOGGER: Loggest = Loggest;
static mut CONFIG: Option<Config> = None;
struct Loggest;
struct Config {
level: LevelFilter,
base_filename: OsString,
}
/// Error initializing `loggest`
#[derive(Debug, Error, From)]
pub enum LoggestError {
#[error("I/O error: `{0}`")]
IoError(#[source] io::Error),
#[error("Set logger error: `{0}`")]
SetLoggerError(#[source] log::SetLoggerError),
#[error("File name must be a valid utf-8")]
BadFileName,
}
/// Initialize `loggest`. Must only be called once.
///
/// The `base_filename` argument is used as the name for the main thread. Other threads append `.<thread_id>`.
///
/// # Example
/// ```no_run
/// loggest::init(log::LevelFilter::max(), env!("CARGO_PKG_NAME")).unwrap();
/// ```
pub fn init<P>(level: LevelFilter, base_filename: P) -> Result<FlushGuard, LoggestError>
where
P: Into<OsString>,
{
let base_filename: OsString = base_filename.into();
set_logger(&LOGGER)?;
set_max_level(level);
unsafe {
debug_assert!(CONFIG.is_none());
CONFIG = Some(Config { level, base_filename });
}
Ok(FlushGuard)
}
impl Log for Loggest {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= unsafe { CONFIG.as_ref().unwrap().level }
}
fn log(&self, record: &Record) {
if !self.enabled(record.metadata()) {
return;
}
output::log(record);
}
fn flush(&self) {}
}
pub struct FlushGuard;
impl Drop for FlushGuard {
fn drop(&mut self) {
flush();
}
}
|
/*
Copyright 2019-2023 Didier Plaindoux
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use crate::parser::response::Response;
use crate::parser::response::Response::{Reject, Success};
use crate::stream::stream::Stream;
pub trait Combine<A> {}
pub trait Parse<A, S>
where
S: Stream,
{
fn parse(&self, s: S) -> Response<A, S>;
fn check(&self, s: S) -> Response<(), S> {
match self.parse(s) {
Success(_, s, c) => Success((), s, c),
Reject(s, c) => Reject(s, c),
}
}
}
|
use myexponent::pow;
fn main() {
print!("8 raised to 2 is {}", pow(8, 2));
} |
use crate::App;
use bincode::deserialize;
use dominator::Dom;
use onitama_lib::ServerMsg;
use wasm_bindgen::{prelude::Closure, JsCast, JsValue};
use web_sys::{window, MessageEvent};
pub fn game_dom(url: &str) -> Dom {
let socket = web_sys::WebSocket::new(url).unwrap();
socket.set_binary_type(web_sys::BinaryType::Arraybuffer);
let app = App::new();
let game_clone = app.game.clone();
let timestamp_clone = app.timestamp.clone();
let onmessage = Closure::wrap(Box::new(move |e: MessageEvent| {
let buf = e.data().dyn_into::<js_sys::ArrayBuffer>().unwrap();
let array = js_sys::Uint8Array::new(&buf).to_vec();
let msg: ServerMsg = deserialize(&array[..]).unwrap();
game_clone.set(msg);
timestamp_clone.set(window().unwrap().performance().unwrap().now())
}) as Box<dyn FnMut(MessageEvent)>);
socket.set_onmessage(Some(onmessage.as_ref().unchecked_ref()));
onmessage.forget();
let done_clone = app.done.clone();
let onclose = Closure::wrap(Box::new(move |_| {
done_clone.set(true);
}) as Box<dyn FnMut(JsValue)>);
socket.set_onclose(Some(onclose.as_ref().unchecked_ref()));
onclose.forget();
app.render(&socket)
}
|
// Copyright 2018 Jeffery Xiao, 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![no_main]
#[macro_use]
extern crate libfuzzer_sys;
#[macro_use]
extern crate log;
extern crate arbitrary;
extern crate counted_skiplist;
use arbitrary::*;
use counted_skiplist::TspList;
use counted_skiplist::Finger;
fn fuzztest(data: &[u8]) -> Result<(), <arbitrary::FiniteBuffer<'_> as Unstructured>::Error> {
let _ = env_logger::try_init();
let mut buff = FiniteBuffer::new(data, 4048).unwrap();
let closure = |&x: &_, &_y: &_| x as i32;
let mut list = TspList::new_with_group(closure);
let mut list2 = TspList::new_with_group(closure);
let mut model = Vec::<u8>::new();
let mut model2 = Vec::<u8>::new();
let mut finger1 :Option<Finger<u8>> = None;
let mut finger2 : Option<Finger<u8>> = None;
let mut modelfinger1 : Option<usize> = None;
let mut modelfinger2 : Option<usize> = None;
let iterations = buff.container_size()?;
let mut val =0;
for _ in 0..iterations {
let size: u8 = model.len() as u8;
let op: u8 = Arbitrary::arbitrary(&mut buff)?;
match op % 8 {
0 => {
let ind8: u8 = Arbitrary::arbitrary(&mut buff)?;
let keepfinger:u8 = Arbitrary::arbitrary(&mut buff)?;
let ind = ind8 % (size + 1);
debug!("insert {} {} (kept finger {})", ind, val, keepfinger != 0);
let fngr = list.insert(ind.into(), val);
model.insert(ind.into(), val);
val = val + 1;
if keepfinger != 0 {
finger2 = finger1;
finger1 = Some(fngr);
modelfinger2 = modelfinger1;
modelfinger1 = Some(ind as usize);
} else {
if let Some(mf) = modelfinger1 {
if mf >= ind as usize {
modelfinger1 = Some(mf + 1);
}
}
if let Some(mf) = modelfinger2 {
if mf >= ind as usize {
modelfinger2 = Some(mf + 1);
}
}
}
},
1 => {
if size > 0 {
let ind8: u8 = Arbitrary::arbitrary(&mut buff)?;
let ind = ind8 % size;
debug!("get {}", ind);
assert_eq!(
*list.get(ind.into()).unwrap(),
model[ind as usize]
);
}
},
2 => {
if size > 0 {
let ind8: u8 = Arbitrary::arbitrary(&mut buff)?;
let ind = ind8 % size;
debug!("remove {}", ind);
assert_eq!(list.remove(ind.into()), model.remove(ind.into()));
if let Some(mf) = modelfinger1 {
if ind as usize == mf {
finger1 = None;
modelfinger1 = None;
} else if mf > ind as usize {
modelfinger1 = Some(mf - 1);
}
}
if let Some(mf) = modelfinger2 {
if ind as usize == mf {
finger2 = None;
modelfinger2 = None;
} else if mf > ind as usize {
modelfinger2 = Some(mf - 1);
}
}
}
}
3 => {
debug!("swap");
std::mem::swap(&mut list, &mut list2);
std::mem::swap(&mut model, &mut model2);
finger1 = None;
finger2 = None;
modelfinger1 = None;
modelfinger2 = None;
}
4 => {
debug!("append");
list = list + std::mem::replace(&mut list2, TspList::new_with_group(closure));
model.append(&mut model2);
}
5 => {
debug!("clear");
list.clear();
model.clear();
finger1 = None;
finger2 = None;
modelfinger1 = None;
modelfinger2 = None;
}
6 => {
if let Some(mf) = modelfinger1 {
debug!("split");
list2 = unsafe {list.split_at_finger(finger1.unwrap())};
model2 = model.split_off(mf+1);
finger1 = None;
finger2 = None;
modelfinger1 = None;
modelfinger2 = None;
}
}
7 => {
if size > 0 {
let ind18: u8 = Arbitrary::arbitrary(&mut buff)?;
let ind1 = ind18 % size;
let ind28: u8 = Arbitrary::arbitrary(&mut buff)?;
let ind2 = ind28 % size;
debug!("Compare finger {} {}", ind1, ind2);
let finger1 = list.get_finger(ind1.into()).unwrap();
let finger2 = list.get_finger(ind2.into()).unwrap();
let (distance, sum) = unsafe {list.finger_difference(finger1,finger2)};
assert_eq!(distance,
ind2 as i64 - ind1 as i64);
let mut modelsum = model[std::cmp::min(ind1,ind2) as usize..std::cmp::max(ind1, ind2) as usize].iter().map(|&n| n as i32).sum();
if ind1 > ind2 {
modelsum *= -1;
};
assert_eq!(sum, modelsum);
}
}
_ => panic!("Invalid op!"),
}
counted_skiplist::check_valid(&list);
}
Ok(())
}
fuzz_target!(|data: &[u8]| {
fuzztest(data).ok();
});
|
//#![allow(dead_code, unused_imports)]
extern crate amethyst;
extern crate tiled;
mod tiled_map;
use tiled_map::load_tmx_map;
use amethyst::prelude::*;
use amethyst::input::{InputBundle, InputHandler};
use amethyst::ecs::{Component, Entity, Join, NullStorage, Read, ReadStorage, System, WriteStorage};
use amethyst::core::transform::{Parent, Transform, GlobalTransform, TransformBundle};
use amethyst::core::nalgebra::{MatrixArray, Matrix4, Vector3};
use amethyst::assets::{AssetStorage, Loader};
use amethyst::renderer::{
Camera, ColorMask, DepthMode, DisplayConfig, DrawFlat2D, Event, Projection, Pipeline, PngFormat,
Texture, Transparent, TextureHandle, TextureMetadata, RenderBundle,
Sprite, SpriteRender, ScreenDimensions, SpriteSheet, SpriteSheetFormat,
SpriteSheetHandle, Stage, VirtualKeyCode, ALPHA
};
pub const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0];
// Player and...
#[derive(Default)]
struct Player;
impl Component for Player {
type Storage = NullStorage<Self>;
}
// ...his movement.
struct MovementSystem;
impl<'s> System<'s> for MovementSystem {
type SystemData = (
ReadStorage<'s, Player>,
WriteStorage<'s, Transform>,
Read<'s, InputHandler<String, String>>
);
fn run(&mut self, (players, mut transforms, input): Self::SystemData) {
let x_move = input.axis_value("entity_x").unwrap();
let y_move = input.axis_value("entity_y").unwrap();
for (_, transform) in (&players, &mut transforms).join() {
transform.translate_x(x_move as f32 * 5.0);
transform.translate_y(y_move as f32 * 5.0);
}
}
}
fn load_sprite_sheet(world: &World, png_path: &str, ron_path: &str) -> SpriteSheetHandle {
let texture_handle = {
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<Texture>>();
loader.load(
png_path,
PngFormat,
TextureMetadata::srgb_scale(),
(),
&texture_storage,
)
};
let loader = world.read_resource::<Loader>();
let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>();
loader.load(
ron_path,
SpriteSheetFormat,
texture_handle,
(),
&sprite_sheet_store,
)
}
// Creating player.
fn init_player(world: &mut World, sprite_sheet: &SpriteSheetHandle) -> Entity {
let mut transform = Transform::default();
// let (width, height) = {
// let dim = world.read_resource::<ScreenDimensions>();
// (dim.width(), dim.height())
// };
transform.set_x(0.0);
transform.set_y(0.0);
let sprite = SpriteRender {
sprite_sheet: sprite_sheet.clone(),
sprite_number: 1,
};
world
.create_entity()
.with(transform)
.with(Player)
.with(sprite)
.with(Transparent)
.build()
}
// Camera.
pub fn initialize_camera(world: &mut World, parent: Entity) {
let mut transform = Transform::default();
transform.set_xyz(-16.0, -16.0, 2.0);
let (width, height) = {
let dim = world.read_resource::<ScreenDimensions>();
(dim.width(), dim.height())
};
world
.create_entity()
.with(Camera::from(Projection::orthographic(
0.0, width, 0.0, height
)))
.with(Parent { entity: parent })
.with(transform)
.build();
}
// Actually the gameplay struct with data.
#[derive(Debug)]
pub struct TiledGame;
impl SimpleState for TiledGame {
fn on_start(&mut self, data: StateData<GameData>) {
let world = data.world;
// Loading map.
load_tmx_map(world,
"resources/tiled/tilesheet.png",
"resources/tiled/untitled.tmx",
);
// Loading character.
let circle_sprite_sheet_handle = load_sprite_sheet(world,
"resources/sprites/example_sprite.png",
"resources/sprites/example_sprite.ron"
);
// Loading camera.
let parent = init_player(world, &circle_sprite_sheet_handle);
initialize_camera(world, parent);
}
}
fn main() -> amethyst::Result<()> {
amethyst::start_logger(Default::default());
use amethyst::utils::application_root_dir;
let conf_dir = format!(
"{}/resources/display_config.ron",
application_root_dir()
);
let config = DisplayConfig::load(&conf_dir);
// Rendering code.
let pipe = Pipeline::build()
.with_stage(
Stage::with_backbuffer()
// Background.
.clear_target(BLACK, -1.0)
.with_pass(DrawFlat2D::new()
.with_transparency(
ColorMask::all(),
ALPHA,
Some(DepthMode::LessEqualWrite))), // Tells the pipeline to respect sprite z-depth.
);
let game_data = GameDataBuilder::default()
.with_bundle(
TransformBundle::new())?
.with_bundle(
InputBundle::<String, String>::new()
.with_bindings_from_file("resources/input.ron")?,
)?
.with(MovementSystem, "movement", &[])
.with_bundle(
RenderBundle::new(pipe, Some(config))
.with_sprite_sheet_processor()
.with_sprite_visibility_sorting(&[]), // Let's us use the `Transparent` component.
)?;
let mut game = Application::build("./", TiledGame)?
.build(game_data)?;
game.run();
Ok(())
} |
extern crate duktape_sys;
#[macro_use]
extern crate error_chain;
extern crate typemap;
#[macro_use]
extern crate bitflags;
#[cfg(feature = "value")]
extern crate value;
mod callable;
pub mod class;
mod context;
pub mod error;
mod macros;
mod privates;
pub mod types;
pub use self::callable::Callable;
pub use self::context::*;
pub use self::macros::*;
pub use self::typemap::Key;
pub mod prelude {
pub use super::callable::Callable;
pub use super::class;
pub use super::context::*;
pub use super::error::Error as DukError;
pub use super::error::ErrorKind as DukErrorKind;
pub use super::error::Result as DukResult;
pub use super::types::*;
pub use super::macros::*;
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
|
use yew::prelude::*;
enum Msg {
Update(String),
AddTodo,
DeleteTodo(usize),
}
struct Todo {
completed: bool,
text: String,
}
struct App {
// `ComponentLink` is like a reference to a component.
// It can be used to send messages to the component
link: ComponentLink<Self>,
value: String,
todos: Vec<Todo>,
}
impl Component for App {
type Message = Msg;
type Properties = ();
fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
link,
value: String::new(),
todos: Vec::new(),
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::Update(val) => {
self.value = val;
// the value has changed so we need to
// re-render for it to appear on the page
true
}
Msg::AddTodo => {
self.todos.push(Todo {
completed: false,
text: self.value.to_string(),
});
self.value = String::new();
true
}
Msg::DeleteTodo(id) => {
self.todos.remove(id);
true
}
}
}
fn change(&mut self, _props: Self::Properties) -> ShouldRender {
// Should only return "true" if new properties are different to
// previously received properties.
// This component has no properties so we will always return "false".
false
}
fn view(&self) -> Html {
html! {
<div>
// <button onclick=self.link.callback(|_| Msg::AddOne)>{ "+1" }</button>
// <p>{ self.value }</p>
<div>
<h1>{"Rustodo"}</h1>
<input
type="text"
placeholder="Galvanize rust."
value=self.value.to_string()
oninput=self.link.callback(|e: InputData|{
Msg::Update(e.value)
})
onkeypress=self.link.batch_callback(|e: KeyboardEvent|{
if e.key() == "Enter" {
Some(Msg::AddTodo)
} else {None}
})
/>
<button onclick=self.link.callback(|_| Msg::AddTodo)>{"Add Todo"}</button>
</div>
// <h2>{"Todos:"}</h2>
<div>
<table>
<thead>
<tr>
<th width="20"><input type="checkbox" /></th>
<th>{"Todo"}</th>
<th>{"Option"}</th>
</tr>
</thead>
<tbody>
{
for self.todos.iter().enumerate().map(|todo| self.view_todo(todo))
}
</tbody>
</table>
</div>
</div>
}
}
}
impl App {
fn view_todo(&self, (id, todo): (usize, &Todo)) -> Html {
html! {
<tr>
<td><input type="checkbox" /></td>
<td>{todo.text.as_str()}</td>
<td>
<button onclick=self.link.callback(move |_| Msg::DeleteTodo(id))>{"Delete"}</button>
</td>
</tr>
}
}
}
fn main() {
yew::start_app::<App>();
}
|
pub fn brackets_are_balanced(string: &str) -> bool {
let mut chars: Vec<char> = Vec::new();
for c in string.chars() {
match c {
'{' | '[' | '(' => chars.push(c),
'}' => {
if chars.pop() != Some('{') {
return false;
}
}
']' => {
if chars.pop() != Some('[') {
return false;
}
}
')' => {
if chars.pop() != Some('(') {
return false;
}
}
_ => {}
}
}
chars.is_empty()
}
|
#[doc = "Reader of register CR1"]
pub type R = crate::R<u32, super::CR1>;
#[doc = "Writer for register CR1"]
pub type W = crate::W<u32, super::CR1>;
#[doc = "Register CR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::CR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `REG_ENABLE`"]
pub type REG_ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `REG_ENABLE`"]
pub struct REG_ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> REG_ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `ENABLE_LOCK`"]
pub type ENABLE_LOCK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ENABLE_LOCK`"]
pub struct ENABLE_LOCK_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_LOCK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `BUS_ACTIVITY`"]
pub type BUS_ACTIVITY_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BUS_ACTIVITY`"]
pub struct BUS_ACTIVITY_W<'a> {
w: &'a mut W,
}
impl<'a> BUS_ACTIVITY_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `RSVD_3`"]
pub type RSVD_3_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RSVD_3`"]
pub struct RSVD_3_W<'a> {
w: &'a mut W,
}
impl<'a> RSVD_3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
impl R {
#[doc = "Bit 0 - This bit controls the operation of the internal USB regulator. For applications with supply voltages in the 5V range this bit is set high to enable the internal regulator. For device supply voltage in the 3.3V range this bit is cleared to connect the transceiver directly to the supply."]
#[inline(always)]
pub fn reg_enable(&self) -> REG_ENABLE_R {
REG_ENABLE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - This bit is set to turn on the automatic frequency locking of the internal oscillator to USB traffic. Unless an external clock is being provided this bit should remain set for proper USB operation."]
#[inline(always)]
pub fn enable_lock(&self) -> ENABLE_LOCK_R {
ENABLE_LOCK_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - The Bus Activity bit is a stickybit that detects any non-idle USB event that has occurred on the USB bus. Once set to High by the SIE to indicate the bus activity this bit retains its logical High value until firmware clears it."]
#[inline(always)]
pub fn bus_activity(&self) -> BUS_ACTIVITY_R {
BUS_ACTIVITY_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - N/A"]
#[inline(always)]
pub fn rsvd_3(&self) -> RSVD_3_R {
RSVD_3_R::new(((self.bits >> 3) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - This bit controls the operation of the internal USB regulator. For applications with supply voltages in the 5V range this bit is set high to enable the internal regulator. For device supply voltage in the 3.3V range this bit is cleared to connect the transceiver directly to the supply."]
#[inline(always)]
pub fn reg_enable(&mut self) -> REG_ENABLE_W {
REG_ENABLE_W { w: self }
}
#[doc = "Bit 1 - This bit is set to turn on the automatic frequency locking of the internal oscillator to USB traffic. Unless an external clock is being provided this bit should remain set for proper USB operation."]
#[inline(always)]
pub fn enable_lock(&mut self) -> ENABLE_LOCK_W {
ENABLE_LOCK_W { w: self }
}
#[doc = "Bit 2 - The Bus Activity bit is a stickybit that detects any non-idle USB event that has occurred on the USB bus. Once set to High by the SIE to indicate the bus activity this bit retains its logical High value until firmware clears it."]
#[inline(always)]
pub fn bus_activity(&mut self) -> BUS_ACTIVITY_W {
BUS_ACTIVITY_W { w: self }
}
#[doc = "Bit 3 - N/A"]
#[inline(always)]
pub fn rsvd_3(&mut self) -> RSVD_3_W {
RSVD_3_W { w: self }
}
}
|
use super::ffi::LIBC;
/// mmap API business logic.
use std::os::raw::{c_int, c_void};
/// Need to use pattern here: https://stackoverflow.com/a/37608197/6214034
pub trait MmapAPI {
/// Call if we're not reentrant.
fn call_if_tracking<F: FnMut()>(&self, f: F);
/// Implement removal of tracking metdata.
fn remove_mmap(&self, addr: usize, len: usize);
/// Return whether C module is initialized.
fn is_initialized(&self) -> bool;
}
/// # Safety
/// Only call with pointers from mmap()!
pub unsafe fn munmap_wrapper<A: MmapAPI>(addr: *mut c_void, len: usize, api: &A) -> c_int {
if !api.is_initialized() {
#[cfg(target_os = "macos")]
{
return unsafe { libc::munmap(addr, len) };
};
#[cfg(target_os = "linux")]
{
return unsafe { libc::syscall(libc::SYS_munmap, addr, len) } as c_int;
}
}
api.call_if_tracking(|| {
api.remove_mmap(addr as usize, len);
});
// If munmap() fails the above removal is wrong, but that's highly unlikley
// to happen, and we want to prevent a threading race condition so need to
// remove tracking metdata first.
unsafe { (LIBC.munmap)(addr, len) }
}
#[cfg(test)]
mod tests {
use super::{munmap_wrapper, MmapAPI};
use crate::ffi::LIBC;
struct TrackMunmap {
tracking_removed: std::cell::Cell<(usize, usize)>,
}
impl MmapAPI for TrackMunmap {
fn is_initialized(&self) -> bool {
true
}
fn call_if_tracking<F: FnMut()>(&self, mut f: F) {
f()
}
fn remove_mmap(&self, addr: usize, len: usize) {
// The map should still exist at this point!
assert!(exists_in_maps(addr, len));
self.tracking_removed.set((addr, len));
}
}
// Return whether given mmap() exists for this process.
fn exists_in_maps(addr: usize, len: usize) -> bool {
for map in proc_maps::get_process_maps(std::process::id() as proc_maps::Pid).unwrap() {
if map.start() == addr && map.size() >= len {
return true;
}
}
false
}
// Removing tracking metadata after munmap() can lead to race conditions if
// another thread mmap()s the same address.
#[test]
fn munmap_happens_after_metadata_removal() {
let size = 3072;
let addr = unsafe {
(LIBC.mmap)(
std::ptr::null_mut(),
size,
libc::PROT_READ,
libc::MAP_PRIVATE | libc::MAP_ANONYMOUS,
-1,
0,
)
};
assert!(exists_in_maps(addr as usize, size));
let fake_api = TrackMunmap {
tracking_removed: std::cell::Cell::new((0, 0)),
};
unsafe { munmap_wrapper(addr, size, &fake_api) };
assert_eq!(fake_api.tracking_removed.get(), (addr as usize, size));
assert!(!exists_in_maps(addr as usize, size));
}
}
|
use git2::{Branch, Commit};
/// A set of extension methods for the `Branch` type in git2.
pub trait BranchExt {
/// Gets the `Commit` at the tip of this `Branch`.
fn tip_commit(&self) -> Option<Commit>;
}
impl<'repo> BranchExt for Branch<'repo> {
fn tip_commit(&self) -> Option<Commit> {
self.get().peel_to_commit().ok()
}
} |
use crate::{Call, Event, Runtime};
impl pallet_sudo::Trait for Runtime {
type Event = Event;
type Call = Call;
}
|
use serde_derive::Serialize;
use std::fmt;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use rayon::prelude::*;
/// Structure representing the results of a file analysis.
#[derive(Serialize)]
pub struct FileStats {
/// Number of lines in the file. Based on \n and \r\n
pub lines: Option<usize>,
/// Number of words in the file. Based on the Unicode Derived Core Property White_Space
pub words: Option<usize>,
/// Number of characters in the file. Based on the Unicode Scalar Value
pub chars: Option<usize>,
}
impl fmt::Display for FileStats {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// NOTE: intentional spacing here.
// Always at least one is reported
// Last one includes a space too, so removing one from filename print
write!(
f,
"{}{}{}",
match self.lines {
Some(lines) => format!("{} ", lines),
None => String::new(),
},
match self.words {
Some(words) => format!("{} ", words),
None => String::new(),
},
match self.chars {
Some(chars) => format!("{} ", chars),
None => String::new(),
},
)
}
}
#[derive(Copy, Clone, Debug)]
pub struct AnalysisOptions {
pub lines: bool,
pub words: bool,
pub chars: bool,
}
#[derive(Serialize)]
#[serde(untagged)]
pub enum NamedOutput {
Success { filename: String, stats: FileStats },
Error { filename: String, error: String },
}
/// Display as String for CLI use
impl fmt::Display for NamedOutput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
// NOTE: intentional spacing here, as stats leave a trailing space
NamedOutput::Success { filename, stats } => write!(f, "{}{}", stats, filename),
NamedOutput::Error { filename, error } => write!(f, "{} {}", error, filename),
}
}
}
/// Analyse a single string, returning FileStats
pub fn analyse_string(contents: &str, options: AnalysisOptions) -> FileStats {
FileStats {
lines: if options.lines {
Some(contents.lines().count())
} else {
None
},
words: if options.words {
Some(contents.split_whitespace().count())
} else {
None
},
chars: if options.chars {
Some(contents.chars().count())
} else {
None
},
}
}
/// Runs a file analysis on the given filename path.
/// Returns a NamedOutput structure, with the filename and
/// either results or error
pub fn analyse_file(filename: &str, options: AnalysisOptions) -> NamedOutput {
let file = match File::open(filename) {
Err(e) => {
return NamedOutput::Error {
filename: filename.to_string(),
error: e.to_string(),
}
}
Ok(f) => f,
};
let mut buf_reader = BufReader::new(file);
let mut contents = String::new();
match buf_reader.read_to_string(&mut contents) {
Ok(_bytes) => NamedOutput::Success {
filename: filename.to_string(),
stats: analyse_string(&contents, options),
},
Err(e) => NamedOutput::Error {
filename: filename.to_string(),
error: e.to_string(),
},
}
}
/// Runs a file analysis on the given list of path buffers.
/// Returns a NamedOutput structure, with the filename and
/// either results or error
pub fn analyse_files(filenames: &[&str], options: AnalysisOptions) -> Vec<NamedOutput> {
filenames
.par_iter()
.map(|filename| analyse_file(filename, options))
.collect()
}
|
#![feature(test)]
#[macro_use]
extern crate maplit;
extern crate test;
extern crate kite;
use test::Bencher;
use kite::term::Term;
use kite::token::Token;
use kite::schema::{FieldType, FIELD_INDEXED};
use kite::document::Document;
use kite::store::{IndexStore, IndexReader};
use kite::store::memory::{MemoryIndexStore, MemoryIndexStoreReader};
#[bench]
fn bench_insert_document(b: &mut Bencher) {
let mut store = MemoryIndexStore::new();
let body_field = store.add_field("body".to_string(), FieldType::Text, FIELD_INDEXED).unwrap();
let mut tokens = Vec::new();
for t in 0..5000 {
tokens.push(Token {
term: Term::from_string(t),
position: t
});
}
let mut i = 0;
b.iter(|| {
i += 1;
store.insert_or_update_document(Document {
key: i.to_string(),
indexed_fields: hashmap! {
body_field => tokens.clone()
},
stored_fields: hashmap! {},
});
});
}
|
use parser::{Expression};
use instructions::Instructions;
use memory::MemoryLayout;
use super::errors::Error;
pub fn output_expr(
instructions: &mut Instructions,
mem: &mut MemoryLayout,
expr: Expression
) -> Result<(), Error> {
match expr {
Expression::StringLiteral(text) => {
let cell = mem.next_available_cell();
instructions.move_right_by(cell);
write_string_literal(instructions, text.as_bytes());
instructions.move_left_by(cell);
Ok(())
},
Expression::Identifier(ident) => {
let (position, size) = mem.get_cell_contents(&ident).ok_or_else(|| {
Error::UndeclaredIdentifier {name: ident}
})?;
instructions.move_right_by(position);
instructions.write_consecutive(size);
instructions.move_left_by(position);
Ok(())
},
}
}
fn write_string_literal(instructions: &mut Instructions, bytes: &[u8]) {
// Writing string literals are special because you don't necessarily
// need to store the string literal in any location outside of what is necessary
// for the write. The memory is to be allocated briefly, then freed.
// Because of this, we don't split allocation and writing into separate steps.
// We keep this special routine specifically designed to write string literals
let mut last_char: u8 = 0;
for ch in bytes {
let ch = *ch;
instructions.increment_relative(last_char, ch);
instructions.write();
last_char = ch;
}
// always reset this cell because we don't need it anymore
instructions.increment_relative(last_char, 0);
}
|
use crate::common::Type;
pub trait Name<'a> {
fn as_name(&self) -> &'a str {
self.name().unwrap()
}
fn name(&self) -> Option<&'a str> {
None
}
}
impl<'a> Name<'a> for Type<'a> {
fn name(&self) -> Option<&'a str> {
match self {
Type::NamedType(t) => Some(*t),
Type::ListType(t) => t.name(),
Type::NonNullType(t) => t.name(),
}
}
}
|
use crate::problem::{AccessTokenProblemCategory::*, Problem};
use crate::models::UserId;
use astroplant_auth::token;
use warp::{Filter, Rejection};
/// A filter to authenticate a user through a normal token in the Authorization header.
/// If there is no Authorization header, returns None.
///
/// Rejects the request if the Authorization header is malformed.
pub fn option_by_token() -> impl Filter<Extract = (Option<UserId>,), Error = Rejection> + Copy {
warp::header("Authorization")
.map(|a| Some(a))
.or_else(|_| futures::future::ok::<_, Rejection>((None,)))
.and_then(|authorization: Option<String>| {
async {
if let Some(authorization) = authorization {
let parts: Vec<_> = authorization.split(" ").collect();
if parts.len() != 2 {
return Err(warp::reject::custom(Problem::AuthorizationHeader {
category: Malformed,
}));
}
if parts[0] != "Bearer" {
return Err(warp::reject::custom(Problem::AuthorizationHeader {
category: Malformed,
}));
}
let token_signer: &token::TokenSigner = crate::TOKEN_SIGNER.get().unwrap();
let access_token = parts[1];
let authentication_state: token::AuthenticationState =
match token_signer.decode_access_token(&access_token) {
Ok(authentication_state) => authentication_state,
Err(token::Error::Expired) => {
return Err(warp::reject::custom(Problem::AuthorizationHeader {
category: Expired,
}))
}
Err(_) => {
return Err(warp::reject::custom(Problem::AuthorizationHeader {
category: Malformed,
}))
}
};
trace!("User authenticated with state {:?}", authentication_state);
Ok(Some(UserId(authentication_state.user_id)))
} else {
Ok(None)
}
}
})
}
/// A filter to authenticate a user through a normal token in the Accept header.
/// Rejects the request if the Authorization header is missing or malformed.
pub fn by_token() -> impl Filter<Extract = (UserId,), Error = Rejection> + Copy {
option_by_token().and_then(|user: Option<UserId>| {
futures::future::ready(
user.ok_or(warp::reject::custom(Problem::AuthorizationHeader {
category: Missing,
})),
)
})
}
|
use quick_renderer::egui;
pub trait DrawUI {
type ExtraData;
fn draw_ui(&self, extra_data: &Self::ExtraData, ui: &mut egui::Ui);
fn draw_ui_edit(&mut self, extra_data: &Self::ExtraData, ui: &mut egui::Ui);
}
|
use crate::numeric::Numeric;
pub trait Float: Numeric {
fn f_sqrt(self) -> Self;
}
impl Float for f32 {
fn f_sqrt(self) -> f32 {
self.sqrt()
}
}
impl Float for f64 {
fn f_sqrt(self) -> f64 {
self.sqrt()
}
}
|
fn main() {
messages::main();
ip::main()
}
mod ip {
pub fn main() {
let home = IpAddr {
kind: IpAddrKind::V4,
address: String::from("127.0.0.1"),
};
let loopback = IpAddr {
kind: IpAddrKind::V6,
address: String::from("::1"),
};
}
enum IpAddrKind {
V4,
V6,
}
fn route(ip_kind: IpAddrKind) {}
fn values() {
let four = IpAddrKind::V4;
let six = IpAddrKind::V6;
}
struct IpAddr {
kind: IpAddrKind,
address: String,
}
// We can represent the same concept in a more concise way
// using just an enum, rather than an enum inside a struct,
// by putting data directly into each enum variant
enum IpAddr2 {
V4(u8, u8, u8, u8),
V6(String),
}
fn consice() {
let home = IpAddr2::V4(127, 0, 0, 1);
let loopback = IpAddr2::V6(String::from("::1"));
}
}
mod messages {
pub fn main() {
let x: Message = Message::Move { x: 3, y: 4 };
let y: BoardGameTurn = BoardGameTurn::Move { squares: 1 };
}
#[derive(Debug)]
enum Message {
Quit,
ChangeColor(i32, i32, i32),
Move { x: i32, y: i32 },
Write(String),
}
enum BoardGameTurn {
Move { squares: i32 },
Pass,
}
fn constructor() {
let m = Message::Write("Hello, world".to_string());
// is the same as
fn make_message(x: String) -> Message {
Message::Write(x)
}
let x = make_message("Hello, world".to_string());
}
fn map_to() {
// convert a vector of Strings into a vector of Message::Writes:
let v = vec![
"Hello".to_string(),
"World".to_string()
];
let v1: Vec<Message> = v
.into_iter()
.map(Message::Write)
.collect();
let val = &v1[0];
println!("{:#?}", v1);
// [
// Write("Hello"),
// Write("World"),
// ]
}
}
|
use crate::{
bytecode::{ByteCode, OpCode},
{OpArgBuf, WORD_SIZE},
};
#[derive(Debug)]
pub struct Vm<'a> {
stack: Vec<usize>,
bp: usize,
parser: VmParser<'a>,
}
#[derive(Debug)]
struct VmParser<'a> {
// invariant: self.bytecode.data[self.next_code_offset] always contains the u8 repr of an opcode
next_code_offset: usize,
bytecode: ByteCode<'a>, // immutable. from ByteCodeBuf
res_buf: VmParserResult, // contents reflect most recent parse
}
#[derive(Debug, Default)]
struct VmParserResult {
code: Option<OpCode>,
args: OpArgBuf,
}
///////////////////////////////////////////
impl<'a> VmParser<'a> {
pub fn new(bytecode: ByteCode<'a>) -> Self {
Self { res_buf: Default::default(), bytecode, next_code_offset: 0 }
}
// successfully reads if res.code.is_some() afterwards
// mutates res.args regardless of success
pub fn parse_next(&mut self) {
self.res_buf.code = if self.next_code_offset < self.bytecode.bytes_len() {
let op = unsafe {
// safe! relies on my invariant
self.bytecode.read_op_code_at(self.next_code_offset)
};
self.next_code_offset += 1;
println!("got op {:?} with {} words", op, op.word_args());
let num_words = op.word_args();
unsafe {
// safe! relies on my invariant
self.bytecode
.read_words_into(self.next_code_offset, &mut self.res_buf.args[0..num_words])
};
self.next_code_offset += WORD_SIZE * num_words;
Some(op)
} else {
None
};
}
}
impl<'a> Vm<'a> {
pub fn new_run(bytecode: ByteCode<'a>) {
let mut vm = Self { stack: Default::default(), bp: 0, parser: VmParser::new(bytecode) };
loop {
vm.parser.parse_next();
if !vm.take_do_parsed() {
break;
}
}
}
pub fn take_do_parsed(&mut self) -> bool {
if let Some(op_code) = self.parser.res_buf.code.take() {
use OpCode as Oc;
println!("VM handling {:?} with args {:?}", op_code, &self.parser.res_buf.args);
let args = &self.parser.res_buf.args;
match op_code {
Oc::PushConst => self.stack.push(args[0]),
Oc::TosDown => drop(self.stack.pop().unwrap()),
Oc::Load => self.stack.push(*self.stack.get(args[0]).unwrap()),
Oc::Store => *self.stack.get_mut(args[0]).unwrap() = self.stack.pop().unwrap(),
Oc::JmpTo => self.parser.next_code_offset = args[0],
Oc::DecStack => *self.stack.last_mut().unwrap() -= 1,
Oc::IfNzJmp => {
if self.stack.pop().unwrap() != 0 {
self.parser.next_code_offset = args[0]
}
}
Oc::WrapAddStack => {
let [a, b] = [self.stack.pop().unwrap(), self.stack.pop().unwrap()];
self.stack.push(a + b)
}
Oc::SysOut => {
print!(" >> `{}`\n", self.stack.pop().unwrap() as u8 as char);
}
}
true
} else {
false
}
}
}
|
use std::env;
use serenity::{
model::{channel::Message, gateway::Ready},
prelude::*,
};
mod lib;
use lib::*;
fn main() {
let token = env::var("DISCORD_TOKEN")
.expect("Expected a token in the environment");
let handler = Handler::new();
let mut client = Client::new(&token, handler).expect("Error creating client");
if let Err(why) = client.start() {
eprintln!("Client error: {:?}", why);
}
}
|
use crate::maze::maze_genotype::PathGene;
use crate::maze::maze_phenotype::MazeCell;
use crate::maze::{Orientation, PathDirection};
#[derive(Debug, Clone)]
pub struct MazeValidator {
pub width: u32,
pub height: u32,
pub first_direction: Orientation,
pub grid: Vec<Vec<MazeCell>>,
}
impl MazeValidator {
pub fn new(
width: u32,
height: u32,
first_direction: Orientation,
path_genes: &Vec<PathGene>,
) -> MazeValidator {
let mut validator = MazeValidator {
width,
height,
first_direction,
grid: vec![vec![MazeCell::new(); height as usize]; width as usize],
};
validator.add_path(path_genes);
validator
}
pub fn validate_new_path(
width: u32,
height: u32,
first_direction: Orientation,
path_genes: &Vec<PathGene>,
) -> bool {
let mut validator = MazeValidator {
width,
height,
first_direction,
grid: vec![vec![MazeCell::new(); height as usize]; width as usize],
};
validator.add_path(path_genes)
}
pub fn get_cell_at(&self, x: u32, y: u32) -> &MazeCell {
&self.grid[x as usize][y as usize]
}
pub fn update_cell_is_juncture(&mut self, x: u32, y: u32, is_juncture: bool) {
self.grid[x as usize][y as usize].is_juncture = is_juncture;
}
pub fn update_cell_is_waypoint(&mut self, x: u32, y: u32, is_waypoint: bool) {
self.grid[x as usize][y as usize].is_waypoint = is_waypoint;
}
pub fn update_cell_path_direction(&mut self, x: u32, y: u32, path_direction: PathDirection) {
self.grid[x as usize][y as usize].path_direction = path_direction;
}
pub fn add_path(&mut self, path_genes: &Vec<PathGene>) -> bool {
let start_position = PathGene::new(0, self.height - 1);
if !self.add_waypoint(&start_position, &path_genes[0]) {
return false;
}
for (i, path_gene) in path_genes[0..path_genes.len() - 1].iter().enumerate() {
let target_point = &path_genes[i + 1];
if !self.add_waypoint(&path_gene, &target_point) {
return false;
}
}
let end_position = PathGene::new(self.width - 1, 0);
if !self.add_waypoint(&path_genes[path_genes.len() - 1], &end_position) {
return false;
}
self.update_cell_path_direction(self.width - 1, 0, PathDirection::South);
true
}
pub fn add_waypoint(&mut self, current_point: &PathGene, target_point: &PathGene) -> bool {
self.update_cell_is_waypoint(target_point.x, target_point.y, true);
if self.first_direction == Orientation::Vertical {
if !self.add_vertical_path_segment(current_point.x, current_point.y, target_point.y) {
return false;
}
if !self.add_horizontal_path_segment(target_point.y, current_point.x, target_point.x) {
return false;
}
if current_point.x != target_point.x && current_point.y != target_point.y {
self.update_cell_is_juncture(current_point.x, target_point.y, true);
}
} else if self.first_direction == Orientation::Horizontal {
if !self.add_horizontal_path_segment(current_point.y, current_point.x, target_point.x) {
return false;
}
if !self.add_vertical_path_segment(target_point.x, current_point.y, target_point.y) {
return false;
}
if current_point.x != target_point.x && current_point.y != target_point.y {
self.update_cell_is_juncture(target_point.x, current_point.y, true);
}
}
true
}
pub fn add_vertical_path_segment(&mut self, from_x: u32, from_y: u32, to_y: u32) -> bool {
if from_y <= to_y {
for y in from_y..to_y + 1 {
if y == to_y {
continue;
}
if self.get_cell_at(from_x, y).path_direction != PathDirection::None {
return false;
}
self.update_cell_path_direction(from_x, y, PathDirection::North);
}
} else {
for y in to_y..from_y + 1 {
if y == to_y {
continue;
}
if self.get_cell_at(from_x, y).path_direction != PathDirection::None {
return false;
}
self.update_cell_path_direction(from_x, y, PathDirection::South);
}
}
true
}
pub fn add_horizontal_path_segment(&mut self, from_y: u32, from_x: u32, to_x: u32) -> bool {
if from_x <= to_x {
for x in from_x..to_x + 1 {
if x == to_x {
continue;
}
if self.get_cell_at(x, from_y).path_direction != PathDirection::None {
return false;
}
self.update_cell_path_direction(x, from_y, PathDirection::East);
}
} else {
for x in to_x..from_x + 1 {
if x == to_x {
continue;
}
if self.get_cell_at(x, from_y).path_direction != PathDirection::None {
return false;
}
self.update_cell_path_direction(x, from_y, PathDirection::West);
}
}
true
}
}
|
use std::vec::Vec;
use utils::vec3::Vec3;
use utils::ray::Ray;
use utils::material::Material;
use utils::aabb::{AABB, surrounding_box};
#[allow(dead_code)]
#[derive(Clone)]
pub struct HitRecord {
pub t: f32,
pub u: f32,
pub v: f32,
pub p: Vec3,
pub normal: Vec3,
pub mat: Box<Material>,
}
#[allow(dead_code)]
impl HitRecord {
pub fn new(m: Box<Material>) -> Self {
Self {
t: 0.,
u: 0.,
v: 0.,
p: Vec3::new(0., 0., 0.),
normal: Vec3::new(0., 0., 0.),
mat: m,
}
}
}
pub trait Hitable {
fn hit(&self, r: &Ray, t_min: f32, t_max: f32, rec: &mut HitRecord) -> bool;
fn bounding_box(&self, t0: f32, t1: f32, vox: &mut AABB) -> bool;
fn box_clone(&self) -> Box<Hitable>;
}
impl Clone for Box<Hitable> {
fn clone(&self) -> Box<Hitable> {
self.box_clone()
}
}
#[allow(dead_code)]
#[derive(Clone)]
pub struct HitableList {
pub list: Vec<Box<Hitable>>,
}
unsafe impl Sync for HitableList {}
unsafe impl Send for HitableList {}
#[allow(dead_code)]
impl HitableList {
pub fn new(hitable: Vec<Box<Hitable>>) -> Self {
Self { list: hitable }
}
}
impl Hitable for HitableList {
fn box_clone(&self) -> Box<Hitable> {
Box::new((*self).clone())
}
fn hit(&self, r: &Ray, t_min: f32, t_max: f32, rec: &mut HitRecord) -> bool {
let mut temp_rec = HitRecord::new(rec.mat.clone());
let mut hit_anything = false;
let mut closest_so_far = t_max;
for h in self.list.iter() {
if h.hit(r, t_min, closest_so_far, &mut temp_rec) {
hit_anything = true;
closest_so_far = temp_rec.t;
*rec = temp_rec.clone();
}
}
hit_anything
}
fn bounding_box(&self, t0: f32, t1: f32, vox: &mut AABB) -> bool {
if self.list.len() < 1 {
return false;
}
let mut temp_box = AABB::new(Vec3::new(0., 0., 0.), Vec3::new(0., 0., 0.));
let first_true = self.list[0].bounding_box(t0, t1, &mut temp_box);
if !first_true {
return false;
} else {
*vox = temp_box.clone();
}
for _ in 1..self.list.len() {
if self.list[0].bounding_box(t0, t1, &mut temp_box) {
*vox = surrounding_box(vox.clone(), temp_box.clone());
} else {
return false;
}
}
true
}
}
|
// Copyright (c) 2016 Anatoly Ikorsky
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
//! ## mysql-async
//! Tokio based asynchronous MySql client library for rust programming language.
//!
//! ### Installation
//! Library hosted on [crates.io](https://crates.io/crates/mysql_async/).
//!
//! ```toml
//! [dependencies]
//! mysql_async = "<desired version>"
//! ```
//!
//! ### Example
//!
//! ```rust
//! use mysql_async::prelude::*;
//! # use std::env;
//!
//! #[derive(Debug, PartialEq, Eq, Clone)]
//! struct Payment {
//! customer_id: i32,
//! amount: i32,
//! account_name: Option<String>,
//! }
//!
//! #[tokio::main]
//! async fn main() -> Result<(), mysql_async::error::Error> {
//! let payments = vec![
//! Payment { customer_id: 1, amount: 2, account_name: None },
//! Payment { customer_id: 3, amount: 4, account_name: Some("foo".into()) },
//! Payment { customer_id: 5, amount: 6, account_name: None },
//! Payment { customer_id: 7, amount: 8, account_name: None },
//! Payment { customer_id: 9, amount: 10, account_name: Some("bar".into()) },
//! ];
//! let payments_clone = payments.clone();
//!
//! let database_url = /* ... */
//! # if let Ok(url) = env::var("DATABASE_URL") {
//! # let opts = mysql_async::Opts::from_url(&url).expect("DATABASE_URL invalid");
//! # if opts.get_db_name().expect("a database name is required").is_empty() {
//! # panic!("database name is empty");
//! # }
//! # url
//! # } else {
//! # "mysql://root:password@127.0.0.1:3307/mysql".to_string()
//! # };
//!
//! let pool = mysql_async::Pool::new(database_url);
//! let conn = pool.get_conn().await?;
//!
//! // Create temporary table
//! let conn = conn.drop_query(
//! r"CREATE TEMPORARY TABLE payment (
//! customer_id int not null,
//! amount int not null,
//! account_name text
//! )"
//! ).await?;
//!
//! // Save payments
//! let params = payments_clone.into_iter().map(|payment| {
//! params! {
//! "customer_id" => payment.customer_id,
//! "amount" => payment.amount,
//! "account_name" => payment.account_name.clone(),
//! }
//! });
//!
//! let conn = conn.batch_exec(r"INSERT INTO payment (customer_id, amount, account_name)
//! VALUES (:customer_id, :amount, :account_name)", params).await?;
//!
//! // Load payments from database.
//! let result = conn.prep_exec("SELECT customer_id, amount, account_name FROM payment", ()).await?;
//!
//! // Collect payments
//! let (_ /* conn */, loaded_payments) = result.map_and_drop(|row| {
//! let (customer_id, amount, account_name) = mysql_async::from_row(row);
//! Payment {
//! customer_id: customer_id,
//! amount: amount,
//! account_name: account_name,
//! }
//! }).await?;
//!
//! // The destructor of a connection will return it to the pool,
//! // but pool should be disconnected explicitly because it's
//! // an asynchronous procedure.
//! pool.disconnect().await?;
//!
//! assert_eq!(loaded_payments, payments);
//!
//! // the async fn returns Result, so
//! Ok(())
//! }
//! ```
#![recursion_limit = "1024"]
#![cfg_attr(feature = "nightly", feature(test, const_fn))]
#[cfg(feature = "nightly")]
extern crate test;
pub use mysql_common::{chrono, constants as consts, params, time, uuid};
#[macro_use]
mod macros;
mod conn;
mod connection_like;
/// Errors used in this crate
pub mod error;
mod io;
mod local_infile_handler;
mod opts;
mod queryable;
pub type BoxFuture<T> = ::std::pin::Pin<
Box<dyn ::std::future::Future<Output = Result<T, error::Error>> + Send + 'static>,
>;
#[doc(inline)]
pub use self::conn::Conn;
#[doc(inline)]
pub use self::conn::pool::Pool;
#[doc(inline)]
pub use self::queryable::transaction::IsolationLevel;
#[doc(inline)]
pub use self::opts::{
Opts, OptsBuilder, PoolConstraints, PoolOptions, SslOpts, DEFAULT_INACTIVE_CONNECTION_TTL,
DEFAULT_TTL_CHECK_INTERVAL,
};
#[doc(inline)]
pub use self::local_infile_handler::{builtin::WhiteListFsLocalInfileHandler, InfileHandlerFuture};
#[doc(inline)]
pub use mysql_common::packets::Column;
#[doc(inline)]
pub use mysql_common::proto::codec::Compression;
#[doc(inline)]
pub use mysql_common::row::Row;
#[doc(inline)]
pub use mysql_common::params::Params;
#[doc(inline)]
pub use mysql_common::value::Value;
#[doc(inline)]
pub use mysql_common::row::convert::{from_row, from_row_opt, FromRowError};
#[doc(inline)]
pub use mysql_common::value::convert::{from_value, from_value_opt, FromValueError};
#[doc(inline)]
pub use mysql_common::value::json::{Deserialized, Serialized};
#[doc(inline)]
pub use self::queryable::query_result::QueryResult;
#[doc(inline)]
pub use self::queryable::transaction::{Transaction, TransactionOptions};
#[doc(inline)]
pub use self::queryable::{BinaryProtocol, TextProtocol};
#[doc(inline)]
pub use self::queryable::stmt::Stmt;
/// Futures used in this crate
pub mod futures {
pub use crate::conn::pool::futures::{DisconnectPool, GetConn};
}
/// Traits used in this crate
pub mod prelude {
#[doc(inline)]
pub use crate::local_infile_handler::LocalInfileHandler;
#[doc(inline)]
pub use crate::queryable::Queryable;
#[doc(inline)]
pub use mysql_common::row::convert::FromRow;
#[doc(inline)]
pub use mysql_common::value::convert::{ConvIr, FromValue, ToValue};
/// Everything that is connection.
pub trait ConnectionLike: crate::connection_like::ConnectionLike {}
impl<T: crate::connection_like::ConnectionLike> ConnectionLike for T {}
/// Trait for protocol markers [`crate::TextProtocol`] and [`crate::BinaryProtocol`].
pub trait Protocol: crate::queryable::Protocol {}
impl<T: crate::queryable::Protocol> Protocol for T {}
pub use mysql_common::params;
}
#[cfg(test)]
mod test_misc {
use lazy_static::lazy_static;
use std::env;
use crate::opts::{Opts, OptsBuilder, SslOpts};
#[allow(dead_code)]
fn error_should_implement_send_and_sync() {
fn _dummy<T: Send + Sync>(_: T) {}
_dummy(crate::error::Error::from("foo"));
}
lazy_static! {
pub static ref DATABASE_URL: String = {
if let Ok(url) = env::var("DATABASE_URL") {
let opts = Opts::from_url(&url).expect("DATABASE_URL invalid");
if opts
.get_db_name()
.expect("a database name is required")
.is_empty()
{
panic!("database name is empty");
}
url
} else {
"mysql://root:password@127.0.0.1:3307/mysql".into()
}
};
}
pub fn get_opts() -> OptsBuilder {
let mut builder = OptsBuilder::from_opts(&**DATABASE_URL);
// to suppress warning on unused mut
builder.stmt_cache_size(None);
if test_ssl() {
builder.prefer_socket(false);
let mut ssl_opts = SslOpts::default();
ssl_opts.set_danger_skip_domain_validation(true);
ssl_opts.set_danger_accept_invalid_certs(true);
builder.ssl_opts(ssl_opts);
}
if test_compression() {
builder.compression(crate::Compression::default());
}
builder
}
pub fn test_compression() -> bool {
["true", "1"].contains(&&*env::var("COMPRESS").unwrap_or("".into()))
}
pub fn test_ssl() -> bool {
["true", "1"].contains(&&*env::var("SSL").unwrap_or("".into()))
}
}
|
use super::super::types::PunterId;
use super::super::proto::{Move, Setup};
use super::super::game::{GameState, GameStateBuilder};
pub struct AlwaysPassGameStateBuilder;
impl GameStateBuilder for AlwaysPassGameStateBuilder {
type GameState = AlwaysPassGameState;
fn build(self, setup: Setup) -> Self::GameState {
AlwaysPassGameState {
punter: setup.punter,
}
}
}
#[derive(Serialize, Deserialize)]
pub struct AlwaysPassGameState {
punter: PunterId,
}
impl GameState for AlwaysPassGameState {
type Error = ();
fn play(self, _moves: Vec<Move>) -> Result<(Move, Self), Self::Error> {
Ok((Move::Pass { punter: self.punter, }, self))
}
fn stop(self, _moves: Vec<Move>) -> Result<Self, Self::Error> {
Ok(self)
}
fn get_punter(&self) -> PunterId {
self.punter
}
}
|
use anyhow::Result;
use serde::de::{Unexpected, Visitor};
use serde::export::Formatter;
use serde::{Deserialize, Deserializer, Serialize};
use std::path::PathBuf;
use std::result::Result as stdResult;
use crate::cfg::{CfgError, LocalSetupCfg, SetupCfg};
pub type SetupName = String;
#[derive(Debug, Serialize, Deserialize)]
struct PrivateEnvDir(#[serde(deserialize_with = "deserialize_private_env_dir")] PathBuf);
impl AsRef<PathBuf> for PrivateEnvDir {
fn as_ref(&self) -> &PathBuf {
&self.0
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct GlobalProjectSetupCfg {
#[serde(skip)]
name: SetupName,
#[serde(skip_serializing_if = "Option::is_none")]
private_env_dir: Option<PrivateEnvDir>,
}
impl GlobalProjectSetupCfg {
pub fn new(name: SetupName) -> Self {
Self {
name,
private_env_dir: None,
}
}
pub fn private_env_dir(&self) -> Result<&PathBuf> {
self.private_env_dir
.as_ref()
.map(|private_env_dir| private_env_dir.as_ref())
.ok_or(CfgError::PrivateEnvDirNotFound(self.name.clone()).into())
}
pub fn set_private_env_dir(&mut self, dir: PathBuf) -> Result<()> {
if dir.is_relative() {
bail!(CfgError::PrivateEnvDirRelativePath(dir, self.name.clone()))
}
self.private_env_dir = Some(PrivateEnvDir(dir));
Ok(())
}
pub fn unset_private_env_dir(&mut self) -> Result<()> {
if let None = self.private_env_dir {
bail!(CfgError::PrivateEnvAlreadyUnset(self.name.clone()))
} else {
self.private_env_dir = None;
Ok(())
}
}
pub fn name(&self) -> &SetupName {
&self.name
}
pub fn set_name(&mut self, name: SetupName) {
self.name = name;
}
}
impl From<&LocalSetupCfg> for GlobalProjectSetupCfg {
fn from(local_setup: &LocalSetupCfg) -> Self {
Self {
name: local_setup.name().clone(),
private_env_dir: None,
}
}
}
impl SetupCfg for GlobalProjectSetupCfg {
fn name(&self) -> &String {
&self.name
}
fn set_name(&mut self, name: String) {
self.name = name;
}
}
fn deserialize_private_env_dir<'de, D>(deserializer: D) -> stdResult<PathBuf, D::Error>
where
D: Deserializer<'de>,
{
struct InnerVisitor;
impl<'de> Visitor<'de> for InnerVisitor {
type Value = PathBuf;
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
formatter.write_str("incorrect private_env_dir")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let dir: PathBuf = v.into();
if dir.is_relative() {
Err(E::invalid_value(
Unexpected::Str(v),
&"private_env_dir must be an absolute path",
))
} else {
Ok(dir)
}
}
}
deserializer.deserialize_str(InnerVisitor)
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use predicates::prelude::Predicate;
use predicates::str::contains;
use serde_yaml;
use crate::cfg::global::GlobalProjectSetupCfg;
#[test]
fn deserialize_private_env_dir() {
let content = r#"
name: setup_1
"#;
let setup_cfg = serde_yaml::from_str::<GlobalProjectSetupCfg>(content).unwrap();
assert!(setup_cfg.private_env_dir().is_err());
let content = r#"
name: setup_1
private_env_dir: ./rel_path
"#;
let error = serde_yaml::from_str::<GlobalProjectSetupCfg>(content).unwrap_err();
assert!(contains("private_env_dir must be an absolute path").eval(&error.to_string()));
let content = r#"
name: setup_1
private_env_dir: /rel_path
"#;
let setup_cfg = serde_yaml::from_str::<GlobalProjectSetupCfg>(content).unwrap();
assert_eq!(
&PathBuf::from("/rel_path"),
setup_cfg.private_env_dir().unwrap()
);
}
}
|
use std::io::{self, Write};
fn main() {
loop {
let mut buf = String::new();
io::stdin().read_line(&mut buf)
.expect("could not read line");
if buf.starts_with("exit") {
std::process::exit(0);
}
io::stdout().write(&buf.as_bytes())
.expect("could not write line");
}
}
|
fn main() {
let x = 5;
// x = 6; // エラー!
let mut x = 5;
x = 6; // 問題なし!
// ミュータブル参照
let mut x = 5;
let y = &mut x;
}
|
//! Responsible for dealing with all input.
use piston::input::{Button, GenericEvent, Key, MouseButton};
use serde_json::json;
use std::error::Error;
use std::fs::File;
use std::path::Path;
use crate::common::{ButtonInteraction, Cell, Directions, DIMENSIONS_CHOICES};
use crate::nonogram_board::NonogramBoard;
/// Handles nonogram keybindings.
pub struct NonogramControls {
/// Keybinding for filling the cell that the mouse cursor is hovering over.
pub mouse_fill: Button,
/// Keybinding for marking the cell that the mouse cursor is hovering over.
pub mouse_mark: Button,
/// Keybinding for moving the selected cell focus up.
pub move_up: Button,
/// Keybinding for moving the selected cell focus down.
pub move_down: Button,
/// Keybinding for moving the selected cell focus left.
pub move_left: Button,
/// Keybinding for moving the selected cell focus right.
pub move_right: Button,
/// Keybinding for filling the currently selected cell.
pub key_fill: Button,
/// Keybinding for marking the currently selected cell.
pub key_mark: Button,
/// Keybinding for generating a new board and starting from scratch.
pub restart: Button,
/// Keybinding for increasing the dimensions of the next board to be generated.
pub dim_up: Button,
/// Keybinding for decreasing the dimensions of the next board to be generated.
pub dim_down: Button,
}
/// Default implementation for NonogramControls.
impl Default for NonogramControls {
/// Creates new control handler.
fn default() -> Self {
NonogramControls {
mouse_fill: Button::Mouse(MouseButton::Left),
mouse_mark: Button::Mouse(MouseButton::Right),
move_up: Button::Keyboard(Key::W),
move_down: Button::Keyboard(Key::S),
move_left: Button::Keyboard(Key::A),
move_right: Button::Keyboard(Key::D),
key_fill: Button::Keyboard(Key::J),
key_mark: Button::Keyboard(Key::K),
restart: Button::Keyboard(Key::R),
dim_up: Button::Keyboard(Key::Up),
dim_down: Button::Keyboard(Key::Down),
}
}
}
/// Handles events for nonogram game.
pub struct NonogramController {
/// Stores the keybindings.
pub controls: NonogramControls,
/// Stores the nonogram state.
pub nonogram: NonogramBoard,
/// Stores last mouse cursor position.
cursor_pos: [f64; 2],
/// Stores whether a left mouse button or a right mouse button are being held down.
mouse_d: [bool; 2],
/// Stores whether a fill keybinding or mark keybinding are being held down.
key_d: [bool; 2],
/// Stores whether or not then next keyboard move will travel to the other side of the board.
loop_back: bool,
/// Whether or not mouse was original clicked on board.
board_d: bool,
/// Stores current cell type being manipulated (empty, filled, marked).
current_action: Cell,
/// Current status of dimensions dropdown menu.
pub dimensions_dropdown_menu: ButtonInteraction,
/// Index of dropdown menu selected, and interaction type.
pub dimensions_dropdown_options: (usize, ButtonInteraction),
/// Current status of restart button.
pub restart_button: ButtonInteraction,
/// Current status of new game button.
pub new_game_button: ButtonInteraction,
}
/// Implementation for NonogramController.
impl NonogramController {
/// Creates a new nonogram controller.
pub fn new(nonogram: NonogramBoard) -> NonogramController {
NonogramController {
controls: Default::default(),
nonogram,
cursor_pos: [0.0; 2],
mouse_d: [false; 2],
key_d: [false; 2],
loop_back: false,
board_d: false,
current_action: Cell::Empty,
dimensions_dropdown_menu: ButtonInteraction::None,
dimensions_dropdown_options: (0, ButtonInteraction::None),
restart_button: ButtonInteraction::None,
new_game_button: ButtonInteraction::None,
}
}
/// Handles events.
//
// Refer to this documentation for event traits: https://docs.rs/piston/0.49.0/piston/index.html#traits
//
// This triggers a Clippy warning for cognitive complexity. There's nothing that can be done about this, because
// it's caused by the `GenericEvent` trait.
#[allow(clippy::cognitive_complexity)]
pub fn event<E: GenericEvent>(
&mut self,
board_pos: [f64; 2],
size: [f64; 2],
dimensions_dropdown_menu_box: [f64; 4],
restart_box: [f64; 4],
new_game_box: [f64; 4],
e: &E,
) {
// Debug code for figuring out the ID of a particular event.
//println!("{:?}", e.event_id());
if self.nonogram.end_game_screen {
//if true {
if let Some(pos) = e.mouse_cursor_args() {
self.cursor_pos = [pos[0], pos[1]];
// Check that coordinates are inside new game button.
if self.cursor_pos[0] >= new_game_box[0]
&& self.cursor_pos[0] <= (new_game_box[0] + new_game_box[2])
&& self.cursor_pos[1] >= new_game_box[1]
&& self.cursor_pos[1] <= (new_game_box[1] + new_game_box[3])
{
if self.new_game_button == ButtonInteraction::None {
self.new_game_button = ButtonInteraction::Hover;
}
} else if self.new_game_button == ButtonInteraction::Hover
|| (self.new_game_button == ButtonInteraction::Select && self.mouse_d[0])
{
self.new_game_button = ButtonInteraction::None;
}
}
// Check if left mouse button has been pressed.
if let Some(Button::Mouse(MouseButton::Left)) = e.press_args() {
self.mouse_d[0] = true;
match self.new_game_button {
ButtonInteraction::Select => {
self.new_game_button = ButtonInteraction::None;
}
ButtonInteraction::Hover => {
self.new_game_button = ButtonInteraction::Select;
}
_ => (),
}
}
// Check if left mouse button has been released.
if let Some(Button::Mouse(MouseButton::Left)) = e.release_args() {
self.mouse_d[0] = false;
self.board_d = false;
// Check if left mouse button was released while interacting with new game button.
if self.new_game_button == ButtonInteraction::Select {
self.nonogram.reset_board = true;
self.new_game_button = ButtonInteraction::None;
}
}
} else {
// Check if mouse button has been moved within window and save its location to pos: [f64; 2]
if let Some(pos) = e.mouse_cursor_args() {
self.cursor_pos = [pos[0], pos[1]];
// Find coordinates relative to upper left corner.
let x = self.cursor_pos[0] - board_pos[0];
let y = self.cursor_pos[1] - board_pos[1];
// Check that coordinates are inside dimensions dropdown menu button.
if self.cursor_pos[0] >= dimensions_dropdown_menu_box[0]
&& self.cursor_pos[0]
<= (dimensions_dropdown_menu_box[0] + dimensions_dropdown_menu_box[2])
&& self.cursor_pos[1] >= dimensions_dropdown_menu_box[1]
&& self.cursor_pos[1]
<= (dimensions_dropdown_menu_box[1] + dimensions_dropdown_menu_box[3])
{
if self.dimensions_dropdown_menu == ButtonInteraction::None {
self.dimensions_dropdown_menu = ButtonInteraction::Hover;
}
} else if self.dimensions_dropdown_menu == ButtonInteraction::Hover {
self.dimensions_dropdown_menu = ButtonInteraction::None;
}
// Check that coordinates are inside sub menu of dimensions dropdown menu.
let dropdown_sub_menu_y_min =
dimensions_dropdown_menu_box[1] + dimensions_dropdown_menu_box[3];
let dropdown_sub_menu_y_max = dropdown_sub_menu_y_min
+ (dimensions_dropdown_menu_box[3] * (DIMENSIONS_CHOICES.len() + 2) as f64);
if self.dimensions_dropdown_menu == ButtonInteraction::Select
&& self.cursor_pos[0] >= dimensions_dropdown_menu_box[0]
&& self.cursor_pos[0]
<= (dimensions_dropdown_menu_box[0] + dimensions_dropdown_menu_box[2])
&& self.cursor_pos[1] >= dropdown_sub_menu_y_min
&& self.cursor_pos[1] <= dropdown_sub_menu_y_max
{
let dimension_sub_index = (self.cursor_pos[1] - dropdown_sub_menu_y_min)
/ (dimensions_dropdown_menu_box[3] + 5.0);
self.dimensions_dropdown_options =
(dimension_sub_index as usize, ButtonInteraction::Hover);
self.nonogram.selected_cell = None;
} else {
self.dimensions_dropdown_options = (0, ButtonInteraction::None);
// Check that coordinates are inside board boundaries.
if x >= 0.0 && x < size[0] && y >= 0.0 && y < size[1] {
// Compute the cell position.
let cell_x = (x / size[0] * self.nonogram.dimensions[0] as f64) as usize;
let cell_y = (y / size[1] * self.nonogram.dimensions[1] as f64) as usize;
self.nonogram.selected_cell = Some([cell_x, cell_y]);
if self.nonogram.get([cell_x, cell_y]) == self.current_action
&& self.board_d
{
if self.mouse_d[0] {
self.nonogram.set([cell_x, cell_y], Cell::Filled);
} else if self.mouse_d[1] {
self.nonogram.set([cell_x, cell_y], Cell::Marked);
}
}
}
}
// Check that coordinates are inside restart game button.
if self.cursor_pos[0] >= restart_box[0]
&& self.cursor_pos[0] <= (restart_box[0] + restart_box[2])
&& self.cursor_pos[1] >= restart_box[1]
&& self.cursor_pos[1] <= (restart_box[1] + restart_box[3])
{
if self.restart_button == ButtonInteraction::None {
self.restart_button = ButtonInteraction::Hover;
}
} else if self.restart_button == ButtonInteraction::Hover
|| (self.restart_button == ButtonInteraction::Select && self.mouse_d[0])
{
self.restart_button = ButtonInteraction::None;
}
}
// Check if left mouse button has been pressed.
if Some(self.controls.mouse_fill) == e.press_args() {
self.mouse_d[0] = true;
if let Some(ind) = self.nonogram.selected_cell {
self.board_d = true;
self.current_action = self.nonogram.get(ind);
}
match self.dimensions_dropdown_menu {
ButtonInteraction::Select => {
self.dimensions_dropdown_menu = ButtonInteraction::None;
if self.dimensions_dropdown_options.1 == ButtonInteraction::Hover {
self.nonogram.next_dimensions =
DIMENSIONS_CHOICES[self.dimensions_dropdown_options.0];
self.dimensions_dropdown_options = (0, ButtonInteraction::None);
}
}
ButtonInteraction::Hover => {
self.dimensions_dropdown_menu = ButtonInteraction::Select;
}
_ => (),
}
match self.restart_button {
ButtonInteraction::Select => {
self.restart_button = ButtonInteraction::None;
}
ButtonInteraction::Hover => {
self.restart_button = ButtonInteraction::Select;
}
_ => (),
}
}
// Check if right mouse button has been pressed.
if Some(self.controls.mouse_mark) == e.press_args() {
self.mouse_d[1] = true;
if let Some(ind) = self.nonogram.selected_cell {
self.board_d = true;
self.current_action = self.nonogram.get(ind);
}
}
// Check if left mouse button has been released.
if Some(self.controls.mouse_fill) == e.release_args() {
self.mouse_d[0] = false;
self.board_d = false;
// Check if left mouse button was released while interacting with restart button.
if self.restart_button == ButtonInteraction::Select {
self.nonogram.reset_board = true;
self.restart_button = ButtonInteraction::None;
}
}
// Check if right mouse button has been released.
if Some(self.controls.mouse_mark) == e.release_args() {
self.mouse_d[1] = false;
self.board_d = false;
}
// Check if ESC key has been released.
//
// Refer to this documentation for keyboard key names: http://docs.piston.rs/mush/piston/input/enum.Key.html
if let Some(Button::Keyboard(Key::Escape)) = e.release_args() {
println!("Escape key pressed");
}
// Check if key for increasing dimensions has been released.
if Some(self.controls.dim_up) == e.release_args() {
let dimensions_index = DIMENSIONS_CHOICES
.iter()
.position(|&r| r == self.nonogram.next_dimensions)
.unwrap();
if dimensions_index < DIMENSIONS_CHOICES.len() - 1 {
self.nonogram.next_dimensions = DIMENSIONS_CHOICES[dimensions_index + 1];
}
}
// Check if key for decreasing dimensions has been released.
if Some(self.controls.dim_down) == e.release_args() {
let dimensions_index = DIMENSIONS_CHOICES
.iter()
.position(|&r| r == self.nonogram.next_dimensions)
.unwrap();
if dimensions_index > 0 {
self.nonogram.next_dimensions = DIMENSIONS_CHOICES[dimensions_index - 1];
}
}
// Check if move up key has been pressed.
if Some(self.controls.move_up) == e.press_args() {
self.nonogram
.change_selected(Directions::Up, self.loop_back);
}
// Check if move down key has been pressed.
if Some(self.controls.move_down) == e.press_args() {
self.nonogram
.change_selected(Directions::Down, self.loop_back);
}
// Check if move left key has been pressed.
if Some(self.controls.move_left) == e.press_args() {
self.nonogram
.change_selected(Directions::Left, self.loop_back);
}
// Check if move right key has been pressed
if Some(self.controls.move_right) == e.press_args() {
self.nonogram
.change_selected(Directions::Right, self.loop_back);
}
// Detect keyboard movement keys
if e.press_args() == Some(self.controls.move_up)
|| e.press_args() == Some(self.controls.move_down)
|| e.press_args() == Some(self.controls.move_left)
|| e.press_args() == Some(self.controls.move_right)
{
self.loop_back = false;
if let Some(ind) = self.nonogram.selected_cell {
if self.nonogram.get(ind) == self.current_action {
if self.key_d[0] {
self.nonogram.set(ind, Cell::Filled);
} else if self.key_d[1] {
self.nonogram.set(ind, Cell::Marked);
}
}
}
}
// Check if fill key has been pressed.
if Some(self.controls.key_fill) == e.press_args() && !self.key_d[0] {
self.key_d[0] = true;
if let Some(ind) = self.nonogram.selected_cell {
self.current_action = self.nonogram.get(ind);
self.nonogram.set(ind, Cell::Filled);
}
}
// Check if mark key has been pressed.
if Some(self.controls.key_mark) == e.press_args() && !self.key_d[1] {
self.key_d[1] = true;
if let Some(ind) = self.nonogram.selected_cell {
self.current_action = self.nonogram.get(ind);
self.nonogram.set(ind, Cell::Marked);
}
}
}
// Check if window has been closed.
//
// This will check for window closure via clicking the "X" in the top right corner of the window,
// ALT+F4, or killing the program with task manager. This won't check for closure through ESC with
// the option ".exit_on_esc(true)" enabled in main.rs during the window's initial creation though, so
// that option isn't enabled.
//
// This might be useful later if we intend to save any user progress. The program will run everything in
// this block before it actually closes the program.
if let Some(_window_closed) = e.close_args() {
let path = Path::new("savedata.json");
let display = path.display();
let file = match File::create(&path) {
Err(why) => panic!("couldn't create {}: {}", display, why.description()),
Ok(file) => file,
};
// Serialize it to a JSON string.
//let j = serde_json::to_string(&self.nonogram.goal_nums);
//println!("{:?}", j);
let save_data = json!({
"dimensions": self.nonogram.dimensions,
"next_dimensions": self.nonogram.next_dimensions,
"data": self.nonogram.data,
"goal_nums": self.nonogram.goal_nums,
"count_black": self.nonogram.count_black,
"goal_black": self.nonogram.goal_black,
"duration": self.nonogram.duration,
"end_game_screen": self.nonogram.end_game_screen,
});
match serde_json::to_writer_pretty(file, &save_data) {
Err(why) => panic!("Couldn't write to {}: {}", display, why.description()),
Ok(_) => println!("Successfully wrote to {}", display),
}
println!("Nonogram game closed.");
}
// Check if restart key has been released.
if Some(self.controls.restart) == e.release_args() {
self.nonogram.reset_board = true;
}
if e.release_args() == Some(self.controls.move_up)
|| e.release_args() == Some(self.controls.move_down)
|| e.release_args() == Some(self.controls.move_left)
|| e.release_args() == Some(self.controls.move_right)
{
if let Some(ind) = self.nonogram.selected_cell {
if ind[1] == 0
|| ind[1] == self.nonogram.dimensions[1] - 1
|| ind[0] == 0
|| ind[0] == self.nonogram.dimensions[0] - 1
{
self.loop_back = true;
}
}
}
// Check if fill key has been released.
if Some(self.controls.key_fill) == e.release_args() {
self.key_d[0] = false;
}
// Check if mark key has been released.
if Some(self.controls.key_mark) == e.release_args() {
self.key_d[1] = false;
}
}
}
|
//! # Nom parsers
use std::string::FromUtf8Error;
use nom::{
combinator::{map, map_res},
error::{FromExternalError, ParseError},
multi::length_data,
number::complete::{le_i32, le_u32},
IResult, Parser,
};
use super::CRCTreeNode;
/// Parse a CRC node
pub fn parse_crc_node<'r, D, P, E>(
mut parser: P,
) -> impl FnMut(&'r [u8]) -> IResult<&'r [u8], CRCTreeNode<D>, E>
where
P: Parser<&'r [u8], D, E>,
E: ParseError<&'r [u8]>,
{
move |input: &'r [u8]| -> IResult<&[u8], CRCTreeNode<D>, E> {
let (input, crc) = le_u32(input)?;
let (input, left) = le_i32(input)?;
let (input, right) = le_i32(input)?;
let (input, data) = parser.parse(input)?;
Ok((
input,
CRCTreeNode {
crc,
left,
right,
data,
},
))
}
}
/// Parse a string after an u32 length specifier
pub fn parse_u32_string<'a, E>(input: &'a [u8]) -> IResult<&'a [u8], String, E>
where
E: ParseError<&'a [u8]> + FromExternalError<&'a [u8], FromUtf8Error>,
{
map_res(map(length_data(le_u32), Vec::from), String::from_utf8)(input)
}
|
use async_trait::async_trait;
use messagebus::{
derive::{Error as MbError, Message},
error::{self, GenericError},
AsyncHandler, Bus, Message, TypeTagged,
};
use messagebus_remote::relays::QuicServerRelay;
use serde_derive::{Deserialize, Serialize};
use thiserror::Error;
#[derive(Debug, Error, MbError)]
enum Error {
#[error("Error({0})")]
Error(anyhow::Error),
}
impl<M: Message> From<error::Error<M>> for Error {
fn from(err: error::Error<M>) -> Self {
Self::Error(err.into())
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Message)]
#[namespace("example")]
#[message(shared, clone)]
pub struct Req {
data: i32,
text: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, Message)]
#[namespace("example")]
#[message(shared, clone)]
pub struct Resp {
data: i32,
text: String,
}
struct TmpReceiver;
#[async_trait]
impl AsyncHandler<Req> for TmpReceiver {
type Error = Error;
type Response = Resp;
async fn handle(&self, msg: Req, _bus: &Bus) -> Result<Self::Response, Self::Error> {
println!("TmpReceiver::handle {:?}", msg);
Ok(Resp {
data: msg.data + 12,
text: format!("<< {} >>", msg.text),
})
}
async fn sync(&self, _bus: &Bus) -> Result<(), Self::Error> {
println!("TmpReceiver::sync");
Ok(())
}
}
#[tokio::main]
async fn main() {
let relay = QuicServerRelay::new(
"./examples/cert.key",
"./examples/cert.der",
"0.0.0.0:8083".parse().unwrap(),
(
vec![],
vec![(
Req::type_tag_(),
Some((Resp::type_tag_(), GenericError::type_tag_())),
)],
),
)
.unwrap();
let (b, poller) = Bus::build()
.register_relay(relay)
.register(TmpReceiver)
.subscribe_async::<Req>(8, Default::default())
.done()
.build();
b.ready().await;
println!("ready");
poller.await;
}
|
use super::*;
pub(super) fn log_entry(
entry_serial_number: LogEntrySerialNumber,
earliest_entry_needed: LogEntrySerialNumber,
last_entry_location: FileLocation,
txs: &Vec<&RefCell<Tx>>,
allocs: &Vec<&RefCell<Alloc>>,
tx_deletions: &Vec<TxId>,
alloc_deletions: &Vec<TxId>,
stream: &Box<dyn FileStream>) -> (FileLocation, Vec<ArcDataSlice>) {
let (data_sz, tail_sz, num_data_buffers) = calculate_write_size(txs, allocs,
tx_deletions, alloc_deletions);
let (file_id, file_uuid, initial_offset, padding_sz) = {
let max_size = stream.const_max_file_size();
let (file_id, file_uuid, offset) = stream.status();
let padding = pad_to_4k_alignment(offset as u64, data_sz, tail_sz);
assert!(offset as u64 + data_sz + padding + tail_sz <= max_size as u64);
assert!((offset as u64 + data_sz + padding + tail_sz) % 4096 == 0);
(file_id, file_uuid, offset, padding)
};
let entry_offset = initial_offset as u64 + data_sz + padding_sz;
let mut offset = initial_offset;
let mut tail = DataMut::with_capacity((padding_sz + tail_sz) as usize);
tail.zfill(padding_sz);
let mut buffers = Vec::<ArcDataSlice>::with_capacity(num_data_buffers + 1);
let mut push_data_buffer = |b: ArcDataSlice| -> FileLocation {
let length = b.len();
let l = FileLocation{file_id : file_id, offset : offset as u64, length : length as u32};
buffers.push(b);
offset += length;
l
};
for tx in txs.iter() {
let mut mtx = tx.borrow_mut();
mtx.last_entry_serial = entry_serial_number;
if mtx.txd_location.is_none() {
let s = mtx.state.serialized_transaction_description.clone();
mtx.txd_location = Some(push_data_buffer(s));
}
if mtx.data_locations.is_none() && !mtx.state.object_updates.is_empty() {
let mut v = Vec::new();
for ou in &mtx.state.object_updates {
v.push(push_data_buffer(ou.data.clone()));
}
mtx.data_locations = Some(v);
}
drop(mtx);
encode_tx_state(&tx.borrow(), &mut tail);
}
for a in allocs.iter() {
let mut ma = a.borrow_mut();
ma.last_entry_serial = entry_serial_number;
if ma.data_location.is_none() {
ma.data_location = Some(push_data_buffer(ma.state.data.clone()));
}
drop(ma);
encode_alloc_state(&a.borrow(), &mut tail);
}
for id in tx_deletions {
id.encode_into(&mut tail);
}
for id in alloc_deletions {
id.encode_into(&mut tail);
}
// ---------- Static Entry Block ----------
assert_eq!((tail.capacity() - tail.len()) as u64, STATIC_ENTRY_SIZE);
let entry_block_offset = initial_offset as u64 + data_sz + tail.offset() as u64;
// Entry block always ends with:
// entry_serial_number - 8
// entry_begin_offset - 8
// earliest_needed_entry_serial_number - 8
// num_transactions - 4
// num_allocations - 4
// num_tx_deletions - 4
// num_alloc_deletions - 4
// prev_entry_file_location - 14 (2 + 8 + 4)
// file_uuid - 16
tail.put_u64_le(entry_serial_number.0);
tail.put_u64_le(entry_offset);
tail.put_u64_le(earliest_entry_needed.0);
tail.put_u32_le(txs.len() as u32);
tail.put_u32_le(allocs.len() as u32);
tail.put_u32_le(tx_deletions.len() as u32);
tail.put_u32_le(alloc_deletions.len() as u32);
last_entry_location.encode_into(&mut tail);
tail.put_slice(file_uuid.as_bytes());
buffers.push(ArcDataSlice::from(tail.finalize()));
let entry_location = FileLocation{
file_id : file_id,
offset : entry_block_offset,
length : STATIC_ENTRY_SIZE as u32
};
(entry_location, buffers)
}
pub(super) fn load_entry_data(
buf: &mut Data,
entry: &mut RecoveringEntry,
entry_serial: LogEntrySerialNumber) -> Result<(), DecodeError> {
for _ in 0 .. entry.num_tx {
let rtx = decode_tx_state(buf, entry_serial)?;
entry.transactions.push(rtx);
}
for _ in 0 .. entry.num_allocs {
let ra = decode_alloc_state(buf, entry_serial)?;
entry.allocations.push(ra);
}
for _ in 0 .. entry.num_tx_del {
entry.tx_deletions.push(TxId::decode_from(buf));
}
for _ in 0 .. entry.num_alloc_del {
entry.alloc_deletions.push(TxId::decode_from(buf));
}
Ok(())
}
pub(super) fn decode_entry(buf: &mut Data) -> Result<RecoveringEntry, DecodeError> {
if buf.remaining() < STATIC_ENTRY_SIZE as usize {
Err(DecodeError{})
} else {
let serial = buf.get_u64_le();
let entry_offset = buf.get_u64_le();
let earliest_needed = buf.get_u64_le();
let num_tx = buf.get_u32_le();
let num_allocs = buf.get_u32_le();
let num_tx_del = buf.get_u32_le();
let num_alloc_del = buf.get_u32_le();
let previous_entry_location = FileLocation::decode_from(buf);
Ok(RecoveringEntry {
serial: LogEntrySerialNumber(serial),
entry_offset,
earliest_needed,
num_tx,
num_allocs,
num_tx_del,
num_alloc_del,
previous_entry_location,
transactions: Vec::with_capacity(num_tx as usize),
allocations: Vec::with_capacity(num_allocs as usize),
tx_deletions: Vec::with_capacity(num_tx_del as usize),
alloc_deletions: Vec::with_capacity(num_alloc_del as usize)
})
}
}
// pub struct TransactionRecoveryState {
// store_id: store::Id, 17
// transaction_id: 16
// serialized_transaction_description: Bytes, 14 (FileLocation)
// tx_disposition: transaction::Disposition, 1
// paxos_state: paxos::PersistentState, 11 (1:mask-byte + 5:proposalId + 5:proposalId)
// object_updates: Vec<transaction::ObjectUpdate>, 4:count + num_updates * (16:objuuid + FileLocation)
// }
fn decode_tx_state(buf: &mut Data, entry_serial: LogEntrySerialNumber) -> Result<RecoveringTx, DecodeError> {
if buf.remaining() < STATIC_TX_SIZE as usize {
Err(DecodeError{})
} else {
let id = TxId::decode_from(buf);
let txd_loc = FileLocation::decode_from(buf);
let disposition = transaction::Disposition::from_u8(buf.get_u8())?;
let mask = buf.get_u8();
let promise_peer = buf.get_u8();
let promise_proposal_id = buf.get_u32_le();
let accepted_peer = buf.get_u8();
let accepted_proposal_id = buf.get_u32_le();
let promised = if mask & 1 << 2 == 0 {
None
} else {
Some(paxos::ProposalId{
number: promise_proposal_id,
peer: promise_peer
})
};
let accepted = if mask & 1 << 1 == 0 {
None
} else {
let prop_id = paxos::ProposalId {
number: accepted_proposal_id,
peer: accepted_peer
};
let have_accepted = mask & 1 << 0 != 0;
Some((prop_id, have_accepted))
};
let pax = paxos::PersistentState{
promised,
accepted
};
let mut updates: Vec<(uuid::Uuid, FileLocation)> = Vec::new();
let nupdates = buf.get_u32_le();
if buf.remaining() < nupdates as usize * (16 + FILE_LOCATION_SIZE as usize) {
return Err(DecodeError{});
}
for _ in 0 .. nupdates {
let mut uuid_bytes: [u8; 16] = [0; 16];
buf.copy_to_slice(&mut uuid_bytes);
let location = FileLocation::decode_from(buf);
updates.push((uuid::Uuid::from_bytes(uuid_bytes), location));
}
Ok(RecoveringTx{
id,
serialized_transaction_description: txd_loc,
object_updates: updates,
tx_disposition: disposition,
paxos_state: pax,
last_entry_serial: entry_serial
})
}
}
fn encode_tx_state(tx: &Tx, buf: &mut DataMut) {
let tx_id = TxId(tx.state.store_id, tx.id);
tx_id.encode_into(buf);
if let Some(loc) = &tx.txd_location {
loc.encode_into(buf);
} else {
FileLocation::null().encode_into(buf);
}
buf.put_u8(tx.state.tx_disposition.to_u8());
let mut mask = 0u8;
let mut promise_peer = 0u8;
let mut promise_proposal_id = 0u32;
let mut accepted_peer = 0u8;
let mut accepted_proposal_id = 0u32;
if let Some(promise) = tx.state.paxos_state.promised {
mask |= 1 << 2;
promise_peer = promise.peer;
promise_proposal_id = promise.number;
}
if let Some((prop_id, accepted)) = tx.state.paxos_state.accepted {
mask |= 1 << 1;
if accepted {
mask |= 1 << 0;
}
accepted_peer = prop_id.peer;
accepted_proposal_id = prop_id.number;
}
buf.put_u8(mask);
buf.put_u8(promise_peer);
buf.put_u32_le(promise_proposal_id);
buf.put_u8(accepted_peer);
buf.put_u32_le(accepted_proposal_id);
match &tx.data_locations {
None => buf.put_u32_le(0u32),
Some(dl) => {
buf.put_u32_le(dl.len() as u32);
for (ou, loc) in tx.state.object_updates.iter().zip(dl.iter()) {
buf.put_slice(ou.object_id.0.as_bytes());
loc.encode_into(buf);
}
}
}
}
fn decode_alloc_state(buf: &mut Data, entry_serial: LogEntrySerialNumber) -> Result<RecoveringAlloc, DecodeError> {
if buf.remaining() < STATIC_ARS_SIZE as usize {
Err(DecodeError{})
} else {
let id = TxId::decode_from(buf);
//let txd_loc = FileLocation::decode_from(buf);
let object_id = object::Id(buf.get_uuid());
let kind = object::Kind::from_u8(buf.get_u8())?;
let size = {
let sz = buf.get_u32_le();
if sz == 0 {
None
} else {
Some(sz)
}
};
let data = FileLocation::decode_from(buf);
let refcount = {
let update_serial = buf.get_u32_le();
let count = buf.get_u32_le();
object::Refcount {
update_serial,
count
}
};
let timestamp = hlc::Timestamp::from(buf.get_u64_le());
let serialized_revision_guard = {
let nbytes = buf.get_u32_le() as usize;
if buf.remaining() < nbytes {
return Err(DecodeError{});
}
let mut v: Vec<u8> = Vec::with_capacity(nbytes);
v.extend_from_slice(buf.get_slice(nbytes));
ArcDataSlice::from(v)
};
let store_pointer = {
if buf.remaining() < 4 {
return Err(DecodeError{});
}
let nbytes = buf.get_u32_le() as usize;
if buf.remaining() < nbytes + 1 {
return Err(DecodeError{});
}
let pool_index = buf.get_u8();
if nbytes == 0 {
store::Pointer::None{pool_index}
}
else if nbytes <= 22 {
let mut content: [u8; 22] = [0; 22];
let s = buf.get_slice(nbytes);
for (idx, byte) in s.iter().enumerate() {
content[idx] = *byte;
}
store::Pointer::Short {
pool_index,
nbytes: nbytes as u8,
content
}
}
else {
let mut content: Vec<u8> = Vec::with_capacity(nbytes);
content.extend_from_slice(buf.get_slice(nbytes));
store::Pointer::Long {
pool_index,
content
}
}
};
Ok(RecoveringAlloc{
id,
store_pointer,
object_id,
kind,
size,
data,
refcount,
timestamp,
serialized_revision_guard,
last_entry_serial: entry_serial
})
}
}
fn encode_alloc_state(a: &Alloc, buf: &mut DataMut) {
assert!(a.data_location.is_some(), "DataLocation field must be set!");
let tx_id = TxId(a.state.store_id, a.state.allocation_transaction_id);
tx_id.encode_into(buf);
buf.put_uuid(a.state.id.0);
buf.put_u8(a.state.kind.to_u8());
buf.put_u32_le(match a.state.size {
None => 0u32,
Some(len) => len
});
a.data_location.unwrap().encode_into(buf);
buf.put_u32_le(a.state.refcount.update_serial);
buf.put_u32_le(a.state.refcount.count);
buf.put_u64_le(a.state.timestamp.to_u64());
buf.put_u32_le(a.state.serialized_revision_guard.len() as u32);
buf.put_slice(&a.state.serialized_revision_guard.as_bytes());
match &a.state.store_pointer {
store::Pointer::None{pool_index} => {
buf.put_u32_le(0);
buf.put_u8(*pool_index);
}
store::Pointer::Short{pool_index, nbytes, content} => {
buf.put_u32_le(*nbytes as u32);
buf.put_u8(*pool_index);
buf.put_slice(&content[0..*(nbytes) as usize]);
},
store::Pointer::Long{pool_index, content} => {
buf.put_u32_le(content.len() as u32);
buf.put_u8(*pool_index);
buf.put_slice(content);
}
};
}
/// Calculates the size required for the write.
///
/// Format
///
/// <data-buffers><4-k alignment padding><tail buffer>
///
/// Tail Buffer Format
/// <entry content><static-entry-block>
///
/// Returns (size-of-pre-entry-data, size-of-entry-block, number-of-data-buffers)
fn calculate_write_size(
txs: &Vec<&RefCell<Tx>>,
allocs: &Vec<&RefCell<Alloc>>,
tx_deletions: &Vec<TxId>,
alloc_deletions: &Vec<TxId>) -> (u64, u64, usize) {
let mut buffer_count: usize = 0;
let mut data: u64 = 0;
let mut tail: u64 = STATIC_ENTRY_SIZE;
for tx in txs {
let tx = tx.borrow();
if tx.txd_location.is_none() {
data += tx.state.serialized_transaction_description.len() as u64;
buffer_count += 1;
}
if tx.data_locations.is_none() && ! tx.state.object_updates.is_empty() {
for ou in &tx.state.object_updates {
data += ou.data.len() as u64;
buffer_count += 1;
}
}
let nupdates = tx.state.object_updates.len() as u64;
tail += STATIC_TX_SIZE + nupdates * OBJECT_UPDATE_STATIC_SIZE;
}
for a in allocs {
let a = a.borrow();
if a.data_location.is_none() {
data += a.state.data.len() as u64;
buffer_count += 1;
}
let sp_len = a.state.store_pointer.encoded_len() as u64;
let guard_len = a.state.serialized_revision_guard.len() as u64;
tail += STATIC_ARS_SIZE + sp_len + guard_len;
}
tail += tx_deletions.len() as u64 * TXID_SIZE;
tail += alloc_deletions.len() as u64 * TXID_SIZE;
(data, tail, buffer_count)
}
pub(super) fn tx_write_size(tx: &RefCell<Tx>) -> usize {
let mut data = 0;
let mut update_count = 0;
let tx = tx.borrow();
if tx.txd_location.is_none() {
data += tx.state.serialized_transaction_description.len();
}
if tx.data_locations.is_none() && ! tx.state.object_updates.is_empty() {
for ou in &tx.state.object_updates {
data += ou.data.len();
update_count += 1;
}
}
data + STATIC_TX_SIZE as usize + update_count * (16 + FILE_LOCATION_SIZE as usize)
}
pub(super) fn tx_delete_size(_txid: &TxId) -> usize {
TXID_SIZE as usize
}
pub(super) fn alloc_write_size(alloc: &RefCell<Alloc>) -> usize {
let mut data = 0;
let a = alloc.borrow();
if a.data_location.is_none() {
data += a.state.data.len();
}
data += a.state.store_pointer.encoded_len();
data += a.state.serialized_revision_guard.len();
data + STATIC_ARS_SIZE as usize
}
pub(super) fn alloc_delete_size(_txid: &TxId) -> usize {
TXID_SIZE as usize
}
pub(super) fn pad_to_4k_alignment(offset: u64, data_size: u64, tail_size: u64) -> u64 {
let base = offset + data_size + tail_size;
if base < 4096 {
4096 - base
} else {
let remainder = base % 4096;
if remainder == 0 {
0
} else {
4096 - remainder
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::data::*;
use crate::store;
use crate::transaction;
use crate::paxos;
fn uuids() -> [uuid::Uuid; 4] {
[uuid::Uuid::parse_str("d1cccd1b-e34e-4193-ad62-868a964eab9c").unwrap(),
uuid::Uuid::parse_str("f308d237-a26e-4735-b001-6782fb2eac38").unwrap(),
uuid::Uuid::parse_str("0e18b5ad-0717-4a5b-b0d1-e675dd55790a").unwrap(),
uuid::Uuid::parse_str("7c27c2af-4d7a-4eab-867d-00691d6dfed8").unwrap()]
}
#[test]
fn padding() {
assert_eq!(pad_to_4k_alignment(0, 0, 16), 4096-16);
assert_eq!(pad_to_4k_alignment(0, 0, 17), 4096-17);
assert_eq!(pad_to_4k_alignment(4096, 0, 16), 4096-16);
assert_eq!(pad_to_4k_alignment(4096, 2048, 2048), 0);
assert_eq!(pad_to_4k_alignment(4096, 4096, 4096), 0);
assert_eq!(pad_to_4k_alignment(0, 4096, 4096), 0);
}
#[test]
fn tx_minimal_encoding() {
let ids = uuids();
let txid = transaction::Id(ids[0]);
let pool_uuid = ids[1];
let store_id = store::Id { pool_uuid, pool_index: 1u8 };
let std = ArcData::from(vec![1u8,2u8,3u8]);
let tx_disposition = transaction::Disposition::VoteCommit;
let paxos_state = paxos::PersistentState { promised: None, accepted: None };
let tx1 = Tx {
id: txid,
txd_location: None,
data_locations: None,
state: TransactionRecoveryState {
transaction_id: txid,
store_id,
serialized_transaction_description: std.into(),
object_updates: Vec::new(),
tx_disposition,
paxos_state
},
last_entry_serial: LogEntrySerialNumber(0)
};
let mut m = DataMut::with_capacity(4096);
encode_tx_state(&tx1, &mut m);
assert_eq!(m.len() as u64, STATIC_TX_SIZE);
m.set_offset(0);
let mut r = Data::from(m);
let ra = decode_tx_state(&mut r, LogEntrySerialNumber(0)).unwrap();
let expected = RecoveringTx {
id: TxId(store_id, txid),
serialized_transaction_description: FileLocation::null(),
object_updates: Vec::new(),
tx_disposition,
paxos_state,
last_entry_serial: LogEntrySerialNumber(0)
};
assert_eq!(ra, expected);
}
#[test]
fn tx_full_encoding() {
let ids = uuids();
let txid = transaction::Id(ids[0]);
let txd_loc = FileLocation { file_id: FileId(5), offset: 2,length: 3 };
let u1 = FileLocation { file_id: FileId(1), offset: 1, length: 1 };
let u2 = FileLocation { file_id: FileId(2), offset: 2, length: 2 };
let oid1 = object::Id(ids[2]);
let oid2 = object::Id(ids[3]);
let ads = ArcDataSlice::from(vec![0u8]);
let ou1 = transaction::ObjectUpdate { object_id: oid1, data: ads.clone() };
let ou2 = transaction::ObjectUpdate { object_id: oid2, data: ads.clone() };
let pool_uuid = ids[1];
let store_id = store::Id { pool_uuid, pool_index: 1u8 };
let std = ArcData::from(vec![1u8,2u8,3u8]);
let tx_disposition = transaction::Disposition::VoteCommit;
let pid1 = paxos::ProposalId { number: 1, peer: 2 };
let pid2 = paxos::ProposalId { number: 3, peer: 3 };
let paxos_state = paxos::PersistentState { promised: Some(pid1), accepted: Some((pid2, true)) };
let tx1 = Tx {
id: txid,
txd_location: Some(txd_loc),
data_locations: Some(vec![u1, u2]),
state: TransactionRecoveryState {
transaction_id: txid,
store_id,
serialized_transaction_description: std.into(),
object_updates: vec![ou1, ou2],
tx_disposition,
paxos_state
},
last_entry_serial: LogEntrySerialNumber(0)
};
let mut m = DataMut::with_capacity(4096);
encode_tx_state(&tx1, &mut m);
assert_eq!(m.len() as u64, STATIC_TX_SIZE + 2 * OBJECT_UPDATE_STATIC_SIZE);
m.set_offset(0);
let mut r = Data::from(m);
let ra = decode_tx_state(&mut r, LogEntrySerialNumber(0)).unwrap();
let expected = RecoveringTx {
id: TxId(store_id, txid),
serialized_transaction_description: txd_loc,
object_updates: vec![(ids[2], u1), (ids[3], u2)],
tx_disposition,
paxos_state,
last_entry_serial: LogEntrySerialNumber(0)
};
assert_eq!(ra, expected);
}
#[test]
fn alloc_encoding() {
let data_location = FileLocation { file_id: FileId(1), offset: 2,length: 3 };
let ids = uuids();
let txid = transaction::Id(ids[0]);
let mut sp_content: [u8; 22] = [0u8; 22];
sp_content[0] = 2;
sp_content[1] = 2;
sp_content[2] = 2;
let sp_content = sp_content;
let store_pointer = store::Pointer::Short{pool_index: 4, nbytes: 3, content: sp_content};
let object_id = object::Id(ids[1]);
let kind = object::Kind::KeyValue;
let size = Some(10);
let data = ArcDataSlice::from(vec![0u8]);
let rc = object::Refcount { update_serial: 5, count: 2 };
let timestamp = hlc::Timestamp::from(10u64);
let allocation_transaction_id = txid;
let srg = ArcDataSlice::from(vec![0u8, 1u8, 2u8]);
let pool_uuid = ids[2];
let store_id = store::Id { pool_uuid, pool_index: 1u8 };
let alloc = Alloc {
data_location: Some(data_location),
state: AllocationRecoveryState {
store_id,
store_pointer: store_pointer.clone(),
id: object_id,
kind,
size,
data: data.clone(),
refcount: rc,
timestamp,
allocation_transaction_id,
serialized_revision_guard: srg.clone()
},
last_entry_serial: LogEntrySerialNumber(0)
};
let mut m = DataMut::with_capacity(4096);
encode_alloc_state(&alloc, &mut m);
assert_eq!(m.len() as u64, STATIC_ARS_SIZE + store_pointer.encoded_len() as u64 + srg.len() as u64);
m.set_offset(0);
let mut r = Data::from(m);
let ra = decode_alloc_state(&mut r, LogEntrySerialNumber(0)).unwrap();
let expected = RecoveringAlloc {
id: TxId(store_id, txid),
store_pointer,
object_id,
kind,
size,
data: data_location,
refcount: rc,
timestamp,
serialized_revision_guard: srg.clone(),
last_entry_serial: LogEntrySerialNumber(0)
};
assert_eq!(ra, expected);
}
} |
pub fn run() {
// print to console
println!("Hello from printrs");
// Basic forrmating
println!("Number {}", 1);
// Basic formatting
println!("New {} is {}","Name", "Wolf");
// Positional params
println!("New {0} is {1}","Name", "Wolf");
// Named Arguments
println!("New {name} is {name2}",name = "Name", name2 = "Wolf");
// Placeholder traits
println!("Binary: {:b} Hex {:x} Octal {:o}", 10, 10, 10);
// Pleace holder for debug trait
println!("{:?}", (12, true, "hello"));
// Basic math
println!("10 + 10 = {}", 10 + 10)
} |
use std::iter::Iterator;
use super::ast::*;
use super::ast::BuilderKind::*;
use super::ast::ScalarKind::*;
use super::ast::Type::*;
use super::ast::ExprKind::*;
use super::partial_types::*;
// TODO: These methods could take a mutable string as an argument, or even a fmt::Format.
/// A trait for printing types.
pub trait PrintableType: Clone {
fn print(&self) -> String;
}
/// Print implementation for full Types
impl PrintableType for Type {
fn print(&self) -> String {
match *self {
Scalar(Bool) => "bool".to_string(),
Scalar(I32) => "i32".to_string(),
Scalar(I64) => "i64".to_string(),
Scalar(F32) => "f32".to_string(),
Scalar(F64) => "f64".to_string(),
Vector(ref elem) => format!("vec[{}]", elem.print()),
Struct(ref elems) => join("{", ",", "}", elems.iter().map(|e| e.print())),
Function(ref params, ref ret) => {
let mut res = join("|", ",", "|(", params.iter().map(|e| e.print()));
res.push_str(&ret.print());
res.push_str(")");
res
},
Builder(Appender(ref t)) => format!("appender[{}]", t.print()),
Builder(Merger(ref t, op)) => format!("merger[{},{}]", t.print(), op),
}
}
}
/// Print implementation for PartialTypes
impl PrintableType for PartialType {
fn print(&self) -> String {
use partial_types::PartialType::*;
use partial_types::PartialBuilderKind::*;
match *self {
Unknown => "?".to_string(),
Scalar(Bool) => "bool".to_string(),
Scalar(I32) => "i32".to_string(),
Scalar(I64) => "i64".to_string(),
Scalar(F32) => "f32".to_string(),
Scalar(F64) => "f64".to_string(),
Vector(ref elem) => format!("vec[{}]", elem.print()),
Struct(ref elems) => join("{", ",", "}", elems.iter().map(|e| e.print())),
Function(ref params, ref ret) => {
let mut res = join("(", ",", ")=>", params.iter().map(|e| e.print()));
res.push_str(&ret.print());
res
},
Builder(Appender(ref elem)) => format!("appender[{}]", elem.print()),
Builder(Merger(ref t, op)) => format!("merger[{},{}]", t.print(), op),
}
}
}
/// Print a type.
pub fn print_type<T: PrintableType>(ty: &T) -> String {
ty.print()
}
/// Print an expression concisely (without any type annotations).
pub fn print_expr<T: PrintableType>(expr: &Expr<T>) -> String {
print_expr_impl(expr, false)
}
/// Print an expression with type annotations on all symbols.
pub fn print_typed_expr<T: PrintableType>(expr: &Expr<T>) -> String {
print_expr_impl(expr, true)
}
/// Main work to print an expression.
fn print_expr_impl<T: PrintableType>(expr: &Expr<T>, typed: bool) -> String {
match expr.kind {
BoolLiteral(v) => format!("{}", v),
I32Literal(v) => format!("{}", v),
I64Literal(v) => format!("{}L", v),
F32Literal(v) => {
let mut res = format!("{}", v);
// Hack to disambiguate from integers.
if !res.contains(".") {
res.push_str(".0");
}
res.push_str("F");
res
}
F64Literal(v) => {
let mut res = format!("{}", v);
// Hack to disambiguate from integers.
if !res.contains(".") {
res.push_str(".0");
}
res
}
Ident(ref symbol) => {
if typed {
format!("{}:{}", symbol, expr.ty.print())
} else {
format!("{}", symbol)
}
}
BinOp(kind, ref left, ref right) =>
format!("({}{}{})",
print_expr_impl(left, typed), kind, print_expr_impl(right, typed)),
Let(ref symbol, ref value, ref body) => {
if typed {
format!("(let {}:{}=({});{})",
symbol,
value.ty.print(),
print_expr_impl(value, typed),
print_expr_impl(body, typed))
} else {
format!("(let {}=({});{})",
symbol,
print_expr_impl(value, typed),
print_expr_impl(body, typed))
}
}
MakeStruct(ref exprs) =>
join("{", ",", "}", exprs.iter().map(|e| print_expr_impl(e, typed))),
MakeVector(ref exprs) =>
join("[", ",", "]", exprs.iter().map(|e| print_expr_impl(e, typed))),
GetField(ref param, index) => format!("{}.${}", print_expr_impl(param, typed), index),
Lambda(ref params, ref body) => {
let mut res = join("|", ",", "|", params.iter().map(|e| print_parameter(e, typed)));
res.push_str(&print_expr_impl(body, typed));
res
}
NewBuilder => expr.ty.print(),
Res(ref builder) => format!("result({})", print_expr_impl(builder, typed)),
Merge(ref builder, ref value) => {
format!("merge({},{})", print_expr_impl(builder, typed), print_expr_impl(value, typed))
}
For(ref data, ref builder, ref func) => {
format!("for({},{},{})",
print_expr_impl(data, typed),
print_expr_impl(builder, typed),
print_expr_impl(func, typed))
}
If(ref cond, ref on_true, ref on_false) => {
format!("if({},{},{})",
print_expr_impl(cond, typed),
print_expr_impl(on_true, typed),
print_expr_impl(on_false, typed))
}
Apply(ref func, ref params) => {
let mut res = format!("({})", print_expr_impl(func, typed));
res.push_str(&join("(", ",", ")", params.iter().map(|e| print_expr_impl(e, typed))));
res
}
}
}
/// Print a parameter, optionally showing its type.
fn print_parameter<T: PrintableType>(param: &Parameter<T>, typed: bool) -> String {
if typed {
format!("{}:{}", param.name, param.ty.print())
} else {
format!("{}", param.name)
}
}
fn join<T:Iterator<Item=String>>(start: &str, sep: &str, end: &str, strings: T) -> String {
let mut res = String::new();
res.push_str(start);
for (i, s) in strings.enumerate() {
if i > 0 { res.push_str(sep); }
res.push_str(&s);
}
res.push_str(end);
res
}
|
use std::path::PathBuf;
use crate::Cargo;
pub struct External;
impl External {
/// The external content directory
pub fn dir() -> PathBuf {
let mut path = Cargo::crate_dir();
path.push(".external");
path
}
/// Checks that external content is present and valid
pub fn is_valid() -> bool {
Self::dir().exists()
}
}
|
pub mod b16;
pub mod b36;
pub mod bitop;
#[cfg(test)]
mod tests {
use crate::b36::B36;
use crate::bitop::BitOp;
use crate::bitop::BitOpBase;
use rand::Rng;
const COUNT : i32 = 100;
#[test]
fn test_candidates() {
let inturn : u64 = 7913472;
let opponent : u64 = 251666496;
let candidates : u64 = 33286000768;
let ccands : u64 = B36::get_candidates(inturn, opponent);
assert_eq!(candidates, ccands);
}
fn bitcount_raw(q : u64) -> i32 {
let mut c = 0;
for m in 0..64 {
if ((q >> m) & 1) == 1 {
c += 1;
}
}
c
}
fn lsb_raw(q : u64) -> i32 {
let mut c = 0;
for m in 0..64 {
if ((q >> m) & 1) == 0 {
c += 1;
} else {
break;
}
}
c
}
#[test]
fn test_bitcount() {
let mut rng = rand::thread_rng();
for _ in 0..COUNT {
let r : u64 = rng.gen();
let n : u64 = r & B36::MASK;
let nbits1 = B36::bitcount(n);
let nbits2 = bitcount_raw(n);
assert_eq!(nbits1, nbits2);
let lsbits1 = B36::lsb(n);
let lsbits2 = lsb_raw(n);
assert_eq!(lsbits1, lsbits2);
}
}
#[test]
fn test_reverse() {
let inturn : u64 = 7913472;
let opponent : u64 = 251666496;
let mut candidates : u64 = B36::get_candidates(inturn, opponent);
while candidates != 0 {
let mut ti = inturn;
let mut to = opponent;
let bc : i32 = B36::bitcount(candidates);
println!("bc = {}", bc);
let rmove : i32 = B36::lsb(candidates);
println!("rmove = {}", rmove);
let reversed : i32 = B36::reverse(rmove, &mut ti, &mut to);
assert!(reversed > 0);
println!("reversed = {}", reversed);
let tmask : u64 = 1u64 << rmove;
candidates ^= tmask;
println!("----");
}
}
}
|
use std::cmp::{max, min};
mod line_bresenham;
pub mod lines;
pub mod point;
pub mod point3;
pub use {lines::line2d, point::Point, point3::Point3};
mod line_vector;
pub use line_bresenham::Bresenham;
pub use line_vector::VectorLine;
mod circle_bresenham;
pub use circle_bresenham::{BresenhamCircle, BresenhamCircleNoDiag};
mod rect;
pub use rect::Rect;
/// Enumeration of available 2D Distance algorithms
pub enum DistanceAlg {
Pythagoras,
PythagorasSquared,
Manhattan,
Chebyshev,
}
impl DistanceAlg {
/// Provides a 2D distance between points, using the specified algorithm.
pub fn distance2d(self, start: Point, end: Point) -> f32 {
match self {
DistanceAlg::Pythagoras => distance2d_pythagoras(start, end),
DistanceAlg::PythagorasSquared => distance2d_pythagoras_squared(start, end),
DistanceAlg::Manhattan => distance2d_manhattan(start, end),
DistanceAlg::Chebyshev => distance2d_chebyshev(start, end),
}
}
/// Provides a 3D distance between points, using the specified algorithm.
pub fn distance3d(self, start: Point3, end: Point3) -> f32 {
match self {
DistanceAlg::Pythagoras => distance3d_pythagoras(start, end),
DistanceAlg::PythagorasSquared => distance3d_pythagoras_squared(start, end),
DistanceAlg::Manhattan => distance3d_manhattan(start, end),
DistanceAlg::Chebyshev => distance3d_pythagoras(start, end),
}
}
}
/// Enumeration of available 2D Distance algorithms
pub enum LineAlg {
Bresenham,
Vector,
}
/// Calculates a Pythagoras distance between two points, and skips the square root for speed.
fn distance2d_pythagoras_squared(start: Point, end: Point) -> f32 {
let dx = (max(start.x, end.x) - min(start.x, end.x)) as f32;
let dy = (max(start.y, end.y) - min(start.y, end.y)) as f32;
(dx * dx) + (dy * dy)
}
/// Calculates a Manhattan distance between two points
fn distance2d_manhattan(start: Point, end: Point) -> f32 {
let dx = (max(start.x, end.x) - min(start.x, end.x)) as f32;
let dy = (max(start.y, end.y) - min(start.y, end.y)) as f32;
dx + dy
}
/// Calculates a Manhattan distance between two 3D points
fn distance3d_manhattan(start: Point3, end: Point3) -> f32 {
let dx = (max(start.x, end.x) - min(start.x, end.x)) as f32;
let dy = (max(start.y, end.y) - min(start.y, end.y)) as f32;
let dz = (max(start.z, end.z) - min(start.z, end.z)) as f32;
dx + dy + dz
}
/// Calculates a Chebyshev distance between two points
/// See: http://theory.stanford.edu/~amitp/GameProgramming/Heuristics.html
fn distance2d_chebyshev(start: Point, end: Point) -> f32 {
let dx = (max(start.x, end.x) - min(start.x, end.x)) as f32;
let dy = (max(start.y, end.y) - min(start.y, end.y)) as f32;
if dx > dy {
(dx - dy) + 1.0 * dy
} else {
(dy - dx) + 1.0 * dx
}
}
/// Calculates a Pythagoras distance between two 3D points.
fn distance3d_pythagoras_squared(start: Point3, end: Point3) -> f32 {
let dx = (max(start.x, end.x) - min(start.x, end.x)) as f32;
let dy = (max(start.y, end.y) - min(start.y, end.y)) as f32;
let dz = (max(start.z, end.z) - min(start.z, end.z)) as f32;
(dx * dx) + (dy * dy) + (dz * dz)
}
/// Calculates a Pythagoras distance between two points.
fn distance2d_pythagoras(start: Point, end: Point) -> f32 {
let dsq = distance2d_pythagoras_squared(start, end);
f32::sqrt(dsq)
}
/// Calculates a Pythagoras distance between two 3D points.
fn distance3d_pythagoras(start: Point3, end: Point3) -> f32 {
let dsq = distance3d_pythagoras_squared(start, end);
f32::sqrt(dsq)
}
/// From a given start point, project forward radius units at an angle of angle_radians degrees.
/// 0 Degrees is north (negative Y), 90 degrees is east (positive X)
pub fn project_angle(start: Point, radius: f32, angle_radians: f32) -> Point {
let degrees_radians = angle_radians + std::f32::consts::PI;
Point::new(
(0.0 - (start.x as f32 + radius * f32::sin(degrees_radians))) as i32,
(start.y as f32 + radius * f32::cos(degrees_radians)) as i32,
)
}
#[cfg(test)]
mod tests {
use super::{project_angle, DistanceAlg, Point, Point3};
#[test]
fn test_pythagoras_distance() {
let mut d = DistanceAlg::Pythagoras.distance2d(Point::new(0, 0), Point::new(5, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Pythagoras.distance2d(Point::new(0, 0), Point::new(-5, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Pythagoras.distance2d(Point::new(0, 0), Point::new(0, 5));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Pythagoras.distance2d(Point::new(0, 0), Point::new(0, -5));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Pythagoras.distance2d(Point::new(0, 0), Point::new(5, 5));
assert!(f32::abs(d - 7.071_068) < std::f32::EPSILON);
}
#[test]
fn test_pythagoras_distance3d() {
let mut d = DistanceAlg::Pythagoras.distance3d(Point3::new(0, 0, 0), Point3::new(5, 0, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Pythagoras.distance3d(Point3::new(0, 0, 0), Point3::new(-5, 0, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Pythagoras.distance3d(Point3::new(0, 0, 0), Point3::new(5, 5, 5));
assert!(f32::abs(d - 8.660_254_5) < std::f32::EPSILON);
}
#[test]
fn test_pythagoras_squared_distance() {
let mut d = DistanceAlg::PythagorasSquared.distance2d(Point::new(0, 0), Point::new(5, 0));
assert!(f32::abs(d - 25.0) < std::f32::EPSILON);
d = DistanceAlg::PythagorasSquared.distance2d(Point::new(0, 0), Point::new(-5, 0));
assert!(f32::abs(d - 25.0) < std::f32::EPSILON);
d = DistanceAlg::PythagorasSquared.distance2d(Point::new(0, 0), Point::new(0, 5));
assert!(f32::abs(d - 25.0) < std::f32::EPSILON);
d = DistanceAlg::PythagorasSquared.distance2d(Point::new(0, 0), Point::new(0, -5));
assert!(f32::abs(d - 25.0) < std::f32::EPSILON);
d = DistanceAlg::PythagorasSquared.distance2d(Point::new(0, 0), Point::new(5, 5));
assert!(f32::abs(d - 50.0) < std::f32::EPSILON);
}
#[test]
fn test_pythagoras_squared_distance3d() {
let mut d =
DistanceAlg::PythagorasSquared.distance3d(Point3::new(0, 0, 0), Point3::new(5, 0, 0));
assert!(f32::abs(d - 25.0) < std::f32::EPSILON);
d = DistanceAlg::PythagorasSquared.distance3d(Point3::new(0, 0, 0), Point3::new(-5, 0, 0));
assert!(f32::abs(d - 25.0) < std::f32::EPSILON);
d = DistanceAlg::PythagorasSquared.distance3d(Point3::new(0, 0, 0), Point3::new(5, 5, 5));
assert!(f32::abs(d - 75.0) < std::f32::EPSILON);
}
#[test]
fn test_manhattan_distance() {
let mut d = DistanceAlg::Manhattan.distance2d(Point::new(0, 0), Point::new(5, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Manhattan.distance2d(Point::new(0, 0), Point::new(-5, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Manhattan.distance2d(Point::new(0, 0), Point::new(0, 5));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Manhattan.distance2d(Point::new(0, 0), Point::new(0, -5));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Manhattan.distance2d(Point::new(0, 0), Point::new(5, 5));
assert!(f32::abs(d - 10.0) < std::f32::EPSILON);
}
#[test]
fn test_manhattan_distance3d() {
let mut d = DistanceAlg::Manhattan.distance3d(Point3::new(0, 0, 0), Point3::new(5, 0, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Manhattan.distance3d(Point3::new(0, 0, 0), Point3::new(-5, 0, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Manhattan.distance3d(Point3::new(0, 0, 0), Point3::new(5, 5, 5));
assert!(f32::abs(d - 15.0) < std::f32::EPSILON);
}
#[test]
fn test_chebyshev_distance() {
let mut d = DistanceAlg::Chebyshev.distance2d(Point::new(0, 0), Point::new(5, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Chebyshev.distance2d(Point::new(0, 0), Point::new(-5, 0));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Chebyshev.distance2d(Point::new(0, 0), Point::new(0, 5));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Chebyshev.distance2d(Point::new(0, 0), Point::new(0, -5));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
d = DistanceAlg::Chebyshev.distance2d(Point::new(0, 0), Point::new(5, 5));
assert!(f32::abs(d - 5.0) < std::f32::EPSILON);
}
#[test]
fn test_project_angle() {
let start = Point::new(0, 0);
let mut dest = project_angle(start, 10.0, 0.0);
assert_eq!(dest, Point::new(0, -10));
dest = project_angle(start, 10.0, std::f32::consts::PI); // 180 degrees
assert_eq!(dest, Point::new(0, 10));
dest = project_angle(start, 10.0, std::f32::consts::PI / 2.0); // 90 degrees, east
assert_eq!(dest, Point::new(10, 0));
dest = project_angle(
start,
10.0,
std::f32::consts::PI + (std::f32::consts::PI / 2.0),
); // 270 degrees, west
assert_eq!(dest, Point::new(-10, 0));
dest = project_angle(start, 10.0, std::f32::consts::FRAC_PI_4); // 45 degrees, north-east
assert_eq!(dest, Point::new(7, -7));
dest = project_angle(start, 10.0, 2.35619); // 135 degrees, south-east
assert_eq!(dest, Point::new(7, 7));
dest = project_angle(start, 10.0, 3.92699); // 225 degrees, south-west
assert_eq!(dest, Point::new(-7, 7));
dest = project_angle(start, 10.0, 5.49779); // 315 degrees, north-west
assert_eq!(dest, Point::new(-7, -7));
}
}
|
extern crate base;
extern crate parser;
extern crate check;
mod functions;
use functions::*;
#[test]
fn dont_stack_overflow_on_let_bindings() {
let text = r#"
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in 1
"#;
typecheck(text).unwrap();
}
|
use apllodb_shared_components::{ApllodbError, ApllodbResult, NnSqlValue, SqlValue};
use apllodb_sql_parser::apllodb_ast;
use crate::ast_translator::AstTranslator;
impl AstTranslator {
/// # Failures
///
/// - [DataExceptionNumericValueOutOfRange](apllodb_shared_components::SqlState::DataExceptionNumericValueOutOfRange) when:
/// - `ast_integer_constant` is out of range of `i64`.
pub(crate) fn integer_constant(
ast_integer_constant: apllodb_ast::IntegerConstant,
) -> ApllodbResult<SqlValue> {
let s = ast_integer_constant.0;
s.parse::<i16>()
.map(|i| SqlValue::NotNull(NnSqlValue::SmallInt(i)))
.or_else(|_| {
s.parse::<i32>()
.map(|i| SqlValue::NotNull(NnSqlValue::Integer(i)))
})
.or_else(|_| {
s.parse::<i64>()
.map(|i| SqlValue::NotNull(NnSqlValue::BigInt(i)))
})
.map_err(|_e| {
ApllodbError::data_exception_numeric_value_out_of_range(format!(
"integer value `{}` could not be parsed as i64 (max supported size)",
s
))
})
}
}
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use apllodb_shared_components::{ApllodbResult, NnSqlValue, SqlState, SqlType, SqlValue};
use apllodb_sql_parser::apllodb_ast;
use super::AstTranslator;
#[test]
fn test_ok() -> ApllodbResult<()> {
struct TestDatum<'test> {
input_ast_integer_constant: &'test str,
expected_sql_type: SqlType,
expected_rust_value: i64,
}
let test_data: Vec<TestDatum<'_>> = vec![
// I16
TestDatum {
input_ast_integer_constant: "0",
expected_sql_type: SqlType::small_int(),
expected_rust_value: 0,
},
TestDatum {
input_ast_integer_constant: "-1",
expected_sql_type: SqlType::small_int(),
expected_rust_value: -1,
},
TestDatum {
input_ast_integer_constant: "32767",
expected_sql_type: SqlType::small_int(),
expected_rust_value: i16::MAX as i64,
},
TestDatum {
input_ast_integer_constant: "-32768",
expected_sql_type: SqlType::small_int(),
expected_rust_value: i16::MIN as i64,
},
// I32
TestDatum {
input_ast_integer_constant: "32768",
expected_sql_type: SqlType::integer(),
expected_rust_value: (i16::MAX as i64) + 1,
},
TestDatum {
input_ast_integer_constant: "-32769",
expected_sql_type: SqlType::integer(),
expected_rust_value: (i16::MIN as i64) - 1,
},
TestDatum {
input_ast_integer_constant: "2147483647",
expected_sql_type: SqlType::integer(),
expected_rust_value: i32::MAX as i64,
},
TestDatum {
input_ast_integer_constant: "-2147483648",
expected_sql_type: SqlType::integer(),
expected_rust_value: i32::MIN as i64,
},
// I64
TestDatum {
input_ast_integer_constant: "2147483648",
expected_sql_type: SqlType::big_int(),
expected_rust_value: (i32::MAX as i64) + 1,
},
TestDatum {
input_ast_integer_constant: "-2147483649",
expected_sql_type: SqlType::big_int(),
expected_rust_value: (i32::MIN as i64) - 1,
},
TestDatum {
input_ast_integer_constant: "9223372036854775807",
expected_sql_type: SqlType::big_int(),
expected_rust_value: i64::MAX,
},
TestDatum {
input_ast_integer_constant: "-9223372036854775808",
expected_sql_type: SqlType::big_int(),
expected_rust_value: i64::MIN,
},
];
for test_datum in test_data {
log::debug!(
"testing input `{:?}`...",
test_datum.input_ast_integer_constant
);
let input_ast_integer_constant =
apllodb_ast::IntegerConstant(test_datum.input_ast_integer_constant.to_string());
if let SqlValue::NotNull(out_sql_value) =
AstTranslator::integer_constant(input_ast_integer_constant)?
{
assert_eq!(out_sql_value.sql_type(), test_datum.expected_sql_type);
assert_eq!(
out_sql_value,
NnSqlValue::BigInt(test_datum.expected_rust_value)
);
} else {
unreachable!()
}
}
Ok(())
}
#[test]
fn test_err() {
let test_data: Vec<&str> = vec!["9223372036854775808", "-9223372036854775809"];
for test_datum in test_data {
log::debug!("testing input `{:?}`...", test_datum);
let input_ast_integer_constant = apllodb_ast::IntegerConstant(test_datum.to_string());
assert_eq!(
*AstTranslator::integer_constant(input_ast_integer_constant)
.unwrap_err()
.kind(),
SqlState::DataExceptionNumericValueOutOfRange
);
}
}
}
|
pub fn reply(input: &str) -> &str {
if input.is_empty() {
"Fine. Be that way!"
} else if input.ends_with("?") {
"Sure."
} else if !input.chars().any(|char| char.is_lowercase()) {
"Whoa, chill out!"
} else {
"Whatever."
}
}
|
use std::cmp::Ordering;
use std::fs;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
pub type Name = String;
pub type Title = String;
pub type Children = Vec<Node>;
fn main() {
let root = PathBuf::from("example");
let tree = get_hierarchy(root).unwrap();
print_hierarchy(tree);
}
fn print_hierarchy(tree: Node) {
let mut rows = vec!["# SUMMARY".to_string(), "".to_string()];
hierarchy_to_md(&tree, "", 0, &mut rows);
println!("{}", rows.join("\n"));
}
fn hierarchy_to_md(tree: &Node, path: &str, depth: usize, output: &mut Vec<String>) {
let indentation = match depth {
0 => None,
n => Some(String::from_utf8(vec![b' '; (n - 1) * 4]).unwrap()),
};
match tree {
Node::File(name, title) => {
if let Some(indentation) = indentation {
if name != "SUMMARY.md" && name != "landing.md" && name.ends_with(".md") {
output.push(format!(
"{}- [{}]({}/{})",
&indentation, &title, &path, &name
));
}
}
}
Node::Folder(name, title, children) => {
if let Some(indentation) = indentation {
output.push(format!(
"{}- [{}]({}/{}/landing.md)",
&indentation, &title, &path, &name
));
}
let path = match depth {
0 => ".".to_string(),
_ => format!("{}/{}", path, name),
};
for node in children {
hierarchy_to_md(&node, &path, depth + 1, output);
}
}
}
}
fn get_hierarchy(parent: PathBuf) -> Option<Node> {
let parent_name = parent.file_name()?.to_str()?.to_string();
let mut children = vec![];
for entry in fs::read_dir(&parent).ok()? {
let entry = entry.ok()?;
let path = entry.path();
let metadata = fs::metadata(&path).ok()?;
if metadata.is_dir() {
children.push(get_hierarchy(path).unwrap());
} else {
let file_name = path.file_name()?.to_str()?.to_string();
if !file_name.ends_with(".md") {
continue;
}
let title = get_title(&path)
.or_else(|| Some(path.file_name()?.to_str()?.to_string()))
.unwrap();
children.push(Node::File(file_name.to_string(), title))
}
}
let title = get_title(&parent.join("landing.md"))
.or_else(|| Some(parent_name.to_owned()))
.unwrap();
children.sort();
let tree = Node::Folder(parent_name, title, children);
Some(tree)
}
fn get_title(path: &PathBuf) -> Option<String> {
let file = fs::File::open(&path).ok()?;
let mut buffer = BufReader::new(file);
let mut line = String::new();
let whitelist: Vec<char> =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 .,!?-+"
.chars()
.collect();
while line.is_empty() {
let result = buffer.read_line(&mut line);
if let Ok(0) = result {
break;
}
line = line
.replace("_", " ")
.trim()
.chars()
.filter(|it| whitelist.contains(it))
.collect();
line = line.trim().to_string();
}
if line.trim().is_empty() {
None
} else {
Some(line)
}
}
// region Node
#[derive(Debug, Eq)]
pub enum Node {
Folder(Name, Title, Children),
File(Name, Title),
}
impl Node {
fn title(&self) -> &str {
match self {
Node::Folder(_, title, _) => title,
Node::File(_, title) => title,
}
}
}
impl Ord for Node {
fn cmp(&self, other: &Self) -> Ordering {
self.title().cmp(other.title())
}
}
impl PartialOrd for Node {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for Node {
fn eq(&self, other: &Self) -> bool {
self.title().eq(other.title())
}
}
// endregion
|
// Copyright (c) 2018-2020 Brendan Molloy <brendan@bbqsrc.net>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::ExampleValues;
use std::{fmt::Display, rc::Rc};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CapturedOutput {
pub out: String,
pub err: String,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Location {
pub file: String,
pub line: u32,
pub column: u32,
}
impl Location {
pub fn unknown() -> Self {
Location {
file: "<unknown>".into(),
line: 0,
column: 0,
}
}
}
impl Display for Location {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"\u{00a0}{}:{}:{}\u{00a0}",
&self.file, self.line, self.column
)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PanicInfo {
pub location: Location,
pub payload: String,
}
impl PanicInfo {
pub fn unknown() -> Self {
PanicInfo {
location: Location::unknown(),
payload: "(No panic info was found?)".into(),
}
}
}
pub enum TestEvent<W> {
Unimplemented,
Skipped,
Success(W, CapturedOutput),
Failure(PanicInfo, CapturedOutput),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum StepEvent {
Starting,
Unimplemented,
Skipped,
Passed(CapturedOutput),
Failed(CapturedOutput, PanicInfo),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ScenarioEvent {
Starting(ExampleValues),
Background(Rc<gherkin::Step>, StepEvent),
Step(Rc<gherkin::Step>, StepEvent),
Skipped,
Passed,
Failed,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum RuleEvent {
Starting,
Scenario(Rc<gherkin::Scenario>, ScenarioEvent),
Skipped,
Passed,
Failed,
}
#[derive(Debug, Clone)]
pub enum FeatureEvent {
Starting,
Scenario(Rc<gherkin::Scenario>, ScenarioEvent),
Rule(Rc<gherkin::Rule>, RuleEvent),
Finished,
}
#[derive(Debug, Clone)]
pub enum CucumberEvent {
Starting,
Feature(Rc<gherkin::Feature>, FeatureEvent),
Finished,
}
|
use super::super::mem;
use crate::cpu::{interrupt, ioregister};
pub struct Timer {
/// The timer overflow behavior is delayed.
timer_overflow: bool,
tima_cycles_counter: u32,
}
impl Default for Timer {
fn default() -> Timer {
Timer {
timer_overflow: false,
tima_cycles_counter: 0,
}
}
}
impl Timer {
pub fn update(&mut self, cycles: u32, memory: &mut mem::Memory) {
let internal_counter = memory.read_byte(ioregister::TIMER_INTERNAL_COUNTER_ADDR);
let div = memory.read_byte(ioregister::DIV_REGISTER_ADDR);
let old_internal_timer = ((div as u16) << 8) | internal_counter as u16;
let internal_timer = old_internal_timer.wrapping_add(cycles as u16);
memory.write_byte(
ioregister::TIMER_INTERNAL_COUNTER_ADDR,
internal_timer as u8,
);
memory.write_byte(ioregister::DIV_REGISTER_ADDR, (internal_timer >> 8) as u8);
if self.timer_overflow {
self.timer_overflow = false;
let tima = memory.read_byte(ioregister::TMA_REGISTER_ADDR);
memory.write_byte(ioregister::TIMA_REGISTER_ADDR, tima);
interrupt::request(interrupt::Interrupt::TimerOverflow, memory);
} else {
let tac = memory.read_byte(ioregister::TAC_REGISTER_ADDR);
// timer start bit is on
if (tac >> 2) & 0b1 == 0b1 {
// TODO: these numbers never change, move them out to some static const.
let fc = |hz| -> u32 { ioregister::CPU_FREQUENCY_HZ / (1_000_000 / hz) };
let freq_cycles = match tac & 0b11 {
0 => fc(4096u32),
1 => fc(262144u32),
2 => fc(65536u32),
3 => fc(16384u32),
_ => unreachable!(),
};
//TODO implement glitch?
self.tima_cycles_counter = self.tima_cycles_counter.wrapping_add(cycles);
if self.tima_cycles_counter > freq_cycles {
let tima = memory
.read_byte(ioregister::TIMA_REGISTER_ADDR)
.wrapping_add(1);
if tima == 0 {
// overflows
self.timer_overflow = true;
}
memory.write_byte(ioregister::TIMA_REGISTER_ADDR, tima);
self.tima_cycles_counter = 0;
}
}
}
}
}
|
#![doc(
test(attr(deny(warnings))),
test(attr(allow(bare_trait_objects, unknown_lints)))
)]
#![warn(missing_docs)]
// Don't fail on links to things not enabled in features
#![allow(
unknown_lints,
renamed_and_removed_lints,
intra_doc_link_resolution_failure,
broken_intra_doc_links
)]
// These little nifty labels saying that something needs a feature to be enabled
#![cfg_attr(docsrs, feature(doc_cfg))]
//! Library for easier and safe Unix signal handling
//!
//! Unix signals are inherently hard to handle correctly, for several reasons:
//!
//! * They are a global resource. If a library wants to set its own signal handlers, it risks
//! disrupting some other library. It is possible to chain the previous signal handler, but then
//! it is impossible to remove the old signal handlers from the chains in any practical manner.
//! * They can be called from whatever thread, requiring synchronization. Also, as they can
//! interrupt a thread at any time, making most handling race-prone.
//! * According to the POSIX standard, the set of functions one may call inside a signal handler is
//! limited to very few of them. To highlight, mutexes (or other locking mechanisms) and memory
//! allocation and deallocation is *not* allowed.
//!
//! # The goal of the library
//!
//! The aim is to subscriptions to signals a „structured“ resource, in a similar way memory
//! allocation is ‒ parts of the program can independently subscribe and it's the same part of the
//! program that can give them up, independently of what the other parts do. Therefore, it is
//! possible to register multiple actions to the same signal.
//!
//! Another goal is to shield applications away from differences between platforms. Various Unix
//! systems have little quirks and differences that need to be worked around and that's not
//! something every application should be dealing with. We even try to provide some support for
//! Windows, but we lack the expertise in that area, so that one is not complete and is a bit rough
//! (if you know how it works there and are willing to either contribute the code or consult,
//! please get in touch).
//!
//! Furthermore, it provides implementation of certain common signal-handling patterns, usable from
//! safe Rust, without the application author needing to learn about *all* the traps.
//!
//! Note that despite everything, there are still some quirks around signal handling that are not
//! possible to paper over and need to be considered. Also, there are some signal use cases that
//! are inherently unsafe and they are not covered by this crate.
//!
//! # Anatomy of the crate
//!
//! The crate is split into several modules.
//!
//! The easiest way to handle signals is using the [`Signals`][crate::iterator::Signals] iterator
//! thing. It can register for a set of signals and produce them one by one, in a blocking manner.
//! You can reserve a thread for handling them as they come. If you want something asynchronous,
//! there are adaptor crates for the most common asynchronous runtimes. The module also contains
//! ways to build iterators that produce a bit more information that just the signal number.
//!
//! The [`flag`] module contains routines to set a flag based on incoming signals and to do
//! certain actions inside the signal handlers based on the flags (the flags can also be
//! manipulated by the rest of the application). This allows building things like checking if a
//! signal happened on each loop iteration or making sure application shuts down on the second
//! CTRL+C if it got stuck in graceful shutdown requested by the first.
//!
//! The [`consts`] module contains some constants, most importantly the signal numbers themselves
//! (these are just re-exports from [`libc`] and if your OS has some extra ones, you can use them
//! too, this is just for convenience).
//!
//! And last, there is the [`low_level`] module. It contains routines to directly register and
//! unregister arbitrary actions. Some of the patters in the above modules return a [`SigId`],
//! which can be used with the [`low_level::unregister`] to remove the action. There are also some
//! other utilities that are more suited to build other abstractions with than to use directly.
//!
//! Certain parts of the library can be enabled or disabled with use flags:
//!
//! * `channel`: The [low_level::channel] module (on by default).
//! * `iterator`: The [iterator] module (on by default).
//! * `extended-sig-info`: Support for providing more information in the iterators or from the
//! async adaptor crates. This is off by default.
//!
//! # Limitations
//!
//! * OS limitations still apply. Certain signals are not possible to override or subscribe to ‒
//! `SIGKILL` or `SIGSTOP`.
//! * Overriding some others is probably a very stupid idea (or very unusual needs) ‒ handling eg.
//! `SIGSEGV` is not something done lightly. For that reason, the crate will panic in case
//! registering of these is attempted (see [`FORBIDDEN`][crate::consts::FORBIDDEN]. If you still
//! need to do so, you can find such APIs in the `signal-hook-registry` backend crate, but
//! additional care must be taken.
//! * Interaction with other signal-handling libraries is limited. If signal-hook finds an existing
//! handler present, it chain-calls it from the signal it installs and assumes other libraries
//! would do the same, but that's everything that can be done to make it work with libraries not
//! based on [`signal-hook-registry`](https://lib.rs/signal-hook-registry)
//! (the backend of this crate).
//! * The above chaining contains a race condition in multi-threaded programs, where the previous
//! handler might not get called if it is received during the registration process. This is
//! handled (at least on non-windows platforms) on the same thread where the registration
//! happens, therefore it is advised to register at least one action for each signal of interest
//! early, before any additional threads are started. Registering any additional (or removing and
//! registering again) action on the same signal is without the race condition.
//! * Once at least one action is registered for a signal, the default action is replaced (this is
//! how signals work in the OS). Even if all actions of that signal are removed, `signal-hook`
//! does not restore the default handler (such behaviour would be at times inconsistent with
//! making the actions independent and there's no reasonable way to do so in a race-free way in a
//! multi-threaded program while also dealing with signal handlers registered with other
//! libraries). It is, however, possible to *emulate* the default handler (see the
//! [`emulate_default_handler`][low_level::emulate_default_handler]) ‒ there are only 4
//! default handlers:
//! - Ignore. This is easy to emulate.
//! - Abort. Depending on if you call it from within a signal handler of from outside, the
//! [`low_level::abort`] or [`std::process::abort`] can be used.
//! - Terminate. This can be done with `exit` ([`low_level::exit`] or [`std::process::exit`]).
//! - Stop. It is possible to [`raise`][low_level::raise] the [`SIGSTOP`][consts::SIGSTOP] signal.
//! That one can't be replaced and always stops the application.
//! * Many of the patterns here can collate multiple instances of the same signal into fewer
//! instances, if the application doesn't consume them fast enough. This is consistent with what
//! the kernel does if the application doesn't keep up with them (at least for non-realtime
//! signals, see below), so it is something one needs to deal with anyway.
//! * (By design) the library mostly _postpones_ or helps the user postpone acting on the signals
//! until later. This, in combination with the above collating inside the library may make it
//! unsuitable for realtime signals. These usually want to be handled directly inside the signal
//! handler ‒ which still can be done with [signal_hook_registry::register], but using unsafe and
//! due care. Patterns for working safely with realtime signals are not unwanted in the library,
//! but nobody contributed them yet.
//!
//! # Signal masks
//!
//! As the library uses `sigaction` under the hood, signal masking works as expected (eg. with
//! `pthread_sigmask`). This means, signals will *not* be delivered if the signal is masked in all
//! program's threads.
//!
//! By the way, if you do want to modify the signal mask (or do other Unix-specific magic), the
//! [nix](https://lib.rs/crates/nix) crate offers safe interface to many low-level functions,
//! including
//! [`pthread_sigmask`](https://docs.rs/nix/0.11.0/nix/sys/signal/fn.pthread_sigmask.html).
//!
//! # Portability
//!
//! It should work on any POSIX.1-2001 system, which are all the major big OSes with the notable
//! exception of Windows.
//!
//! Non-standard signals are also supported. Pass the signal value directly from `libc` or use
//! the numeric value directly.
//!
//! ```rust
//! use std::sync::Arc;
//! use std::sync::atomic::{AtomicBool};
//! let term = Arc::new(AtomicBool::new(false));
//! let _ = signal_hook::flag::register(libc::SIGINT, Arc::clone(&term));
//! ```
//!
//! This crate includes a limited support for Windows, based on `signal`/`raise` in the CRT.
//! There are differences in both API and behavior:
//!
//! - Many parts of the library are not available there.
//! - We have only a few signals: `SIGABRT`, `SIGABRT_COMPAT`, `SIGBREAK`,
//! `SIGFPE`, `SIGILL`, `SIGINT`, `SIGSEGV` and `SIGTERM`.
//! - Due to lack of signal blocking, there's a race condition.
//! After the call to `signal`, there's a moment where we miss a signal.
//! That means when you register a handler, there may be a signal which invokes
//! neither the default handler or the handler you register.
//! - Handlers registered by `signal` in Windows are cleared on first signal.
//! To match behavior in other platforms, we re-register the handler each time the handler is
//! called, but there's a moment where we miss a handler.
//! That means when you receive two signals in a row, there may be a signal which invokes
//! the default handler, nevertheless you certainly have registered the handler.
//!
//! Moreover, signals won't work as you expected. `SIGTERM` isn't actually used and
//! not all `Ctrl-C`s are turned into `SIGINT`.
//!
//! Patches to improve Windows support in this library are welcome.
//!
//! # Features
//!
//! There are several feature flags that control how much is available as part of the crate, some
//! enabled by default.
//!
//! * `channel`: (enabled by default) The [Channel][crate::low_level::channel] synchronization
//! primitive for exporting data out of signal handlers.
//! * `iterator`: (enabled by default) An [Signals iterator][crate::iterator::Signals] that
//! provides a convenient interface for receiving signals in rust-friendly way.
//! * `extended-siginfo` adds support for providing extra information as part of the iterator
//! interface.
//!
//! # Examples
//!
//! ## Using a flag to terminate a loop-based application
//!
//! ```rust
//! use std::io::Error;
//! use std::sync::Arc;
//! use std::sync::atomic::{AtomicBool, Ordering};
//!
//! fn main() -> Result<(), Error> {
//! let term = Arc::new(AtomicBool::new(false));
//! signal_hook::flag::register(signal_hook::consts::SIGTERM, Arc::clone(&term))?;
//! while !term.load(Ordering::Relaxed) {
//! // Do some time-limited stuff here
//! // (if this could block forever, then there's no guarantee the signal will have any
//! // effect).
//! #
//! # // Hack to terminate the example, not part of the real code.
//! # term.store(true, Ordering::Relaxed);
//! }
//! Ok(())
//! }
//! ```
//!
//! ## A complex signal handling with a background thread
//!
//! This also handles the double CTRL+C situation (eg. the second CTRL+C kills) and resetting the
//! terminal on `SIGTSTP` (CTRL+Z, curses-based applications should do something like this).
//!
//! ```rust
//! # #[cfg(feature = "extended-siginfo")] pub mod test {
//! use std::io::Error;
//! use std::sync::Arc;
//! use std::sync::atomic::AtomicBool;
//!
//! use signal_hook::consts::signal::*;
//! use signal_hook::consts::TERM_SIGNALS;
//! use signal_hook::flag;
//! // A friend of the Signals iterator, but can be customized by what we want yielded about each
//! // signal.
//! use signal_hook::iterator::SignalsInfo;
//! use signal_hook::iterator::exfiltrator::WithOrigin;
//! use signal_hook::low_level;
//!
//! # struct App;
//! # impl App {
//! # fn run_background() -> Self { Self }
//! # fn wait_for_stop(self) {}
//! # fn restore_term(&self) {}
//! # fn claim_term(&self) {}
//! # fn resize_term(&self) {}
//! # fn reload_config(&self) {}
//! # fn print_stats(&self) {}
//! # }
//! # pub
//! fn main() -> Result<(), Error> {
//! // Make sure double CTRL+C and similar kills
//! let term_now = Arc::new(AtomicBool::new(false));
//! for sig in TERM_SIGNALS {
//! // When terminated by a second term signal, exit with exit code 1.
//! // This will do nothing the first time (because term_now is false).
//! flag::register_conditional_shutdown(*sig, 1, Arc::clone(&term_now))?;
//! // But this will "arm" the above for the second time, by setting it to true.
//! // The order of registering these is important, if you put this one first, it will
//! // first arm and then terminate ‒ all in the first round.
//! flag::register(*sig, Arc::clone(&term_now))?;
//! }
//!
//! // Subscribe to all these signals with information about where they come from. We use the
//! // extra info only for logging in this example (it is not available on all the OSes or at
//! // all the occasions anyway, it may return `Unknown`).
//! let mut sigs = vec![
//! // Some terminal handling
//! SIGTSTP, SIGCONT, SIGWINCH,
//! // Reload of configuration for daemons ‒ um, is this example for a TUI app or a daemon
//! // O:-)? You choose...
//! SIGHUP,
//! // Application-specific action, to print some statistics.
//! SIGUSR1,
//! ];
//! sigs.extend(TERM_SIGNALS);
//! let mut signals = SignalsInfo::<WithOrigin>::new(&sigs)?;
//! # low_level::raise(SIGTERM)?; // Trick to terminate the example
//!
//! // This is the actual application that'll start in its own thread. We'll control it from
//! // this thread based on the signals, but it keeps running.
//! // This is called after all the signals got registered, to avoid the short race condition
//! // in the first registration of each signal in multi-threaded programs.
//! let app = App::run_background();
//!
//! // Consume all the incoming signals. This happens in "normal" Rust thread, not in the
//! // signal handlers. This means that we are allowed to do whatever we like in here, without
//! // restrictions, but it also means the kernel believes the signal already got delivered, we
//! // handle them in delayed manner. This is in contrast with eg the above
//! // `register_conditional_shutdown` where the shutdown happens *inside* the handler.
//! let mut has_terminal = true;
//! for info in &mut signals {
//! // Will print info about signal + where it comes from.
//! eprintln!("Received a signal {:?}", info);
//! match info.signal {
//! SIGTSTP => {
//! // Restore the terminal to non-TUI mode
//! if has_terminal {
//! app.restore_term();
//! has_terminal = false;
//! // And actually stop ourselves.
//! low_level::emulate_default_handler(SIGTSTP)?;
//! }
//! }
//! SIGCONT => {
//! if !has_terminal {
//! app.claim_term();
//! has_terminal = true;
//! }
//! }
//! SIGWINCH => app.resize_term(),
//! SIGHUP => app.reload_config(),
//! SIGUSR1 => app.print_stats(),
//! term_sig => { // These are all the ones left
//! eprintln!("Terminating");
//! assert!(TERM_SIGNALS.contains(&term_sig));
//! break;
//! }
//! }
//! }
//!
//! // If during this another termination signal comes, the trick at the top would kick in and
//! // terminate early. But if it doesn't, the application shuts down gracefully.
//! app.wait_for_stop();
//!
//! Ok(())
//! }
//! # }
//! # fn main() {
//! # #[cfg(feature = "extended-siginfo")] test::main().unwrap();
//! # }
//! ```
//!
//! # Asynchronous runtime support
//!
//! If you are looking for integration with an asynchronous runtime take a look at one of the
//! following adapter crates:
//!
//! * [`signal-hook-async-std`](https://docs.rs/signal-hook-async-std) for async-std support
//! * [`signal-hook-mio`](https://docs.rs/signal-hook-mio) for MIO support
//! * [`signal-hook-tokio`](https://docs.rs/signal-hook-tokio) for Tokio support
//!
//! Feel free to open a pull requests if you want to add support for runtimes not mentioned above.
//!
//! # Porting from previous versions
//!
//! There were some noisy changes when going from 0.2 version to the 0.3 version. In particular:
//!
//! * A lot of things moved around to make the structure of the crate a bit more understandable.
//! Most of the time it should be possible to just search the documentation for the name that
//! can't be resolved to discover the new location.
//! - The signal constants (`SIGTERM`, for example) are in [`consts`] submodule (individual
//! imports) and in the [`consts::signal`] (for wildcard import of all of them).
//! - Some APIs that are considered more of a low-level building blocks than for casual day to
//! day use are now in the [`low_level`] submodule.
//! * The previous version contained the `cleanup` module that allowed for removal of the actions
//! in rather destructive way (nuking actions of arbitrary other parts of the program). This is
//! completely gone in this version. The use case of shutting down the application on second
//! CTRL+C is now supported by a pattern described in the [`flag`] submodule. For other similar
//! needs, refer above for emulating default handlers.
pub mod flag;
#[cfg(all(not(windows), feature = "iterator"))]
#[cfg_attr(docsrs, doc(cfg(all(not(windows), feature = "iterator"))))]
pub mod iterator;
pub mod low_level;
/// The low-level constants.
///
/// Like the signal numbers.
pub mod consts {
use libc::c_int;
/// The signal constants.
///
/// Can be mass-imported by `use signal_hook::consts::signal::*`, without polluting the
/// namespace with other names. Also available in the [`consts`][crate::consts] directly (but
/// with more constants around).
pub mod signal {
#[cfg(not(windows))]
pub use libc::{
SIGABRT, SIGALRM, SIGBUS, SIGCHLD, SIGCONT, SIGFPE, SIGHUP, SIGILL, SIGINT, SIGKILL,
SIGPIPE, SIGPROF, SIGQUIT, SIGSEGV, SIGSTOP, SIGSYS, SIGTERM, SIGTRAP, SIGTSTP,
SIGTTIN, SIGTTOU, SIGURG, SIGUSR1, SIGUSR2, SIGVTALRM, SIGWINCH, SIGXCPU, SIGXFSZ,
};
#[cfg(not(any(windows, target_os = "haiku")))]
pub use libc::SIGIO;
#[cfg(any(
target_os = "freebsd",
target_os = "dragonfly",
target_os = "netbsd",
target_os = "openbsd",
target_os = "macos"
))]
pub use libc::SIGINFO;
#[cfg(windows)]
pub use libc::{SIGABRT, SIGFPE, SIGILL, SIGINT, SIGSEGV, SIGTERM};
// NOTE: they perhaps deserve backport to libc.
#[cfg(windows)]
/// Same as `SIGABRT`, but the number is compatible to other platforms.
pub const SIGABRT_COMPAT: libc::c_int = 6;
#[cfg(windows)]
/// Ctrl-Break is pressed for Windows Console processes.
pub const SIGBREAK: libc::c_int = 21;
}
pub use self::signal::*;
pub use signal_hook_registry::FORBIDDEN;
/// Various signals commonly requesting shutdown of an application.
#[cfg(not(windows))]
pub const TERM_SIGNALS: &[c_int] = &[SIGTERM, SIGQUIT, SIGINT];
/// Various signals commonly requesting shutdown of an application.
#[cfg(windows)]
pub const TERM_SIGNALS: &[c_int] = &[SIGTERM, SIGINT];
}
pub use signal_hook_registry::SigId;
|
#[doc = "Register `LPTIM_PIDR` reader"]
pub type R = crate::R<LPTIM_PIDR_SPEC>;
#[doc = "Field `P_ID` reader - P_ID"]
pub type P_ID_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - P_ID"]
#[inline(always)]
pub fn p_id(&self) -> P_ID_R {
P_ID_R::new(self.bits)
}
}
#[doc = "LPTIM peripheral type identification register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`lptim_pidr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct LPTIM_PIDR_SPEC;
impl crate::RegisterSpec for LPTIM_PIDR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`lptim_pidr::R`](R) reader structure"]
impl crate::Readable for LPTIM_PIDR_SPEC {}
#[doc = "`reset()` method sets LPTIM_PIDR to value 0x0012_0011"]
impl crate::Resettable for LPTIM_PIDR_SPEC {
const RESET_VALUE: Self::Ux = 0x0012_0011;
}
|
use std::collections::BTreeSet;
use ggez::{Context};
use ggez::graphics::{MeshBuilder,DrawMode,Color,Image,draw};
use ggez::nalgebra::Point2;
use crate::shot::{Shot,ShotStatus};
use crate::enemy::Enemy;
use crate::enemy_grid::EnemyGrid;
use crate::common::Point;
const PLAYERSIZE: f32 = 81.0;
const PLAYERWIDTH: f32 = 63.0;
pub struct Player {
w: f32,
h: f32,
x: f32,
cooldown: i32,
motion: f32,
pub shots: Vec<Shot>,
image: Image,
}
impl Player {
pub fn new(w: f32, h: f32, ctx: &mut Context) -> Self {
let image = Image::new(ctx, "/ship-center.png").unwrap();
Player {
w, h, x: w/2.0, cooldown: 0, motion: 0.0, shots: Vec::new(), image
}
}
pub fn set_motion(&mut self, motion: f32) {
self.motion = motion;
}
pub fn shoot(&mut self) {
if self.cooldown == 0 {
self.shots.push(Shot::new(self.x + PLAYERWIDTH/2.0, self.h-PLAYERSIZE));
self.cooldown = 10;
}
}
pub fn draw(&mut self, ctx: &mut Context, mb: &mut MeshBuilder) {
// let _ = mb.polygon(DrawMode::fill(),
// &[
// Point2::new(self.x, self.h-PLAYERSIZE),
// Point2::new(self.x+PLAYERSIZE/3.0, self.h),
// Point2::new(self.x-PLAYERSIZE/3.0, self.h),
// Point2::new(self.x, self.h-PLAYERSIZE),
// ],
// Color::new(1.0,1.0,1.0,1.0));
//
let pos: Point2<f32> = Point2::new(self.x, self.h-PLAYERSIZE);
let _ = draw(ctx, &self.image, (pos, 0.0, Color::new(1.0, 1.0, 1.0, 1.0)));
}
pub fn draw_shots(&mut self, ctx: &mut Context, mb: &mut MeshBuilder) {
for shot in &mut self.shots {
shot.draw(ctx, mb);
}
}
pub fn check_hits(&mut self, enemy_grid: &mut EnemyGrid) {
let mut enemies_to_remove = BTreeSet::new();
let mut shots_to_remove = BTreeSet::new();
for (i, enemy) in enemy_grid.enemies.iter().enumerate() {
for (j, shot) in self.shots.iter().enumerate() {
if enemy.is_hit(shot, enemy_grid.position) {
enemies_to_remove.insert(i);
shots_to_remove.insert(j);
}
}
}
for i in enemies_to_remove.iter().rev() {
enemy_grid.enemies.remove(*i);
}
for i in shots_to_remove.iter().rev() {
self.shots.remove(*i);
}
}
pub fn update(&mut self) {
self.x += self.motion;
if self.x <= PLAYERSIZE {
self.x = PLAYERSIZE;
self.motion = 0.0;
} else if self.x >= self.w - PLAYERSIZE {
self.x = self.w - PLAYERSIZE;
self.motion = 0.0;
}
if self.cooldown > 0 {
self.cooldown -= 1;
}
let mut shots_to_remove = BTreeSet::new();
for (i, shot) in (&mut self.shots).iter_mut().enumerate() {
match shot.update() {
ShotStatus::Offscreen => { shots_to_remove.insert(i); },
_ => ()
}
}
for i in shots_to_remove.iter().rev() {
self.shots.remove(*i);
}
}
}
|
use aoc::*;
use itertools::Itertools;
use std::cmp::Ordering;
use std::collections::HashMap;
fn main() -> Result<()> {
let input = input("14.txt")?;
let reactions: HashMap<&str, Reaction> = input
.lines()
.map(&Reaction::parse)
.map(|r| (r.output.name, r))
.collect();
let mut low = 0;
let mut high = 10u64.pow(12);
while low < high - 1 {
let mut state = HashMap::new();
let fuel = (low + high) / 2;
cost(&reactions, &mut state, &Chemical::new("FUEL", fuel));
match state["ORE"].cmp(&10u64.pow(12)) {
Ordering::Greater => high = fuel,
_ => low = fuel,
}
}
Ok(println!("{}", low))
}
fn cost(
reactions: &HashMap<&str, Reaction>,
state: &mut HashMap<String, u64>,
chemical: &Chemical,
) {
let count = state.entry(chemical.name.into()).or_insert(0);
if chemical.name == "ORE" {
*count += chemical.units;
} else if chemical.units < *count {
*count -= chemical.units;
} else {
let min = reactions[chemical.name].output.units;
let missing = chemical.units - *count;
let n = missing / min + if missing % min == 0 { 0 } else { 1 };
*count += n * min;
*count -= chemical.units;
for i in &reactions[chemical.name].inputs {
cost(reactions, state, &Chemical::new(i.name, n * i.units));
}
}
}
#[derive(Debug)]
struct Reaction<'a> {
inputs: Vec<Chemical<'a>>,
output: Chemical<'a>,
}
impl<'a> Reaction<'a> {
fn parse(s: &'a str) -> Self {
let (inputs, output) = s.split(" => ").collect_tuple().unwrap();
Reaction {
inputs: inputs.split(", ").map(&Chemical::parse).collect(),
output: Chemical::parse(output),
}
}
}
#[derive(Debug)]
struct Chemical<'a> {
name: &'a str,
units: u64,
}
impl<'a> Chemical<'a> {
fn parse(s: &'a str) -> Self {
let (units, name) = s.split(' ').collect_tuple().unwrap();
Self::new(name, units.parse().unwrap())
}
fn new(name: &'a str, units: u64) -> Self {
Chemical { name, units }
}
}
|
extern crate alloc;
use super::{SqDir, SquareMaze};
use alloc::vec::Vec;
use bit_vec::BitVec;
use rand::prelude::*;
use rand::{distributions::Uniform, rngs::SmallRng};
impl SquareMaze {
pub fn init_wilson(&mut self) {
let mut rng = SmallRng::seed_from_u64(self.seed);
let mut ccount = self.width * self.height;
let mut partof_mz = BitVec::from_elem(ccount, false);
let cellgen = Uniform::from(0..ccount);
let mut start_num = cellgen.sample(&mut rng);
let mut cur_cell;
let mut choice;
let mut dir_buf = Vec::with_capacity(4);
let mut dir_togo = Vec::with_capacity(ccount);
dir_togo.resize(ccount, SqDir::Up);
partof_mz.set(start_num, true);
ccount -= 1;
while ccount != 0 {
start_num = cellgen.sample(&mut rng);
cur_cell = start_num;
while !partof_mz[cur_cell] {
dir_buf.clear();
if cur_cell / self.width != 0 {
dir_buf.push((cur_cell - self.width, SqDir::Up));
}
if cur_cell % self.width != 0 {
dir_buf.push((cur_cell - 1, SqDir::Left));
}
if cur_cell / self.width != self.height - 1 {
dir_buf.push((cur_cell + self.width, SqDir::Down));
}
if cur_cell % self.width != self.width - 1 {
dir_buf.push((cur_cell + 1, SqDir::Right));
}
choice = dir_buf.choose(&mut rng).unwrap();
dir_togo[cur_cell] = choice.1;
cur_cell = choice.0;
}
cur_cell = start_num;
while !partof_mz[cur_cell] {
partof_mz.set(cur_cell, true);
ccount -= 1;
self.walls
.set(self.get_wall(cur_cell, dir_togo[cur_cell]).unwrap(), true);
match dir_togo[cur_cell] {
SqDir::Up => cur_cell -= self.width,
SqDir::Left => cur_cell -= 1,
SqDir::Down => cur_cell += self.width,
SqDir::Right => cur_cell += 1,
}
}
}
}
}
|
use std::fmt::format;
use crate::jack_tokenizer::TokenData;
#[derive(Debug, Clone)]
pub struct VMWriter {
vm: Vec<String>
}
impl VMWriter {
pub fn new() -> Self {
VMWriter {
vm: Vec::new(),
}
}
pub fn write_push(&mut self, segment: &str, n: u16) {
let line = format!("push {} {}", segment, n);
self.vm.push(line);
}
pub fn write_pop(&mut self, segment: &str, n: usize) {
let line = format!("pop {} {}", segment, n);
self.vm.push(line);
}
pub fn write_function(&mut self, class_name: &str, function_name: &str, n: usize) {
let line = format!("function {}.{} {}", class_name, function_name, n);
self.vm.push(line);
}
pub fn write_call(&mut self, name: &str, n: usize) {
let line = format!("call {} {}", name, n);
self.vm.push(line);
}
pub fn write_arithmetic(&mut self, op: &str) {
match op {
"+" => self.vm.push("add".to_string()),
"-" => self.vm.push("sub".to_string()),
"*" => self.vm.push("call Math.multiply 2".to_string()),
"/" => self.vm.push("call Math.divide 2".to_string()),
"&" => self.vm.push("and".to_string()),
"|" => self.vm.push("or".to_string()),
"<" => self.vm.push("lt".to_string()),
">" => self.vm.push("gt".to_string()),
"=" => self.vm.push("eq".to_string()),
_ => {},
}
}
pub fn write_unary_op(&mut self, op: &str) {
match op {
"-" => self.vm.push("neg".to_string()),
"~" => self.vm.push("not".to_string()),
_ => {},
}
}
pub fn write_label(&mut self, label: &str) {
let command = format!("label {}", label);
self.vm.push(command);
}
pub fn write_goto(&mut self, label: &str) {
let command = format!("goto {}", label);
self.vm.push(command);
}
pub fn write_if(&mut self, label: &str) {
self.vm.push("not".to_string());
let command = format!("if-goto {}", label);
self.vm.push(command);
}
pub fn write_return(&mut self) {
self.vm.push("return".to_string());
}
pub fn output(&self) -> Vec<String> {
self.vm.clone()
}
pub fn push(&mut self, str: &str) {
self.vm.push(str.to_string());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.