text stringlengths 8 4.13M |
|---|
use ash::vk;
use sourcerenderer_core::graphics::Format;
pub fn format_to_vk(format: Format, supports_d24: bool) -> vk::Format {
match format {
Format::RGBA8UNorm => vk::Format::R8G8B8A8_UNORM,
Format::RGBA8Srgb => vk::Format::R8G8B8A8_SRGB,
Format::R16UNorm => vk::Format::R16_UNORM,
Format::R16Float => vk::Format::R16_SFLOAT,
Format::R32Float => vk::Format::R32_SFLOAT,
Format::R8Unorm => vk::Format::R8_UNORM,
Format::RG32Float => vk::Format::R32G32_SFLOAT,
Format::RGB32Float => vk::Format::R32G32B32_SFLOAT,
Format::RGBA32Float => vk::Format::R32G32B32A32_SFLOAT,
Format::BGR8UNorm => vk::Format::B8G8R8_UNORM,
Format::BGRA8UNorm => vk::Format::B8G8R8A8_UNORM,
Format::D16 => vk::Format::D16_UNORM,
Format::D16S8 => vk::Format::D16_UNORM_S8_UINT,
Format::D24 => {
if supports_d24 {
vk::Format::D24_UNORM_S8_UINT
} else {
vk::Format::D32_SFLOAT
}
}
Format::D32 => vk::Format::D32_SFLOAT,
Format::D32S8 => vk::Format::D32_SFLOAT_S8_UINT,
Format::DXT1 => vk::Format::BC1_RGB_UNORM_BLOCK,
Format::DXT1Alpha => vk::Format::BC1_RGBA_UNORM_BLOCK,
Format::DXT3 => vk::Format::BC2_UNORM_BLOCK,
Format::DXT5 => vk::Format::BC3_UNORM_BLOCK,
Format::RG16UNorm => vk::Format::R16G16_UNORM,
Format::RG8UNorm => vk::Format::R8G8_UNORM,
Format::R32UInt => vk::Format::R32_UINT,
Format::RG16Float => vk::Format::R16G16_SFLOAT,
Format::RGBA16Float => vk::Format::R16G16B16A16_SFLOAT,
Format::R11G11B10Float => vk::Format::B10G11R11_UFLOAT_PACK32,
Format::RG16UInt => vk::Format::R16G16_UINT,
Format::R16UInt => vk::Format::R16_UINT,
Format::R16SNorm => vk::Format::R16_SNORM,
_ => vk::Format::UNDEFINED,
}
}
|
macro_rules! w {
($dst:expr, $($arg:tt)*) => ($dst.write_fmt(format_args!($($arg)*)).unwrap())
}
macro_rules! w_scope {
($dst:expr, $($arg:tt)*) => {
$dst.write_fmt(format_args!($($arg)*)).unwrap();
$dst.indent();
}
}
macro_rules! w_end_scope {
($dst:expr, $($arg:tt)*) => {
$dst.outdent();
$dst.write_fmt(format_args!($($arg)*)).unwrap();
};
}
pub use pecan_descriptor;
mod context;
mod descriptor;
mod generator;
pub use context::{Context, Output};
pub use generator::Generator;
|
use elfdb::{device::Device, tui, visuals::Visuals};
use failure::{format_err, Error, ResultExt};
use std::path::Path;
fn run<'a, V>(mut visuals: V, initial: Option<&Path>) -> Result<(), Error>
where
V: Visuals,
{
let mut device = Device::default();
if let Some(initial) = initial {
device.load_path(initial).with_context(|_| {
format_err!("failed to load program from path `{}`", initial.display())
})?;
}
visuals.setup()?;
loop {
if visuals.draw(&mut device)? {
break;
}
device.clear();
device.step()?;
}
visuals.done(&mut device)?;
Ok(())
}
fn main() -> Result<(), Error> {
use std::{env, panic, path::PathBuf, process};
panic::set_hook(Box::new(|p| {
eprintln!("{}", p);
process::exit(101);
}));
let mut args = env::args();
args.next();
let program = match args.next() {
None => None,
Some(program) => Some(PathBuf::from(program)),
};
run(
tui::Terminal::new().interactive(),
program.as_ref().map(|p| p.as_path()),
)?;
Ok(())
}
|
//! This the gzip compresion
//! We only need move the folder specified to peform a backup
use crate::backup::backup::Backup;
use crate::compressors::{Comprensable, CompressResult};
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use zip;
/// Struct for handle uncomprensed data
#[derive(Default)]
pub struct Zip {
/// Options for write the zip file
pub options: Option<zip::write::FileOptions>,
destination: Option<zip::ZipWriter<File>>,
}
impl Comprensable for Zip {
fn init(&mut self, bkp: &Backup) {
self.destination = Some(zip::ZipWriter::new(
File::create(&bkp.destination).expect("Cannot create file"),
));
}
fn compress(&mut self, org: &Path, _dest: &Path) -> CompressResult {
if org.is_dir() {
return Ok(());
}
let mut dst = self.destination.take().unwrap();
dst.start_file(format!("{}", org.display()), self.options.unwrap())
.unwrap();
let mut buffer = Vec::new();
let mut f = File::open(org).expect("Error opening file");
f.read_to_end(&mut buffer).expect("Error reading file");
dst.write_all(&*buffer).expect("Error writing file");
buffer.clear();
self.destination = Some(dst);
Ok(())
}
fn finish(&mut self) {
let mut dst = self.destination.take().unwrap();
dst.finish().unwrap();
}
}
|
#[cfg(test)]
mod tests {
use super::super::defaults::ENVIRONMENT;
use super::super::parser::{parse, parse_single};
use super::super::types::{Unit, UnitSet, Value};
use pretty_assertions::assert_eq;
use wasm_bindgen_test::*;
fn parse_helper(input: &'static str) -> Result<Value, ()> {
parse_single(&mut ENVIRONMENT.clone(), input)
}
#[test]
#[wasm_bindgen_test]
fn basic_parse() {
assert_eq!(parse_helper("$1234"), Ok(Value::simple(1234.0, "$")));
assert_eq!(parse_helper("1234 usd"), Ok(Value::simple(1234.0, "usd")),);
}
#[test]
#[wasm_bindgen_test]
fn parse_negation() {
assert_eq!(
parse_helper("-1234 usd"),
Ok(Value::simple(-1234.0, "usd")),
);
}
#[test]
#[wasm_bindgen_test]
fn parse_addition() {
assert_eq!(
parse_helper("123 usd + 12 usd"),
Ok(Value::simple(135.0, "usd")),
);
assert_eq!(
parse_helper("123 usd - 12 usd"),
Ok(Value::simple(111.0, "usd")),
);
assert_eq!(
parse_helper("-123 usd - 12 usd * 14"),
Ok(Value::simple(-291.0, "usd")),
);
assert_eq!(parse_helper("2 * 3 + 4"), Ok(Value::unitless(10.0)),);
assert_eq!(parse_helper("2 * (3 + 4)"), Ok(Value::unitless(14.0)),);
}
#[test]
#[wasm_bindgen_test]
fn parse_power() {
assert_eq!(parse_helper("2 ** 3"), Ok(Value::unitless(8.0)),);
assert_eq!(
parse_helper("($2) ** 3"),
Ok(Value::new(8.0, units!("$" to 3))),
);
assert_eq!(
parse_helper("($1 + $1) ** (1 + 4 - 2)"),
Ok(Value::new(8.0, units!("$" to 3))),
);
}
// these are the tests generated at build time from the ./spec folder
include!(concat!(env!("OUT_DIR"), "/spec_tests.rs"));
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type EmailDataProviderConnection = *mut ::core::ffi::c_void;
pub type EmailDataProviderTriggerDetails = *mut ::core::ffi::c_void;
pub type EmailMailboxCreateFolderRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxCreateFolderRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxDeleteFolderRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxDeleteFolderRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxDownloadAttachmentRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxDownloadAttachmentRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxDownloadMessageRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxDownloadMessageRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxEmptyFolderRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxEmptyFolderRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxForwardMeetingRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxForwardMeetingRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxGetAutoReplySettingsRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxGetAutoReplySettingsRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxMoveFolderRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxMoveFolderRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxProposeNewTimeForMeetingRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxProposeNewTimeForMeetingRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxResolveRecipientsRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxResolveRecipientsRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxServerSearchReadBatchRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxServerSearchReadBatchRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxSetAutoReplySettingsRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxSetAutoReplySettingsRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxSyncManagerSyncRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxSyncManagerSyncRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxUpdateMeetingResponseRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxUpdateMeetingResponseRequestEventArgs = *mut ::core::ffi::c_void;
pub type EmailMailboxValidateCertificatesRequest = *mut ::core::ffi::c_void;
pub type EmailMailboxValidateCertificatesRequestEventArgs = *mut ::core::ffi::c_void;
|
//! Threshold Filter
//!
//! This filter returns the `on_match` result if the level in the LogRecord is the same or more
//! specific than the configured level and the `on_mismatch` value otherwise. For example, if the
//! ThresholdFilter is configured with Level `Error` and the LogRecord contains Level `Debug` then
//! the `on_mismatch` value will be returned since `Error` events are more specific than `Debug`.
use Filter;
use config::filter::MatchAction;
use config::filter::MatchAction::*;
use log::{LogLevel, LogLevelFilter, LogMetadata, LogRecord};
#[cfg_attr(test, derive(PartialEq))]
#[derive(Clone, Debug)]
/// Threshold Filter
pub struct ThresholdFilter {
/// Log Level to filter on.
level: LogLevelFilter,
/// Action to take on match. Default is Neutral.
on_match: Option<MatchAction>,
/// Action to take on no match. Default is Deny.
on_mismatch: Option<MatchAction>,
}
impl ThresholdFilter {
/// Create a new ThresholdFilter with the given minimum level.
pub fn new(level: LogLevelFilter) -> ThresholdFilter {
ThresholdFilter {
level: level,
on_match: None,
on_mismatch: None,
}
}
/// Set the on match MatchAction. Default is `Neutral`.
pub fn on_match(mut self, action: Option<MatchAction>) -> ThresholdFilter {
self.on_match = action;
self
}
/// Set the on mis-match MatchAction. Default is `Deny`.
pub fn on_mismatch(mut self, action: Option<MatchAction>) -> ThresholdFilter {
self.on_mismatch = action;
self
}
fn filter(&self, level: LogLevel) -> MatchAction {
if level <= self.level {
// Take match action here.
match self.on_match {
Some(ref a) => a.clone(),
None => Neutral,
}
} else {
// Take mismatch action here.
match self.on_mismatch {
Some(ref a) => a.clone(),
None => Deny,
}
}
}
}
impl Filter for ThresholdFilter {
fn filter(&self, record: &LogRecord) -> MatchAction {
self.filter(record.level())
}
fn filter_by_meta(&self, meta: &LogMetadata) -> MatchAction {
self.filter(meta.level())
}
}
#[cfg(feature = "rustc-serialize")]
mod rs {
use config::rs::read_llf;
use rustc_serialize::{Decodable, Decoder};
use super::*;
impl Decodable for ThresholdFilter {
fn decode<D: Decoder>(d: &mut D) -> Result<ThresholdFilter, D::Error> {
d.read_struct("ThresholdFilter", 3, |d| {
let level = try!(d.read_struct_field("level", 1, read_llf));
let on_match = try!(d.read_struct_field("on_match", 2, |d| Decodable::decode(d)));
let on_mismatch = try!(d.read_struct_field("on_mismatch",
3,
|d| Decodable::decode(d)));
let tf = ThresholdFilter::new(level)
.on_match(on_match)
.on_mismatch(on_mismatch);
Ok(tf)
})
}
}
}
#[cfg(feature = "serde")]
mod serde {
use config::serde::LogLevelFilterField;
use config::filter::serde::MatchActionField;
use super::*;
use serde::{Deserialize, Deserializer};
use serde::de::{MapVisitor, Visitor};
enum ThresholdFilterField {
Level,
OnMatch,
OnMismatch,
}
impl Deserialize for ThresholdFilterField {
fn deserialize<D>(deserializer: &mut D) -> Result<ThresholdFilterField, D::Error>
where D: Deserializer
{
struct ThresholdFilterFieldVisitor;
impl Visitor for ThresholdFilterFieldVisitor {
type Value = ThresholdFilterField;
fn visit_str<E>(&mut self, value: &str) -> Result<ThresholdFilterField, E>
where E: ::serde::de::Error
{
match value {
"level" => Ok(ThresholdFilterField::Level),
"on_match" => Ok(ThresholdFilterField::OnMatch),
"on_mismatch" => Ok(ThresholdFilterField::OnMismatch),
_ => Err(::serde::de::Error::syntax("Unexpected field!")),
}
}
}
deserializer.visit(ThresholdFilterFieldVisitor)
}
}
impl Deserialize for ThresholdFilter {
fn deserialize<D>(deserializer: &mut D) -> Result<ThresholdFilter, D::Error>
where D: Deserializer
{
static FIELDS: &'static [&'static str] = &["level", "on_match", "on_mismatch"];
deserializer.visit_struct("ThresholdFilter", FIELDS, ThresholdFilterVisitor)
}
}
struct ThresholdFilterVisitor;
impl Visitor for ThresholdFilterVisitor {
type Value = ThresholdFilter;
fn visit_map<V>(&mut self, mut visitor: V) -> Result<ThresholdFilter, V::Error>
where V: MapVisitor
{
let mut level: Option<LogLevelFilterField> = None;
let mut on_match: Option<MatchActionField> = None;
let mut on_mismatch: Option<MatchActionField> = None;
loop {
match try!(visitor.visit_key()) {
Some(ThresholdFilterField::Level) => {
level = Some(try!(visitor.visit_value()));
}
Some(ThresholdFilterField::OnMatch) => {
on_match = Some(try!(visitor.visit_value()));
}
Some(ThresholdFilterField::OnMismatch) => {
on_mismatch = Some(try!(visitor.visit_value()));
}
None => {
break;
}
}
}
let lvl = match level {
Some(l) => l.level(),
None => return visitor.missing_field("level"),
};
let omma = match on_match {
Some(om) => Some(om.match_action()),
None => None,
};
let ommma = match on_mismatch {
Some(omm) => Some(omm.match_action()),
None => None,
};
try!(visitor.end());
let tf = ThresholdFilter::new(lvl)
.on_match(omma)
.on_mismatch(ommma);
Ok(tf)
}
}
}
#[cfg(test)]
mod test {
use decode;
use super::*;
const BASE_CONFIG: &'static str = r#"
level = "Debug"
"#;
const ALL_CONFIG: &'static str = r#"
level = "Debug"
on_match = "Accept"
on_mismatch = "Neutral"
"#;
static VALIDS: &'static [&'static str] = &[BASE_CONFIG, ALL_CONFIG];
const INVALID_CONFIG_0: &'static str = r#""#;
const INVALID_CONFIG_1: &'static str = r#"
notafield = "not a field"
"#;
const INVALID_CONFIG_2: &'static str = r#"
level = "NOt A LeVel"
"#;
const INVALID_CONFIG_3: &'static str = r#"
level = "Debug"
on_match = 1
"#;
const INVALID_CONFIG_4: &'static str = r#"
level = "Debug"
on_mismatch = 1
"#;
static INVALIDS: &'static [&'static str] = &[INVALID_CONFIG_0,
INVALID_CONFIG_1,
INVALID_CONFIG_2,
INVALID_CONFIG_3,
INVALID_CONFIG_4];
#[test]
fn test_valid_configs() {
let mut results = Vec::new();
for valid in VALIDS {
match decode::<ThresholdFilter>(valid) {
Ok(_) => {
results.push(true);
}
Err(_) => assert!(false),
};
}
assert!(results.iter().all(|x| *x));
}
#[test]
fn test_invalid_configs() {
let mut results = Vec::new();
for invalid in INVALIDS {
match decode::<ThresholdFilter>(invalid) {
Ok(_) => assert!(false),
Err(_) => {
results.push(true);
}
};
}
assert!(results.iter().all(|x| *x));
}
}
|
// Copyright 2020 - 2021 Alex Dukhno
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::*;
#[rstest::rstest]
fn delete_from_nonexistent_table(database_with_schema: (InMemory, ResultCollector)) {
let (mut engine, collector) = database_with_schema;
engine
.execute(Inbound::Query {
sql: "delete from schema_name.table_name;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Err(QueryError::table_does_not_exist("schema_name.table_name")));
}
#[rstest::rstest]
fn delete_all_records(database_with_schema: (InMemory, ResultCollector)) {
let (mut engine, collector) = database_with_schema;
engine
.execute(Inbound::Query {
sql: "create table schema_name.table_name (column_test smallint);".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::TableCreated));
engine
.execute(Inbound::Query {
sql: "insert into schema_name.table_name values (123);".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::RecordsInserted(1)));
engine
.execute(Inbound::Query {
sql: "insert into schema_name.table_name values (456);".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::RecordsInserted(1)));
engine
.execute(Inbound::Query {
sql: "select * from schema_name.table_name;".to_owned(),
})
.expect("query executed");
collector.lock().unwrap().assert_receive_many(vec![
Ok(QueryEvent::RowDescription(vec![("column_test".to_owned(), SMALLINT)])),
Ok(QueryEvent::DataRow(vec!["123".to_owned()])),
Ok(QueryEvent::DataRow(vec!["456".to_owned()])),
Ok(QueryEvent::RecordsSelected(2)),
]);
engine
.execute(Inbound::Query {
sql: "delete from schema_name.table_name;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::RecordsDeleted(2)));
engine
.execute(Inbound::Query {
sql: "select * from schema_name.table_name;".to_owned(),
})
.expect("query executed");
collector.lock().unwrap().assert_receive_many(vec![
Ok(QueryEvent::RowDescription(vec![("column_test".to_owned(), SMALLINT)])),
Ok(QueryEvent::RecordsSelected(0)),
]);
}
|
type CoOrd = (i32, i32, i32);
const MOON_COUNT: usize = 4;
#[derive(Debug, Eq, PartialEq, Clone)]
struct State {
positions: [CoOrd; MOON_COUNT],
velocities: [CoOrd; MOON_COUNT],
}
fn parse_state(input: &'static str) -> State {
let moon_coords = input
.lines()
.map(|l| {
l.trim_start()
.trim_matches(|c| c == '>' || c == '<')
.split(',')
.map(|s| s.trim_start()[2..].to_owned().parse::<i32>().unwrap())
.collect::<Vec<i32>>()
})
.collect::<Vec<Vec<i32>>>();
State {
positions : [
(moon_coords[0][0], moon_coords[0][1], moon_coords[0][2]),
(moon_coords[1][0], moon_coords[1][1], moon_coords[1][2]),
(moon_coords[2][0], moon_coords[2][1], moon_coords[2][2]),
(moon_coords[3][0], moon_coords[3][1], moon_coords[3][2])
],
velocities : [(0,0,0),(0,0,0),(0,0,0),(0,0,0)]
}
}
fn relative_velocity(a: i32, b: i32) -> i32 {
match a.cmp(&b) {
std::cmp::Ordering::Less => 1,
std::cmp::Ordering::Greater => -1,
std::cmp::Ordering::Equal => 0,
}
}
fn get_gravity(a: &(i32, i32, i32), b: &(i32, i32, i32)) -> (i32, i32, i32) {
(
relative_velocity(a.0, b.0),
relative_velocity(a.1, b.1),
relative_velocity(a.2, b.2),
)
}
fn tick_state_mut(state: &mut State) {
for i in 0..MOON_COUNT {
for j in 0..MOON_COUNT {
if state.positions[i] != state.positions[j] {
let gravity_adjustment = get_gravity(&state.positions[i], &state.positions[j]);
state.velocities[i].0 += gravity_adjustment.0;
state.velocities[i].1 += gravity_adjustment.1;
state.velocities[i].2 += gravity_adjustment.2;
}
}
}
for i in 0..MOON_COUNT {
state.positions[i].0 += state.velocities[i].0;
state.positions[i].1 += state.velocities[i].1;
state.positions[i].2 += state.velocities[i].2;
}
}
fn get_total_energy(state: &State) -> i32 {
state
.positions
.iter()
.zip(state.velocities.iter())
.map(|((px, py, pz), (vx, vy, vz))| {
(px.abs() + py.abs() + pz.abs()) * (vx.abs() + vy.abs() + vz.abs())
})
.sum()
}
fn run(input: &'static str, num_ticks: Option<i64>) -> (i32, i64) {
let initial_state = parse_state(input);
let mut state = initial_state.clone();
let max_ticks = num_ticks.unwrap_or(std::i64::MAX);
let mut tick_cnt = 0;
while tick_cnt < max_ticks {
tick_state_mut(&mut state);
tick_cnt += 1;
if state == initial_state {
break;
}
}
(get_total_energy(&state), tick_cnt)
}
fn main() {
println!(
"Part 1 => {}",
run(include_str!("../input/day_12.txt"), Some(1000)).0
);
println!(
"Part 2 => {}",
run(include_str!("../input/day_12.txt"), None).1
);
}
#[test]
fn energy_calc() {
let input = "<x=-1, y=0, z=2>
<x=2, y=-10, z=-7>
<x=4, y=-8, z=8>
<x=3, y=5, z=-1>";
assert_eq!(run(&input, Some(10)).0, 179);
}
#[test]
fn energy_calc_hundred_steps() {
let input = "<x=-8, y=-10, z=0>
<x=5, y=5, z=10>
<x=2, y=-7, z=3>
<x=9, y=-8, z=-3>";
assert_eq!(run(&input, Some(100)).0, 1940);
}
#[test]
fn seen_state() {
let input = "<x=-8, y=-10, z=0>
<x=5, y=5, z=10>
<x=2, y=-7, z=3>
<x=9, y=-8, z=-3>";
assert_eq!(run(&input, None).1, 4686774924);
} |
use serde::{Deserialize, Serialize};
use smart_default::SmartDefault;
#[derive(
Debug, Copy, Serialize, Deserialize, SmartDefault, PartialEq, Eq, Clone, Ord, PartialOrd,
)]
pub enum ConstGmObject {
#[serde(rename = "GMObject")]
#[default]
Const,
}
#[derive(
Debug, Copy, Serialize, Deserialize, SmartDefault, PartialEq, Eq, Clone, Ord, PartialOrd,
)]
pub enum ConstGmEvent {
#[serde(rename = "GMEvent")]
#[default]
Const,
}
#[derive(
Debug, Copy, Serialize, Deserialize, SmartDefault, PartialEq, Eq, Clone, Ord, PartialOrd,
)]
pub enum ConstGmObjectProperty {
#[serde(rename = "GMObjectProperty")]
#[default]
Const,
}
#[derive(
Debug, Copy, Serialize, Deserialize, SmartDefault, PartialEq, Eq, Clone, Ord, PartialOrd,
)]
pub enum ConstGmObjectOverrideProperty {
#[serde(rename = "GMOverriddenProperty")]
#[default]
Const,
}
|
mod build_macro;
mod implement;
mod implement_macro;
use build_macro::*;
use gen::*;
use implement_macro::*;
use quote::*;
use reader::*;
use syn::parse_macro_input;
/// A macro for generating Windows API bindings to a .rs file at build time.
///
/// This macro can be used to import Windows APIs from any Windows metadata (winmd) file.
/// It is only intended for use from a crate's build.rs script.
///
/// The macro generates a single `build` function which can be used in build scripts
/// to generate the Windows bindings. After using the `build` macro, call the
/// generated `build` function somewhere in the build.rs script's main function.
///
/// # Usage
/// To use, you must then specify which types you want to use. These
/// follow the same convention as Rust `use` paths. Types know which other types they depend on so
/// `build` will generate any other Windows types needed for the specified type to work.
///
/// # Example
/// The following `build!` generates all types inside of the `Microsoft::AI::MachineLearning`
/// namespace.
///
/// ```rust,ignore
/// build!(
/// Microsoft::AI::MachineLearning::*
/// );
/// ```
#[proc_macro]
pub fn build(stream: proc_macro::TokenStream) -> proc_macro::TokenStream {
parse_macro_input!(stream as BuildMacro);
gen_build().as_str().parse().unwrap()
}
#[doc(hidden)]
#[proc_macro]
pub fn build_legacy(stream: proc_macro::TokenStream) -> proc_macro::TokenStream {
parse_macro_input!(stream as BuildMacro);
gen_build_legacy().as_str().parse().unwrap()
}
/// A macro for generating Windows API bindings ahead of time.
#[proc_macro]
pub fn generate(stream: proc_macro::TokenStream) -> proc_macro::TokenStream {
parse_macro_input!(stream as BuildMacro);
let mut tokens = String::new();
tokens.push_str("r#\"");
tokens.push_str(&gen_source_tree().into_string());
tokens.push_str("\"#");
tokens.parse().unwrap()
}
/// Rust structs can use the [`macro@implement`] attribute macro to implement entire WinRT or COM
/// classes or any combination of existing COM and WinRT interfaces.
///
/// If the attribute [`proc_macro::TokenStream`] contains the name of a WinRT class then all
/// of its interfaces are implemented. Otherwise, whatever interfaces are contained within
/// the attribute TokenStream are implemented.
#[proc_macro_attribute]
pub fn implement(attribute: proc_macro::TokenStream, input: proc_macro::TokenStream) -> proc_macro::TokenStream {
implement::gen(attribute, input)
}
/// Includes the generated bindings into the current context.
#[proc_macro]
pub fn include_bindings(_: proc_macro::TokenStream) -> proc_macro::TokenStream {
// TODO: check that input stream is empty
r#"::std::include!(::std::concat!(::std::env!("OUT_DIR"), "/windows.rs"));"#.parse().unwrap()
}
// TODO: only use for blittable structs and unions? Anything else requires deep comparison?
#[proc_macro_derive(StructDerive)]
pub fn derive_struct_traits(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let name = format_token!("{}", syn::parse_macro_input!(input as syn::DeriveInput).ident.to_string());
let tokens = quote! {
impl ::core::cmp::PartialEq for #name {
fn eq(&self, other: &Self) -> bool {
unsafe {
::windows::core::memcmp(self as *const _ as _, other as *const _ as _, core::mem::size_of::<#name>()) == 0
}
}
}
impl ::core::cmp::Eq for #name {}
};
tokens.as_str().parse().unwrap()
}
|
//! This is crate wide documentation.
/// This is module level documentation.
pub mod functions;
|
use crate::renderer::rectangle::{Rectangle, Region};
use crate::renderer::Dimensions;
use unicode_segmentation::UnicodeSegmentation;
use wgpu_glyph::ab_glyph::{Font, FontArc};
use wgpu_glyph::{GlyphPositioner, Layout, SectionGeometry, Text};
use winit::dpi::{PhysicalPosition, PhysicalSize};
use winit::event::VirtualKeyCode;
#[derive(Debug)]
pub struct Cursor {
pub rect: Rectangle,
pub row: usize,
pub column: usize,
pub x_offset: f32,
}
impl Cursor {
pub fn new(
device: &wgpu::Device,
screen_size: PhysicalSize<f32>,
dimensions: Dimensions,
color: [f32; 3],
region: Option<Region>,
) -> Self {
Self {
rect: Rectangle::new(device, screen_size, dimensions, color, region),
row: 0,
column: 0,
x_offset: 0.0,
}
}
}
pub trait TextInput {
fn input_special(
&mut self,
screen_size: PhysicalSize<f32>,
key: VirtualKeyCode,
);
fn input_char(&mut self, screen_size: PhysicalSize<f32>, ch: char);
}
pub struct TextArea {
cursor: Cursor,
font: FontArc,
font_height: f32,
max_line_length: f32,
text: Vec<String>,
_multiline: Option<f32>,
}
impl TextArea {
pub fn _new(
font: FontArc,
text: String,
font_height: f32,
device: &wgpu::Device,
screen_size: PhysicalSize<f32>,
multiline: Option<f32>,
) -> Self {
let mut split_text =
text.lines().map(|s| s.to_string()).collect::<Vec<String>>();
if multiline.is_some() && text.ends_with('\n') {
split_text.push(String::from(""));
}
if multiline.is_none() {
assert_eq!(split_text.len(), 1);
}
// TODO: bounding rect
let cursor = Cursor::new(
device,
screen_size,
Dimensions {
x: 0.0,
y: screen_size.height as f32 - font_height,
width: 1.0,
height: font_height,
},
[0.7, 0.0, 0.0],
Some(Region {
x: 0,
y: 0,
width: screen_size.width as u32,
height: screen_size.height as u32,
}),
);
let max_line_length =
max_line_length(&split_text, font.clone(), font_height);
Self {
text: split_text,
cursor,
font,
font_height,
max_line_length,
_multiline: multiline,
}
}
}
impl super::RenderElement for TextArea {
fn get_rects(&self) -> Vec<&Rectangle> {
vec![&self.cursor.rect]
}
fn get_elements(&mut self) -> Vec<&mut dyn super::RenderElement> {
vec![]
}
fn get_dimensions(&self) -> Dimensions {
todo!()
}
}
impl TextInput for TextArea {
fn input_special(
&mut self,
screen_size: PhysicalSize<f32>,
key: VirtualKeyCode,
) {
input_special(
screen_size,
key,
&mut self.text,
&mut self.cursor,
self.font.clone(),
self.font_height,
PhysicalPosition { x: 0.0, y: 0.0 },
PhysicalPosition { x: 0.0, y: 0.0 },
);
}
fn input_char(&mut self, screen_size: PhysicalSize<f32>, ch: char) {
self.max_line_length = input_char(
screen_size,
ch,
&mut self.text,
&mut self.cursor,
self.font.clone(),
self.font_height,
PhysicalPosition { x: 0.0, y: 0.0 },
PhysicalPosition { x: 0.0, y: 0.0 },
);
}
}
pub fn line_length(line: &str, font: FontArc, font_height: f32) -> f32 {
let layout = Layout::default_wrap();
let text = Text::new(line).with_scale(font_height);
let section_glyphs = layout.calculate_glyphs(
&[font.clone()],
&SectionGeometry {
..Default::default()
},
&[text],
);
if let Some(section_glyph) = section_glyphs.last() {
section_glyph.glyph.position.x
+ font.glyph_bounds(§ion_glyph.glyph).width()
} else {
0.0
}
}
pub fn max_line_length(
lines: &[String],
font: FontArc,
font_height: f32,
) -> f32 {
let mut max_line_width = 0.0;
for line in lines {
let width = line_length(line, font.clone(), font_height);
if width > max_line_width {
max_line_width = width;
}
}
max_line_width
}
pub fn cursor_x_position(
row: usize,
column: usize,
text: &[String],
font: FontArc,
font_height: f32,
offset: PhysicalPosition<f32>,
) -> Option<f32> {
let text = Text::new(&text[row]).with_scale(font_height);
let layout = Layout::default_wrap();
let section_glyphs = layout.calculate_glyphs(
&[font.clone()],
&SectionGeometry {
screen_position: (offset.x, offset.y),
..Default::default()
},
&[text],
);
if let Some(section_glyph) = section_glyphs.get(column) {
Some(section_glyph.glyph.position.x)
} else if column != 0 {
section_glyphs.get(column - 1).map(|section_glyph| {
section_glyph.glyph.position.x
+ font.glyph_bounds(§ion_glyph.glyph).width()
})
} else {
None
}
}
#[allow(clippy::too_many_arguments)]
pub fn input_special(
screen_size: PhysicalSize<f32>,
key: VirtualKeyCode,
text: &mut Vec<String>,
cursor: &mut Cursor,
font: FontArc,
font_height: f32,
offset: PhysicalPosition<f32>,
scroll_offset: PhysicalPosition<f32>,
) {
let cursor_x_position2 = |row: usize, column: usize| {
cursor_x_position(
row,
column,
text,
font.clone(),
font_height,
scroll_offset,
)
};
match key {
VirtualKeyCode::Up => {
if cursor.row != 0 {
cursor.row -= 1;
if let Some(offset) = cursor_x_position2(cursor.row, cursor.column) {
cursor.x_offset = offset;
} else {
cursor.column = text[cursor.row].len();
cursor.x_offset =
cursor_x_position2(cursor.row, cursor.column).unwrap_or(0.0);
}
} else {
cursor.x_offset = 0.0;
cursor.column = 0;
}
}
VirtualKeyCode::Left => {
if cursor.column != 0 {
cursor.column -= 1;
cursor.x_offset =
cursor_x_position2(cursor.row, cursor.column).unwrap();
} else if cursor.row != 0 {
cursor.row -= 1;
cursor.column = text[cursor.row].len();
cursor.x_offset =
cursor_x_position2(cursor.row, cursor.column).unwrap_or(0.0);
}
}
VirtualKeyCode::Down => {
if cursor.row != (text.len() - 1) {
cursor.row += 1;
if let Some(offset) = cursor_x_position2(cursor.row, cursor.column) {
cursor.x_offset = offset;
} else {
cursor.column = text[cursor.row].len();
cursor.x_offset =
cursor_x_position2(cursor.row, cursor.column).unwrap_or(0.0);
}
} else {
cursor.column = text[cursor.row].len();
cursor.x_offset =
cursor_x_position2(cursor.row, cursor.column).unwrap_or(0.0);
}
}
VirtualKeyCode::Right => {
if cursor.row != (text.len() - 1) {
if let Some(offset) = cursor_x_position2(cursor.row, cursor.column + 1)
{
cursor.column += 1;
cursor.x_offset = offset;
} else {
cursor.x_offset = 0.0;
cursor.column = 0;
cursor.row += 1;
}
} else if let Some(offset) =
cursor_x_position2(cursor.row, cursor.column + 1)
{
cursor.column += 1;
cursor.x_offset = offset;
}
}
_ => return,
}
cursor.rect.resize(
screen_size,
Dimensions {
x: offset.x + scroll_offset.x + cursor.x_offset,
y: scroll_offset.y + font_height + (cursor.row as f32 * font_height),
..cursor.rect.dimensions
},
);
}
#[allow(clippy::too_many_arguments)]
pub fn input_char(
screen_size: PhysicalSize<f32>,
ch: char,
text: &mut Vec<String>,
cursor: &mut Cursor,
font: FontArc,
font_height: f32,
offset: PhysicalPosition<f32>,
scroll_offset: PhysicalPosition<f32>,
) -> f32 {
let input_spc =
|key: VirtualKeyCode, text: &mut Vec<String>, cursor: &mut Cursor| {
input_special(
screen_size,
key,
text,
cursor,
font.clone(),
font_height,
offset,
scroll_offset,
);
};
match ch {
// backspace
'\u{7f}' => {
if cursor.column != 0 {
let mut graphemes_indices = text[cursor.row].grapheme_indices(true);
let index = graphemes_indices.nth(cursor.column - 1).unwrap().0;
text[cursor.row].remove(index);
input_spc(VirtualKeyCode::Left, text, cursor);
} else if cursor.row != 0 {
let removed = text.remove(cursor.row);
cursor.row -= 1;
cursor.column = text[cursor.row].len() + 1;
text[cursor.row] += &removed;
input_spc(VirtualKeyCode::Left, text, cursor);
}
}
// enter
'\r' => {
let mut graphemes_indices = text[cursor.row].grapheme_indices(true);
let index = graphemes_indices
.nth(cursor.column)
.map(|(i, _)| i)
.unwrap_or_else(|| text[cursor.row].len());
let after_enter = text[cursor.row].split_off(index);
text.insert(cursor.row + 1, after_enter);
input_spc(VirtualKeyCode::Right, text, cursor);
}
_ => {
let mut graphemes_indices = text[cursor.row].grapheme_indices(true);
let index = graphemes_indices
.nth(cursor.column)
.map(|(i, _)| i)
.unwrap_or_else(|| text[cursor.row].len());
text[cursor.row].insert(index, ch);
input_spc(VirtualKeyCode::Right, text, cursor);
}
}
max_line_length(&text, font, font_height)
}
|
use crate::responses::GenericResponse;
use std::fmt::{Debug, Display, Formatter};
pub use serde::Deserialize;
use crate::responses::listing::GenericListing;
use serde_json::Value;
///About Data for the User
#[derive(Deserialize, Clone)]
pub struct MeResponse {
#[serde(flatten)]
pub about: AboutUser,
/// If you know what this data is. Please Tell me.
pub features: Value,
}
impl Debug for MeResponse {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "[Me Response] User: {}", self.about.name)
}
}
#[derive(Deserialize, Clone)]
pub struct PersonalInformation {
pub pref_no_profanity: bool,
pub has_external_account: bool,
pub pref_geopopular: String,
pub pref_show_trending: bool,
pub pref_show_presence: bool,
/// IDK
pub gold_expiration: Option<i64>,
/// I am guessing premium?
pub has_gold_subscription: bool,
/// users gold
pub coins: i64,
/// has_paypal_subscription - Why is this public?
pub has_paypal_subscription: bool,
/// has_subscribed_to_premium
pub has_subscribed_to_premium: bool,
}
impl Debug for PersonalInformation {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "[Personal Information]")
}
}
#[derive(Deserialize, Clone)]
pub struct AboutUser {
#[serde(default)]
pub is_employee: bool,
#[serde(default)]
pub is_friend: bool,
//TODO expand upon later
pub subreddit: Value,
pub snoovatar_size: Option<Vec<i64>>,
#[serde(default)]
pub awardee_karma: i64,
pub id: String,
pub verified: bool,
pub is_gold: bool,
#[serde(default)]
pub is_suspended: bool,
#[serde(default)]
pub is_mod: bool,
#[serde(default)]
pub awarder_karma: i64,
pub has_verified_email: bool,
pub icon_img: String,
pub hide_from_robots: bool,
#[serde(default)]
pub link_karma: i64,
#[serde(default)]
pub is_blocked: bool,
#[serde(default)]
pub total_karma: i64,
pub pref_show_snoovatar: bool,
pub name: String,
#[serde(default)]
pub created: f64,
#[serde(default)]
pub created_utc: f64,
pub snoovatar_img: String,
#[serde(default)]
pub comment_karma: i64,
pub accept_followers: bool,
pub has_subscribed: bool,
#[serde(flatten)]
pub personal_details: Option<PersonalInformation>,
}
impl Display for AboutUser {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.name)
}
}
impl Debug for AboutUser {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"[User]. name: {}. Contains Personal Information {}",
self.name,
self.personal_details.is_some()
)
}
}
/// About with a GenericResponse Wrap
pub type UserResponse = GenericResponse<AboutUser>;
/// A listing of user abouts
pub type Users = GenericListing<AboutUser>;
|
extern crate regex;
use std::io;
use std::fs;
use std::io::BufRead;
use std::path::Path;
use std::collections::HashMap;
use std::collections::HashSet;
use regex::Regex;
fn main() {
let input = parse_input();
eprintln!("Found {} entries", input.passports.len());
println!("Found {} valid passports", input.valid_passports());
// let passport = Passport {
// index: 0,
// birth_year: "2026".to_string(),
// issue_year: "2028".to_string(),
// expire_year: "1943".to_string(),
// height: "64cm".to_string(),
// hair_color: "z".to_string(),
// eye_color: "zzz".to_string(),
// passport_id: "160cm".to_string(),
// country_id: "74".to_string(),
// };
// println!("valid? {}", passport.is_valid());
}
struct Passport {
index: usize,
birth_year: String,
issue_year: String,
expire_year: String,
height: String,
hair_color: String,
eye_color: String,
passport_id: String,
_country_id: String,
}
struct InputData {
passports: Vec<Passport>,
}
impl Passport {
fn from_hash(data: HashMap<String, String>, index: usize) -> Passport {
Passport {
index: index,
birth_year: data.get("byr").map(|s| s.to_string()).unwrap_or("".to_string()),
issue_year: data.get("iyr").map(|s| s.to_string()).unwrap_or("".to_string()),
expire_year: data.get("eyr").map(|s| s.to_string()).unwrap_or("".to_string()),
height: data.get("hgt").map(|s| s.to_string()).unwrap_or("".to_string()),
hair_color: data.get("hcl").map(|s| s.to_string()).unwrap_or("".to_string()),
eye_color: data.get("ecl").map(|s| s.to_string()).unwrap_or("".to_string()),
passport_id: data.get("pid").map(|s| s.to_string()).unwrap_or("".to_string()),
_country_id: data.get("cid").map(|s| s.to_string()).unwrap_or("".to_string()),
}
}
fn is_valid(&self) -> bool {
let mut missing_keys = vec![];
if !self.valid_birth_year() { missing_keys.push("byr"); }
if !self.valid_issue_year() { missing_keys.push("iyr"); }
if !self.valid_expire_year() { missing_keys.push("eyr"); }
if !self.valid_height() { missing_keys.push("hgt"); }
if !self.valid_hair_color() { missing_keys.push("hcl"); }
if !self.valid_eye_color() { eprintln!("bad eye color"); missing_keys.push("ecl"); }
if !self.valid_passport_id() { missing_keys.push("pid"); }
// if self.country_id.len() == 0 { missing_keys.push("cid"); }
let valid = missing_keys.is_empty();
eprintln!("({}) is valid? {:?} (missing {:?})", self.index, valid, missing_keys);
valid
}
fn valid_birth_year(&self) -> bool {
match self.birth_year.parse::<usize>() {
Ok(i) => i >= 1920 && i <= 2002,
Err(_) => false,
}
}
fn valid_issue_year(&self) -> bool {
match self.issue_year.parse::<usize>() {
Ok(i) => i >= 2010 && i <= 2020,
Err(_) => false,
}
}
fn valid_expire_year(&self) -> bool {
match self.expire_year.parse::<usize>() {
Ok(i) => i >= 2020 && i <= 2030,
Err(_) => false,
}
}
fn valid_height(&self) -> bool {
let matcher = Regex::new(r"^(?P<num>\d+)(?P<unit>cm|in)$").unwrap();
match matcher.captures(&self.height) {
None => false,
Some(c) => {
let num = c.name("num").unwrap().as_str().parse::<usize>().unwrap();
match c.name("unit").unwrap().as_str() {
"cm" => num >=150 && num <= 193,
"in" => num >= 59 && num <= 76,
_ => false,
}
}
}
}
fn valid_hair_color(&self) -> bool {
let matcher = Regex::new(r"^#[0-9a-f]{6}$").unwrap();
matcher.is_match(&self.hair_color)
}
fn valid_eye_color(&self) -> bool {
let mut valid_colors = HashSet::new();
valid_colors.insert("amb".to_string());
valid_colors.insert("blu".to_string());
valid_colors.insert("brn".to_string());
valid_colors.insert("gry".to_string());
valid_colors.insert("grn".to_string());
valid_colors.insert("hzl".to_string());
valid_colors.insert("oth".to_string());
let value = valid_colors.contains(&self.eye_color);
// eprintln!("{:?} contains {}? {}", valid_colors, self.eye_color, value);
value
}
fn valid_passport_id(&self) -> bool {
let matcher = Regex::new(r"^\d{9}$").unwrap();
matcher.is_match(&self.passport_id)
}
}
impl InputData {
fn valid_passports(&self) -> usize {
self.passports.iter().filter(|p| p.is_valid()).count()
}
}
fn parse_input() -> InputData {
let io_result = lines_in_file("input/day4.txt");
let part_split = Regex::new(r"^(?P<label>[a-z]+):(?P<value>\S+)$").unwrap();
match io_result {
Ok(lines) => {
let mut data = HashMap::new();
let mut passports = vec![];
let mut index = 0;
for line in lines {
match line {
Ok(stuff) => {
if stuff.len() <= 1 {
let passport = Passport::from_hash(data, index);
passports.push(passport);
data = HashMap::new();
index += 1;
} else {
for part in stuff.split(" ") {
let captures = part_split.captures(part).unwrap();
data.insert(
captures.name("label").unwrap().as_str().to_string(),
captures.name("value").unwrap().as_str().to_string(),
);
}
}
},
Err(_) => panic!("Error reading line"),
}
}
let passport = Passport::from_hash(data, index);
passports.push(passport);
InputData {
passports: passports,
}
},
Err(_) => panic!("Error reading file"),
}
}
fn lines_in_file<P>(file_path: P) -> io::Result<io::Lines<io::BufReader<fs::File>>> where P: AsRef<Path> {
let file = fs::File::open(file_path)?;
Ok(io::BufReader::new(file).lines())
}
|
/*
Gordon Adam
1107425
Struct that represents a message made up of a header, and possible questions and resource records
*/
use std::default;
use std::io::BufReader;
use std::io::net::ip::{SocketAddr};
use question::Question;
use resource::Resource;
use header::Header;
#[deriving(Default,Clone)]
pub struct Message {
pub header: Header, // The header of the message
pub questions: Vec<Question>, // The vector of questions
pub answers: Vec<Resource>, // The vector of answer resource records
pub authority: Vec<Resource>, // The vector of authority resource records
pub additional: Vec<Resource>, // The vector of additional resource records
pub msg_copy: Vec<u8>, // A copy of the message stored as a vector of u8 characters
pub timestamp: i64,
pub server: uint,
}
impl Message {
// Creates a new message with default values
pub fn new() -> Message {
return Message {..default::Default::default()};
}
// reads into the struct from a vector of u8 characters provided
pub fn read_in(&mut self, data: &mut [u8], length: uint) -> Result<(), String>{
self.make_copy(data, length);
let mut reader = BufReader::new(data.slice_to(length));
self.header = Header::new();
self.header.read_in(&mut reader);
for i in range(0u, self.header.qdcount.to_uint().unwrap()) {
self.questions.push(Question::new());
match self.questions[i].read_in(&mut reader, &mut self.msg_copy) {
Ok(()) => {},
Err(a) => {return Err(a)}
}
}
for i in range(0u, self.header.ancount.to_uint().unwrap()) {
self.answers.push(Resource::new());
match self.answers[i].read_in(&mut reader, &mut self.msg_copy) {
Ok(()) => {},
Err(a) => {return Err(a)}
}
}
for i in range(0u, self.header.nscount.to_uint().unwrap()) {
self.authority.push(Resource::new());
match self.authority[i].read_in(&mut reader, &mut self.msg_copy) {
Ok(()) => {},
Err(a) => {return Err(a)}
}
}
for i in range(0u, self.header.arcount.to_uint().unwrap()) {
self.additional.push(Resource::new());
match self.additional[i].read_in(&mut reader, &mut self.msg_copy) {
Ok(()) => {},
Err(a) => {return Err(a)}
}
}
return Ok(());
}
// generates a query message for the hostname provided
pub fn generate_query(&mut self, name: Vec<u8>) {
self.header.generate_query_header();
self.questions.push(Question::new());
self.questions[0].generate(name, &mut self.msg_copy);
}
// creates and returns a vector of u8 characters made up from the message
pub fn write(&mut self) -> Vec<u8> {
let mut message_buffer: Vec<u8> = vec![];
message_buffer.push_all(self.header.write().as_slice());
for i in range(0u, self.header.qdcount.to_uint().unwrap()) {
message_buffer.push_all(self.questions[i].write().as_slice());
}
for i in range(0u, self.header.ancount.to_uint().unwrap()) {
message_buffer.push_all(self.answers[i].write().as_slice());
}
return message_buffer;
}
// Returns the next ipv4 address in the additional records, if no more exist return none
pub fn next_server(&mut self) -> Option<SocketAddr> {
while self.server < self.additional.len() {
match self.additional[self.server].ip_addr() {
Some(ip) => {
self.server = self.server + 1;
return Some(ip)
},
None => {
self.server = self.server + 1;
}
}
}
return None;
}
// Prints out the entire message; including the header and each answer, name server and additional record attached to that particular message
pub fn print(&mut self) {
self.header.print();
for i in range(0u, self.header.qdcount.to_uint().unwrap()) {
self.questions[i].print();
}
println!("Answers");
for i in range(0u, self.header.ancount.to_uint().unwrap()) {
self.answers[i].print();
}
println!("Authoritative Name Servers");
for i in range(0u, self.header.nscount.to_uint().unwrap()) {
self.authority[i].print();
}
println!("Additional Records");
for i in range(0u, self.header.arcount.to_uint().unwrap()) {
self.additional[i].print();
}
}
pub fn drop_records(&mut self) {
if self.header.ancount > 0 {
self.answers.clear();
}
/*
if(self.header.nscount > 0) {
self.authority.clear();
}
if(self.header.arcount > 0) {
self.additional.clear();
}
*/
self.header.ancount = 0x0000;
/*
self.header.nscount = 0x0000;
self.header.arcount = 0x0000;
self.server = 0;
*/
}
// makes a copy of the message and stores it in the struct
pub fn make_copy(&mut self, data: &mut [u8], length: uint) {
for i in range(0, length) {
self.msg_copy.push(data[i]);
}
}
pub fn contains_type(&mut self, t: u16) -> bool {
for i in range(0u16, self.header.ancount) {
if self.answers[i as uint].rtype == t {
return true;
}
}
return false;
}
}
|
#![warn(rust_2018_idioms, missing_docs, warnings, unused_extern_crates)]
//! Main data structures holding information about spanish auctions.
/// Auction concepts
pub mod concepts;
/// Spain provinces
pub mod provinces;
/// Auction types
pub mod types;
pub use self::types::*;
pub use chrono::NaiveDate;
pub use geo_types::Point;
pub use rust_decimal::Decimal;
|
use ggez::event::KeyMods;
bitflags! {
#[derive(Default)]
pub struct KeyMod: u8 {
const NONE = 0b00000000;
const SHIFT = 0b00000001;
const CTRL = 0b00000010;
const ALT = 0b00000100;
const LOGO = 0b00001000;
}
}
impl From<KeyMods> for KeyMod {
fn from(keymods: KeyMods) -> Self {
let mut keymod = KeyMod::empty();
if keymods.shift {
keymod |= Self::SHIFT;
}
if keymods.ctrl {
keymod |= Self::CTRL;
}
if keymods.alt {
keymod |= Self::ALT;
}
if keymods.logo {
keymod |= Self::LOGO;
}
keymod
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn key_mod_conversions() {
assert_eq!(
KeyMod::empty(),
KeyMod::from(KeyMods {
shift: false,
ctrl: false,
alt: false,
logo: false,
})
);
assert_eq!(
KeyMod::SHIFT,
KeyMod::from(KeyMods {
shift: true,
ctrl: false,
alt: false,
logo: false,
})
);
assert_eq!(
KeyMod::SHIFT | KeyMod::ALT,
KeyMod::from(KeyMods {
shift: true,
ctrl: false,
alt: true,
logo: false,
})
);
assert_eq!(
KeyMod::SHIFT | KeyMod::ALT | KeyMod::CTRL,
KeyMod::from(KeyMods {
shift: true,
ctrl: true,
alt: true,
logo: false,
})
);
assert_eq!(
KeyMod::SHIFT - KeyMod::ALT,
KeyMod::from(KeyMods {
shift: true,
ctrl: false,
alt: false,
logo: false,
})
);
assert_eq!(
(KeyMod::SHIFT | KeyMod::ALT) - KeyMod::ALT,
KeyMod::from(KeyMods {
shift: true,
ctrl: false,
alt: false,
logo: false,
})
);
assert_eq!(
KeyMod::SHIFT - (KeyMod::ALT | KeyMod::SHIFT),
KeyMod::from(KeyMods {
shift: false,
ctrl: false,
alt: false,
logo: false,
})
);
}
}
|
use proconio::input;
fn main() {
input! {
a: f64,
b: f64,
};
let f = |x: f64| -> f64 {
a / (1.0 + x).sqrt() + b * x
};
let mut ub = 1_000_000_000_000_000_000_u64;
let mut lb = 0;
while ub - lb > 2 {
let x1 = (lb * 2 + ub) / 3;
let x2 = (lb + ub * 2) / 3;
if f(x1 as f64) > f(x2 as f64) {
lb = x1;
} else {
ub = x2;
}
}
let ans = f(lb as f64).min(f((lb + 1) as f64)).min(f(ub as f64));
println!("{}", ans);
}
|
use fn_search_backend_db::models::Function;
use radix_trie::{Trie, TrieCommon};
use std::iter::FromIterator;
pub struct FnCache {
trie: Trie<String, Vec<i64>>,
}
impl FnCache {
fn new() -> Self {
FnCache { trie: Trie::new() }
}
/// returns at most num function ids with signature sig, starting at index starting_index
pub fn search(&self, sig: &str, num: usize, starting_index: Option<usize>) -> Option<&[i64]> {
if let Some(cache) = self.trie.get(sig) {
let start = if let Some(s) = starting_index { s } else { 0 };
let len = cache.len();
if start >= len {
return None;
}
let end = if len < start + num { len } else { start + num };
Some(&cache[start..end])
} else {
None
}
}
/// returns at most num suggested type signature ids for completing sig
pub fn suggest(&self, sig: &str, num: usize) -> Option<Vec<&str>> {
if let Some(t) = self.trie.get_raw_descendant(sig) {
let mut res = Vec::new();
for k in (&t).keys().take(num) {
res.push(k.as_str());
}
if res.is_empty() {
None
} else {
Some(res)
}
} else {
None
}
}
// ASSUME EACH FUNCTION IS ONLY INSERTED ONCE!!!
fn insert(&mut self, type_signature: &str, func_id: i64) {
self.trie.map_with_default(
type_signature.to_string(),
|cache| {
cache.push(func_id);
},
[func_id].to_vec(),
);
}
}
impl FromIterator<(String, i64)> for FnCache {
fn from_iter<T: IntoIterator<Item = (String, i64)>>(fns: T) -> Self {
let mut c = FnCache::new();
for f in fns {
c.insert(f.0.as_str(), f.1);
}
c
}
}
impl<'a> FromIterator<(&'a str, i64)> for FnCache {
fn from_iter<T: IntoIterator<Item = (&'a str, i64)>>(fns: T) -> Self {
let mut c = FnCache::new();
for f in fns {
c.insert(f.0, f.1);
}
c
}
}
impl FromIterator<Function> for FnCache {
fn from_iter<T: IntoIterator<Item = Function>>(fns: T) -> Self {
let mut c = FnCache::new();
for f in fns {
c.insert(f.type_signature.as_str(), f.id);
}
c
}
}
impl<'a> FromIterator<&'a Function> for FnCache {
fn from_iter<T: IntoIterator<Item = &'a Function>>(fns: T) -> Self {
let mut c = FnCache::new();
for f in fns {
c.insert(f.type_signature.as_str(), f.id);
}
c
}
}
|
// Copyright 2016 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
use rocksdb::{DB, Options};
pub use rocksdb::CFHandle;
pub fn get_cf_handle<'a>(db: &'a DB, cf: &str) -> Result<&'a CFHandle, String> {
db.cf_handle(cf)
.ok_or_else(|| format!("cf {} not found.", cf))
}
pub fn open(path: &str, cfs: &[&str]) -> Result<DB, String> {
let mut opts = Options::new();
opts.create_if_missing(false);
let mut cfs_opts = vec![];
for _ in 0..cfs.len() {
cfs_opts.push(Options::new());
}
open_opt(&opts, path, cfs, cfs_opts)
}
pub fn open_opt(opts: &Options,
path: &str,
cfs: &[&str],
cfs_opts: Vec<Options>)
-> Result<DB, String> {
let cfs_ref_opts: Vec<&Options> = cfs_opts.iter().collect();
DB::open_cf(opts, path, cfs, &cfs_ref_opts)
}
pub fn new_engine(path: &str, cfs: &[&str]) -> Result<DB, String> {
let opts = Options::new();
let mut cfs_opts = vec![];
for _ in 0..cfs.len() {
cfs_opts.push(Options::new());
}
new_engine_opt(opts, path, cfs, cfs_opts)
}
pub fn new_engine_opt(mut opts: Options,
path: &str,
cfs: &[&str],
cfs_opts: Vec<Options>)
-> Result<DB, String> {
// Currently we support 1) Create new db. 2) Open a db with CFs we want. 3) Open db with no
// CF.
// TODO: Support open db with incomplete CFs.
// opts is used as Rocksdb's DBOptions when call DB::open_cf
opts.create_if_missing(false);
let cfs_ref_opts: Vec<&Options> = cfs_opts.iter().collect();
if let Ok(db) = DB::open_cf(&opts, path, cfs, &cfs_ref_opts) {
return Ok(db);
}
// opts is used as Rocksdb's Options(include DBOptions and ColumnFamilyOptions)
// when call DB::open
opts.create_if_missing(true);
let mut db = match DB::open(&opts, path) {
Ok(db) => db,
Err(e) => return Err(e),
};
for (&cf, &cf_opts) in cfs.iter().zip(&cfs_ref_opts) {
if cf == "default" {
continue;
}
if let Err(e) = db.create_cf(cf, cf_opts) {
return Err(e);
}
}
Ok(db)
}
|
//! A crate for parsing g-code programs, designed with embedded environments in
//! mind.
//!
//! Some explicit design goals of this crate are:
//!
//! - **embedded-friendly:** users should be able to use this crate without
//! requiring access to an operating system (e.g. `#[no_std]` environments or
//! WebAssembly)
//! - **deterministic memory usage:** the library can be tweaked to use no
//! dynamic allocation (see [`buffers::Buffers`])
//! - **error-resistant:** erroneous input won't abort parsing, instead
//! notifying the caller and continuing on (see [`Callbacks`])
//! - **performance:** parsing should be reasonably fast, guaranteeing `O(n)`
//! time complexity with no backtracking
//!
//! # Getting Started
//!
//! The typical entry point to this crate is via the [`parse()`] function. This
//! gives you an iterator over the [`GCode`]s in a string of text, ignoring any
//! errors or comments that may appear along the way.
//!
//! ```rust
//! use gcode::Mnemonic;
//!
//! let src = r#"
//! G90 (absolute coordinates)
//! G00 X50.0 Y-10 (move somewhere)
//! "#;
//!
//! let got: Vec<_> = gcode::parse(src).collect();
//!
//! assert_eq!(got.len(), 2);
//!
//! let g90 = &got[0];
//! assert_eq!(g90.mnemonic(), Mnemonic::General);
//! assert_eq!(g90.major_number(), 90);
//! assert_eq!(g90.minor_number(), 0);
//!
//! let rapid_move = &got[1];
//! assert_eq!(rapid_move.mnemonic(), Mnemonic::General);
//! assert_eq!(rapid_move.major_number(), 0);
//! assert_eq!(rapid_move.value_for('X'), Some(50.0));
//! assert_eq!(rapid_move.value_for('y'), Some(-10.0));
//! ```
//!
//! The [`full_parse_with_callbacks()`] function can be used if you want access
//! to [`Line`] information and to be notified on any parse errors.
//!
//! ```rust
//! use gcode::{Callbacks, Span};
//!
//! /// A custom set of [`Callbacks`] we'll use to keep track of errors.
//! #[derive(Debug, Default)]
//! struct Errors {
//! unexpected_line_number : usize,
//! letter_without_number: usize,
//! garbage: Vec<String>,
//! }
//!
//! impl Callbacks for Errors {
//! fn unknown_content(&mut self, text: &str, _span: Span) {
//! self.garbage.push(text.to_string());
//! }
//!
//! fn unexpected_line_number(&mut self, _line_number: f32, _span: Span) {
//! self.unexpected_line_number += 1;
//! }
//!
//! fn letter_without_a_number(&mut self, _value: &str, _span: Span) {
//! self.letter_without_number += 1;
//! }
//! }
//!
//! let src = r"
//! G90 N1 ; Line numbers (N) should be at the start of a line
//! G ; there was a G, but no number
//! G01 X50 $$%# Y20 ; invalid characters are ignored
//! ";
//!
//! let mut errors = Errors::default();
//!
//! {
//! let lines: Vec<_> = gcode::full_parse_with_callbacks(src, &mut errors)
//! .collect();
//!
//! assert_eq!(lines.len(), 3);
//! let total_gcodes: usize = lines.iter()
//! .map(|line| line.gcodes().len())
//! .sum();
//! assert_eq!(total_gcodes, 2);
//! }
//!
//! assert_eq!(errors.unexpected_line_number, 1);
//! assert_eq!(errors.letter_without_number, 1);
//! assert_eq!(errors.garbage.len(), 1);
//! assert_eq!(errors.garbage[0], "$$%# ");
//! ```
//!
//! # Customising Memory Usage
//!
//! You'll need to manually create a [`Parser`] if you want control over buffer
//! sizes instead of relying on [`buffers::DefaultBuffers`].
//!
//! You shouldn't normally need to do this unless you are on an embedded device
//! and know your expected input will be bigger than
//! [`buffers::SmallFixedBuffers`] will allow.
//!
//! ```rust
//! use gcode::{Word, Comment, GCode, Nop, Parser, buffers::Buffers};
//! use arrayvec::ArrayVec;
//!
//! /// A type-level variable which contains definitions for each of our buffer
//! /// types.
//! enum MyBuffers {}
//!
//! impl<'input> Buffers<'input> for MyBuffers {
//! type Arguments = ArrayVec<[Word; 10]>;
//! type Commands = ArrayVec<[GCode<Self::Arguments>; 2]>;
//! type Comments = ArrayVec<[Comment<'input>; 1]>;
//! }
//!
//! let src = "G90 G01 X5.1";
//!
//! let parser: Parser<Nop, MyBuffers> = Parser::new(src, Nop);
//!
//! let lines = parser.count();
//! assert_eq!(lines, 1);
//! ```
//!
//! # Spans
//!
//! Something that distinguishes this crate from a lot of other g-code parsers
//! is that each element's original location, its [`Span`], is retained and
//! passed to the caller.
//!
//! This is important for applications such as:
//!
//! - Indicating where in the source text a parsing error or semantic error has
//! occurred
//! - Visually highlighting the command currently being executed when stepping
//! through a program in a simulator
//! - Reporting what point a CNC machine is up to when executing a job
//!
//! It's pretty easy to check whether something contains its [`Span`], just look
//! for a `span()` method (e.g. [`GCode::span()`]) or a `span` field (e.g.
//! [`Comment::span`]).
//!
//! # Cargo Features
//!
//! Additional functionality can be enabled by adding feature flags to your
//! `Cargo.toml` file:
//!
//! - **std:** adds `std::error::Error` impls to any errors and switches to
//! `Vec` for the default backing buffers
//! - **serde-1:** allows serializing and deserializing most types with `serde`
#![deny(
bare_trait_objects,
elided_lifetimes_in_paths,
missing_copy_implementations,
missing_debug_implementations,
rust_2018_idioms,
unreachable_pub,
unsafe_code,
unused_qualifications,
unused_results,
variant_size_differences,
intra_doc_link_resolution_failure,
missing_docs
)]
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[cfg(all(test, not(feature = "std")))]
#[macro_use]
extern crate std;
#[cfg(test)]
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
mod macros;
pub mod buffers;
mod callbacks;
mod comment;
mod gcode;
mod lexer;
mod line;
mod parser;
mod span;
mod words;
pub use crate::{
callbacks::{Callbacks, Nop},
comment::Comment,
gcode::{GCode, Mnemonic},
line::Line,
parser::{full_parse_with_callbacks, parse, Parser},
span::Span,
words::Word,
};
|
use std::collections::HashMap;
use std::io;
use std::io::Write;
pub fn day15(part_a: bool) {
// just use brute force?
let limit = if part_a { 2020 } else { 30000000 };
let mut line = String::new();
let mut last_said = HashMap::new();
match io::stdin().read_line(&mut line) {
Err(error) => panic!("error: {}", error),
Ok(0) => {
println!("got nothing");
return;
}
Ok(_) => (),
};
let numbers: Vec<_> = line
.trim()
.split(',')
.map(|x| x.parse::<usize>().unwrap())
.collect();
for (idx, n) in numbers.iter().enumerate() {
last_said.insert(*n, idx + 1);
}
// assume the next number is 0, if there are no duplicates in the
// starting numbers.
let mut next_num = 0;
for idx in (numbers.len() + 1)..limit {
match last_said.get(&next_num) {
None => {
last_said.insert(next_num, idx);
next_num = 0;
}
Some(k) => {
let diff = idx - k;
last_said.insert(next_num, idx);
next_num = diff;
}
}
if idx % 500000 == 0 {
print!("{}...", idx);
io::stdout().flush().expect("error");
}
}
println!();
println!("{}th number: {}", limit, next_num);
}
|
use crate::controller::ControllerState;
use crate::coordination::{CoordinationMessage, CoordinationPayload};
use async_bincode::AsyncBincodeReader;
use futures::sync::mpsc::UnboundedSender;
use futures::{self, Future, Sink, Stream};
use hyper::{self, header::CONTENT_TYPE, Method, StatusCode};
use noria::consensus::Authority;
use noria::ControllerDescriptor;
use rand;
use slog;
use std::io;
use std::net::{IpAddr, SocketAddr};
use std::sync::Arc;
use std::time;
use stream_cancel::{Valve, Valved};
use tokio;
use tokio::prelude::future::Either;
use tokio::prelude::*;
use tokio_io_pool;
use crate::handle::Handle;
use crate::Config;
#[allow(clippy::large_enum_variant)]
crate enum Event {
InternalMessage(CoordinationMessage),
ExternalRequest(
Method,
String,
Option<String>,
Vec<u8>,
futures::sync::oneshot::Sender<Result<Result<String, String>, StatusCode>>,
),
LeaderChange(ControllerState, ControllerDescriptor),
WonLeaderElection(ControllerState),
CampaignError(failure::Error),
#[cfg(test)]
IsReady(futures::sync::oneshot::Sender<bool>),
ManualMigration {
f: Box<dyn FnOnce(&mut crate::controller::migrate::Migration) + Send + 'static>,
done: futures::sync::oneshot::Sender<()>,
},
}
use std::fmt;
impl fmt::Debug for Event {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Event::InternalMessage(ref cm) => write!(f, "Internal({:?})", cm),
Event::ExternalRequest(ref m, ref path, ..) => write!(f, "Request({} {})", m, path),
Event::LeaderChange(..) => write!(f, "LeaderChange(..)"),
Event::WonLeaderElection(..) => write!(f, "Won(..)"),
Event::CampaignError(ref e) => write!(f, "CampaignError({:?})", e),
#[cfg(test)]
Event::IsReady(..) => write!(f, "IsReady"),
Event::ManualMigration { .. } => write!(f, "ManualMigration{{..}}"),
}
}
}
/// Start up a new instance and return a handle to it. Dropping the handle will stop the
/// instance. Make sure that this method is run while on a runtime.
pub(super) fn start_instance<A: Authority + 'static>(
authority: Arc<A>,
listen_addr: IpAddr,
config: Config,
memory_limit: Option<usize>,
memory_check_frequency: Option<time::Duration>,
log: slog::Logger,
) -> impl Future<Item = Handle<A>, Error = failure::Error> {
let mut pool = tokio_io_pool::Builder::default();
pool.name_prefix("io-worker-");
if let Some(threads) = config.threads {
pool.pool_size(threads);
}
let iopool = pool.build().unwrap();
let (trigger, valve) = Valve::new();
let (tx, rx) = futures::sync::mpsc::unbounded();
let v = try {
// we'll be listening for a couple of different types of events:
// first, events from workers
let wport = tokio::net::TcpListener::bind(&SocketAddr::new(listen_addr, 0))?;
let waddr = wport.local_addr()?;
// second, messages from the "real world"
let xport = tokio::net::TcpListener::bind(&SocketAddr::new(listen_addr, 6033))
.or_else(|_| tokio::net::TcpListener::bind(&SocketAddr::new(listen_addr, 0)))?;
let xaddr = xport.local_addr()?;
// and third, domain control traffic. this traffic is a little special, since we may need to
// receive from it while handling control messages (e.g., for replay acks). because of this, we
// give it its own channel.
let cport = tokio::net::TcpListener::bind(&SocketAddr::new(listen_addr, 0))?;
let caddr = cport.local_addr()?;
((wport, waddr), (xport, xaddr), (cport, caddr))
};
let ((wport, waddr), (xport, xaddr), (cport, caddr)) = match v {
Ok(v) => v,
Err(e) => return future::Either::A(future::err(e)),
};
// set up different loops for the controller "part" and the worker "part" of us. this is
// necessary because sometimes the two need to communicate (e.g., for migrations), and if they
// were in a single loop, that could deadlock.
let (ctrl_tx, ctrl_rx) = futures::sync::mpsc::unbounded();
let (worker_tx, worker_rx) = futures::sync::mpsc::unbounded();
// spawn all of those
tokio::spawn(listen_internal(&valve, log.clone(), tx.clone(), wport));
let ext_log = log.clone();
tokio::spawn(
listen_external(tx.clone(), valve.wrap(xport.incoming()), authority.clone()).map_err(
move |e| {
warn!(ext_log, "external request failed: {:?}", e);
},
),
);
// first, a loop that just forwards to the appropriate place
tokio::spawn(
rx.map_err(|_| unreachable!())
.fold((ctrl_tx, worker_tx), move |(ctx, wtx), e| {
let fw = move |e, to_ctrl| {
if to_ctrl {
Either::A(ctx.send(e).map(move |ctx| (ctx, wtx)))
} else {
Either::B(wtx.send(e).map(move |wtx| (ctx, wtx)))
}
};
match e {
Event::InternalMessage(ref msg) => match msg.payload {
CoordinationPayload::Deregister => fw(e, true),
CoordinationPayload::RemoveDomain => fw(e, false),
CoordinationPayload::AssignDomain(..) => fw(e, false),
CoordinationPayload::DomainBooted(..) => fw(e, false),
CoordinationPayload::Register { .. } => fw(e, true),
CoordinationPayload::Heartbeat => fw(e, true),
CoordinationPayload::CreateUniverse(..) => fw(e, true),
},
Event::ExternalRequest(..) => fw(e, true),
Event::ManualMigration { .. } => fw(e, true),
Event::LeaderChange(..) => fw(e, false),
Event::WonLeaderElection(..) => fw(e, true),
Event::CampaignError(..) => fw(e, true),
#[cfg(test)]
Event::IsReady(..) => fw(e, true),
}
.map_err(|e| panic!("{:?}", e))
})
.map(|_| ()),
);
let descriptor = ControllerDescriptor {
external_addr: xaddr,
worker_addr: waddr,
domain_addr: caddr,
nonce: rand::random(),
};
tokio::spawn(crate::controller::main(
&valve,
config,
descriptor,
ctrl_rx,
cport,
log.clone(),
authority.clone(),
tx.clone(),
));
tokio::spawn(crate::worker::main(
iopool.handle().clone(),
worker_rx,
listen_addr,
waddr,
memory_limit,
memory_check_frequency,
log.clone(),
));
future::Either::B(Handle::new(authority, tx, trigger, iopool))
}
fn listen_internal(
valve: &Valve,
log: slog::Logger,
event_tx: UnboundedSender<Event>,
on: tokio::net::TcpListener,
) -> impl Future<Item = (), Error = ()> {
let valve = valve.clone();
valve
.wrap(on.incoming())
.map_err(failure::Error::from)
.for_each(move |sock| {
tokio::spawn(
valve
.wrap(AsyncBincodeReader::from(sock))
.map(Event::InternalMessage)
.map_err(failure::Error::from)
.forward(
event_tx
.clone()
.sink_map_err(|_| format_err!("main event loop went away")),
)
.map(|_| ())
.map_err(|e| panic!("{:?}", e)),
);
Ok(())
})
.map_err(move |e| {
warn!(log, "internal connection failed: {:?}", e);
})
}
struct ExternalServer<A: Authority>(UnboundedSender<Event>, Arc<A>);
fn listen_external<A: Authority + 'static>(
event_tx: UnboundedSender<Event>,
on: Valved<tokio::net::tcp::Incoming>,
authority: Arc<A>,
) -> impl Future<Item = (), Error = hyper::Error> + Send {
use hyper::{
service::{NewService, Service},
Request, Response,
};
impl<A: Authority> Clone for ExternalServer<A> {
// Needed due to #26925
fn clone(&self) -> Self {
ExternalServer(self.0.clone(), self.1.clone())
}
}
impl<A: Authority> Service for ExternalServer<A> {
type ReqBody = hyper::Body;
type ResBody = hyper::Body;
type Error = hyper::Error;
type Future = Box<dyn Future<Item = Response<Self::ResBody>, Error = Self::Error> + Send>;
fn call(&mut self, req: Request<Self::ReqBody>) -> Self::Future {
let mut res = Response::builder();
// disable CORS to allow use as API server
res.header(hyper::header::ACCESS_CONTROL_ALLOW_ORIGIN, "*");
if let Method::GET = *req.method() {
match req.uri().path() {
"/graph.html" => {
res.header(CONTENT_TYPE, "text/html");
let res = res.body(hyper::Body::from(include_str!("graph.html")));
return Box::new(futures::future::ok(res.unwrap()));
}
path if path.starts_with("/zookeeper/") => {
let res = match self.1.try_read(&format!("/{}", &path[11..])) {
Ok(Some(data)) => {
res.header(CONTENT_TYPE, "application/json");
res.body(hyper::Body::from(data))
}
_ => {
res.status(StatusCode::NOT_FOUND);
res.body(hyper::Body::empty())
}
};
return Box::new(futures::future::ok(res.unwrap()));
}
_ => {}
}
}
let method = req.method().clone();
let path = req.uri().path().to_string();
let query = req.uri().query().map(ToOwned::to_owned);
let event_tx = self.0.clone();
Box::new(req.into_body().concat2().and_then(move |body| {
let body: Vec<u8> = body.iter().cloned().collect();
let (tx, rx) = futures::sync::oneshot::channel();
event_tx
.clone()
.send(Event::ExternalRequest(method, path, query, body, tx))
.map_err(|_| futures::Canceled)
.then(move |_| rx)
.then(move |reply| match reply {
Ok(reply) => {
let res = match reply {
Ok(Ok(reply)) => {
res.header("Content-Type", "application/json; charset=utf-8");
res.body(hyper::Body::from(reply))
}
Ok(Err(reply)) => {
res.status(StatusCode::INTERNAL_SERVER_ERROR);
res.header("Content-Type", "text/plain; charset=utf-8");
res.body(hyper::Body::from(reply))
}
Err(status_code) => {
res.status(status_code);
res.body(hyper::Body::empty())
}
};
Ok(res.unwrap())
}
Err(_) => {
res.status(StatusCode::NOT_FOUND);
Ok(res.body(hyper::Body::empty()).unwrap())
}
})
}))
}
}
impl<A: Authority> NewService for ExternalServer<A> {
type Service = Self;
type ReqBody = hyper::Body;
type ResBody = hyper::Body;
type Error = hyper::Error;
type InitError = io::Error;
type Future = tokio::prelude::future::FutureResult<Self, Self::InitError>;
fn new_service(&self) -> tokio::prelude::future::FutureResult<Self, io::Error> {
Ok(self.clone()).into()
}
}
let service = ExternalServer(event_tx, authority);
hyper::server::Server::builder(on).serve(service)
}
|
//! Types for identifying/indexing and comparing the time between simulation frames.
//!
//! Many things in CrystalOrb are timestamped. Each frame of a [`World`](crate::world::World)
//! simulation are assigned a [`Timestamp`]. The corresponding
//! [snapshots](crate::world::World::SnapshotType), [commands](crate::world::World::CommandType),
//! and [display states](crate::world::World::DisplayStateType) are all timestamped so that the
//! client and server knows which simulation frame they are associated with.
// use crate::fixed_timestepper::Stepper;
// use serde::{Deserialize, Serialize};
use std::{
cmp::Ordering,
fmt::{Debug, Display, Formatter, Result},
num::Wrapping,
ops::{Add, Deref, DerefMut, Range, Sub},
};
/// Represents and identifies a simulation instant.
#[derive(Eq, PartialEq, Debug, Clone, Copy, /*Serialize, Deserialize,*/ Default)]
pub struct Timestamp(Wrapping<i16>);
impl Timestamp {
/// See note about transitivity for Timestamp's Ord implementation.
pub const MAX_COMPARABLE_RANGE: i16 = i16::MAX;
/// Find the corresponding timestamp for the current time in seconds.
pub fn from_seconds(seconds: f64, timestep_seconds: f64) -> Self {
Self::from(FloatTimestamp::from_seconds(seconds, timestep_seconds))
}
/// Modify itself to become the timestamp of the next frame.
pub fn increment(&mut self) {
self.0 += Wrapping(1);
}
/// Find the corresponding time in seconds for this timestamp. Since timestamps repeat over
/// time, this function returns the time closest to zero. This makes it useful to find the
/// number of seconds between two timestamps.
///
/// # Example
///
/// ```
/// use orb::timestamp::Timestamp;
/// use float_cmp::approx_eq;
/// const TIMESTEP: f64 = 1.0 / 60.0;
///
/// // Given two timestamps.
/// let t1 = Timestamp::default();
/// let t2 = t1 + 50;
///
/// // We can get the seconds between these two timestamps.
/// let seconds_difference = (t2 - t1).as_seconds(TIMESTEP);
/// assert!(approx_eq!(f64, seconds_difference, 50.0 / 60.0, ulps=1));
/// ```
pub fn as_seconds(self, timestep_seconds: f64) -> f64 {
self.0 .0 as f64 * timestep_seconds
}
/// See note about transitivity for Timestamp's Ord implementation.
pub fn comparable_range_with_midpoint(midpoint: Timestamp) -> Range<Timestamp> {
let max_distance_from_midpoint = Self::MAX_COMPARABLE_RANGE / 2;
(midpoint - max_distance_from_midpoint)..(midpoint + max_distance_from_midpoint)
}
pub fn to_i16(self) -> i16 {
self.0 .0
}
pub fn from_i16(source: i16) -> Self {
Self(Wrapping(source))
}
}
impl From<FloatTimestamp> for Timestamp {
fn from(float_timestamp: FloatTimestamp) -> Self {
Self(Wrapping(float_timestamp.0 as i16))
}
}
impl Add<i16> for Timestamp {
type Output = Self;
fn add(self, rhs: i16) -> Self::Output {
Self(self.0 + Wrapping(rhs))
}
}
impl Sub<i16> for Timestamp {
type Output = Self;
fn sub(self, rhs: i16) -> Self::Output {
Self(self.0 - Wrapping(rhs))
}
}
impl Sub<Timestamp> for Timestamp {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self(self.0 - rhs.0)
}
}
impl Ord for Timestamp {
/// Note: This is technically not transitive, since we are doing wrapped differences.
/// To guarantee transitivity (for example, to use in `BTreeMap`s), ensure that all values being
/// compared against each other are at most `std::i16::MAX` length of each other.
/// (Maybe `std::i16::MAX` is off by one, but it is at least on the conservative side)
fn cmp(&self, other: &Self) -> Ordering {
let difference: Wrapping<i16> = self.0 - other.0;
match difference {
d if d < Wrapping(0) => Ordering::Less,
d if d == Wrapping(0) => Ordering::Equal,
d if d > Wrapping(0) => Ordering::Greater,
_ => unreachable!(),
}
}
}
impl PartialOrd for Timestamp {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl From<Timestamp> for i16 {
fn from(timestamp: Timestamp) -> i16 {
timestamp.0 .0
}
}
impl Display for Timestamp {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(f, "T={:+06}", self.0 .0)
}
}
/// Representation of time in the same units as [`Timestamp`], but whereas [`Timestamp`] identifies
/// which whole number of frames only, [`FloatTimestamp`] can represent any time in the continuous
/// region between two adjacent frames.
#[derive(PartialEq, Debug, Clone, Copy, /*Serialize, Deserialize,*/ Default)]
pub struct FloatTimestamp(f64);
impl FloatTimestamp {
/// Convert the time from seconds into [`Timestamp`] units (1 per frame, i.e. 1 per
/// timestep), and fit it into the [`Timestamp`] space.
pub fn from_seconds(seconds: f64, timestep_seconds: f64) -> Self {
Self::from_unwrapped(seconds / timestep_seconds)
}
/// Fit the time in [`Timestamp`] units into the [`Timestamp`] space by wrapping.
pub fn from_unwrapped(frames: f64) -> Self {
let frames_wrapped =
(frames + 15.0_f64.exp2()).rem_euclid(16.0_f64.exp2()) - 15.0_f64.exp2();
Self(frames_wrapped)
}
/// Find the corresponding time in seconds for this float timestamp. Since timestamps
/// repeat over time, this function returns the time closest to zero. This makes it useful
/// to find the number of seconds between two float timestamps.
///
/// # Example
///
/// ```
/// use orb::timestamp::FloatTimestamp;
/// use float_cmp::approx_eq;
/// const TIMESTEP: f64 = 1.0 / 60.0;
///
/// // Given two float timestamps.
/// let t1 = FloatTimestamp::from_unwrapped(123.2);
/// let t2 = FloatTimestamp::from_unwrapped(123.7);
///
/// // We can get the seconds between these two float timestamps.
/// let seconds_difference = (t2 - t1).as_seconds(TIMESTEP);
/// assert!(approx_eq!(f64, seconds_difference, 0.5 / 60.0, ulps=1));
/// ```
pub fn as_seconds(self, timestep_seconds: f64) -> f64 {
self.0 * timestep_seconds
}
/// Round up to the next whole-number [`Timestamp`] (or its own value if it is already a
/// whole number).
///
/// # Example
///
/// ```
/// use orb::timestamp::{FloatTimestamp, Timestamp};
///
/// let t1 = FloatTimestamp::from_unwrapped(123.4);
/// let t2 = FloatTimestamp::from_unwrapped(123.0);
///
/// assert_eq!(t1.ceil(), Timestamp::default() + 124);
/// assert_eq!(t2.ceil(), Timestamp::default() + 123);
/// ```
pub fn ceil(self) -> Timestamp {
Timestamp(Wrapping(self.0.ceil() as i16))
}
/// Round down to the previous whole-number [`Timestamp`] (or its own value if it is
/// already a whole number).
///
/// # Example
///
/// ```
/// use orb::timestamp::{FloatTimestamp, Timestamp};
///
/// let t1 = FloatTimestamp::from_unwrapped(123.4);
/// let t2 = FloatTimestamp::from_unwrapped(123.0);
///
/// assert_eq!(t1.floor(), Timestamp::default() + 123);
/// assert_eq!(t2.floor(), Timestamp::default() + 123);
/// ```
pub fn floor(self) -> Timestamp {
Timestamp(Wrapping(self.0.floor() as i16))
}
}
impl Sub<FloatTimestamp> for FloatTimestamp {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self::from_unwrapped(self.0 - rhs.0)
}
}
impl From<Timestamp> for FloatTimestamp {
fn from(timestamp: Timestamp) -> FloatTimestamp {
FloatTimestamp(timestamp.0 .0 as f64)
}
}
/// Associate a [`Timestamp`] with another type.
#[derive(Debug, Clone, /*Serialize, Deserialize,*/ Default, PartialEq)]
pub struct Timestamped<T> {
inner: T,
timestamp: Timestamp,
}
impl<T> Timestamped<T> {
/// Wrap the given data with the given [`Timestamp`].
pub fn new(inner: T, timestamp: Timestamp) -> Self {
Self { inner, timestamp }
}
/// Get a reference to the inner data without the [`Timestamp`].
pub fn inner(&self) -> &T {
&self.inner
}
/// Get a mutable reference to the inner data without the [`Timestamp`].
pub fn inner_mut(&mut self) -> &mut T {
&mut self.inner
}
/// Get the associated [`Timestamp`].
pub fn timestamp(&self) -> Timestamp {
self.timestamp
}
/// Update the current [`Timestamp`] associated with the inner piece of data.
pub fn set_timestamp(&mut self, timestamp: Timestamp) {
self.timestamp = timestamp;
}
}
// impl<T: Stepper> Stepper for Timestamped<T> {
// fn step(&mut self) {
// self.inner.step();
// self.timestamp.increment();
// }
// }
impl<T> Deref for Timestamped<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<T> DerefMut for Timestamped<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
#[cfg(test)]
pub mod tests {
use super::*;
pub fn interesting_timestamps() -> [Timestamp; 7] {
[
Timestamp::default() + std::i16::MIN,
Timestamp::default() + std::i16::MIN / 2,
Timestamp::default() - 1,
Timestamp::default(),
Timestamp::default() + 1,
Timestamp::default() + std::i16::MAX / 2,
Timestamp::default() + std::i16::MAX,
]
}
struct InterestingOffsets {
plus_one: Timestamp,
plus_limit: Timestamp,
plus_wrapped: Timestamp,
plus_wrapped_limit: Timestamp,
plus_wrapped_full: Timestamp,
minus_one: Timestamp,
minus_limit: Timestamp,
minus_wrapped: Timestamp,
minus_wrapped_limit: Timestamp,
minus_wrapped_full: Timestamp,
}
fn generate_interesting_offsets(initial: Timestamp) -> InterestingOffsets {
let plus_one = initial + 1;
let plus_limit = initial + i16::MAX;
let plus_wrapped = plus_limit + 1;
let plus_wrapped_limit = plus_limit - i16::MIN;
let plus_wrapped_full = plus_wrapped_limit + 1;
let minus_one = initial - 1;
let minus_limit = initial + i16::MIN;
let minus_wrapped = minus_limit - 1;
let minus_wrapped_limit = minus_limit - i16::MAX;
let minus_wrapped_full = minus_wrapped_limit - 1;
InterestingOffsets {
plus_one,
plus_limit,
plus_wrapped,
plus_wrapped_limit,
plus_wrapped_full,
minus_one,
minus_limit,
minus_wrapped,
minus_wrapped_limit,
minus_wrapped_full,
}
}
#[test]
fn test_timestamp_ord() {
fn test_timestamp_ord_with_initial(initial: Timestamp) {
let offsets = generate_interesting_offsets(initial);
assert!(offsets.plus_one > initial);
assert!(offsets.plus_limit > initial);
assert!(offsets.plus_wrapped < initial);
assert!(offsets.plus_wrapped_limit < initial);
assert!(offsets.plus_wrapped_full == initial);
assert!(offsets.minus_one < initial);
assert!(offsets.minus_limit < initial);
assert!(offsets.minus_wrapped > initial);
assert!(offsets.minus_wrapped_limit > initial);
assert!(offsets.minus_wrapped_full == initial);
}
for timestamp in &interesting_timestamps() {
test_timestamp_ord_with_initial(*timestamp);
}
}
#[test]
fn test_timestamp_difference() {
fn test_timestamp_difference_with_initial(initial: Timestamp) {
let offsets = generate_interesting_offsets(initial);
assert_eq!(offsets.plus_one - initial, Timestamp::default() + 1);
assert_eq!(
offsets.plus_limit - initial,
Timestamp::default() + i16::MAX
);
assert_eq!(
offsets.plus_wrapped - initial,
Timestamp::default() + i16::MIN
);
assert_eq!(
offsets.plus_wrapped_limit - initial,
Timestamp::default() - 1
);
assert_eq!(offsets.plus_wrapped_full - initial, Timestamp::default());
assert_eq!(offsets.minus_one - initial, Timestamp::default() - 1);
assert_eq!(
offsets.minus_limit - initial,
Timestamp::default() + i16::MIN
);
assert_eq!(
offsets.minus_wrapped - initial,
Timestamp::default() + i16::MAX
);
assert_eq!(
offsets.minus_wrapped_limit - initial,
Timestamp::default() + 1
);
assert_eq!(offsets.minus_wrapped_full - initial, Timestamp::default());
}
for timestamp in &interesting_timestamps() {
test_timestamp_difference_with_initial(*timestamp);
}
}
#[test]
fn test_timestamp_increment() {
for timestamp in &interesting_timestamps() {
let mut incremented = timestamp.clone();
incremented.increment();
assert!(incremented > *timestamp);
assert_eq!(incremented - *timestamp, Timestamp::default() + 1);
}
}
#[test]
fn test_timestamp_from_seconds() {
assert_eq!(Timestamp::from_seconds(0.0, 1.0), Timestamp::default());
assert_eq!(Timestamp::from_seconds(1.0, 1.0), Timestamp::default() + 1);
assert_eq!(
Timestamp::from_seconds(0.25, 0.25),
Timestamp::default() + 1
);
assert_eq!(Timestamp::from_seconds(-1.0, 1.0), Timestamp::default() - 1);
assert_eq!(
Timestamp::from_seconds(i16::MAX as f64, 1.0),
Timestamp::default() + i16::MAX,
);
assert_eq!(
Timestamp::from_seconds((i16::MAX as f64) + 1.0, 1.0),
Timestamp::default() + i16::MIN
);
assert_eq!(
Timestamp::from_seconds(i16::MIN as f64, 1.0),
Timestamp::default() + i16::MIN
);
assert_eq!(
Timestamp::from_seconds((i16::MIN as f64) - 1.0, 1.0),
Timestamp::default() + i16::MAX
);
}
#[test]
#[allow(clippy::float_cmp)]
fn test_timestamp_as_seconds() {
assert_eq!(Timestamp::from_seconds(0.0, 1.0).as_seconds(1.0), 0.0);
assert_eq!(Timestamp::from_seconds(1.0, 1.0).as_seconds(1.0), 1.0);
assert_eq!(Timestamp::from_seconds(1.0, 1.0).as_seconds(0.25), 0.25);
assert_eq!(Timestamp::from_seconds(0.25, 0.25).as_seconds(0.25), 0.25);
assert_eq!(Timestamp::from_seconds(-1.0, 1.0).as_seconds(1.0), -1.0);
assert_eq!(
Timestamp::from_seconds(i16::MAX as f64, 1.0).as_seconds(1.0),
i16::MAX as f64,
);
assert_eq!(
Timestamp::from_seconds((i16::MAX as f64) + 1.0, 1.0).as_seconds(1.0),
i16::MIN as f64,
);
assert_eq!(
Timestamp::from_seconds(i16::MIN as f64, 1.0).as_seconds(1.0),
i16::MIN as f64,
);
assert_eq!(
Timestamp::from_seconds((i16::MIN as f64) - 1.0, 1.0).as_seconds(1.0),
i16::MAX as f64,
);
}
}
|
mod stdin_input;
mod stdout_output;
pub use stdin_input::StdinInput;
pub use stdout_output::StdoutOutput; |
#[doc = r" Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Unspecified"]
pub unused0: UNUSED0,
#[doc = "0x04 - Unspecified"]
pub unused1: UNUSED1,
#[doc = "0x08 - Unspecified"]
pub unused2: UNUSED2,
_reserved3: [u8; 4usize],
#[doc = "0x10 - Unspecified"]
pub unused3: UNUSED3,
#[doc = "0x14 - Description collection[n]: Reserved for Nordic firmware design"]
pub nrffw: [NRFFW; 15],
#[doc = "0x50 - Description collection[n]: Reserved for Nordic hardware design"]
pub nrfhw: [NRFHW; 12],
#[doc = "0x80 - Description collection[n]: Reserved for customer"]
pub customer: [CUSTOMER; 32],
_reserved7: [u8; 256usize],
#[doc = "0x200 - Description collection[n]: Mapping of the nRESET function"]
pub pselreset: [PSELRESET; 2],
#[doc = "0x208 - Access port protection"]
pub approtect: APPROTECT,
#[doc = "0x20c - Setting of pins dedicated to NFC functionality: NFC antenna or GPIO"]
pub nfcpins: NFCPINS,
#[doc = "0x210 - Processor debug control"]
pub debugctrl: DEBUGCTRL,
_reserved11: [u8; 240usize],
#[doc = "0x304 - GPIO reference voltage / external output supply voltage in high voltage mode"]
pub regout0: REGOUT0,
}
#[doc = "Unspecified"]
pub struct UNUSED0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Unspecified"]
pub mod unused0;
#[doc = "Unspecified"]
pub struct UNUSED1 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Unspecified"]
pub mod unused1;
#[doc = "Unspecified"]
pub struct UNUSED2 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Unspecified"]
pub mod unused2;
#[doc = "Unspecified"]
pub struct UNUSED3 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Unspecified"]
pub mod unused3;
#[doc = "Description collection[n]: Reserved for Nordic firmware design"]
pub struct NRFFW {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Description collection[n]: Reserved for Nordic firmware design"]
pub mod nrffw;
#[doc = "Description collection[n]: Reserved for Nordic hardware design"]
pub struct NRFHW {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Description collection[n]: Reserved for Nordic hardware design"]
pub mod nrfhw;
#[doc = "Description collection[n]: Reserved for customer"]
pub struct CUSTOMER {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Description collection[n]: Reserved for customer"]
pub mod customer;
#[doc = "Description collection[n]: Mapping of the nRESET function"]
pub struct PSELRESET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Description collection[n]: Mapping of the nRESET function"]
pub mod pselreset;
#[doc = "Access port protection"]
pub struct APPROTECT {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Access port protection"]
pub mod approtect;
#[doc = "Setting of pins dedicated to NFC functionality: NFC antenna or GPIO"]
pub struct NFCPINS {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Setting of pins dedicated to NFC functionality: NFC antenna or GPIO"]
pub mod nfcpins;
#[doc = "Processor debug control"]
pub struct DEBUGCTRL {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Processor debug control"]
pub mod debugctrl;
#[doc = "GPIO reference voltage / external output supply voltage in high voltage mode"]
pub struct REGOUT0 {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO reference voltage / external output supply voltage in high voltage mode"]
pub mod regout0;
|
/* origin: FreeBSD /usr/src/lib/msun/src/k_tan.c */
/*
* ====================================================
* Copyright 2004 Sun Microsystems, Inc. All Rights Reserved.
*
* Permission to use, copy, modify, and distribute this
* software is freely granted, provided that this notice
* is preserved.
* ====================================================
*/
/* |tan(x)/x - t(x)| < 2**-25.5 (~[-2e-08, 2e-08]). */
const T: [f64; 6] = [
0.333331395030791399758, /* 0x15554d3418c99f.0p-54 */
0.133392002712976742718, /* 0x1112fd38999f72.0p-55 */
0.0533812378445670393523, /* 0x1b54c91d865afe.0p-57 */
0.0245283181166547278873, /* 0x191df3908c33ce.0p-58 */
0.00297435743359967304927, /* 0x185dadfcecf44e.0p-61 */
0.00946564784943673166728, /* 0x1362b9bf971bcd.0p-59 */
];
#[cfg_attr(all(test, assert_no_panic), no_panic::no_panic)]
pub(crate) fn k_tanf(x: f64, odd: bool) -> f32 {
let z = x * x;
/*
* Split up the polynomial into small independent terms to give
* opportunities for parallel evaluation. The chosen splitting is
* micro-optimized for Athlons (XP, X64). It costs 2 multiplications
* relative to Horner's method on sequential machines.
*
* We add the small terms from lowest degree up for efficiency on
* non-sequential machines (the lowest degree terms tend to be ready
* earlier). Apart from this, we don't care about order of
* operations, and don't need to to care since we have precision to
* spare. However, the chosen splitting is good for accuracy too,
* and would give results as accurate as Horner's method if the
* small terms were added from highest degree down.
*/
let mut r = T[4] + z * T[5];
let t = T[2] + z * T[3];
let w = z * z;
let s = z * x;
let u = T[0] + z * T[1];
r = (x + s * u) + (s * w) * (t + w * r);
(if odd { -1. / r } else { r }) as f32
}
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#[repr(packed)]
pub struct Good {
data: &'static u32,
data2: [&'static u32; 2],
aligned: [u8; 32],
}
#[repr(packed)]
pub struct JustArray {
array: [u32]
}
// kill this test when that turns to a hard error
#[allow(safe_packed_borrows)]
fn main() {
let good = Good {
data: &0,
data2: [&0, &0],
aligned: [0; 32]
};
unsafe {
let _ = &good.data; // ok
let _ = &good.data2[0]; // ok
}
let _ = &good.data;
let _ = &good.data2[0];
let _ = &*good.data; // ok, behind a pointer
let _ = &good.aligned; // ok, has align 1
let _ = &good.aligned[2]; // ok, has align 1
}
|
use crate::event::Event;
use crate::{
shutdown::ShutdownSignal,
tls::{MaybeTlsSettings, TlsConfig},
};
use futures::{
compat::{AsyncRead01CompatExt, Future01CompatExt, Stream01CompatExt},
FutureExt, TryFutureExt, TryStreamExt,
};
use futures01::{sync::mpsc, Sink};
use serde::Serialize;
use std::error::Error;
use std::fmt;
use std::net::SocketAddr;
use tokio_util::compat::FuturesAsyncReadCompatExt;
use warp::{
filters::BoxedFilter,
http::{HeaderMap, StatusCode},
reject::Rejection,
Filter,
};
#[derive(Serialize, Debug)]
pub struct ErrorMessage {
code: u16,
message: String,
}
impl ErrorMessage {
pub fn new(code: StatusCode, message: String) -> Self {
ErrorMessage {
code: code.as_u16(),
message,
}
}
}
impl Error for ErrorMessage {}
impl fmt::Display for ErrorMessage {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}: {}", self.code, self.message)
}
}
impl warp::reject::Reject for ErrorMessage {}
struct RejectShuttingDown;
impl fmt::Debug for RejectShuttingDown {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("shutting down")
}
}
impl warp::reject::Reject for RejectShuttingDown {}
pub trait HttpSource: Clone + Send + Sync + 'static {
fn build_event(
&self,
body: bytes05::Bytes,
header_map: HeaderMap,
) -> Result<Vec<Event>, ErrorMessage>;
fn run(
self,
address: SocketAddr,
path: &'static str,
tls: &Option<TlsConfig>,
out: mpsc::Sender<Event>,
shutdown: ShutdownSignal,
) -> crate::Result<crate::sources::Source> {
let mut filter: BoxedFilter<()> = warp::post().boxed();
if !path.is_empty() && path != "/" {
for s in path.split('/') {
filter = filter.and(warp::path(s)).boxed();
}
}
let svc = filter
.and(warp::path::end())
.and(warp::header::headers_cloned())
.and(warp::body::bytes())
.and_then(move |headers: HeaderMap, body| {
info!("Handling http request: {:?}", headers);
let this = self.clone();
let out = out.clone();
async move {
match this.build_event(body, headers) {
Ok(events) => {
out.send_all(futures01::stream::iter_ok(events))
.compat()
.map_err(move |e: mpsc::SendError<Event>| {
// can only fail if receiving end disconnected, so we are shuting down,
// probably not gracefully.
error!("Failed to forward events, downstream is closed");
error!("Tried to send the following event: {:?}", e);
warp::reject::custom(RejectShuttingDown)
})
.map_ok(|_| warp::reply())
.await
}
Err(err) => Err(warp::reject::custom(err)),
}
}
});
let ping = warp::get().and(warp::path("ping")).map(|| "pong");
let routes = svc.or(ping).recover(|r: Rejection| async move {
if let Some(e_msg) = r.find::<ErrorMessage>() {
let json = warp::reply::json(e_msg);
Ok(warp::reply::with_status(
json,
StatusCode::from_u16(e_msg.code).unwrap_or(StatusCode::INTERNAL_SERVER_ERROR),
))
} else {
//other internal error - will return 500 internal server error
Err(r)
}
});
info!(message = "building http server", addr = %address);
let tls = MaybeTlsSettings::from_config(tls, true).unwrap();
let incoming = tls.bind(&address).unwrap().incoming();
let fut = async move {
let _ = warp::serve(routes)
.serve_incoming_with_graceful_shutdown(
incoming.compat().map_ok(|s| s.compat().compat()),
shutdown.clone().compat().map(|_| ()),
)
.await;
// We need to drop the last copy of ShutdownSignalToken only after server has shut down.
drop(shutdown);
Ok(())
};
Ok(Box::new(fut.boxed().compat()))
}
}
|
use crate::errors::*;
use crate::SetupArgs;
use dinghy_build::build_env::append_path_to_env;
use dinghy_build::build_env::append_path_to_target_env;
use dinghy_build::build_env::envify;
use dinghy_build::build_env::set_env;
use dinghy_build::build_env::set_target_env;
use itertools::Itertools;
use std::io::Write;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
use std::{fs, path};
use walkdir::WalkDir;
#[cfg(not(target_os = "windows"))]
static GLOB_ARGS: &str = r#""$@""#;
#[cfg(target_os = "windows")]
static GLOB_ARGS: &str = r#"%*"#;
#[derive(Clone, Debug)]
pub struct Toolchain {
pub rustc_triple: String,
}
impl Toolchain {
pub fn setup_tool(&self, var: &str, exe: &str) -> Result<()> {
set_env(format!("TARGET_{}", var), exe);
set_env(format!("{}_{}", var, self.rustc_triple), exe);
Ok(())
}
pub fn setup_cc(&self, _id: &str, compiler_command: &str) -> Result<()> {
set_env("TARGET_CC", compiler_command);
set_env(format!("CC_{}", self.rustc_triple), compiler_command);
Ok(())
}
pub fn setup_linker<P: AsRef<path::Path>>(
&self,
id: &str,
linker_command: &str,
workspace_root: P,
) -> Result<()> {
let shim = create_shim(
workspace_root,
&self.rustc_triple,
id,
"linker",
format!("{} {}", linker_command, GLOB_ARGS).as_str(),
)?;
set_env(
format!("CARGO_TARGET_{}_LINKER", envify(self.rustc_triple.as_str())).as_str(),
shim,
);
Ok(())
}
pub fn setup_pkg_config(&self) -> Result<()> {
set_env("PKG_CONFIG_ALLOW_CROSS", "1");
set_target_env("PKG_CONFIG_LIBPATH", Some(&self.rustc_triple), "");
Ok(())
}
pub fn setup_runner(&self, platform_id: &str, setup_args: &SetupArgs) -> Result<()> {
set_env(
format!("CARGO_TARGET_{}_RUNNER", envify(self.rustc_triple.as_str())).as_str(),
setup_args.get_runner_command(platform_id),
);
Ok(())
}
pub fn setup_target(&self) -> Result<()> {
set_env("CARGO_BUILD_TARGET", &self.rustc_triple);
Ok(())
}
}
#[derive(Clone, Debug)]
pub struct ToolchainConfig {
pub bin_dir: PathBuf,
pub root: PathBuf,
pub rustc_triple: String,
pub sysroot: Option<PathBuf>,
pub cc: String,
pub binutils_prefix: String,
pub cc_prefix: String,
}
impl ToolchainConfig {
pub fn cc_executable(&self, name_without_triple: &str) -> String {
self.bin_dir
.join(format!("{}-{}", self.cc_prefix, name_without_triple))
.to_string_lossy()
.to_string()
}
pub fn binutils_executable(&self, name_without_triple: &str) -> String {
self.bin_dir
.join(format!("{}-{}", self.binutils_prefix, name_without_triple))
.to_string_lossy()
.to_string()
}
pub fn naked_executable(&self, name: &str) -> String {
self.bin_dir.join(name).to_string_lossy().to_string()
}
pub fn setup_pkg_config(&self) -> Result<()> {
self.as_toolchain().setup_pkg_config()?;
if self.root.parent().is_some() {
append_path_to_target_env(
"PKG_CONFIG_LIBDIR",
Some(&self.rustc_triple),
WalkDir::new(self.root.to_string_lossy().as_ref())
.into_iter()
.filter_map(|e| e.ok()) // Ignore unreadable files, maybe could warn...
.filter(|e| e.file_name() == "pkgconfig" && e.file_type().is_dir())
.map(|e| e.path().to_string_lossy().into_owned())
.join(":"),
);
}
if let Some(sr) = &self.sysroot {
set_target_env("PKG_CONFIG_SYSROOT_DIR", Some(&self.rustc_triple), &sr);
}
Ok(())
}
pub fn setup_sysroot(&self) {
if let Some(sr) = &self.sysroot {
set_env("TARGET_SYSROOT", sr);
}
}
pub fn setup_tool(&self, var: &str, command: &str) -> Result<()> {
self.as_toolchain().setup_tool(var, command)
}
pub fn setup_cc(&self, id: &str, compiler_command: &str) -> Result<()> {
self.as_toolchain().setup_cc(id, compiler_command)
}
pub fn generate_linker_command(&self, setup_args: &SetupArgs) -> String {
let mut linker_cmd = self.cc_executable(&*self.cc);
linker_cmd.push_str(" ");
if setup_args.verbosity > 0 {
linker_cmd.push_str("-Wl,--verbose -v")
}
if let Some(sr) = &self.sysroot {
linker_cmd.push_str(&format!(" --sysroot {}", sr.display()));
}
for forced_overlay in &setup_args.forced_overlays {
linker_cmd.push_str(" -l");
linker_cmd.push_str(&forced_overlay);
// TODO Add -L
}
linker_cmd
}
pub fn setup_linker<P: AsRef<path::Path>>(
&self,
id: &str,
linker_command: &str,
workspace_root: P,
) -> Result<()> {
self.as_toolchain()
.setup_linker(id, linker_command, workspace_root)
}
pub fn setup_runner(&self, platform_id: &str, setup_args: &SetupArgs) -> Result<()> {
self.as_toolchain().setup_runner(platform_id, setup_args)
}
pub fn setup_target(&self) -> Result<()> {
self.as_toolchain().setup_target()
}
pub fn shim_executables<P: AsRef<path::Path>>(
&self,
id: &str,
workspace_root: P,
) -> Result<()> {
let workspace_root = workspace_root.as_ref();
let shims_path = workspace_root
.join("target")
.join(&self.rustc_triple)
.join(id);
for exe in self.bin_dir.read_dir()? {
let exe = exe?;
let exe_file_name = exe.file_name();
let exe_path = exe.path();
let exe_path = exe_path.to_string_lossy();
let rustified_exe = &exe_file_name
.to_string_lossy()
.replace(self.binutils_prefix.as_str(), self.rustc_triple.as_str())
.replace(self.cc_prefix.as_str(), self.rustc_triple.as_str());
log::trace!("Shim {} -> {}", exe_path, rustified_exe);
create_shim(
workspace_root,
self.rustc_triple.as_str(),
id,
rustified_exe,
&format!("{} {}", exe_path, GLOB_ARGS),
)?;
}
append_path_to_env("PATH", shims_path.to_string_lossy().as_ref());
Ok(())
}
fn as_toolchain(&self) -> Toolchain {
Toolchain {
rustc_triple: self.rustc_triple.clone(),
}
}
}
fn create_shim<P: AsRef<path::Path>>(
root: P,
rustc_triple: &str,
id: &str,
name: &str,
shell: &str,
) -> Result<PathBuf> {
let target_shim_path = root.as_ref().join("target").join(rustc_triple).join(id);
fs::create_dir_all(&target_shim_path)?;
let mut shim = target_shim_path.join(name);
if cfg!(target_os = "windows") {
shim.set_extension("bat");
};
let mut linker_shim = fs::File::create(&shim)?;
if !cfg!(target_os = "windows") {
writeln!(linker_shim, "#!/bin/sh")?;
}
linker_shim.write_all(shell.as_bytes())?;
writeln!(linker_shim, "\n")?;
#[cfg(unix)]
fs::set_permissions(&shim, PermissionsExt::from_mode(0o777))?;
Ok(shim)
}
|
// RGB standard library
// Written in 2020 by
// Dr. Maxim Orlovsky <orlovsky@pandoracore.com>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the MIT License
// along with this software.
// If not, see <https://opensource.org/licenses/MIT>.
use clap::Clap;
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use bitcoin::consensus::{Decodable, Encodable};
use bitcoin::util::psbt::{raw::Key, PartiallySignedTransaction};
use bitcoin::OutPoint;
use lnpbp::bitcoin;
use lnpbp::bp::blind::OutpointReveal;
use lnpbp::client_side_validation::Conceal;
use lnpbp::data_format::DataFormat;
use lnpbp::rgb::prelude::*;
use lnpbp::strict_encoding::strict_encode;
use super::{Error, OutputFormat, Runtime};
use crate::api::fungible::{AcceptApi, Issue, TransferApi};
use crate::api::{reply, Reply};
use crate::fungible::{Asset, Invoice, Outcoincealed, Outcoins, Outpoint};
use crate::util::file::ReadWrite;
#[derive(Clap, Clone, Debug, Display)]
#[display_from(Debug)]
pub enum Command {
/// Lists all known assets
List {
/// Format for information output
#[clap(short, long, arg_enum, default_value = "yaml")]
format: OutputFormat,
/// List all asset details
#[clap(short, long)]
long: bool,
},
Import {
/// Bech32 representation of the asset genesis
asset: Genesis,
},
Export {
/// Bech32 representation of the asset ID (contract id of the asset genesis)
asset: ContractId,
},
/// Creates a new asset
Issue(Issue),
/// Create an invoice
Invoice(InvoiceCli),
/// Do a transfer of some requested asset to another party
Transfer(TransferCli),
/// Do a transfer of some requested asset to another party
Validate {
/// Consignment file
consignment: PathBuf,
},
/// Accepts an incoming payment
Accept {
/// Consignment file
consignment: PathBuf,
/// Locally-controlled outpoint (specified when the invoice was created)
outpoint: OutPoint,
/// Outpoint blinding factor (generated when the invoice was created)
blinding_factor: u64,
},
Forget {
/// Bitcoin transaction output that was spent and which data
/// has to be forgotten
outpoint: OutPoint,
},
}
#[derive(Clap, Clone, PartialEq, Debug, Display)]
#[display_from(Debug)]
pub struct InvoiceCli {
/// Assets
pub asset: ContractId,
/// Amount
pub amount: f32,
/// Receive assets to a given bitcoin address or UTXO
pub outpoint: OutPoint,
}
#[derive(Clap, Clone, PartialEq, Debug, Display)]
#[display_from(Debug)]
pub struct TransferCli {
/// Asset inputs
#[clap(short = "i", long = "input", min_values = 1)]
pub inputs: Vec<OutPoint>,
/// Adds additional asset allocations; MUST use transaction inputs
/// controlled by the local party
#[clap(short, long)]
pub allocate: Vec<Outcoins>,
/// Invoice to pay
pub invoice: Invoice,
/// Read partially-signed transaction prototype
pub prototype: PathBuf,
/// Fee (in satoshis)
pub fee: u64,
/// File to save consignment to
pub consignment: PathBuf,
/// File to save updated partially-signed bitcoin transaction to
pub transaction: PathBuf,
}
impl Command {
pub fn exec(self, runtime: Runtime) -> Result<(), Error> {
match self {
Command::List { format, long } => self.exec_list(runtime, format, long),
Command::Import { ref asset } => self.exec_import(runtime, asset.clone()),
Command::Export { asset } => self.exec_export(runtime, asset),
Command::Invoice(invoice) => invoice.exec(runtime),
Command::Issue(issue) => issue.exec(runtime),
Command::Transfer(transfer) => transfer.exec(runtime),
Command::Validate { ref consignment } => {
self.exec_validate(runtime, consignment.clone())
}
Command::Accept {
ref consignment,
outpoint,
blinding_factor,
} => self.exec_accept(runtime, consignment.clone(), outpoint, blinding_factor),
Command::Forget { outpoint } => self.exec_forget(runtime, outpoint),
}
}
fn exec_list(
&self,
mut runtime: Runtime,
output_format: OutputFormat,
long: bool,
) -> Result<(), Error> {
match &*runtime.list()? {
Reply::Failure(failure) => {
eprintln!("Server returned error: {}", failure);
}
Reply::Sync(reply::SyncFormat(input_format, data)) => {
let assets: Vec<Asset> = match input_format {
DataFormat::Yaml => serde_yaml::from_slice(&data)?,
DataFormat::Json => serde_json::from_slice(&data)?,
DataFormat::Toml => toml::from_slice(&data)?,
DataFormat::StrictEncode => unimplemented!(),
};
let short: Vec<HashMap<&str, String>> = assets
.iter()
.map(|a| {
map! {
"id" => a.id().to_bech32_string(),
"ticker" => a.ticker().clone(),
"name" => a.name().clone()
}
})
.collect();
let long_str: String;
let short_str: String;
match output_format {
OutputFormat::Yaml => {
long_str = serde_yaml::to_string(&assets)?;
short_str = serde_yaml::to_string(&short)?;
}
OutputFormat::Json => {
long_str = serde_json::to_string(&assets)?;
short_str = serde_json::to_string(&short)?;
}
OutputFormat::Toml => {
long_str = toml::to_string(&assets)?;
short_str = toml::to_string(&short)?;
}
_ => unimplemented!(),
}
if long {
println!("{}", long_str);
} else {
println!("{}", short_str);
}
}
_ => {
eprintln!(
"Unexpected server error; probably you connecting with outdated client version"
);
}
}
Ok(())
}
fn exec_import(&self, mut runtime: Runtime, genesis: Genesis) -> Result<(), Error> {
info!("Importing asset ...");
match &*runtime.import(genesis)? {
Reply::Failure(failure) => {
eprintln!("Server returned error: {}", failure);
}
Reply::Success => {
eprintln!("Asset successfully imported");
}
_ => {
eprintln!(
"Unexpected server error; probably you connecting with outdated client version"
);
}
}
Ok(())
}
fn exec_export(&self, mut runtime: Runtime, asset_id: ContractId) -> Result<(), Error> {
info!("Exporting asset ...");
match &*runtime.export(asset_id)? {
Reply::Failure(failure) => {
eprintln!("Server returned error: {}", failure);
}
Reply::Genesis(genesis) => {
eprintln!("Asset successfully exported. Use this information for sharing:");
println!("{}", genesis);
}
_ => {
eprintln!(
"Unexpected server error; probably you connecting with outdated client version"
);
}
}
Ok(())
}
fn exec_validate(&self, mut runtime: Runtime, filename: PathBuf) -> Result<(), Error> {
use lnpbp::strict_encoding::strict_encode;
info!("Validating asset transfer...");
debug!("Reading consignment from file {:?}", &filename);
let consignment = Consignment::read_file(filename.clone()).map_err(|err| {
Error::InputFileFormatError(format!("{:?}", filename), format!("{}", err))
})?;
trace!("{:?}", strict_encode(&consignment));
match &*runtime.validate(consignment)? {
Reply::Failure(failure) => {
eprintln!("Server returned error: {}", failure);
}
Reply::Success => {
eprintln!("Asset transfer successfully validated.");
}
_ => {
eprintln!(
"Unexpected server error; probably you connecting with outdated client version"
);
}
}
Ok(())
}
fn exec_accept(
&self,
mut runtime: Runtime,
filename: PathBuf,
outpoint: OutPoint,
blinding_factor: u64,
) -> Result<(), Error> {
use lnpbp::strict_encoding::strict_encode;
info!("Accepting asset transfer...");
debug!("Reading consignment from file {:?}", &filename);
let consignment = Consignment::read_file(filename.clone()).map_err(|err| {
Error::InputFileFormatError(format!("{:?}", filename), format!("{}", err))
})?;
trace!("{:?}", strict_encode(&consignment));
let api = if let Some((_, outpoint_hash)) = consignment.endpoints.get(0) {
let outpoint_reveal = OutpointReveal {
blinding: blinding_factor,
txid: outpoint.txid,
vout: outpoint.vout as u32,
};
if outpoint_reveal.conceal() != *outpoint_hash {
eprintln!("The provided outpoint and blinding factors does not match outpoint from the consignment");
Err(Error::DataInconsistency)?
}
AcceptApi {
consignment,
reveal_outpoints: vec![outpoint_reveal],
}
} else {
eprintln!("Currently, this command-line tool is unable to accept consignments containing more than a single locally-controlled output point");
Err(Error::UnsupportedFunctionality)?
};
match &*runtime.accept(api)? {
Reply::Failure(failure) => {
eprintln!("Server returned error: {}", failure);
}
Reply::Success => {
eprintln!("Asset transfer successfully accepted.");
}
_ => {
eprintln!(
"Unexpected server error; probably you connecting with outdated client version"
);
}
}
Ok(())
}
fn exec_forget(&self, mut runtime: Runtime, outpoint: OutPoint) -> Result<(), Error> {
info!(
"Forgetting assets allocated to specific bitcoin transaction output that was spent..."
);
match &*runtime.forget(outpoint)? {
Reply::Failure(failure) => {
eprintln!("Server returned error: {}", failure);
}
Reply::Success => {
eprintln!("Assets are removed from the stash.");
}
_ => {
eprintln!(
"Unexpected server error; probably you connecting with outdated client version"
);
}
}
Ok(())
}
}
impl Issue {
pub fn exec(self, mut runtime: Runtime) -> Result<(), Error> {
info!("Issuing asset ...");
debug!("{}", self.clone());
let reply = runtime.issue(self)?;
info!("Reply: {}", reply);
// TODO: Wait for the information from push notification
/*let (asset, genesis) = match reply {
};
debug!("Asset information:\n {:?}\n", asset);
trace!("Genesis contract:\n {:?}\n", genesis);
eprintln!("Asset successfully issued. Use this information for sharing:");
println!("{}", genesis);*/
Ok(())
}
}
impl InvoiceCli {
pub fn exec(self, _: Runtime) -> Result<(), Error> {
info!("Generating invoice ...");
debug!("{}", self.clone());
let outpoint_reveal = OutpointReveal::from(self.outpoint);
let invoice = Invoice {
contract_id: self.asset,
outpoint: Outpoint::BlindedUtxo(outpoint_reveal.conceal()),
amount: self.amount,
};
eprint!("Invoice: ");
println!("{}", invoice);
eprint!("Outpoint blinding factor: ");
println!("{}", outpoint_reveal.blinding);
Ok(())
}
}
impl TransferCli {
#[allow(unreachable_code)]
pub fn exec(self, mut runtime: Runtime) -> Result<(), Error> {
info!("Transferring asset ...");
debug!("{}", self.clone());
let seal_confidential = match self.invoice.outpoint {
Outpoint::BlindedUtxo(outpoint_hash) => outpoint_hash,
Outpoint::Address(_address) => {
// To do a pay-to-address, we need to spend some bitcoins,
// which we have to take from somewhere. While payee can
// provide us with additional input, it's not part of the
// invoicing protocol + does not make a lot of sense, since
// the same input can be simply used by Utxo scheme
unimplemented!();
SealDefinition::WitnessVout {
vout: 0,
blinding: 0,
}
.conceal()
}
};
let pubkey_key = Key {
type_value: 0xFC,
key: PSBT_PUBKEY_KEY.to_vec(),
};
let fee_key = Key {
type_value: 0xFC,
key: PSBT_FEE_KEY.to_vec(),
};
debug!(
"Reading partially-signed transaction from file {:?}",
self.prototype
);
let filepath = format!("{:?}", &self.prototype);
let file = fs::File::open(self.prototype)
.map_err(|_| Error::InputFileIoError(format!("{:?}", filepath)))?;
let mut psbt = PartiallySignedTransaction::consensus_decode(file).map_err(|err| {
Error::InputFileFormatError(format!("{:?}", filepath), format!("{}", err))
})?;
psbt.global
.unknown
.insert(fee_key, self.fee.to_be_bytes().to_vec());
for output in &mut psbt.outputs {
output.unknown.insert(
pubkey_key.clone(),
output.hd_keypaths.keys().next().unwrap().to_bytes(),
);
}
trace!("{:?}", psbt);
let api = TransferApi {
psbt,
contract_id: self.invoice.contract_id,
inputs: self.inputs,
ours: self.allocate,
theirs: vec![Outcoincealed {
coins: self.invoice.amount,
seal_confidential,
}],
};
// TODO: Do tx output reorg for deterministic ordering
let reply = runtime.transfer(api)?;
info!("Reply: {}", reply);
match &*reply {
Reply::Failure(failure) => {
eprintln!("Transfer failed: {}", failure);
}
Reply::Transfer(transfer) => {
trace!("{:?}", strict_encode(&transfer.consignment));
transfer.consignment.write_file(self.consignment.clone())?;
let out_file = fs::File::create(&self.transaction)
.expect("can't create output transaction file");
transfer.psbt.consensus_encode(out_file)?;
println!(
"Transfer succeeded, consignment data are written to {:?}, partially signed witness transaction to {:?}",
self.consignment, self.transaction
);
}
_ => (),
}
Ok(())
}
}
|
use crate::{
geom::{Scalar, Vector},
graphics::{Background, Color, Image}
};
use std::cmp::Ordering;
#[derive(Clone, Copy, Debug)]
/// A vertex for drawing items to the GPU
pub struct Vertex {
/// The position of the vertex in space
pub pos: Vector,
/// If there is a texture attached to this vertex, where to get the texture data from
///
/// It is normalized from 0 to 1
pub tex_pos: Option<Vector>,
/// The color to blend this vertex with
pub col: Color,
}
impl Vertex {
/// Create a new GPU vertex
pub fn new(pos: impl Into<Vector>, tex_pos: Option<Vector>, bkg: Background) -> Vertex {
Vertex {
pos: pos.into(),
tex_pos: tex_pos.map(|pos| pos.into()),
col: bkg.color()
}
}
}
#[derive(Clone)]
/// A triangle to draw to the GPU
pub struct GpuTriangle {
/// The plane the triangle falls on
pub z: f32,
/// The indexes in the vertex list that the GpuTriangle uses
pub indices: [u32; 3],
/// The (optional) image used by the GpuTriangle
///
/// All of the vertices used by the triangle should agree on whether it uses an image,
/// it is up to you to maintain this
pub image: Option<Image>
}
impl GpuTriangle {
/// Create a new untextured GPU Triangle
pub fn new(offset: u32, indices: [u32; 3], z: impl Scalar, bkg: Background) -> GpuTriangle {
GpuTriangle {
z: z.float(),
indices: [indices[0] + offset, indices[1] + offset, indices[2] + offset],
image: bkg.image().cloned()
}
}
}
#[doc(hidden)]
impl PartialEq for GpuTriangle {
fn eq(&self, other: &GpuTriangle) -> bool {
match (&self.image, &other.image) {
(&Some(ref a), &Some(ref b)) => a.get_id() == b.get_id(),
(&None, &None) => true,
_ => false
}
}
}
#[doc(hidden)]
impl Eq for GpuTriangle {}
#[doc(hidden)]
impl PartialOrd for GpuTriangle {
fn partial_cmp(&self, other: &GpuTriangle) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[doc(hidden)]
impl Ord for GpuTriangle {
fn cmp(&self, other: &GpuTriangle) -> Ordering {
match self.z.partial_cmp(&other.z) {
None | Some(Ordering::Equal) =>
match (&self.image, &other.image) {
(&Some(ref a), &Some(ref b)) => a.get_id().cmp(&b.get_id()),
(&Some(_), &None) => Ordering::Greater,
(&None, &Some(_)) => Ordering::Less,
(&None, &None) => Ordering::Equal,
},
Some(result) => result
}
}
}
|
use allegro;
/*
pub fn update(_: &Platform, _: &mut GameMapDetail) -> Option<State> {
None
}
pub fn render(p: &Platform, detail: &GameMapDetail) {
detail.map.render(p);
}
pub fn handle_event(p: &Platform, detail: GameMapDetail, e: allegro::Event) {
match e {
allegro::KeyDown{keycode, ..} => {
println!("Handling keypress for {}!", p.core.keycode_to_name(keycode));
},
_ => (),
}
}
*/
|
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct Evening {}
|
use crate::domain::git::CocoCommit;
use crate::infrastructure::git::cmd_git::commit_message;
use crate::infrastructure::git::git_log_parser::GitMessageParser;
use core_model::coco_config::CocoCommitConfig;
use core_model::url_format;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ShortCommit {
pub branch: String,
pub story_id: String,
pub commit_id: String,
pub author: String,
pub email: String,
pub date: i64,
pub message: String,
pub parent_hashes: Vec<String>,
pub tree_hash: String,
pub total_added: i32,
pub total_deleted: i32,
pub changed_file_count: i32,
}
impl ShortCommit {
pub fn convert(commit: CocoCommit, commit_config: &Option<CocoCommitConfig>) -> ShortCommit {
let mut short_commit = Self {
branch: commit.branch,
story_id: "".to_string(),
commit_id: commit.commit_id,
author: commit.author,
email: commit.email,
date: commit.date,
message: commit.message,
parent_hashes: commit.parent_hashes,
tree_hash: commit.tree_hash,
total_added: commit.total_added,
total_deleted: commit.total_deleted,
changed_file_count: commit.changed_file_count,
};
if let Some(config) = commit_config {
if let Ok(hash) = CocoCommitConfig::verify_config(config) {
if let Some(id) = hash.get("id") {
short_commit.story_id = String::from(id)
}
}
}
short_commit
}
}
pub fn analysis(url: &str, commit_config: Option<CocoCommitConfig>) -> Vec<ShortCommit> {
let local_path = url_format::uri_to_path(url);
let messages = commit_message(Some(format!("{}", local_path.display())));
let vec = GitMessageParser::parse(messages.as_str());
let mut results = vec![];
for commit in vec {
results.push(ShortCommit::convert(commit, &commit_config))
}
return results;
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::Write;
use std::sync::Arc;
use common_base::base::tokio;
use common_catalog::table::Table;
use common_exception::Result;
use common_expression::block_debug::pretty_format_blocks;
use common_meta_app::principal::AuthInfo;
use common_meta_app::principal::AuthType;
use common_meta_app::principal::RoleInfo;
use common_meta_app::principal::UserGrantSet;
use common_meta_app::principal::UserInfo;
use common_meta_app::principal::UserOption;
use common_meta_app::principal::UserQuota;
// use common_sql::executor::table_read_plan::ToReadDataSourcePlan;
use common_meta_app::storage::StorageParams;
use common_meta_app::storage::StorageS3Config;
use common_metrics::init_default_metrics_recorder;
use common_sql::executor::table_read_plan::ToReadDataSourcePlan;
use common_storages_system::BuildOptionsTable;
use common_storages_system::CatalogsTable;
use common_storages_system::ClustersTable;
use common_storages_system::ColumnsTable;
use common_storages_system::ConfigsTable;
use common_storages_system::ContributorsTable;
use common_storages_system::CreditsTable;
use common_storages_system::DatabasesTable;
use common_storages_system::EnginesTable;
use common_storages_system::FunctionsTable;
use common_storages_system::MetricsTable;
use common_storages_system::RolesTable;
use common_storages_system::SettingsTable;
use common_storages_system::TracingTable;
use common_storages_system::UsersTable;
use common_users::UserApiProvider;
use databend_query::sessions::QueryContext;
use databend_query::sessions::TableContext;
use databend_query::stream::ReadDataBlockStream;
use futures::TryStreamExt;
use goldenfile::Mint;
use wiremock::matchers::method;
use wiremock::matchers::path;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
async fn run_table_tests(
file: &mut impl Write,
ctx: Arc<QueryContext>,
table: Arc<dyn Table>,
) -> Result<()> {
let table_info = table.get_table_info();
writeln!(file, "---------- TABLE INFO ------------").unwrap();
writeln!(file, "{table_info}").unwrap();
let source_plan = table.read_plan(ctx.clone(), None).await?;
let stream = table.read_data_block_stream(ctx, &source_plan).await?;
let blocks = stream.try_collect::<Vec<_>>().await?;
let formatted = pretty_format_blocks(&blocks).unwrap();
let mut actual_lines: Vec<&str> = formatted.trim().lines().collect();
// sort except for header + footer
let num_lines = actual_lines.len();
if num_lines > 3 {
actual_lines.as_mut_slice()[2..num_lines - 1].sort_unstable()
}
writeln!(file, "-------- TABLE CONTENTS ----------").unwrap();
if table_info.name.to_lowercase() == "settings" {
actual_lines.retain(|&item| {
!(item.contains("max_threads")
|| item.contains("max_memory_usage")
|| item.contains("max_storage_io_requests"))
});
}
for line in actual_lines {
writeln!(file, "{}", line).unwrap();
}
write!(file, "\n\n").unwrap();
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_build_options_table() -> Result<()> {
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table = BuildOptionsTable::create(1);
let source_plan = table.read_plan(ctx.clone(), None).await?;
let stream = table.read_data_block_stream(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 2);
assert!(block.num_rows() > 0);
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_columns_table() -> Result<()> {
let (_guard, ctx) = crate::tests::create_query_context().await?;
let mut mint = Mint::new("tests/it/storages/testdata");
let file = &mut mint.new_goldenfile("columns_table.txt").unwrap();
let table = ColumnsTable::create(1);
run_table_tests(file, ctx, table).await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_clusters_table() -> Result<()> {
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table = ClustersTable::create(1);
let source_plan = table.read_plan(ctx.clone(), None).await?;
let stream = table.read_data_block_stream(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 4);
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_configs_table_basic() -> Result<()> {
let mut mint = Mint::new("tests/it/storages/testdata");
let file = &mut mint.new_goldenfile("configs_table_basic.txt").unwrap();
let conf = crate::tests::ConfigBuilder::create().config();
let (_guard, ctx) = crate::tests::create_query_context_with_config(conf, None).await?;
ctx.get_settings().set_max_threads(8)?;
let table = ConfigsTable::create(1);
run_table_tests(file, ctx, table).await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_configs_table_redact() -> Result<()> {
let mut mint = Mint::new("tests/it/storages/testdata");
let _file = &mut mint.new_goldenfile("configs_table_redact.txt").unwrap();
let mock_server = MockServer::builder().start().await;
Mock::given(method("HEAD"))
.and(path("/test/.opendal"))
.respond_with(ResponseTemplate::new(404))
.mount(&mock_server)
.await;
let mut conf = crate::tests::ConfigBuilder::create().build();
conf.storage.params = StorageParams::S3(StorageS3Config {
region: "us-east-2".to_string(),
endpoint_url: mock_server.uri(),
bucket: "test".to_string(),
access_key_id: "access_key_id".to_string(),
secret_access_key: "secret_access_key".to_string(),
..Default::default()
});
let (_guard, ctx) = crate::tests::create_query_context_with_config(conf, None).await?;
ctx.get_settings().set_max_threads(8)?;
let table = ConfigsTable::create(1);
let source_plan = table.read_plan(ctx.clone(), None).await?;
let stream = table.read_data_block_stream(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 4);
// need a method to skip/edit endpoint_url
// run_table_tests(file, ctx, table).await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_contributors_table() -> Result<()> {
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table = ContributorsTable::create(1);
let source_plan = table.read_plan(ctx.clone(), None).await?;
let stream = table.read_data_block_stream(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_credits_table() -> Result<()> {
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table = CreditsTable::create(1);
let source_plan = table.read_plan(ctx.clone(), None).await?;
let stream = table.read_data_block_stream(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 3);
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_catalogs_table() -> Result<()> {
let mut mint = Mint::new("tests/it/storages/testdata");
let file = &mut mint.new_goldenfile("catalogs_table.txt").unwrap();
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table = CatalogsTable::create(1);
run_table_tests(file, ctx, table).await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_databases_table() -> Result<()> {
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table = DatabasesTable::create(1);
let mut mint = Mint::new("tests/it/storages/testdata");
let file = &mut mint.new_goldenfile("databases_table.txt").unwrap();
run_table_tests(file, ctx, table).await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_engines_table() -> Result<()> {
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table = EnginesTable::create(1);
let mut mint = Mint::new("tests/it/storages/testdata");
let file = &mut mint.new_goldenfile("engines_table.txt").unwrap();
run_table_tests(file, ctx, table).await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_functions_table() -> Result<()> {
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table = FunctionsTable::create(1);
let source_plan = table.read_plan(ctx.clone(), None).await?;
let stream = table.read_data_block_stream(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 8);
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_metrics_table() -> Result<()> {
init_default_metrics_recorder();
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table = MetricsTable::create(1);
let source_plan = table.read_plan(ctx.clone(), None).await?;
metrics::counter!("test.test_metrics_table_count", 1);
#[cfg(feature = "enable_histogram")]
metrics::histogram!("test.test_metrics_table_histogram", 1.0);
let stream = table.read_data_block_stream(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 4);
assert!(block.num_rows() >= 1);
let output = pretty_format_blocks(result.as_slice())?;
assert!(output.contains("test_test_metrics_table_count"));
#[cfg(feature = "enable_histogram")]
assert!(output.contains("test_test_metrics_table_histogram"));
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_roles_table() -> Result<()> {
let mut mint = Mint::new("tests/it/storages/testdata");
let file = &mut mint.new_goldenfile("roles_table.txt").unwrap();
let (_guard, ctx) = crate::tests::create_query_context().await?;
let tenant = ctx.get_tenant();
ctx.get_settings().set_max_threads(2)?;
{
let role_info = RoleInfo::new("test");
UserApiProvider::instance()
.add_role(&tenant, role_info, false)
.await?;
}
{
let mut role_info = RoleInfo::new("test1");
role_info.grants.grant_role("test".to_string());
UserApiProvider::instance()
.add_role(&tenant, role_info, false)
.await?;
}
let table = RolesTable::create(1);
run_table_tests(file, ctx, table).await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_settings_table() -> Result<()> {
let mut mint = Mint::new("tests/it/storages/testdata");
let file = &mut mint.new_goldenfile("settings_table.txt").unwrap();
let (_guard, ctx) = crate::tests::create_query_context().await?;
ctx.get_settings().set_max_threads(2)?;
ctx.get_settings().set_max_memory_usage(1073741824)?;
let table = SettingsTable::create(1);
run_table_tests(file, ctx, table).await?;
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_tracing_table() -> Result<()> {
let (_guard, ctx) = crate::tests::create_query_context().await?;
let table: Arc<dyn Table> = Arc::new(TracingTable::create(1));
let source_plan = table.read_plan(ctx.clone(), None).await?;
let stream = table.read_data_block_stream(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 1);
assert!(block.num_rows() > 0);
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn test_users_table() -> Result<()> {
let mut mint = Mint::new("tests/it/storages/testdata");
let file = &mut mint.new_goldenfile("users_table.txt").unwrap();
let (_guard, ctx) = crate::tests::create_query_context().await?;
let tenant = ctx.get_tenant();
ctx.get_settings().set_max_threads(2)?;
let auth_data = AuthInfo::None;
UserApiProvider::instance()
.add_user(
&tenant,
UserInfo {
auth_info: auth_data,
name: "test".to_string(),
hostname: "localhost".to_string(),
grants: UserGrantSet::empty(),
quota: UserQuota::no_limit(),
option: UserOption::default(),
},
false,
)
.await?;
let auth_data = AuthInfo::new(AuthType::Sha256Password, &Some("123456789".to_string()));
assert!(auth_data.is_ok());
UserApiProvider::instance()
.add_user(
&tenant,
UserInfo {
auth_info: auth_data.unwrap(),
name: "test1".to_string(),
hostname: "%".to_string(),
grants: UserGrantSet::empty(),
quota: UserQuota::no_limit(),
option: UserOption::default().with_default_role(Some("role1".to_string())),
},
false,
)
.await?;
let table = UsersTable::create(1);
run_table_tests(file, ctx, table).await?;
Ok(())
}
|
use std::sync::Arc;
use bevy::{
prelude::*,
input::{
keyboard::KeyboardInput,
mouse::{
MouseButtonInput,
MouseWheel
}
},
render::{
draw::DrawContext,
render_graph::RenderGraph,
renderer::RenderResourceBindings
},
window::{
CursorMoved,
WindowId,
WindowResized
}
};
use egui::Event;
use crate::{
render::AddEguiSystemNode,
components::EguiJobsDescriptor,
egui_node::EguiNode,
egui_node::{
EguiSystemNode,
},
};
#[allow(clippy::type_complexity)]
pub fn egui_draw_system(
mut draw_context: DrawContext,
mut render_resource_bindings: ResMut<RenderResourceBindings>,
msaa: Res<Msaa>,
mut query: Query<With<Handle<EguiNode>, (&mut Draw, &mut RenderPipelines, &EguiJobsDescriptor)>>,
) {
for (mut draw, mut render_pipelines, jobs_descriptor) in &mut query.iter() {
if !draw.is_visible {
continue;
}
let render_pipelines = &mut *render_pipelines;
for pipeline in render_pipelines.pipelines.iter_mut() {
pipeline.specialization.sample_count = msaa.samples;
}
// This is needed since draw operations were already done for entity (and it cannot currently be prevented). In fact...
// TODO(#55): stop the entity from having its draw operations already done, allowing this to be removed
draw.clear_render_commands();
for render_pipeline in render_pipelines.pipelines.iter() {
draw_context
.set_pipeline(
&mut draw,
render_pipeline.pipeline,
&render_pipeline.specialization,
)
.unwrap();
draw_context
.set_bind_groups_from_bindings(
&mut draw,
&mut [
&mut render_pipelines.bindings,
&mut render_resource_bindings,
],
)
.unwrap();
let indices = draw_context
.set_vertex_buffers_from_bindings(&mut draw, &[&render_pipelines.bindings])
.unwrap();
if indices.is_some() {
for (indices, base_vertex) in &jobs_descriptor.jobs {
draw.draw_indexed(indices.clone(), *base_vertex, 0..1);
}
}
}
}
}
// TODO(#56): properly integrate the context as an asset to remove the singleton restriction on egui contexts
pub struct EguiContext {
// TODO(#56): Utilize name as a key for properly integrated context asset
// name: &'static str,
pub context: Arc<egui::Context>,
}
impl Default for EguiContext {
fn default() -> Self {
Self {
context: egui::Context::new(),
}
}
}
#[derive(Default)]
pub struct EguiSystemNodeAdderState {
event_reader: EventReader<AssetEvent<EguiContext>>,
}
// TODO(#56): properly integrate the context as an asset to remove the singleton restriction on egui contexts
pub fn egui_system_node_adder(
mut state: Local<EguiSystemNodeAdderState>,
egui_input: Res<EguiInput>,
context_events: Res<Events<AssetEvent<EguiContext>>>,
mut contexts: ResMut<Assets<EguiContext>>,
mut render_graph: ResMut<RenderGraph>,
) {
for event in state.event_reader.iter(&context_events) {
match event {
AssetEvent::Created { handle } => {
let EguiContext { context, .. } = contexts.get_mut(handle).unwrap();
// Begin frame so that the system is in the correct state
context.begin_frame(egui_input.raw_input.clone());
render_graph.add_egui_system_node(EguiSystemNode {
command_queue: Default::default(),
context: *handle,
});
},
AssetEvent::Modified { .. } => {
// TODO(#56): Determine what (if anything?) should happen if there is a modification
},
AssetEvent::Removed { .. } => {
todo!("TODO(#56): Deal with removing the render node stuff for a given egui context!")
}
}
}
}
pub struct GatherEguiInputState {
mouse_button_reader: EventReader<MouseButtonInput>,
cursor_moved_reader: EventReader<CursorMoved>,
mouse_wheel_reader: EventReader<MouseWheel>,
keyboard_input_reader: EventReader<KeyboardInput>,
window_resized_reader: EventReader<WindowResized>,
primary_window_id: WindowId,
current_tick: f64,
previous_input: egui::RawInput,
}
impl FromResources for GatherEguiInputState {
fn from_resources(resources: &Resources) -> Self {
let windows = resources.get::<Windows>().unwrap();
let window = windows.get_primary().unwrap();
let screen_size = egui::vec2(window.width as _, window.height as _);
Self {
mouse_button_reader: Default::default(),
cursor_moved_reader: Default::default(),
mouse_wheel_reader: Default::default(),
keyboard_input_reader: Default::default(),
window_resized_reader: Default::default(),
primary_window_id: window.id,
current_tick: 0.0,
previous_input: egui::RawInput {
mouse_down: false,
mouse_pos: None,
scroll_delta: egui::Vec2::new(0.0, 0.0),
screen_size,
pixels_per_point: Some(1.0),
time: 0.0,
events: vec![]
}
}
}
}
/// Stores the current frame's [`RawInput`] to be passed to egui.
pub struct EguiInput {
pub raw_input: egui::RawInput
}
impl Default for EguiInput {
fn default() -> Self {
Self {
raw_input: egui::RawInput {
mouse_down: false,
mouse_pos: None,
scroll_delta: egui::math::vec2(0.0, 0.0),
screen_size: egui::math::vec2(1280.0, 720.0),
pixels_per_point: Some(1.0),
time: 0.0,
events: Vec::new()
}
}
}
}
// TODO(#66): when a proper input handler exists, use it for the egui input gathering system
/// Gathers all inputs to update [`EguiInput`] so egui can start frames.
pub fn egui_gather_input(
mut state: Local<GatherEguiInputState>,
mut egui_input: ResMut<EguiInput>,
mouse_button_events: Res<Events<MouseButtonInput>>,
cursor_moved_events: Res<Events<CursorMoved>>,
mouse_wheel_events: Res<Events<MouseWheel>>,
keyboard_input_events: Res<Events<KeyboardInput>>,
window_resized_events: Res<Events<WindowResized>>,
) {
state.current_tick += 1.0;
// Easier to allow the warning then rename the fields and break the shorthand syntax
#[allow(unused_variables)]
let GatherEguiInputState {
mouse_button_reader,
cursor_moved_reader,
mouse_wheel_reader,
keyboard_input_reader,
window_resized_reader,
primary_window_id,
current_tick,
previous_input
} = &mut *state;
// Uses the pressed state of the latest left mouse event, otherwise the previous state
let mouse_down = mouse_button_reader
.find_latest(&mouse_button_events, |input| {
input.button == MouseButton::Left
})
.map_or(previous_input.mouse_down, |input| {
input.state.is_pressed()
});
let scroll_delta = mouse_wheel_reader.iter(&mouse_wheel_events)
.fold(egui::vec2(0.0, 0.0), |delta, MouseWheel { x, y, .. }| {
delta + egui::vec2(*x, *y)
});
let screen_size = window_resized_reader.find_latest(&window_resized_events, |resized| {
resized.id == *primary_window_id
})
.map_or(previous_input.screen_size, |WindowResized { width, height, .. }| {
egui::vec2(*width as _, *height as _)
});
let mouse_pos = cursor_moved_reader
.find_latest(&cursor_moved_events, |cursor| {
cursor.id == *primary_window_id
})
.map_or(previous_input.mouse_pos, |CursorMoved { position, .. }| {
// Bevy has origin as bottom left, egui expects origin to be top left, so we do the math to change it
Some(egui::pos2(position.x(), screen_size.y - position.y()))
});
let events = keyboard_input_reader.iter(&keyboard_input_events)
.fold(Vec::<egui::Event>::new(), |mut events: Vec<egui::Event>, KeyboardInput { key_code, state, .. }| {
if let Some(key_code) = key_code {
// Handle special cased key combos like cut/copy/paste
if let Some(event) = match key_code {
KeyCode::Copy => Some(Event::Copy),
KeyCode::Cut => Some(Event::Cut),
KeyCode::Paste => {
tracing::warn!("Paste event isn't implemented in bevy_egui! We need to figure out how to get stuff from the clipboard...");
None
}
_ => None
} {
events.push(event);
} else if let Some(key) = key_code.into_egui_key() {
events.push(Event::Key {
key,
pressed: state.is_pressed()
});
} else {
tracing::info!("Key presses are currently not really given to egui! Pressed: {:?}", key_code);
}
}
events
});
let new_input = egui::RawInput {
mouse_down,
mouse_pos,
scroll_delta,
screen_size,
events,
..*previous_input
};
*previous_input = new_input.clone();
egui_input.raw_input = new_input;
}
trait IntoEguiKey {
fn into_egui_key(self) -> Option<egui::Key>;
}
impl IntoEguiKey for KeyCode {
fn into_egui_key(self) -> Option<egui::Key> {
let key = match self {
KeyCode::LAlt |
KeyCode::RAlt => egui::Key::Alt,
KeyCode::Back => egui::Key::Backspace,
KeyCode::LControl |
KeyCode::RControl =>egui::Key::Control,
KeyCode::Delete => egui::Key::Delete,
KeyCode::Down => egui::Key::Down,
KeyCode::End => egui::Key::End,
KeyCode::Escape => egui::Key::Escape,
KeyCode::Home => egui::Key::Home,
KeyCode::Insert => egui::Key::Insert,
KeyCode::Left => egui::Key::Left,
KeyCode::LWin |
KeyCode::RWin => egui::Key::Logo,
KeyCode::PageDown => egui::Key::PageDown,
KeyCode::PageUp => egui::Key::PageUp,
KeyCode::NumpadEnter |
KeyCode:: Return => egui::Key::Enter,
KeyCode::Right => egui::Key::Right,
KeyCode::LShift |
KeyCode::RShift => egui::Key::Shift,
KeyCode::Tab => egui::Key::Tab,
KeyCode::Up => egui::Key::Up,
_ => return None
};
Some(key)
}
} |
use std::collections::HashMap;
#[derive(Copy, Clone, Debug, Eq, Hash)]
struct Coord {
x: i32,
y: i32,
}
impl PartialEq for Coord {
fn eq(&self, other: &Coord) -> bool {
self.x == other.x && self.y == other.y
}
}
impl Coord {
pub fn manhattan_distance(&self, other: &Coord) -> u32 {
((self.x - other.x).abs() + (self.y - other.y).abs()) as u32
}
pub fn left(&self) -> Coord {
return Coord { x: self.x - 1, y: self.y };
}
pub fn right(&self) -> Coord {
return Coord { x: self.x + 1, y: self.y };
}
pub fn up(&self) -> Coord {
return Coord { x: self.x, y: self.y - 1 };
}
pub fn down(&self) -> Coord {
return Coord { x: self.x, y: self.y + 1};
}
}
struct CoordinateSystem {
cache: HashMap<u32, Coord>
}
impl CoordinateSystem {
pub fn new() -> Self {
CoordinateSystem {
cache: HashMap::new()
}
}
pub fn coords_of(&mut self, n: u32) -> Coord {
let result: &Coord = self.cache.entry(n.to_owned()).or_insert_with(|| { return CoordinateSystem::calc_coords_of(n) });
*result
}
fn calc_coords_of(n: u32) -> Coord {
assert_ne!(n, 0);
let mut from = 2;
let mut level = 1;
let mut result = Coord { x: 0, y: 0 };
if n > 1 {
loop {
let width = (level * 2) + 1;
let capacity = 4 * (width - 1);
let next = from + capacity;
if n < next {
let idx = n - from;
let segment = idx / (width - 1);
let pos = (idx % (width - 1)) as i32;
let l = level as i32;
result =
match segment {
0 => Coord { x: l, y: l - 1 - pos },
1 => Coord { x: l - 1 - pos, y: -l },
2 => Coord { x: -l, y: -l + 1 + pos },
3 => Coord { x: -l + 1 + pos, y: l },
_ => panic!("Segment was {}", segment)
};
break;
} else {
from = next;
level += 1;
}
}
}
result
}
}
fn calc_value(squares: &HashMap<Coord, u32>, coord: &Coord) -> u32 {
squares.get(&coord.left()).unwrap_or(&0) +
squares.get(&coord.right()).unwrap_or(&0) +
squares.get(&coord.up()).unwrap_or(&0) +
squares.get(&coord.down()).unwrap_or(&0) +
squares.get(&coord.left().up()).unwrap_or(&0) +
squares.get(&coord.right().up()).unwrap_or(&0) +
squares.get(&coord.left().down()).unwrap_or(&0) +
squares.get(&coord.right().down()).unwrap_or(&0)
}
fn find_first_larger_cell(csys: &mut CoordinateSystem, than: u32) -> u32 {
let mut squares: HashMap<Coord, u32> = HashMap::new();
let mut n: u32 = 1;
let mut coord: Coord = csys.coords_of(n);
let mut value: u32 = 1;
squares.insert(coord, value);
while value < than {
n = n + 1;
coord = csys.coords_of(n);
value = calc_value(&squares, &coord);
squares.insert(coord, value);
}
value
}
fn debug(csys: &mut CoordinateSystem, n: u32) {
println!("{}: {:?} d: {}", n, csys.coords_of(n), csys.coords_of(n).manhattan_distance(&Coord { x: 0, y: 0 }));
}
pub fn run() {
let mut csys = CoordinateSystem::new();
debug(&mut csys, 1);
debug(&mut csys, 12);
debug(&mut csys, 23);
debug(&mut csys, 1024);
debug(&mut csys, 277678);
println!("Day 3 result 1: {}", csys.coords_of(277678).manhattan_distance(&Coord { x: 0, y: 0 }));
println!("Day 3 result 2: {}", find_first_larger_cell(&mut csys, 277678));
} |
//! Protocol of the websocket server
//!
//! The protocol uses JSON as coding and a request/answer id for every packet to know where to put
//! the answer.
use std::result;
use error::{Error, Result};
use bincode::{serialize, deserialize};
use hex_database::{Track, Playlist, Token, TrackKey, PlaylistKey, TokenId, TransitionAction, Transition};
/// Identification of a packet
///
/// A request should contain a random number associating it with the pending answer.
pub type PacketId = [u32; 4];
/// Incoming message
///
/// The incoming message is wrapped in a packet struct containing the `id` field. Any `fn` field is
/// in snake_case formatting and can contain more parameters.
#[derive(Debug)]
#[cfg_attr(any(feature="server", target_arch = "wasm32"), derive(Deserialize))]
#[cfg_attr(feature="client", derive(Serialize))]
pub enum RequestAction {
/// Search for tracks with a query
Search {
query: String
},
/// Get a single track with a key
GetTrack {
key: TrackKey
},
/// Get the next packet in a stream (`key` has to be available in first call)
StreamNext {
key: Option<TrackKey>
},
/// End a stream
StreamEnd,
/// Seek in a stream forward
StreamSeek {
sample: u32
},
/// Update possible fields in a track
UpdateTrack {
key: TrackKey,
title: Option<String>,
album: Option<String>,
interpret: Option<String>,
people: Option<String>,
composer: Option<String>
},
/// Get suggestions for a track from acousticid
GetSuggestion {
key: TrackKey
},
/// Create a new playlist
AddPlaylist {
name: String
},
/// Delete a playlist
DeletePlaylist {
key: PlaylistKey
},
/// Set a playlist image
SetPlaylistImage {
key: PlaylistKey,
image: Vec<u8>
},
/// Add a track to a playlist
AddToPlaylist {
key: TrackKey,
playlist: PlaylistKey
},
/// Delete a track from a playlist
DeleteFromPlaylist {
key: TrackKey,
playlist: PlaylistKey
},
/// Update metadata of a playlist
UpdatePlaylist {
key: PlaylistKey,
title: Option<String>,
desc: Option<String>
},
/// Get all playlists
GetPlaylists,
/// Get a single playlist with key
GetPlaylist {
key: PlaylistKey
},
/// Get all playlists of a track
GetPlaylistsOfTrack {
key: TrackKey
},
/// Delete a track
DeleteTrack {
key: TrackKey
},
/// Start upload from a youtube music video
UploadYoutube {
path: String
},
/// Start upload of a track saved in the internal buffer
UploadTrack {
name: String,
format: String,
data: Vec<u8>
},
/// Vote for a track
VoteForTrack {
key: TrackKey
},
/// Ask the upload progress
AskUploadProgress,
/// Get a token
GetToken {
token: TokenId
},
/// Update the metadata of a token
UpdateToken {
token: TokenId,
key: Option<PlaylistKey>,
played: Option<Vec<TrackKey>>,
pos: Option<f64>
},
/// Create a new token
CreateToken,
/// Get the last inserted token
LastToken,
/// Get the summarise for all days
GetSummary,
/// Get all events
GetTransitions,
/// Start download a bunch of tracks
Download {
format: String,
tracks: Vec<TrackKey>
},
/// Ask for the download progress
AskDownloadProgress
}
/// Wrapper for the Incoming message
///
/// This struct supplements the protocol with an identification. It can be useful to match the
/// answer to the request, or to have stateful calls. For example the `search` query should return
/// just a bunch of tracks each time executed, but has to remember which were already transmitted.
#[derive(Debug)]
#[cfg_attr(feature="server", derive(Deserialize))]
#[cfg_attr(feature="client", derive(Serialize))]
pub struct Request {
pub id: PacketId,
pub msg: RequestAction
}
impl Request {
pub fn new(id: PacketId, msg: RequestAction) -> Request {
Request { id, msg }
}
#[cfg(feature="server")]
pub fn try_from(buf: &[u8]) -> Result<Request> {
deserialize(buf).map_err(|err| Error::Bincode(err))
}
#[cfg(feature="client")]
pub fn to_buf(&self) -> Result<Vec<u8>> {
serialize(self).map_err(|err| Error::Bincode(err))
}
}
/// Outgoing packets
#[derive(Debug)]
#[cfg_attr(feature="client", derive(Deserialize))]
#[cfg_attr(any(feature="server", target_arch = "wasm32"), derive(Serialize))]
pub enum AnswerAction {
/// The result of a single search
SearchResult {
/// Searched query
query: String,
/// Slice of answers
answ: Vec<Track>,
/// Are there more tracks available? (repeated call)
more: bool
},
/// Response to a `GetTrack` call
Track(Track),
ClearBuffer,
StreamNext(Vec<u8>),
StreamSeek {
sample: u32
},
StreamEnd,
UpdateTrack(TrackKey),
GetSuggestion {
key: TrackKey,
data: String
},
AddPlaylist(Playlist),
DeletePlaylist,
UpdatePlaylist,
SetPlaylistImage,
AddToPlaylist,
DeleteFromPlaylist,
GetPlaylists(Vec<Playlist>),
GetPlaylist((Playlist,Vec<Track>)),
GetPlaylistsOfTrack(Vec<Playlist>),
DeleteTrack(()),
UploadYoutube,
UploadTrack,
VoteForTrack,
AskUploadProgress(Vec<UploadProgress>),
GetToken((Token, Option<(Playlist, Vec<Track>)>)),
UpdateToken,
CreateToken(TokenId),
LastToken(Option<TokenId>),
GetSummary(Vec<(String, u32, u32)>),
GetTransitions(Vec<Transition>),
Download,
AskDownloadProgress(Vec<DownloadProgress>),
Transition(TransitionAction)
}
#[derive(Debug)]
#[cfg_attr(feature="client", derive(Deserialize))]
#[cfg_attr(feature="server", derive(Serialize))]
pub struct Answer {
pub id: PacketId,
pub msg: result::Result<AnswerAction, String>
}
impl Answer {
pub fn new(id: PacketId, msg: result::Result<AnswerAction, String>) -> Answer {
Answer {
id,
msg
}
}
#[cfg(feature="client")]
pub fn try_from(buf: &[u8]) -> Result<Answer> {
deserialize(buf).map_err(|err| Error::Bincode(err))
}
#[cfg(feature="server")]
pub fn to_buf(&self) -> Result<Vec<u8>> {
serialize(self).map_err(|err| Error::Bincode(err))
}
}
#[derive(Debug)]
#[cfg_attr(feature="client", derive(Deserialize))]
#[cfg_attr(any(feature="server", target_arch = "wasm32"), derive(Serialize))]
pub struct UploadProgress {
pub desc: String,
pub kind: String,
pub progress: f32,
pub id: PacketId,
pub key: Option<TrackKey>
}
#[derive(Debug, Clone)]
#[cfg_attr(feature="client", derive(Deserialize))]
#[cfg_attr(any(feature="server", target_arch = "wasm32"), derive(Serialize))]
pub struct DownloadProgress {
pub id: PacketId,
pub format: String,
pub progress: f32,
pub download: Option<String>
}
impl DownloadProgress {
pub fn empty() -> DownloadProgress {
DownloadProgress {
id: [0,0,0,0],
format: String::new(),
progress: 0.0,
download: None
}
}
}
|
use crate::{
hittable::HitRecord,
ray::Ray,
rtweekend::{fmin, random_double},
texture::{ConstTexture, Texture},
vec3::{random_in_unit_sphere, random_unit_vector, reflect, refract, Color, Point3},
};
use std::sync::Arc;
pub trait Material: Send + Sync {
fn scatter(
&self,
r_in: &Ray,
rec: &HitRecord,
attenuation: &mut Color,
scattered: &mut Ray,
) -> bool;
fn emitted(&self, u: f64, v: f64, p: &Point3) -> Color;
}
pub struct Lambertian {
pub albedo: Arc<dyn Texture>,
}
impl Lambertian {
pub fn new(a: Color) -> Self {
Self {
albedo: Arc::new(ConstTexture { color_value: a }),
}
}
}
impl Material for Lambertian {
fn scatter(
&self,
_r_in: &Ray,
rec: &HitRecord,
attenuation: &mut Color,
scattered: &mut Ray,
) -> bool {
let scatter_direction = rec.normal + random_unit_vector();
*scattered = Ray {
orig: rec.p,
dir: scatter_direction,
};
*attenuation = self.albedo.value(rec.u, rec.v, &rec.p);
true
}
fn emitted(&self, _u: f64, _v: f64, _p: &Point3) -> Color {
Color::zero()
}
}
pub struct Metal {
pub albedo: Color,
pub fuzz: f64,
}
impl Metal {
pub fn new(a: &Color, f: f64) -> Self {
if f < 1.0 {
return Self {
albedo: *a,
fuzz: f,
};
}
Self {
albedo: *a,
fuzz: 1.0,
}
}
}
impl Material for Metal {
fn scatter(
&self,
r_in: &Ray,
rec: &HitRecord,
attenuation: &mut Color,
scattered: &mut Ray,
) -> bool {
let reflected = reflect(&r_in.dir.unit(), &rec.normal);
*scattered = Ray {
orig: rec.p,
dir: reflected + random_in_unit_sphere() * self.fuzz,
};
*attenuation = self.albedo;
scattered.dir * rec.normal > 0.0
}
fn emitted(&self, _u: f64, _v: f64, _p: &Point3) -> Color {
Color::zero()
}
}
pub struct Dielectric {
pub ref_idx: f64,
}
impl Dielectric {
pub fn new(r: f64) -> Self {
Self { ref_idx: r }
}
}
impl Material for Dielectric {
fn scatter(
&self,
r_in: &Ray,
rec: &HitRecord,
attenuation: &mut Color,
scattered: &mut Ray,
) -> bool {
*attenuation = Color::ones();
let mut etai_over_etat = 1.0 / self.ref_idx;
if !rec.front_face {
etai_over_etat = self.ref_idx;
}
let unit_dir = r_in.dir.unit();
let cos_theta = fmin(1.0, -unit_dir * rec.normal);
let sin_theta = (1.0 - cos_theta * cos_theta).sqrt();
if etai_over_etat * sin_theta > 1.0 {
let reflected = reflect(&unit_dir, &rec.normal);
*scattered = Ray {
orig: rec.p,
dir: reflected,
};
return true;
}
let reflect_prob = schlick(cos_theta, etai_over_etat);
if random_double(0.0, 1.0) < reflect_prob {
let reflected = reflect(&unit_dir, &rec.normal);
*scattered = Ray {
orig: rec.p,
dir: reflected,
};
return true;
}
let refracted = refract(&unit_dir, &rec.normal, etai_over_etat);
*scattered = Ray {
orig: rec.p,
dir: refracted,
};
true
}
fn emitted(&self, _u: f64, _v: f64, _p: &Point3) -> Color {
Color::zero()
}
}
pub struct FrostedGlass {
pub ref_idx: f64,
pub fuzz: f64,
}
impl FrostedGlass {
pub fn new(r: f64, f: f64) -> Self {
Self {
ref_idx: r,
fuzz: f,
}
}
}
impl Material for FrostedGlass {
fn scatter(
&self,
r_in: &Ray,
rec: &HitRecord,
attenuation: &mut Color,
scattered: &mut Ray,
) -> bool {
*attenuation = Color::ones();
let mut etai_over_etat = 1.0 / self.ref_idx;
if !rec.front_face {
etai_over_etat = self.ref_idx;
}
let unit_dir = r_in.dir.unit();
let cos_theta = fmin(1.0, -unit_dir * rec.normal);
let sin_theta = (1.0 - cos_theta * cos_theta).sqrt();
if etai_over_etat * sin_theta > 1.0 {
let reflected = reflect(&unit_dir, &rec.normal);
*scattered = Ray {
orig: rec.p,
dir: reflected,
};
return true;
}
let reflect_prob = schlick(cos_theta, etai_over_etat);
if random_double(0.0, 1.0) < reflect_prob {
let reflected = reflect(&unit_dir, &rec.normal);
*scattered = Ray {
orig: rec.p,
dir: reflected,
};
return true;
}
let refracted = refract(&unit_dir, &rec.normal, etai_over_etat);
*scattered = Ray {
orig: rec.p,
dir: refracted + random_in_unit_sphere() * self.fuzz,
};
true
}
fn emitted(&self, _u: f64, _v: f64, _p: &Point3) -> Color {
Color::zero()
}
}
pub fn schlick(cosine: f64, ref_idx: f64) -> f64 {
let mut r0 = (1.0 - ref_idx) / (1.0 + ref_idx);
r0 = r0 * r0;
r0 + (1.0 - r0) * (1.0 - cosine).powi(5)
}
pub struct DiffuseLight {
pub emit: Arc<dyn Texture>,
}
impl DiffuseLight {
pub fn new(c: Color) -> Self {
Self {
emit: Arc::new(ConstTexture { color_value: c }),
}
}
}
impl Material for DiffuseLight {
fn scatter(
&self,
_r_in: &Ray,
_rec: &HitRecord,
_attenuation: &mut Color,
_scattered: &mut Ray,
) -> bool {
false
}
fn emitted(&self, u: f64, v: f64, p: &Point3) -> Color {
self.emit.value(u, v, p)
}
}
|
#[derive(Debug, Clone)]
pub struct MemBlock {
slc: Box<[u8]>,
size: (usize, usize),
}
impl MemBlock {
/// Create a new [`MemBlock`] from a size in pixel (x,y)
pub fn new(size: (usize, usize)) -> Self {
MemBlock {
slc: vec![0; size.0 * size.1 * 4].into_boxed_slice(),
size,
}
}
/// Create a new [`MemBlock`] from a size in pixel (x,y) with a given default value;
pub fn new_with_value(size: (usize, usize), value: u32) -> Self {
MemBlock {
slc: vec![
((value >> 24) & 0xFF) as u8,
((value >> 16) & 0xFF) as u8,
((value >> 8) & 0xFF) as u8,
(value & 0xFF) as u8,
]
.repeat(size.0 * size.1)
.into_boxed_slice(),
size,
}
}
///Returns the size (in pixel) of the [`MemBlock`]
pub fn size(&self) -> (usize, usize) {
self.size
}
/// Read a single value from the [`MemBlock`]
/// The index is the number of u8's from the start, it is a 1d index
fn read_u8(&self, index: usize) -> u8 {
assert!(index < self.size.0 * self.size.1 * 4);
assert!(index <= isize::max_value() as usize);
self.slc[index]
}
/// Write a single value to the [`MemBlock`]
/// The index is the number of u8's from the start, it is a 1d index
fn write_u8(&mut self, index: usize, data: u8) {
assert!(index < self.size.0 * self.size.1 * 4);
assert!(index <= isize::max_value() as usize);
self.slc[index] = data;
}
/// Read a single pixel from the [`MemBlock`]
/// The index is the (x,y) position of the pixel in the memory
pub fn read(&self, index: (usize, usize)) -> u32 {
assert!(index.0 < self.size.0);
assert!(index.1 < self.size.1);
let r_index = (index.0 + index.1 * self.size.0) * 4;
let mut data = 0u32;
for i in 0..4 {
data += (self.read_u8(r_index + i) as u32) << (24 - 8 * i);
}
data
}
/// Write a single pixel to the [`MemBlock`]
/// The index is the (x,y) position of the pixel in the memory
pub fn write(&mut self, index: (usize, usize), data: u32) {
assert!(index.0 < self.size.0);
assert!(index.1 < self.size.1);
let r_index = (index.0 + index.1 * self.size.0) * 4;
for i in 0..4 {
self.write_u8(r_index + i, (data >> (24 - 8 * i)) as u8);
}
}
/// Copy `source` onto `self` at given index
pub fn dma(&mut self, index: (usize, usize), source: &Self) {
for y in 0..source.size.1 {
for x in 0..source.size.0 {
if x + index.0 >= self.size.0 || y + index.1 >= self.size.1 {
} else {
self.write((index.0 + x, index.1 + y), source.read((x, y)));
}
}
}
}
/// Print the [`MemBlock`] with nice formating;
pub fn table(&self) {
for y in 0..(self.size.1) {
for x in 0..(self.size.0) {
print!(" {:08X}", self.read((x, y)));
}
println!();
}
}
}
impl std::ops::Deref for MemBlock {
type Target = [u8];
fn deref(&self) -> &Self::Target {
&self.slc
}
}
impl std::ops::DerefMut for MemBlock {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.slc
}
}
impl std::borrow::Borrow<[u8]> for MemBlock {
fn borrow(&self) -> &[u8] {
&self[..]
}
}
impl std::borrow::BorrowMut<[u8]> for MemBlock {
fn borrow_mut(&mut self) -> &mut [u8] {
&mut self[..]
}
}
|
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#![recursion_limit = "512"]
#![warn(rust_2018_idioms)]
#![deny(
dead_code,
nonstandard_style,
unused_imports,
unused_mut,
unused_variables,
unused_unsafe,
unreachable_patterns
)]
use async_timer::Interval;
use faas_api::{relay, Address, FunctionCall, Protocol};
use fluence_client::{Client, ClientCommand, ClientEvent};
use futures::{channel::oneshot::Receiver, select, FutureExt, StreamExt};
use libp2p::identity::ed25519::Keypair;
use libp2p::PeerId;
use once_cell::sync::Lazy;
use parity_multiaddr::Multiaddr;
use serde_json::json;
use std::error::Error;
use std::ops::Deref;
use std::time::Duration;
use uuid::Uuid;
const IPFS_SERVICE_ID: &str = "IPFS.multiaddr";
static IPFS_SERVICE: Lazy<Protocol> = Lazy::new(|| Protocol::Service(IPFS_SERVICE_ID.to_string()));
fn register_call(client: PeerId, relay: PeerId, service_id: &str, kp: &Keypair) -> FunctionCall {
let reply_to = relay!(relay, client);
let signature = Protocol::Signature(kp.sign(reply_to.path().as_bytes()));
let reply_to = Some(reply_to.append(signature));
let target = Some(Protocol::Service("provide".into()).into());
let arguments = json!({ "service_id": service_id });
let uuid = message_id();
let name = Some(format!("Delegate provide service {}", service_id));
FunctionCall {
uuid,
target,
reply_to,
arguments,
name,
}
}
fn multiaddr_call(
bootstrap_id: PeerId,
client: PeerId,
reply_to: Address,
msg_id: Option<&str>,
multiaddr: &Multiaddr,
kp: &Keypair,
) -> FunctionCall {
let target = Some(reply_to);
let arguments = json!({ "multiaddr": multiaddr.to_string(), "msg_id": msg_id });
let reply_to = relay!(bootstrap_id, client);
let signature = Protocol::Signature(kp.sign(reply_to.path().as_bytes()));
let reply_to = Some(reply_to.append(signature));
let uuid = message_id();
let name = Some("Reply on IPFS.multiaddr".to_string());
FunctionCall {
uuid,
target,
reply_to,
arguments,
name,
}
}
fn message_id() -> String {
// TODO: use v1
Uuid::new_v4().to_string()
}
pub async fn run_ipfs_multiaddr_service(
bootstrap: Multiaddr,
ipfs: Multiaddr,
stop: Receiver<()>,
) -> Result<(), Box<dyn Error>> {
let (mut client, client_task) = Client::connect(bootstrap.clone()).await?;
let mut stop = stop.into_stream().fuse();
let mut bootstrap_id: Option<PeerId> = None;
// Will publish service 10 times, each 10 seconds
let mut periodic = Interval::platform_new(Duration::from_secs(10))
.take(10)
.fuse();
loop {
select!(
incoming = client.receive_one() => {
match incoming {
Some(ClientEvent::FunctionCall {
call: FunctionCall {
target: Some(target),
reply_to: Some(reply_to),
arguments, ..
},
sender
}) if target.contains(&IPFS_SERVICE) => {
log::info!(
"Got call for {} from {}, asking node to reply to {:?}",
IPFS_SERVICE.deref(), sender, reply_to
);
let msg_id = arguments.get("msg_id").and_then(|v| v.as_str());
let call = multiaddr_call(
bootstrap_id.clone().unwrap(), client.peer_id.clone(), reply_to, msg_id, &ipfs, &client.key_pair
);
if let Some(node) = bootstrap_id.clone() {
client.send(ClientCommand::Call { node, call })
} else {
log::warn!("Can't send {} reply: bootstrap hasn't connected yed", IPFS_SERVICE.deref());
}
},
Some(ClientEvent::NewConnection { peer_id, multiaddr }) if &multiaddr == &bootstrap => {
log::info!("Bootstrap connected, will send register call",);
bootstrap_id = Some(peer_id.clone());
}
Some(msg) => log::info!("Received msg {:?}, ignoring", msg),
None => {
log::warn!("Client closed");
break;
}
}
},
_ = periodic.next() => {
if let Some(bootstrap_id) = bootstrap_id.clone() {
let call = register_call(client.peer_id.clone(), bootstrap_id.clone(), IPFS_SERVICE_ID, &client.key_pair);
log::info!("Sending register call {:?}", call);
client.send(ClientCommand::Call {
node: bootstrap_id,
call,
});
}
}
_ = stop.next() => {
log::info!("Will stop");
client.stop();
break;
}
)
}
log::info!("Waiting client_task");
client_task.await;
log::info!("client_task finished, exiting");
Ok(())
}
|
use wasm_encoder::{
Component, ComponentExternName, PrimitiveValType, ComponentTypeRef,
ComponentTypeSection, ComponentImportSection,
};
use std::env;
use std::fs::File;
use std::io::Write;
fn main() -> std::io::Result<()> {
let mut types = ComponentTypeSection::new();
types
.function()
.params([("a", PrimitiveValType::S32)])
.result(PrimitiveValType::String);
let mut imports = ComponentImportSection::new();
let name = ComponentExternName::Kebab("f");
imports.import(name, ComponentTypeRef::Func(0));
let mut comp = Component::new();
comp.section(&types);
comp.section(&imports);
let wasm_b = comp.finish();
let file_name = env::args().nth(1).unwrap_or("sample.wasm".to_string());
let mut file = File::create(file_name)?;
file.write_all(&wasm_b)?;
Ok(())
}
|
use crate::mir::{Body, Expression, Id, Mir, VisibleExpressions};
use rustc_hash::FxHashSet;
use tracing::error;
impl Mir {
pub fn validate(&self) {
self.body
.validate(&mut FxHashSet::default(), im::HashSet::new());
}
}
impl Body {
pub fn validate(&self, defined_ids: &mut FxHashSet<Id>, mut visible: im::HashSet<Id>) {
if self.expressions.is_empty() {
error!("A body of a function is empty! Functions should have at least a return value.");
error!("This is the MIR:\n{self}");
panic!("MIR is invalid!");
}
for (id, expression) in self.iter() {
for captured in expression.captured_ids() {
if !visible.contains(&captured) {
error!("MIR is invalid! {id} captures {captured}, but that's not visible.");
error!("This is the MIR:\n{self}");
panic!("MIR is invalid!");
}
}
if let Expression::Function {
original_hirs: _,
parameters,
responsible_parameter,
body,
} = expression
{
let mut inner_visible = visible.clone();
inner_visible.extend(parameters.iter().copied());
inner_visible.insert(*responsible_parameter);
body.validate(defined_ids, inner_visible);
}
if defined_ids.contains(&id) {
error!("ID {id} exists twice.");
error!("This is the MIR:\n{self}");
panic!("MIR is invalid!");
}
defined_ids.insert(id);
visible.insert(id);
}
}
}
impl Expression {
pub fn validate(&self, visible: &VisibleExpressions) {
for id in self.captured_ids() {
if !visible.contains(id) {
error!("Expression references ID {id:?}, but that ID is not visible:");
error!("{self}");
panic!("Expression references ID that is not in its scope.");
}
}
}
}
|
pub mod system;
pub mod todo;
use crate::config::Config;
use crate::helpers::{database, email, handler};
use actix_web::middleware::errhandlers::ErrorHandlers;
use actix_web::{http, web, App, HttpServer};
use std::sync::Arc;
pub fn init_services(cnfg: Arc<Config>) {
let db_pool = database::init_pool(&cnfg, 5).expect("Failed to init database connection");
let mailer = email::init_mailer(&cnfg);
let system_ucs = system::usecase::init(&cnfg, &db_pool);
let todo_ucs = todo::usecase::init(&cnfg, &db_pool);
let system_cnr = system::controller::init(&cnfg, &system_ucs);
let todo_cnr = todo::controller::init(&cnfg, &todo_ucs, &system_ucs, &mailer);
let app = move || {
let system_dlr_rest = system::delivery::rest::init(&cnfg, &system_cnr);
let todo_dlr_rest = todo::delivery::rest::init(&cnfg, &todo_cnr);
let api = web::scope("/api/v1")
.service(system_dlr_rest)
.service(todo_dlr_rest);
App::new()
.wrap(
ErrorHandlers::new()
.handler(http::StatusCode::BAD_REQUEST, handler::bad_request_handler),
)
.service(api)
};
// Todo: Move to main file
HttpServer::new(app)
.bind("0.0.0.0:8000")
.expect("Failed to bind port for the http server")
.run()
.expect("Failed to run http server");
}
|
use liblumen_alloc::erts::term::prelude::*;
#[native_implemented::function(erlang:is_map/1)]
pub fn result(term: Term) -> Term {
term.is_boxed_map().into()
}
|
use std::borrow::Cow;
use std::char;
use std::ops::RangeInclusive;
use winnow::combinator::alt;
use winnow::combinator::cut_err;
use winnow::combinator::delimited;
use winnow::combinator::fail;
use winnow::combinator::opt;
use winnow::combinator::peek;
use winnow::combinator::preceded;
use winnow::combinator::repeat;
use winnow::combinator::success;
use winnow::combinator::terminated;
use winnow::prelude::*;
use winnow::stream::Stream;
use winnow::token::any;
use winnow::token::none_of;
use winnow::token::one_of;
use winnow::token::tag;
use winnow::token::take_while;
use winnow::trace::trace;
use crate::parser::errors::CustomError;
use crate::parser::numbers::HEXDIG;
use crate::parser::prelude::*;
use crate::parser::trivia::{from_utf8_unchecked, newline, ws, ws_newlines, NON_ASCII, WSCHAR};
// ;; String
// string = ml-basic-string / basic-string / ml-literal-string / literal-string
pub(crate) fn string<'i>(input: &mut Input<'i>) -> PResult<Cow<'i, str>> {
trace(
"string",
alt((
ml_basic_string,
basic_string,
ml_literal_string,
literal_string.map(Cow::Borrowed),
)),
)
.parse_next(input)
}
// ;; Basic String
// basic-string = quotation-mark *basic-char quotation-mark
pub(crate) fn basic_string<'i>(input: &mut Input<'i>) -> PResult<Cow<'i, str>> {
trace("basic-string", |input: &mut Input<'i>| {
let _ = one_of(QUOTATION_MARK).parse_next(input)?;
let mut c = Cow::Borrowed("");
if let Some(ci) = opt(basic_chars).parse_next(input)? {
c = ci;
}
while let Some(ci) = opt(basic_chars).parse_next(input)? {
c.to_mut().push_str(&ci);
}
let _ = cut_err(one_of(QUOTATION_MARK))
.context(StrContext::Label("basic string"))
.parse_next(input)?;
Ok(c)
})
.parse_next(input)
}
// quotation-mark = %x22 ; "
pub(crate) const QUOTATION_MARK: u8 = b'"';
// basic-char = basic-unescaped / escaped
fn basic_chars<'i>(input: &mut Input<'i>) -> PResult<Cow<'i, str>> {
alt((
// Deviate from the official grammar by batching the unescaped chars so we build a string a
// chunk at a time, rather than a `char` at a time.
take_while(1.., BASIC_UNESCAPED)
.try_map(std::str::from_utf8)
.map(Cow::Borrowed),
escaped.map(|c| Cow::Owned(String::from(c))),
))
.parse_next(input)
}
// basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
pub(crate) const BASIC_UNESCAPED: (
(u8, u8),
u8,
RangeInclusive<u8>,
RangeInclusive<u8>,
RangeInclusive<u8>,
) = (WSCHAR, 0x21, 0x23..=0x5B, 0x5D..=0x7E, NON_ASCII);
// escaped = escape escape-seq-char
fn escaped(input: &mut Input<'_>) -> PResult<char> {
preceded(ESCAPE, escape_seq_char).parse_next(input)
}
// escape = %x5C ; \
pub(crate) const ESCAPE: u8 = b'\\';
// escape-seq-char = %x22 ; " quotation mark U+0022
// escape-seq-char =/ %x5C ; \ reverse solidus U+005C
// escape-seq-char =/ %x62 ; b backspace U+0008
// escape-seq-char =/ %x66 ; f form feed U+000C
// escape-seq-char =/ %x6E ; n line feed U+000A
// escape-seq-char =/ %x72 ; r carriage return U+000D
// escape-seq-char =/ %x74 ; t tab U+0009
// escape-seq-char =/ %x75 4HEXDIG ; uXXXX U+XXXX
// escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX
fn escape_seq_char(input: &mut Input<'_>) -> PResult<char> {
dispatch! {any;
b'b' => success('\u{8}'),
b'f' => success('\u{c}'),
b'n' => success('\n'),
b'r' => success('\r'),
b't' => success('\t'),
b'u' => cut_err(hexescape::<4>).context(StrContext::Label("unicode 4-digit hex code")),
b'U' => cut_err(hexescape::<8>).context(StrContext::Label("unicode 8-digit hex code")),
b'\\' => success('\\'),
b'"' => success('"'),
_ => {
cut_err(fail::<_, char, _>)
.context(StrContext::Label("escape sequence"))
.context(StrContext::Expected(StrContextValue::CharLiteral('b')))
.context(StrContext::Expected(StrContextValue::CharLiteral('f')))
.context(StrContext::Expected(StrContextValue::CharLiteral('n')))
.context(StrContext::Expected(StrContextValue::CharLiteral('r')))
.context(StrContext::Expected(StrContextValue::CharLiteral('t')))
.context(StrContext::Expected(StrContextValue::CharLiteral('u')))
.context(StrContext::Expected(StrContextValue::CharLiteral('U')))
.context(StrContext::Expected(StrContextValue::CharLiteral('\\')))
.context(StrContext::Expected(StrContextValue::CharLiteral('"')))
}
}
.parse_next(input)
}
pub(crate) fn hexescape<const N: usize>(input: &mut Input<'_>) -> PResult<char> {
take_while(0..=N, HEXDIG)
.verify(|b: &[u8]| b.len() == N)
.map(|b: &[u8]| unsafe { from_utf8_unchecked(b, "`is_ascii_digit` filters out on-ASCII") })
.verify_map(|s| u32::from_str_radix(s, 16).ok())
.try_map(|h| char::from_u32(h).ok_or(CustomError::OutOfRange))
.parse_next(input)
}
// ;; Multiline Basic String
// ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
// ml-basic-string-delim
fn ml_basic_string<'i>(input: &mut Input<'i>) -> PResult<Cow<'i, str>> {
trace(
"ml-basic-string",
delimited(
ML_BASIC_STRING_DELIM,
preceded(opt(newline), cut_err(ml_basic_body)),
cut_err(ML_BASIC_STRING_DELIM),
)
.context(StrContext::Label("multiline basic string")),
)
.parse_next(input)
}
// ml-basic-string-delim = 3quotation-mark
pub(crate) const ML_BASIC_STRING_DELIM: &[u8] = b"\"\"\"";
// ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ]
fn ml_basic_body<'i>(input: &mut Input<'i>) -> PResult<Cow<'i, str>> {
let mut c = Cow::Borrowed("");
if let Some(ci) = opt(mlb_content).parse_next(input)? {
c = ci;
}
while let Some(ci) = opt(mlb_content).parse_next(input)? {
c.to_mut().push_str(&ci);
}
while let Some(qi) = opt(mlb_quotes(none_of(b'\"').value(()))).parse_next(input)? {
if let Some(ci) = opt(mlb_content).parse_next(input)? {
c.to_mut().push_str(qi);
c.to_mut().push_str(&ci);
while let Some(ci) = opt(mlb_content).parse_next(input)? {
c.to_mut().push_str(&ci);
}
} else {
break;
}
}
if let Some(qi) = opt(mlb_quotes(tag(ML_BASIC_STRING_DELIM).value(()))).parse_next(input)? {
c.to_mut().push_str(qi);
}
Ok(c)
}
// mlb-content = mlb-char / newline / mlb-escaped-nl
// mlb-char = mlb-unescaped / escaped
fn mlb_content<'i>(input: &mut Input<'i>) -> PResult<Cow<'i, str>> {
alt((
// Deviate from the official grammar by batching the unescaped chars so we build a string a
// chunk at a time, rather than a `char` at a time.
take_while(1.., MLB_UNESCAPED)
.try_map(std::str::from_utf8)
.map(Cow::Borrowed),
// Order changed fromg grammar so `escaped` can more easily `cut_err` on bad escape sequences
mlb_escaped_nl.map(|_| Cow::Borrowed("")),
escaped.map(|c| Cow::Owned(String::from(c))),
newline.map(|_| Cow::Borrowed("\n")),
))
.parse_next(input)
}
// mlb-quotes = 1*2quotation-mark
fn mlb_quotes<'i>(
mut term: impl winnow::Parser<Input<'i>, (), ContextError>,
) -> impl Parser<Input<'i>, &'i str, ContextError> {
move |input: &mut Input<'i>| {
let start = input.checkpoint();
let res = terminated(b"\"\"", peek(term.by_ref()))
.map(|b| unsafe { from_utf8_unchecked(b, "`bytes` out non-ASCII") })
.parse_next(input);
match res {
Err(winnow::error::ErrMode::Backtrack(_)) => {
input.reset(start);
terminated(b"\"", peek(term.by_ref()))
.map(|b| unsafe { from_utf8_unchecked(b, "`bytes` out non-ASCII") })
.parse_next(input)
}
res => res,
}
}
}
// mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii
pub(crate) const MLB_UNESCAPED: (
(u8, u8),
u8,
RangeInclusive<u8>,
RangeInclusive<u8>,
RangeInclusive<u8>,
) = (WSCHAR, 0x21, 0x23..=0x5B, 0x5D..=0x7E, NON_ASCII);
// mlb-escaped-nl = escape ws newline *( wschar / newline
// When the last non-whitespace character on a line is a \,
// it will be trimmed along with all whitespace
// (including newlines) up to the next non-whitespace
// character or closing delimiter.
fn mlb_escaped_nl(input: &mut Input<'_>) -> PResult<()> {
repeat(1.., (ESCAPE, ws, ws_newlines))
.map(|()| ())
.value(())
.parse_next(input)
}
// ;; Literal String
// literal-string = apostrophe *literal-char apostrophe
pub(crate) fn literal_string<'i>(input: &mut Input<'i>) -> PResult<&'i str> {
trace(
"literal-string",
delimited(
APOSTROPHE,
cut_err(take_while(0.., LITERAL_CHAR)),
cut_err(APOSTROPHE),
)
.try_map(std::str::from_utf8)
.context(StrContext::Label("literal string")),
)
.parse_next(input)
}
// apostrophe = %x27 ; ' apostrophe
pub(crate) const APOSTROPHE: u8 = b'\'';
// literal-char = %x09 / %x20-26 / %x28-7E / non-ascii
pub(crate) const LITERAL_CHAR: (
u8,
RangeInclusive<u8>,
RangeInclusive<u8>,
RangeInclusive<u8>,
) = (0x9, 0x20..=0x26, 0x28..=0x7E, NON_ASCII);
// ;; Multiline Literal String
// ml-literal-string = ml-literal-string-delim [ newline ] ml-literal-body
// ml-literal-string-delim
fn ml_literal_string<'i>(input: &mut Input<'i>) -> PResult<Cow<'i, str>> {
trace(
"ml-literal-string",
delimited(
(ML_LITERAL_STRING_DELIM, opt(newline)),
cut_err(ml_literal_body.map(|t| {
if t.contains("\r\n") {
Cow::Owned(t.replace("\r\n", "\n"))
} else {
Cow::Borrowed(t)
}
})),
cut_err(ML_LITERAL_STRING_DELIM),
)
.context(StrContext::Label("multiline literal string")),
)
.parse_next(input)
}
// ml-literal-string-delim = 3apostrophe
pub(crate) const ML_LITERAL_STRING_DELIM: &[u8] = b"'''";
// ml-literal-body = *mll-content *( mll-quotes 1*mll-content ) [ mll-quotes ]
fn ml_literal_body<'i>(input: &mut Input<'i>) -> PResult<&'i str> {
(
repeat(0.., mll_content).map(|()| ()),
repeat(
0..,
(
mll_quotes(none_of(APOSTROPHE).value(())),
repeat(1.., mll_content).map(|()| ()),
),
)
.map(|()| ()),
opt(mll_quotes(tag(ML_LITERAL_STRING_DELIM).value(()))),
)
.recognize()
.try_map(std::str::from_utf8)
.parse_next(input)
}
// mll-content = mll-char / newline
fn mll_content(input: &mut Input<'_>) -> PResult<u8> {
alt((one_of(MLL_CHAR), newline)).parse_next(input)
}
// mll-char = %x09 / %x20-26 / %x28-7E / non-ascii
const MLL_CHAR: (
u8,
RangeInclusive<u8>,
RangeInclusive<u8>,
RangeInclusive<u8>,
) = (0x9, 0x20..=0x26, 0x28..=0x7E, NON_ASCII);
// mll-quotes = 1*2apostrophe
fn mll_quotes<'i>(
mut term: impl winnow::Parser<Input<'i>, (), ContextError>,
) -> impl Parser<Input<'i>, &'i str, ContextError> {
move |input: &mut Input<'i>| {
let start = input.checkpoint();
let res = terminated(b"''", peek(term.by_ref()))
.map(|b| unsafe { from_utf8_unchecked(b, "`bytes` out non-ASCII") })
.parse_next(input);
match res {
Err(winnow::error::ErrMode::Backtrack(_)) => {
input.reset(start);
terminated(b"'", peek(term.by_ref()))
.map(|b| unsafe { from_utf8_unchecked(b, "`bytes` out non-ASCII") })
.parse_next(input)
}
res => res,
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn basic_string() {
let input =
r#""I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF. \U0002070E""#;
let expected = "I\'m a string. \"You can quote me\". Name\tJosé\nLocation\tSF. \u{2070E}";
let parsed = string.parse(new_input(input));
assert_eq!(parsed.as_deref(), Ok(expected), "Parsing {input:?}");
}
#[test]
fn ml_basic_string() {
let cases = [
(
r#""""
Roses are red
Violets are blue""""#,
r#"Roses are red
Violets are blue"#,
),
(r#"""" \""" """"#, " \"\"\" "),
(r#"""" \\""""#, " \\"),
];
for &(input, expected) in &cases {
let parsed = string.parse(new_input(input));
assert_eq!(parsed.as_deref(), Ok(expected), "Parsing {input:?}");
}
let invalid_cases = [r#"""" """#, r#"""" \""""#];
for input in &invalid_cases {
let parsed = string.parse(new_input(input));
assert!(parsed.is_err());
}
}
#[test]
fn ml_basic_string_escape_ws() {
let inputs = [
r#""""
The quick brown \
fox jumps over \
the lazy dog.""""#,
r#""""\
The quick brown \
fox jumps over \
the lazy dog.\
""""#,
];
for input in &inputs {
let expected = "The quick brown fox jumps over the lazy dog.";
let parsed = string.parse(new_input(input));
assert_eq!(parsed.as_deref(), Ok(expected), "Parsing {input:?}");
}
let empties = [
r#""""\
""""#,
r#""""
\
\
""""#,
];
for input in &empties {
let expected = "";
let parsed = string.parse(new_input(input));
assert_eq!(parsed.as_deref(), Ok(expected), "Parsing {input:?}");
}
}
#[test]
fn literal_string() {
let inputs = [
r#"'C:\Users\nodejs\templates'"#,
r#"'\\ServerX\admin$\system32\'"#,
r#"'Tom "Dubs" Preston-Werner'"#,
r#"'<\i\c*\s*>'"#,
];
for input in &inputs {
let expected = &input[1..input.len() - 1];
let parsed = string.parse(new_input(input));
assert_eq!(parsed.as_deref(), Ok(expected), "Parsing {input:?}");
}
}
#[test]
fn ml_literal_string() {
let inputs = [
r#"'''I [dw]on't need \d{2} apples'''"#,
r#"''''one_quote''''"#,
];
for input in &inputs {
let expected = &input[3..input.len() - 3];
let parsed = string.parse(new_input(input));
assert_eq!(parsed.as_deref(), Ok(expected), "Parsing {input:?}");
}
let input = r#"'''
The first newline is
trimmed in raw strings.
All other whitespace
is preserved.
'''"#;
let expected = &input[4..input.len() - 3];
let parsed = string.parse(new_input(input));
assert_eq!(parsed.as_deref(), Ok(expected), "Parsing {input:?}");
}
}
|
use crate::error::Diagnostic;
use crate::util::{
format_doc, iter_use_idents, pyclass_ident_and_attrs, text_signature, AttrItemMeta,
AttributeExt, ClassItemMeta, ContentItem, ContentItemInner, ErrorVec, ItemMeta, ItemNursery,
ModuleItemMeta, SimpleItemMeta, ALL_ALLOWED_NAMES,
};
use proc_macro2::TokenStream;
use quote::{quote, quote_spanned, ToTokens};
use std::{collections::HashSet, str::FromStr};
use syn::{parse_quote, spanned::Spanned, Attribute, AttributeArgs, Ident, Item, Result};
use syn_ext::ext::*;
#[derive(Clone, Copy, Eq, PartialEq)]
enum AttrName {
Function,
Attr,
Class,
}
impl std::fmt::Display for AttrName {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let s = match self {
Self::Function => "pyfunction",
Self::Attr => "pyattr",
Self::Class => "pyclass",
};
s.fmt(f)
}
}
impl FromStr for AttrName {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(match s {
"pyfunction" => Self::Function,
"pyattr" => Self::Attr,
"pyclass" => Self::Class,
s => {
return Err(s.to_owned());
}
})
}
}
#[derive(Default)]
struct ModuleContext {
name: String,
function_items: FunctionNursery,
attribute_items: ItemNursery,
has_extend_module: bool, // TODO: check if `fn extend_module` exists
errors: Vec<syn::Error>,
}
pub fn impl_pymodule(attr: AttributeArgs, module_item: Item) -> Result<TokenStream> {
let (doc, mut module_item) = match module_item {
Item::Mod(m) => (m.attrs.doc(), m),
other => bail_span!(other, "#[pymodule] can only be on a full module"),
};
let fake_ident = Ident::new("pymodule", module_item.span());
let module_meta =
ModuleItemMeta::from_nested(module_item.ident.clone(), fake_ident, attr.into_iter())?;
// generation resources
let mut context = ModuleContext {
name: module_meta.simple_name()?,
..Default::default()
};
let items = module_item.items_mut().ok_or_else(|| {
module_meta.new_meta_error("requires actual module, not a module declaration")
})?;
// collect to context
for item in items.iter_mut() {
if matches!(item, Item::Impl(_) | Item::Trait(_)) {
// #[pyclass] implementations
continue;
}
let r = item.try_split_attr_mut(|attrs, item| {
let (py_items, cfgs) = attrs_to_module_items(attrs, module_item_new)?;
for py_item in py_items.iter().rev() {
let r = py_item.gen_module_item(ModuleItemArgs {
item,
attrs,
context: &mut context,
cfgs: cfgs.as_slice(),
});
context.errors.ok_or_push(r);
}
Ok(())
});
context.errors.ok_or_push(r);
}
// append additional items
let module_name = context.name.as_str();
let function_items = context.function_items.validate()?;
let attribute_items = context.attribute_items.validate()?;
let doc = doc.or_else(|| {
crate::doc::Database::shared()
.try_path(module_name)
.ok()
.flatten()
.map(str::to_owned)
});
let doc = if let Some(doc) = doc {
quote!(Some(#doc))
} else {
quote!(None)
};
let is_submodule = module_meta.sub()?;
let withs = module_meta.with()?;
if !is_submodule {
items.extend(iter_chain![
parse_quote! {
pub(crate) const MODULE_NAME: &'static str = #module_name;
},
parse_quote! {
pub(crate) const DOC: Option<&'static str> = #doc;
},
parse_quote! {
pub(crate) fn __module_def(
ctx: &::rustpython_vm::Context,
) -> &'static ::rustpython_vm::builtins::PyModuleDef {
DEF.get_or_init(|| {
#[allow(clippy::ptr_arg)]
let method_defs = {
let mut method_defs = Vec::new();
extend_method_def(ctx, &mut method_defs);
method_defs
};
let mut def = ::rustpython_vm::builtins::PyModuleDef {
name: ctx.intern_str(MODULE_NAME),
doc: DOC.map(|doc| ctx.intern_str(doc)),
methods: Box::leak(method_defs.into_boxed_slice()),
slots: Default::default(),
};
def.slots.exec = Some(extend_module);
def
})
}
},
parse_quote! {
#[allow(dead_code)]
pub(crate) fn make_module(
vm: &::rustpython_vm::VirtualMachine
) -> ::rustpython_vm::PyRef<::rustpython_vm::builtins::PyModule> {
use ::rustpython_vm::PyPayload;
let module = ::rustpython_vm::builtins::PyModule::from_def(__module_def(&vm.ctx)).into_ref(&vm.ctx);
__init_dict(vm, &module);
extend_module(vm, &module).unwrap();
module
}
},
]);
}
if !is_submodule && !context.has_extend_module {
items.push(parse_quote! {
pub(crate) fn extend_module(vm: &::rustpython_vm::VirtualMachine, module: &::rustpython_vm::Py<::rustpython_vm::builtins::PyModule>) -> ::rustpython_vm::PyResult<()> {
__extend_module(vm, module);
Ok(())
}
});
}
items.extend(iter_chain![
parse_quote! {
::rustpython_vm::common::static_cell! {
pub(crate) static DEF: ::rustpython_vm::builtins::PyModuleDef;
}
},
parse_quote! {
#[allow(clippy::ptr_arg)]
pub(crate) fn extend_method_def(
ctx: &::rustpython_vm::Context,
method_defs: &mut Vec<::rustpython_vm::function::PyMethodDef>,
) {
#(
super::#withs::extend_method_def(ctx, method_defs);
)*
#function_items
}
},
parse_quote! {
pub(crate) fn __init_attributes(
vm: &::rustpython_vm::VirtualMachine,
module: &::rustpython_vm::Py<::rustpython_vm::builtins::PyModule>,
) {
#(
super::#withs::__init_attributes(vm, module);
)*
let ctx = &vm.ctx;
#attribute_items
}
},
parse_quote! {
pub(crate) fn __extend_module(
vm: &::rustpython_vm::VirtualMachine,
module: &::rustpython_vm::Py<::rustpython_vm::builtins::PyModule>,
) {
module.__init_methods(vm).unwrap();
__init_attributes(vm, module);
}
},
parse_quote! {
pub(crate) fn __init_dict(
vm: &::rustpython_vm::VirtualMachine,
module: &::rustpython_vm::Py<::rustpython_vm::builtins::PyModule>,
) {
::rustpython_vm::builtins::PyModule::__init_dict_from_def(vm, module);
}
},
]);
Ok(if let Some(error) = context.errors.into_error() {
let error = Diagnostic::from(error);
quote! {
#module_item
#error
}
} else {
module_item.into_token_stream()
})
}
fn module_item_new(
index: usize,
attr_name: AttrName,
py_attrs: Vec<usize>,
) -> Box<dyn ModuleItem<AttrName = AttrName>> {
match attr_name {
AttrName::Function => Box::new(FunctionItem {
inner: ContentItemInner { index, attr_name },
py_attrs,
}),
AttrName::Attr => Box::new(AttributeItem {
inner: ContentItemInner { index, attr_name },
py_attrs,
}),
AttrName::Class => Box::new(ClassItem {
inner: ContentItemInner { index, attr_name },
py_attrs,
}),
}
}
fn attrs_to_module_items<F, R>(attrs: &[Attribute], item_new: F) -> Result<(Vec<R>, Vec<Attribute>)>
where
F: Fn(usize, AttrName, Vec<usize>) -> R,
{
let mut cfgs: Vec<Attribute> = Vec::new();
let mut result = Vec::new();
let mut iter = attrs.iter().enumerate().peekable();
while let Some((_, attr)) = iter.peek() {
// take all cfgs but no py items
let attr = *attr;
if let Some(ident) = attr.get_ident() {
let attr_name = ident.to_string();
if attr_name == "cfg" {
cfgs.push(attr.clone());
} else if ALL_ALLOWED_NAMES.contains(&attr_name.as_str()) {
break;
}
}
iter.next();
}
let mut closed = false;
let mut py_attrs = Vec::new();
for (i, attr) in iter {
// take py items but no cfgs
let attr_name = if let Some(ident) = attr.get_ident() {
ident.to_string()
} else {
continue;
};
if attr_name == "cfg" {
bail_span!(attr, "#[py*] items must be placed under `cfgs`")
}
let attr_name = match AttrName::from_str(attr_name.as_str()) {
Ok(name) => name,
Err(wrong_name) => {
if !ALL_ALLOWED_NAMES.contains(&wrong_name.as_str()) {
continue;
} else if closed {
bail_span!(attr, "Only one #[pyattr] annotated #[py*] item can exist")
} else {
bail_span!(attr, "#[pymodule] doesn't accept #[{}]", wrong_name)
}
}
};
if attr_name == AttrName::Attr {
if !result.is_empty() {
bail_span!(
attr,
"#[pyattr] must be placed on top of other #[py*] items",
)
}
py_attrs.push(i);
continue;
}
if py_attrs.is_empty() {
result.push(item_new(i, attr_name, Vec::new()));
} else {
match attr_name {
AttrName::Class | AttrName::Function => {
result.push(item_new(i, attr_name, py_attrs.clone()));
}
_ => {
bail_span!(
attr,
"#[pyclass] or #[pyfunction] only can follow #[pyattr]",
)
}
}
py_attrs.clear();
closed = true;
}
}
if let Some(last) = py_attrs.pop() {
assert!(!closed);
result.push(item_new(last, AttrName::Attr, py_attrs));
}
Ok((result, cfgs))
}
#[derive(Default)]
struct FunctionNursery {
items: Vec<FunctionNurseryItem>,
}
struct FunctionNurseryItem {
py_names: Vec<String>,
cfgs: Vec<Attribute>,
ident: Ident,
doc: String,
}
impl FunctionNursery {
fn add_item(&mut self, item: FunctionNurseryItem) {
self.items.push(item);
}
fn validate(self) -> Result<ValidatedFunctionNursery> {
let mut name_set = HashSet::new();
for item in &self.items {
for py_name in &item.py_names {
if !name_set.insert((py_name.to_owned(), &item.cfgs)) {
bail_span!(item.ident, "duplicate method name `{}`", py_name);
}
}
}
Ok(ValidatedFunctionNursery(self))
}
}
struct ValidatedFunctionNursery(FunctionNursery);
impl ToTokens for ValidatedFunctionNursery {
fn to_tokens(&self, tokens: &mut TokenStream) {
for item in &self.0.items {
let ident = &item.ident;
let cfgs = &item.cfgs;
let py_names = &item.py_names;
let doc = &item.doc;
let flags = quote! { rustpython_vm::function::PyMethodFlags::empty() };
tokens.extend(quote! {
#(#cfgs)*
{
let doc = Some(#doc);
#(method_defs.push(rustpython_vm::function::PyMethodDef::new(
(#py_names),
#ident,
#flags,
doc,
));)*
}
});
}
}
}
/// #[pyfunction]
struct FunctionItem {
inner: ContentItemInner<AttrName>,
py_attrs: Vec<usize>,
}
/// #[pyclass]
struct ClassItem {
inner: ContentItemInner<AttrName>,
py_attrs: Vec<usize>,
}
/// #[pyattr]
struct AttributeItem {
inner: ContentItemInner<AttrName>,
py_attrs: Vec<usize>,
}
impl ContentItem for FunctionItem {
type AttrName = AttrName;
fn inner(&self) -> &ContentItemInner<AttrName> {
&self.inner
}
}
impl ContentItem for ClassItem {
type AttrName = AttrName;
fn inner(&self) -> &ContentItemInner<AttrName> {
&self.inner
}
}
impl ContentItem for AttributeItem {
type AttrName = AttrName;
fn inner(&self) -> &ContentItemInner<AttrName> {
&self.inner
}
}
struct ModuleItemArgs<'a> {
item: &'a mut Item,
attrs: &'a mut Vec<Attribute>,
context: &'a mut ModuleContext,
cfgs: &'a [Attribute],
}
impl<'a> ModuleItemArgs<'a> {
fn module_name(&'a self) -> &'a str {
self.context.name.as_str()
}
}
trait ModuleItem: ContentItem {
fn gen_module_item(&self, args: ModuleItemArgs<'_>) -> Result<()>;
}
impl ModuleItem for FunctionItem {
fn gen_module_item(&self, args: ModuleItemArgs<'_>) -> Result<()> {
let func = args
.item
.function_or_method()
.map_err(|_| self.new_syn_error(args.item.span(), "can only be on a function"))?;
let ident = &func.sig().ident;
let item_attr = args.attrs.remove(self.index());
let item_meta = SimpleItemMeta::from_attr(ident.clone(), &item_attr)?;
let py_name = item_meta.simple_name()?;
let sig_doc = text_signature(func.sig(), &py_name);
let module = args.module_name();
let doc = args.attrs.doc().or_else(|| {
crate::doc::Database::shared()
.try_module_item(module, &py_name)
.ok() // TODO: doc must exist at least one of code or CPython
.flatten()
.map(str::to_owned)
});
let doc = if let Some(doc) = doc {
format_doc(&sig_doc, &doc)
} else {
sig_doc
};
let py_names = {
if self.py_attrs.is_empty() {
vec![py_name]
} else {
let mut py_names = HashSet::new();
py_names.insert(py_name);
for attr_index in self.py_attrs.iter().rev() {
let mut loop_unit = || {
let attr_attr = args.attrs.remove(*attr_index);
let item_meta = SimpleItemMeta::from_attr(ident.clone(), &attr_attr)?;
let py_name = item_meta.simple_name()?;
let inserted = py_names.insert(py_name.clone());
if !inserted {
return Err(self.new_syn_error(
ident.span(),
&format!(
"`{py_name}` is duplicated name for multiple py* attribute"
),
));
}
Ok(())
};
let r = loop_unit();
args.context.errors.ok_or_push(r);
}
let py_names: Vec<_> = py_names.into_iter().collect();
py_names
}
};
args.context.function_items.add_item(FunctionNurseryItem {
ident: ident.to_owned(),
py_names,
cfgs: args.cfgs.to_vec(),
doc,
});
Ok(())
}
}
impl ModuleItem for ClassItem {
fn gen_module_item(&self, args: ModuleItemArgs<'_>) -> Result<()> {
let (ident, _) = pyclass_ident_and_attrs(args.item)?;
let (class_name, class_new) = {
let class_attr = &mut args.attrs[self.inner.index];
let no_attr = class_attr.try_remove_name("no_attr")?;
if self.py_attrs.is_empty() {
// check no_attr before ClassItemMeta::from_attr
if no_attr.is_none() {
bail_span!(
ident,
"#[{name}] requires #[pyattr] to be a module attribute. \
To keep it free type, try #[{name}(no_attr)]",
name = self.attr_name()
)
}
}
let no_attr = no_attr.is_some();
let is_use = matches!(&args.item, syn::Item::Use(_));
let class_meta = ClassItemMeta::from_attr(ident.clone(), class_attr)?;
let module_name = args.context.name.clone();
let module_name = if let Some(class_module_name) = class_meta.module().ok().flatten() {
class_module_name
} else {
class_attr.fill_nested_meta("module", || {
parse_quote! {module = #module_name}
})?;
module_name
};
let class_name = if no_attr && is_use {
"<NO ATTR>".to_owned()
} else {
class_meta.class_name()?
};
let class_new = quote_spanned!(ident.span() =>
let new_class = <#ident as ::rustpython_vm::class::PyClassImpl>::make_class(ctx);
new_class.set_attr(rustpython_vm::identifier!(ctx, __module__), vm.new_pyobj(#module_name));
);
(class_name, class_new)
};
let mut py_names = Vec::new();
for attr_index in self.py_attrs.iter().rev() {
let mut loop_unit = || {
let attr_attr = args.attrs.remove(*attr_index);
let item_meta = SimpleItemMeta::from_attr(ident.clone(), &attr_attr)?;
let py_name = item_meta
.optional_name()
.unwrap_or_else(|| class_name.clone());
py_names.push(py_name);
Ok(())
};
let r = loop_unit();
args.context.errors.ok_or_push(r);
}
let set_attr = match py_names.len() {
0 => quote! {
let _ = new_class; // suppress warning
let _ = vm.ctx.intern_str(#class_name);
},
1 => {
let py_name = &py_names[0];
quote! {
vm.__module_set_attr(&module, vm.ctx.intern_str(#py_name), new_class).unwrap();
}
}
_ => quote! {
for name in [#(#py_names,)*] {
vm.__module_set_attr(&module, vm.ctx.intern_str(name), new_class.clone()).unwrap();
}
},
};
args.context.attribute_items.add_item(
ident.clone(),
py_names,
args.cfgs.to_vec(),
quote_spanned! { ident.span() =>
#class_new
#set_attr
},
0,
)?;
Ok(())
}
}
impl ModuleItem for AttributeItem {
fn gen_module_item(&self, args: ModuleItemArgs<'_>) -> Result<()> {
let cfgs = args.cfgs.to_vec();
let attr = args.attrs.remove(self.index());
let (ident, py_name, let_obj) = match args.item {
Item::Fn(syn::ItemFn { sig, block, .. }) => {
let ident = &sig.ident;
// If `once` keyword is in #[pyattr],
// wrapping it with static_cell for preventing it from using it as function
let attr_meta = AttrItemMeta::from_attr(ident.clone(), &attr)?;
if attr_meta.inner()._bool("once")? {
let stmts = &block.stmts;
let return_type = match &sig.output {
syn::ReturnType::Default => {
unreachable!("#[pyattr] attached function must have return type.")
}
syn::ReturnType::Type(_, ty) => ty,
};
let stmt: syn::Stmt = parse_quote! {
{
rustpython_common::static_cell! {
static ERROR: #return_type;
}
ERROR
.get_or_init(|| {
#(#stmts)*
})
.clone()
}
};
block.stmts = vec![stmt];
}
let py_name = attr_meta.simple_name()?;
(
ident.clone(),
py_name,
quote_spanned! { ident.span() =>
let obj = vm.new_pyobj(#ident(vm));
},
)
}
Item::Const(syn::ItemConst { ident, .. }) => {
let item_meta = SimpleItemMeta::from_attr(ident.clone(), &attr)?;
let py_name = item_meta.simple_name()?;
(
ident.clone(),
py_name,
quote_spanned! { ident.span() =>
let obj = vm.new_pyobj(#ident);
},
)
}
Item::Use(item) => {
if !self.py_attrs.is_empty() {
return Err(self
.new_syn_error(item.span(), "Only single #[pyattr] is allowed for `use`"));
}
let _ = iter_use_idents(item, |ident, is_unique| {
let item_meta = SimpleItemMeta::from_attr(ident.clone(), &attr)?;
let py_name = if is_unique {
item_meta.simple_name()?
} else if item_meta.optional_name().is_some() {
// this check actually doesn't need to be placed in loop
return Err(self.new_syn_error(
ident.span(),
"`name` attribute is not allowed for multiple use items",
));
} else {
ident.to_string()
};
let tokens = quote_spanned! { ident.span() =>
vm.__module_set_attr(module, vm.ctx.intern_str(#py_name), vm.new_pyobj(#ident)).unwrap();
};
args.context.attribute_items.add_item(
ident.clone(),
vec![py_name],
cfgs.clone(),
tokens,
1,
)?;
Ok(())
})?;
return Ok(());
}
other => {
return Err(
self.new_syn_error(other.span(), "can only be on a function, const and use")
)
}
};
let (tokens, py_names) = if self.py_attrs.is_empty() {
(
quote_spanned! { ident.span() => {
#let_obj
vm.__module_set_attr(module, vm.ctx.intern_str(#py_name), obj).unwrap();
}},
vec![py_name],
)
} else {
let mut names = vec![py_name];
for attr_index in self.py_attrs.iter().rev() {
let mut loop_unit = || {
let attr_attr = args.attrs.remove(*attr_index);
let item_meta = AttrItemMeta::from_attr(ident.clone(), &attr_attr)?;
if item_meta.inner()._bool("once")? {
return Err(self.new_syn_error(
ident.span(),
"#[pyattr(once)] is only allowed for the bottom-most item",
));
}
let py_name = item_meta.optional_name().ok_or_else(|| {
self.new_syn_error(
ident.span(),
"#[pyattr(name = ...)] is mandatory except for the bottom-most item",
)
})?;
names.push(py_name);
Ok(())
};
let r = loop_unit();
args.context.errors.ok_or_push(r);
}
(
quote_spanned! { ident.span() => {
#let_obj
for name in [(#(#names,)*)] {
vm.__module_set_attr(module, vm.ctx.intern_str(name), obj.clone()).unwrap();
}
}},
names,
)
};
args.context
.attribute_items
.add_item(ident, py_names, cfgs, tokens, 1)?;
Ok(())
}
}
|
extern crate libc;
extern crate base64;
use std::env;
use std::ffi::CString;
use libc::c_char;
use std::str;
use std::slice;
#[repr(C)]
struct SoundData {
length: i32,
data: *mut c_char
}
#[link(name = "ttss")]
extern {
fn tts_init();
fn tts_msg(msg: *const c_char) -> SoundData;
}
fn main() {
if env::args().len() < 2{
println!("ERROR: wrong number of command-line arguments.");
println!("USAGE: my-tts message");
std::process::exit(1);
}
init_tts();
let data = process_msg(env::args().nth(1).unwrap());
println!("{} {}", data.len(),base64::encode(&data));
}
fn init_tts() {
unsafe { tts_init();}
}
fn process_msg(msg: String) -> Vec<u8> {
let message_c = CString::new(msg).unwrap();
let sound = unsafe { tts_msg(message_c.as_ptr())};
let c_buf = sound.data as *mut u8;
let buf = unsafe{ slice::from_raw_parts(c_buf, sound.length as usize) };
let v = buf.to_vec();
unsafe {libc::free(c_buf as *mut libc::c_void);}
v
}
|
//! # My Crate
//!
//! `my_crate` 是一个使得特定计算更方便的
//! 工具集合
//! # Art
//!
//! 一个描述美术信息的库。
// 对于有很多嵌套模块的情况,使用 pub use 将类型重导出到顶级结果对于 crate 的使用者来说
// 将会是大为不同的体验;
// pub use 提供了解耦组织 crate 内部结构和与终端用户体现的灵活性
pub use kinds::PrimaryColor;
pub use kinds::SecondaryColor;
pub use utils::mix;
pub mod kinds {
/// 采用 RGB 色彩模式的主要颜色。
pub enum PrimaryColor {
Red,
Yellow,
Blue,
}
/// 采用 RGB 色彩模式的次要颜色。
pub enum SecondaryColor {
Orange,
Green,
Purple,
}
}
pub mod utils {
use crate::kinds::*;
/// 等量的混合两个主要颜色,
/// 来创建一个次要颜色。
pub fn mix(c1: PrimaryColor, c2: PrimaryColor) -> SecondaryColor {
//
SecondaryColor::Green
}
}
/// 将给定的数字加一
///
/// # Examples
///
/// ```
/// let five = 5;
///
/// assert_eq!(6, my_crate::add_one(five));
/// ```
pub fn add_one(x: i32) -> i32 {
x + 1
}
|
mod filter;
pub use filter::*;
mod identity;
pub use identity::*;
|
use crate::parser::*;
use common::*;
use std::collections::HashMap;
use thiserror::Error;
pub trait IntoInstruction {
fn instruction_bits(&self) -> u8;
}
impl IntoInstruction for Source {
fn instruction_bits(&self) -> u8 {
match self {
Source::Expansion => 0b00_000000,
Source::Accumulator => 0b01_000000,
Source::Memory => 0b10_000000,
Source::Operand(_) | Source::LabelHi(_) | Source::LabelLo(_) => 0b11_000000,
}
}
}
impl IntoInstruction for Destination {
fn instruction_bits(&self) -> u8 {
match self {
Destination::Memory => 0b00_0000_00,
Destination::AccumulatorPlus => 0b00_0001_00,
Destination::AccumulatorNand => 0b00_0010_00,
Destination::Accumulator => 0b00_0011_00,
Destination::ProgramCounterLatch => 0b00_0100_00,
Destination::ProgramCounter => 0b00_0101_00,
Destination::MemAddressLo => 0b00_0110_00,
Destination::MemAddressHi => 0b00_0111_00,
Destination::Serial => 0b00_1000_00,
Destination::Led => 0b00_1001_00,
Destination::CarrySet => 0b00_1010_00,
Destination::CarryReset => 0b00_1011_00,
Destination::ExpansionSelect => 0b00_1100_00,
}
}
}
impl IntoInstruction for Operation {
fn instruction_bits(&self) -> u8 {
let src = self.src.instruction_bits();
let dest = self.dest.instruction_bits();
let if_carry = if self.cond_carry { 0b000000_01 } else { 0 };
let if_one = if self.cond_1 { 0b000000_10 } else { 0 };
src | dest | if_carry | if_one
}
}
#[derive(Error, Debug, Clone, PartialEq)]
pub enum AssemblerError {
#[error("Label repeated {label}, line: {line}")]
RepeatLabel { label: String, line: usize },
#[error("Unrecognized label \"{label}\", line: {line}")]
UnrecognizedLabel { label: String, line: usize },
#[error("Forbidden instruction, line: {line}")]
ForbiddenInstruction { line: usize },
}
pub fn assemble(lines: &[(Statement, usize)]) -> Result<Vec<u8>, AssemblerError> {
let mut labels = HashMap::new();
let mut pc: u16 = 0;
for (statement, line) in lines {
match statement {
Statement::Label(label) => {
if labels.insert(label.clone(), pc).is_some() {
Err(AssemblerError::RepeatLabel {
line: *line,
label: label.clone(),
})?;
}
}
Statement::Operation(op) => match op.src {
Source::Operand(_) | Source::LabelLo(_) | Source::LabelHi(_) => pc += 2,
_ => pc += 1,
},
}
}
let mut bytecode = Vec::new();
for (statement, line) in lines {
let op = match statement {
Statement::Operation(op) => op,
Statement::Label(_) => continue,
};
bytecode.push(op.instruction_bits());
let get_label_pc = |label: &String| match labels.get(label) {
Some(pc) => Ok(*pc),
None => Err(AssemblerError::UnrecognizedLabel {
label: label.clone(),
line: *line,
}),
};
match op.src {
Source::Operand(op) => bytecode.push(op),
Source::LabelHi(ref label) => bytecode.push((get_label_pc(label)? >> 8) as u8),
Source::LabelLo(ref label) => bytecode.push((get_label_pc(label)? & 0x00FF) as u8),
_ => (),
}
if let Source::Operand(_) | Source::LabelLo(_) | Source::LabelHi(_) = op.src {
if op.dest == Destination::Memory {
Err(AssemblerError::ForbiddenInstruction { line: *line })?;
}
}
}
Ok(bytecode)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_operation_bits() {
assert_eq!(
Operation {
src: Source::Accumulator,
dest: Destination::Accumulator,
cond_1: false,
cond_carry: false
}
.instruction_bits(),
0b01_0011_00
);
assert_eq!(
Operation {
src: Source::Operand(0x00),
dest: Destination::CarrySet,
cond_1: false,
cond_carry: true
}
.instruction_bits(),
0b11_1010_01
);
}
#[test]
fn test_assembler() {
let instructions = [
(
Statement::Operation(Operation {
src: Source::Accumulator,
dest: Destination::Accumulator,
cond_carry: false,
cond_1: false,
}),
1,
),
(Statement::Label("loop".into()), 2),
(
Statement::Operation(Operation {
src: Source::LabelLo("loop".into()),
dest: Destination::ProgramCounterLatch,
cond_carry: false,
cond_1: false,
}),
2,
),
(
Statement::Operation(Operation {
src: Source::LabelHi("loop".into()),
dest: Destination::ProgramCounter,
cond_carry: false,
cond_1: false,
}),
3,
),
];
let expected_bytecode = vec![
0b01_0011_00,
0b11_0100_00,
0b0000000001,
0b11_0101_00,
0b0000000000,
];
assert_eq!(assemble(&instructions), Ok(expected_bytecode));
}
}
|
mod availability;
mod dungeon_availability;
mod location_availability;
mod rule;
pub use crate::lttp::logic::{
availability::Availability,
dungeon_availability::DungeonAvailability,
location_availability::LocationAvailability,
rule::Rule,
};
use serde::{
Deserialize,
Serialize,
};
use ts_rs::TS;
#[allow(clippy::module_name_repetitions)]
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Ord, Eq, Deserialize, Serialize, TS)]
#[ts(export, export_to = "ui/src/server_types/RandoLogic.ts")]
#[serde(rename_all = "camelCase")]
#[derive(Default)]
pub enum RandoLogic {
#[default]
Glitchless,
OverWorldGlitches,
MajorGlitches,
}
|
//! HDMI
//!
//! Size: 128K
use core::marker::PhantomData;
use core::ops::{Deref, DerefMut};
use static_assertions::const_assert_eq;
pub const PADDR: usize = 0x01EE_0000;
const PHY_OFFSET: usize = 0x0001_0000;
pub const PHY_PADDR: usize = PADDR + PHY_OFFSET;
register! {
VersionId,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
Control,
u32,
RW,
Fields [
Enable WIDTH(U1) OFFSET(U31),
]
}
register! {
Interrupt,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
Hpd,
u32,
RW,
Fields [
Detect WIDTH(U1) OFFSET(U0),
]
}
register! {
VideoControl,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
VideoSize,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
VideoBp,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
VideoFp,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
VideoSpw,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
VideoPolarity,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
QcpPacket0,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
QcpPacket1,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PadControl0,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PadControl1,
u32,
RW,
Fields [
Halve WIDTH(U1) OFFSET(U6),
]
}
register! {
PllControl,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PllDbg0,
u32,
RW,
Fields [
Pll WIDTH(U1) OFFSET(U21) [
Pll3Video0 = U0,
Pll7Video1 = U1
]
]
}
register! {
PllDbg1,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
HpdCec,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PacketControl0,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PacketControl1,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
AudioSampleCount,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
AudioTxFifo,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcControl,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcExreg,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcCommand,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcAddress,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcIntMask,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcIntStatus,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcFifoControl,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcFifoStatus,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcClock,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcTimeout,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
DdcFifoData,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PhyPol,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PhyReadEn,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PhyUnscramble,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PhyControl,
u32,
RW,
Fields [
B0 WIDTH(U1) OFFSET(U0),
B1 WIDTH(U1) OFFSET(U1),
B2 WIDTH(U1) OFFSET(U2),
B3 WIDTH(U1) OFFSET(U3),
F0 WIDTH(U3) OFFSET(U4) [
Full = U7
]
B7 WIDTH(U1) OFFSET(U7),
F1 WIDTH(U4) OFFSET(U8) [
Full = U15
]
F2 WIDTH(U4) OFFSET(U12) [
Full = U15
]
B16 WIDTH(U1) OFFSET(U16),
B18 WIDTH(U1) OFFSET(U18),
B19 WIDTH(U1) OFFSET(U19),
]
}
register! {
PhyUnk1,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PhyUnk2,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PhyPll,
u32,
RW,
Fields [
F0 WIDTH(U6) OFFSET(U0),
B25 WIDTH(U1) OFFSET(U25),
B30 WIDTH(U1) OFFSET(U30),
B31 WIDTH(U1) OFFSET(U31),
]
}
register! {
PhyClock,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PhyUnk3,
u32,
RW,
Fields [
Bits WIDTH(U32) OFFSET(U0),
]
}
register! {
PhyStatus,
u32,
RW,
Fields [
Ready WIDTH(U1) OFFSET(U7),
PlugIn WIDTH(U1) OFFSET(U19),
]
}
register! {
// Offset 0x01FC..=0x01FF
Rx1fc,
u32,
RW,
Fields [
Byte0 WIDTH(U8) OFFSET(U0),
Byte1 WIDTH(U8) OFFSET(U8),
Byte2 WIDTH(U8) OFFSET(U16),
IhMute WIDTH(U8) OFFSET(U24) [
Disabled = U3
]
]
}
register! {
// Offset 0x7E04..=0x7E07
Rx7e04,
u32,
RW,
Fields [
I2cmOp WIDTH(U8) OFFSET(U0),
I2cmInt WIDTH(U8) OFFSET(U8),
I2cmCtlInt WIDTH(U8) OFFSET(U16),
I2cmDiv WIDTH(U8) OFFSET(U24),
]
}
register! {
// Offset 0x7E08..=0x7E0B
Rx7e08,
u32,
RW,
Fields [
I2cmSegAddr WIDTH(U8) OFFSET(U0),
I2cmSoftRstz WIDTH(U8) OFFSET(U8),
I2cmSegPtr WIDTH(U8) OFFSET(U16),
I2cmSsSclHcnt1Addr WIDTH(U8) OFFSET(U24),
]
}
register! {
// Offset 0x7E0C..=0x7E0F
Rx7e0c,
u32,
RW,
Fields [
I2cmSsSclHcnt0Addr WIDTH(U8) OFFSET(U0),
I2cmSsSclLcnt1Addr WIDTH(U8) OFFSET(U8),
I2cmSsSclLcnt0Addr WIDTH(U8) OFFSET(U16),
I2cmFsSclHcnt1Addr WIDTH(U8) OFFSET(U24),
]
}
register! {
// Offset 0x7E10..=0x7E13
Rx7e10,
u32,
RW,
Fields [
I2cmFsSclHcnt0Addr WIDTH(U8) OFFSET(U0),
I2cmFsSclLcnt1Addr WIDTH(U8) OFFSET(U8),
I2cmFsSclLcnt0Addr WIDTH(U8) OFFSET(U16),
Byte3 WIDTH(U8) OFFSET(U24),
]
}
const_assert_eq!(core::mem::size_of::<RegisterBlock>(), 0x0001_003C);
#[repr(C)]
pub struct RegisterBlock {
pub version: VersionId::Register, // 0x000
pub ctrl: Control::Register, // 0x004
pub irq: Interrupt::Register, // 0x008
pub hpd: Hpd::Register, // 0x00C
pub video_ctrl: VideoControl::Register, // 0x010
pub video_size: VideoSize::Register, // 0x014
pub video_bp: VideoBp::Register, // 0x018
pub video_fp: VideoFp::Register, // 0x01C
pub video_spw: VideoSpw::Register, // 0x020
pub video_polarity: VideoPolarity::Register, // 0x024
__reserved_0: [u32; 22], // 0x028
pub avi_info_frame: [u32; 5], // 0x080
__reserved_2: [u32; 19], // 0x094
pub qcp_packet0: QcpPacket0::Register, // 0x0E0
pub qcp_packet1: QcpPacket1::Register, // 0x0E4
__reserved_3: [u32; 69], // 0x0E8
pub r1fc: Rx1fc::Register, // 0x1FC
pub pad_ctrl0: PadControl0::Register, // 0x200
pub pad_ctrl1: PadControl1::Register, // 0x204
pub pll_ctrl: PllControl::Register, // 0x208
pub pll_dbg0: PllDbg0::Register, // 0x20C
pub pll_dbg1: PllDbg1::Register, // 0x210
pub hpd_cec: HpdCec::Register, // 0x214
__reserved_5: [u32; 10], // 0x218
pub vendor_info_frame: [u32; 5], // 0x240
__reserved_6: [u32; 39], // 0x254
pub pkt_ctrl0: PacketControl0::Register, // 0x2F0
pub pkt_ctrl1: PacketControl1::Register, // 0x2F4
__reserved_7: [u32; 6], // 0x2F8
pub audio_sample_count: AudioSampleCount::Register, // 0x310
__reserved_8: [u32; 59], // 0x314
pub audio_tx_fifo: AudioTxFifo::Register, // 0x400
__reserved_9: [u32; 63], // 0x404
pub ddc_ctrl: DdcControl::Register, // 0x500
pub ddc_exreg: DdcExreg::Register, // 0x504
pub ddc_cmd: DdcCommand::Register, // 0x508
pub ddc_addr: DdcAddress::Register, // 0x50C
pub ddc_int_mask: DdcIntMask::Register, // 0x510
pub ddc_int_status: DdcIntStatus::Register, // 0x514
pub ddc_fifo_ctrl: DdcFifoControl::Register, // 0x518
pub ddc_fifo_status: DdcFifoStatus::Register, // 0x51C
pub ddc_clock: DdcClock::Register, // 0x520
pub ddc_timeout: DdcTimeout::Register, // 0x524
__reserved_10: [u32; 22], // 0x528
pub ddc_fifo_data: DdcFifoData::Register, // 0x580
__reserved_11: [u32; 7712], // 0x584
pub r7e04: Rx7e04::Register, // 0x7E04
pub r7e08: Rx7e08::Register, // 0x7E08
pub r7e0c: Rx7e0c::Register, // 0x7E0C
pub r7e10: Rx7e10::Register, // 0x7E10
__reserved_12: [u32; 8315], // 0x7E14
pub phy_pol: PhyPol::Register, // 0x1_0000
__reserved_13: [u32; 3], // 0x1_0004
pub phy_read_en: PhyReadEn::Register, // 0x1_0010
pub phy_unscramble: PhyUnscramble::Register, // 0x1_0014
__reserved_14: [u32; 2], // 0x1_0018
pub phy_ctrl: PhyControl::Register, // 0x1_0020
pub phy_unk1: PhyUnk1::Register, // 0x1_0024
pub phy_unk2: PhyUnk2::Register, // 0x1_0028
pub phy_pll: PhyPll::Register, // 0x1_002C
pub phy_clk: PhyClock::Register, // 0x1_0030
pub phy_unk3: PhyUnk3::Register, // 0x1_0034
pub phy_status: PhyStatus::Register, // 0x1_0038
}
pub struct HDMI {
_marker: PhantomData<*const ()>,
}
unsafe impl Send for HDMI {}
impl HDMI {
pub unsafe fn from_paddr() -> Self {
Self {
_marker: PhantomData,
}
}
pub fn as_ptr(&self) -> *const RegisterBlock {
PADDR as *const _
}
pub const unsafe fn ptr() -> *const RegisterBlock {
PADDR as *const _
}
pub fn as_mut_ptr(&mut self) -> *mut RegisterBlock {
PADDR as *mut _
}
pub const unsafe fn mut_ptr() -> *mut RegisterBlock {
PADDR as *mut _
}
}
impl Deref for HDMI {
type Target = RegisterBlock;
fn deref(&self) -> &RegisterBlock {
unsafe { &*self.as_ptr() }
}
}
impl DerefMut for HDMI {
fn deref_mut(&mut self) -> &mut RegisterBlock {
unsafe { &mut *self.as_mut_ptr() }
}
}
|
use crate::spec::{EncodingType, FramePointer, LinkerFlavor, Target, TargetOptions};
pub fn target() -> Target {
let mut base = super::apple_base::opts("macos");
base.cpu = "core2".into();
base.max_atomic_width = Some(128); // core2 support cmpxchg16b
base.frame_pointer = FramePointer::Always;
base.pre_link_args.insert(
LinkerFlavor::Gcc,
vec!["-m64".into(), "-arch".into(), "x86_64".into()],
);
let mut link_env_remove = super::apple_base::macos_link_env_remove();
base.link_env_remove.append(&mut link_env_remove);
// don't use probe-stack=inline-asm until rust#83139 and rust#84667 are resolved
base.stack_probes = false;
// Clang automatically chooses a more specific target based on
// MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
// correctly, we do too.
let arch = "x86_64";
let llvm_target = super::apple_base::macos_llvm_target(&arch);
Target {
llvm_target: llvm_target.into(),
pointer_width: 64,
data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
.into(),
arch: arch.into(),
options: TargetOptions {
encoding: EncodingType::Encoding64Nanboxed,
mcount: "\u{1}mcount".into(),
..base
},
}
}
|
use nix::fcntl::{open, fcntl, FcntlArg, OFlag};
use nix::unistd::{getpid};
use std::time::Duration;
use std::thread::sleep;
use libc::{flock, F_RDLCK, F_WRLCK, SEEK_SET};
use nix::sys::stat::Mode;
use std::os::raw::c_short;
fn main() {
let lock_file_path = "/tmp/lock_segment.test";
let lock_file = open(lock_file_path,
OFlag::O_RDWR,
Mode::S_IRUSR | Mode::S_IWUSR | Mode::S_IRGRP | Mode::S_IWGRP | Mode::S_IROTH | Mode::S_IWOTH);
match lock_file {
Err(e) => {
println!("open failure, {}, {}", e, getpid());
sleep(Duration::from_secs(3));
}
Ok(f) => {
let step = 5;
for i in (0..99).step_by(step) {
println!("Testing region, {} - {}, {}", i, i+step, getpid());
let mut lock_w_info = flock{
l_type: F_WRLCK as c_short,
l_whence: SEEK_SET as c_short,
l_start: i as i64,
l_len: step as i64,
l_pid: -1,
};
match fcntl(f, FcntlArg::F_GETLK(&mut lock_w_info)){
Err(e) => println!("Failed to F_WRLCK, {}, {}", e, getpid()),
Ok(_) => {
if lock_w_info.l_pid != -1 {
println!("Write Locked Would Failure, {}", getpid());
}else{
println!("Write Locked Would Success, {}", getpid());
}
}
}
let mut lock_r_info = flock{
l_type: F_RDLCK as c_short,
l_whence: SEEK_SET as c_short,
l_start: i as i64,
l_len: step as i64,
l_pid: -1,
};
match fcntl(f, FcntlArg::F_GETLK(&mut lock_r_info)){
Err(e) => println!("Failed to F_GETLK, {}, {}", e, getpid()),
Ok(_) => {
if lock_r_info.l_pid != -1 {
println!("Read Locked Would Failure, {}", getpid());
} else{
println!("Read Locked Would Success, {}", getpid());
}
}
}
}
}
}
} |
/*
* Copyright © 2019-today Peter M. Stahl pemistahl@gmail.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expressed or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#![cfg(target_family = "wasm")]
use grex::WasmRegExpBuilder;
use indoc::indoc;
use wasm_bindgen::JsValue;
use wasm_bindgen_test::*;
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
#[wasm_bindgen_test]
fn assert_regexpbuilder_succeeds() {
let test_cases = Box::new([JsValue::from("hello"), JsValue::from("world")]);
let builder = WasmRegExpBuilder::from(test_cases);
assert!(builder.is_ok());
let regexp = builder.unwrap().build();
assert_eq!(regexp, "^(?:hello|world)$");
}
#[wasm_bindgen_test]
fn assert_regexpbuilder_fails() {
let builder = WasmRegExpBuilder::from(Box::new([]));
assert_eq!(
builder.err(),
Some(JsValue::from(
"No test cases have been provided for regular expression generation"
))
);
}
#[wasm_bindgen_test]
fn test_conversion_of_digits() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withConversionOfDigits()
.build();
assert_eq!(regexp, "^(?:abc |\\d\\d\\d)$");
}
#[wasm_bindgen_test]
fn test_conversion_of_non_digits() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withConversionOfNonDigits()
.build();
assert_eq!(regexp, "^(?:\\D\\D\\D\\D\\D|123)$");
}
#[wasm_bindgen_test]
fn test_conversion_of_whitespace() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withConversionOfWhitespace()
.build();
assert_eq!(regexp, "^(?:abc\\s\\s|123)$");
}
#[wasm_bindgen_test]
fn test_conversion_of_non_whitespace() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withConversionOfNonWhitespace()
.build();
assert_eq!(regexp, "^\\S\\S\\S(?: )?$");
}
#[wasm_bindgen_test]
fn test_conversion_of_words() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withConversionOfWords()
.build();
assert_eq!(regexp, "^\\w\\w\\w(?: )?$");
}
#[wasm_bindgen_test]
fn test_conversion_of_non_words() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withConversionOfNonWords()
.build();
assert_eq!(regexp, "^(?:abc\\W\\W|123)$");
}
#[wasm_bindgen_test]
fn test_conversion_of_repetitions() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withConversionOfRepetitions()
.build();
assert_eq!(regexp, "^(?:abc {2}|123)$");
}
#[wasm_bindgen_test]
fn test_case_insensitive_matching() {
let test_cases = Box::new([
JsValue::from("ABC"),
JsValue::from("abc "),
JsValue::from("123"),
]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withCaseInsensitiveMatching()
.build();
assert_eq!(regexp, "(?i)^(?:abc(?: )?|123)$");
}
#[wasm_bindgen_test]
fn test_capturing_groups() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withCapturingGroups()
.build();
assert_eq!(regexp, "^(abc |123)$");
}
#[wasm_bindgen_test]
fn test_escaping_of_non_ascii_chars() {
let test_cases = Box::new([
JsValue::from("abc "),
JsValue::from("123"),
JsValue::from("♥"),
]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withEscapingOfNonAsciiChars(false)
.build();
assert_eq!(regexp, "^(?:abc |123|\\u{2665})$");
}
#[wasm_bindgen_test]
fn test_verbose_mode() {
let test_cases = Box::new([
JsValue::from("abc "),
JsValue::from("123"),
JsValue::from("♥"),
]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withVerboseMode()
.build();
assert_eq!(
regexp,
indoc!(
r#"
(?x)
^
(?:
abc\ \
|
123
|
♥
)
$"#
)
);
}
#[wasm_bindgen_test]
fn test_without_start_anchor() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withoutStartAnchor()
.build();
assert_eq!(regexp, "(?:abc |123)$");
}
#[wasm_bindgen_test]
fn test_without_end_anchor() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withoutEndAnchor()
.build();
assert_eq!(regexp, "^(?:abc |123)");
}
#[wasm_bindgen_test]
fn test_without_anchors() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let regexp = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withoutAnchors()
.build();
assert_eq!(regexp, "(?:abc |123)");
}
#[wasm_bindgen_test]
fn test_minimum_repetitions() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let builder = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withMinimumRepetitions(0);
assert_eq!(
builder.err(),
Some(JsValue::from(
"Quantity of minimum repetitions must be greater than zero"
))
);
}
#[wasm_bindgen_test]
fn test_minimum_substring_length() {
let test_cases = Box::new([JsValue::from("abc "), JsValue::from("123")]);
let builder = WasmRegExpBuilder::from(test_cases)
.unwrap()
.withMinimumSubstringLength(0);
assert_eq!(
builder.err(),
Some(JsValue::from(
"Minimum substring length must be greater than zero"
))
);
}
|
use std::convert::{TryInto, TryFrom};
use ebml_iterable::tools::{self as ebml_tools, Vint};
use ebml_iterable::tags::TagData;
use super::super::errors::WebmError;
///
/// An enum describing different block lacing options.
///
/// This enum is based on the definition for [Lacing](https://www.matroska.org/technical/basics.html#lacing) as defined by the [Matroska Spec](http://www.matroska.org/technical/specs/index.html).
///
#[derive(PartialEq, Debug)]
pub enum BlockLacing {
Xiph,
Ebml,
FixedSize,
}
///
/// A typed interpretation of the Matroska "Block" element.
///
/// This struct has fields specific to the [Block](https://www.matroska.org/technical/basics.html#block-structure) element as defined by the [Matroska Spec](http://www.matroska.org/technical/specs/index.html). This struct implements `TryFrom<TagData>` and `Into<TagData>` to simplify coercion to and from regular [`TagData::Binary`] values.
///
/// ## Example
///
/// ```
/// # use std::convert::TryInto;
/// # use ebml_iterable::tags::TagData;
/// use webm_iterable::matroska_spec::Block;
///
/// let binary_tag_data = TagData::Binary(vec![0x81,0x00,0x01,0x9d,0x00,0x00,0x00]);
/// let mut block: Block = binary_tag_data.try_into().unwrap();
/// block.track = 3;
/// ```
///
pub struct Block {
pub payload: Vec<u8>,
pub track: u64,
pub value: i16,
pub invisible: bool,
pub lacing: Option<BlockLacing>,
}
impl TryFrom<TagData> for Block {
type Error = WebmError;
fn try_from(value: TagData) -> Result<Self, Self::Error> {
if let TagData::Binary(data) = value {
let data = &data;
let mut position: usize = 0;
let (track, track_size) = ebml_tools::read_vint(data)
.map_err(|_| WebmError::BlockCoercionError(String::from("Unable to read track data in Block.")))?
.ok_or_else(|| WebmError::BlockCoercionError(String::from("Unable to read track data in Block.")))?;
position += track_size;
let value: [u8;2] = data[position..position+2].try_into()
.map_err(|_| WebmError::BlockCoercionError(String::from("Attempting to create Block tag, but binary data length was not 2")))?;
let value = i16::from_be_bytes(value);
position += 2;
let flags: u8 = data[position];
position += 1;
let invisible = (flags & 0x10) == 0x10;
let lacing: Option<BlockLacing>;
if flags & 0x0c == 0x0c {
lacing = Some(BlockLacing::FixedSize);
} else if flags & 0x0c == 0x08 {
lacing = Some(BlockLacing::Ebml);
} else if flags & 0x0c == 0x04 {
lacing = Some(BlockLacing::Xiph);
} else {
lacing = None;
}
let payload = data[position..].to_vec();
Ok(Block {
payload,
track,
value,
invisible,
lacing
})
} else {
Err(WebmError::BlockCoercionError(String::from("Expected binary tag type for Block tag, but received a different type!")))
}
}
}
#[allow(clippy::from_over_into)]
impl Into<TagData> for Block {
fn into(self) -> TagData {
let mut result = Vec::with_capacity(self.payload.len() + 11);
result.extend_from_slice(&self.track.as_vint().expect("Unable to convert track value to vint"));
result.extend_from_slice(&self.value.to_be_bytes());
let mut flags: u8 = 0x00;
if self.invisible {
flags |= 0x10;
}
if self.lacing.is_some() {
match self.lacing.unwrap() {
BlockLacing::Xiph => { flags |= 0x04; },
BlockLacing::Ebml => { flags |= 0x08; },
BlockLacing::FixedSize => { flags |= 0x0c; },
}
}
result.extend_from_slice(&flags.to_be_bytes());
result.extend_from_slice(&self.payload);
TagData::Binary(result)
}
} |
use rocket_contrib::databases::{database, diesel};
#[database("unkso_main_forums")]
pub struct UnksoMainForums(diesel::MysqlConnection);
#[database("titan_primary")]
pub struct TitanPrimary(diesel::MysqlConnection);
|
use futures::channel::{mpsc, oneshot};
use futures::{SinkExt, Future, TryFutureExt};
use futures::future::FutureExt;
use std::fmt::Debug;
use std::panic::AssertUnwindSafe;
// This is a hack required because the json-rpc crate is not updated to tokio 0.2.
// We should watch the `jsonrpsee` crate and switch to that once it's ready.
pub async fn tokio02_spawn<I: Send + 'static, ER: Send + 'static>(
mut task_sink: mpsc::Sender<Box<dyn std::future::Future<Output = ()> + Send + Unpin>>,
future: impl std::future::Future<Output = Result<I, ER>> + Send + Unpin + 'static,
) -> Result<I, ER>
where
I: Debug,
ER: Debug,
{
let (return_sender, return_receiver) = oneshot::channel();
task_sink
.send(Box::new(future.map(move |res| {
return_sender.send(res).expect("`return_receiver` dropped");
})))
.await
.expect("task receiver dropped");
return_receiver.await.expect("`return_sender` dropped")
}
pub fn abort_on_panic<T: Send + 'static>(
f: impl Future<Output = T> + Send + 'static,
) -> impl Future<Output = T> {
// We're crashing, unwind safety doesn't matter.
AssertUnwindSafe(f).catch_unwind().unwrap_or_else(|_| {
println!("Panic in tokio task, aborting!");
std::process::abort()
})
} |
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::TEST {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `CONST_CARRIER`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CONST_CARRIERR {
#[doc = "Constant carrier disabled."]
DISABLED,
#[doc = "Constant carrier enabled."]
ENABLED,
}
impl CONST_CARRIERR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
CONST_CARRIERR::DISABLED => false,
CONST_CARRIERR::ENABLED => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> CONST_CARRIERR {
match value {
false => CONST_CARRIERR::DISABLED,
true => CONST_CARRIERR::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline]
pub fn is_disabled(&self) -> bool {
*self == CONST_CARRIERR::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline]
pub fn is_enabled(&self) -> bool {
*self == CONST_CARRIERR::ENABLED
}
}
#[doc = "Possible values of the field `PLL_LOCK`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PLL_LOCKR {
#[doc = "PLL lock disabled."]
DISABLED,
#[doc = "PLL lock enabled."]
ENABLED,
}
impl PLL_LOCKR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PLL_LOCKR::DISABLED => false,
PLL_LOCKR::ENABLED => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PLL_LOCKR {
match value {
false => PLL_LOCKR::DISABLED,
true => PLL_LOCKR::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline]
pub fn is_disabled(&self) -> bool {
*self == PLL_LOCKR::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline]
pub fn is_enabled(&self) -> bool {
*self == PLL_LOCKR::ENABLED
}
}
#[doc = "Values that can be written to the field `CONST_CARRIER`"]
pub enum CONST_CARRIERW {
#[doc = "Constant carrier disabled."]
DISABLED,
#[doc = "Constant carrier enabled."]
ENABLED,
}
impl CONST_CARRIERW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
CONST_CARRIERW::DISABLED => false,
CONST_CARRIERW::ENABLED => true,
}
}
}
#[doc = r" Proxy"]
pub struct _CONST_CARRIERW<'a> {
w: &'a mut W,
}
impl<'a> _CONST_CARRIERW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CONST_CARRIERW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Constant carrier disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(CONST_CARRIERW::DISABLED)
}
#[doc = "Constant carrier enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(CONST_CARRIERW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PLL_LOCK`"]
pub enum PLL_LOCKW {
#[doc = "PLL lock disabled."]
DISABLED,
#[doc = "PLL lock enabled."]
ENABLED,
}
impl PLL_LOCKW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PLL_LOCKW::DISABLED => false,
PLL_LOCKW::ENABLED => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PLL_LOCKW<'a> {
w: &'a mut W,
}
impl<'a> _PLL_LOCKW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PLL_LOCKW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "PLL lock disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(PLL_LOCKW::DISABLED)
}
#[doc = "PLL lock enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(PLL_LOCKW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Constant carrier. Decision point: TXEN task."]
#[inline]
pub fn const_carrier(&self) -> CONST_CARRIERR {
CONST_CARRIERR::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - PLL lock. Decision point: TXEN or RXEN task."]
#[inline]
pub fn pll_lock(&self) -> PLL_LOCKR {
PLL_LOCKR::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Constant carrier. Decision point: TXEN task."]
#[inline]
pub fn const_carrier(&mut self) -> _CONST_CARRIERW {
_CONST_CARRIERW { w: self }
}
#[doc = "Bit 1 - PLL lock. Decision point: TXEN or RXEN task."]
#[inline]
pub fn pll_lock(&mut self) -> _PLL_LOCKW {
_PLL_LOCKW { w: self }
}
}
|
use super::types::Type;
use super::ast_printer::print_at_depth;
use super::types::IntegerTypeMetadata;
pub fn print_type (the_type: &Type, depth: isize) {
match the_type {
Type::Char(meta) => {
print_at_depth("Type: char".to_string(), depth);
print_int_meta(meta, depth + 1);
},
Type::Short(meta) => {
print_at_depth("Type: short".to_string(), depth);
print_int_meta(meta, depth + 1);
},
Type::Int(meta) => {
print_at_depth("Type: int".to_string(), depth);
print_int_meta(meta, depth + 1);
},
Type::LongLongInt(meta) => {
print_at_depth("Type: long long int".to_string(), depth);
print_int_meta(meta, depth + 1);
},
Type::Pointer(meta) => {
print_at_depth("Type: Pointer to:".to_string(), depth);
print_type(&meta.points_to, depth + 1);
}
}
}
fn print_int_meta (meta: &IntegerTypeMetadata, depth: isize) {
let signed_string = if meta.signed { "yes" } else { "no" };
print_at_depth(format!("Signed: {}", signed_string), depth);
} |
#[macro_use]
extern crate error_chain;
extern crate tempdir;
extern crate walkdir;
use tempdir::TempDir;
use walkdir::WalkDir;
use std::fs;
use std::io::{self, BufRead};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::env;
const EXE_DIR: &'static str = "target/debug";
error_chain! {
foreign_links {
Io(::std::io::Error);
JoinPaths(env::JoinPathsError);
}
}
macro_rules! example {
($func: ident, $name: expr) => {
example!($func, $name, );
};
($func: ident, $name: expr, $( #[$meta:meta] )*) => {
#[test]
$(#[$meta])*
fn $func() {
let tc = TestCase::new($name).expect("setup");
tc.run().expect($name);
}
};
}
fn copy_dir<P0: AsRef<Path>, P1: AsRef<Path>>(src: P0, dst: P1) -> Result<()> {
for e in WalkDir::new(&src) {
let e = e.chain_err(|| "walkdir")?;
let path = e.path()
.strip_prefix(&src)
.chain_err(|| "path from test case")?;
let dest = dst.as_ref().join(path);
// println!("{:?} → {:?}", e.path(), dest);
if e.file_type().is_dir() {
fs::create_dir(&dest).chain_err(|| "mkdir")?;
} else if e.file_type().is_file() {
fs::copy(&e.path(), &dest).chain_err(|| "copy file")?;
} else {
panic!("Unrecognised fs entity: {:?}: {:?}", e.path(), e.metadata())
}
}
Ok(())
}
#[derive(Debug)]
struct TestCase {
tmpdir: PathBuf,
example: String,
}
impl TestCase {
fn new(example: &str) -> Result<TestCase> {
let tmpdir = TempDir::new(example).chain_err(|| "TempDir::new")?;
let basedir = "t";
fs::remove_dir_all(&tmpdir.path()).chain_err(|| "cleanup")?;
copy_dir(&basedir, &tmpdir.path()).chain_err(|| "copy_dir")?;
Ok(TestCase {
tmpdir: tmpdir.into_path(),
example: example.to_owned(),
})
}
fn run(&self) -> Result<()> {
let cwd = env::current_dir()?;
let exec_dir = cwd.join(EXE_DIR);
let curr_path = env::var_os("PATH").chain_err(|| "lookup current $PATH")?;
let mut paths = env::split_paths(&curr_path).collect::<Vec<_>>();
paths.insert(0, exec_dir.clone());
let stdout_name = PathBuf::from(format!("target/{}.out.txt", self.example));
let stderr_name = PathBuf::from(format!("target/{}.err.txt", self.example));
let mut cmd = Command::new(exec_dir.join("redonk"));
cmd.arg("redo");
cmd.arg(PathBuf::from(&self.example).join("all"));
cmd.current_dir(&self.tmpdir);
cmd.env("PATH", env::join_paths(paths)?);
cmd.stdout(fs::File::create(&stdout_name)
.chain_err(|| stdout_name.to_string_lossy().into_owned())?);
cmd.stderr(fs::File::create(&stderr_name)
.chain_err(|| stderr_name.to_string_lossy().into_owned())?);
println!("Child stdout: {:?}; stderr: {:?}", stdout_name, stderr_name);
let child = cmd.spawn()
.chain_err(|| format!("Command::spawn: {:?}", cmd))?
.wait()
.chain_err(|| format!("Child::wait: {:?}", cmd))?;
if child.success() {
Ok(())
} else {
Err(format!("Child command: {:?} exited: {:?}", cmd, child).into())
}
}
fn example_dir(&self) -> PathBuf {
self.tmpdir.join(&self.example)
}
}
#[test]
fn t_000_set_minus_e() {
let tc = TestCase::new("000-set-minus-e").expect("setup");
tc.run().expect("000-set-minus-e");
println!("Test case dir: {:?}", tc);
let log = io::BufReader::new(fs::File::open(tc.example_dir().join("log")).expect("log file"));
let log_content = log.lines()
.map(|r| r.map_err(|e| e.into()))
.collect::<Result<Vec<_>>>()
.expect("log lines");
assert_eq!(log_content, vec!["ok"]);
}
example!(t_100_args, "100-args");
example!(t_101_atime, "101-atime");
example!(t_102_empty, "102-empty");
example!(t_103_unicode, "103-unicode");
example!(t_104_space, "104-space");
#[test]
fn t_110_compile() {
let tc = TestCase::new("110-compile").expect("setup");
tc.run().expect("110-compile");
let hello = tc.example_dir().join("hello");
println!("Test case dir: {:?}", tc);
let _ = fs::metadata(&hello)
.chain_err(|| format!("Built hello at {:?}", hello))
.expect("hello");
let out = Command::new(&hello).output().expect("spawn hello");
assert!(
out.status.success(),
"Compiled hello ({:?}) ran okay",
hello
);
}
example!(t_111_compile2, "111-compile2");
example!(t_120_defaults_flat, "120-defaults-flat");
example!(t_121_defaults_nested, "121-defaults-nested");
example!(t_130_mode, "130-mode");
example!(t_140_shuffle, "140-shuffle");
example!(t_141_keep_going, "141-keep-going");
example!(t_200_shell, "200-shell");
example!(t_201_fail, "201-fail");
example!(t_202_del, "202-del");
example!(t_220_ifcreate, "220-ifcreate");
example!(t_250_makedir, "250-makedir");
example!(t_350_deps, "350-deps");
example!(t_550_chdir, "550-chdir");
// example!(t_640_always, "640-always");
// example!(t_660_stamp, "660-stamp");
example!(t_950_curse, "950-curse");
// example!(t_999_installer, "999-installer");
|
use iron::prelude::*;
use iron::status;
use persistent::{Read, Write};
use hyper::header::*;
use hyper::mime::*;
use bodyparser;
use dal;
use std::ops::Deref;
use chrono::prelude::*;
use serde_json;
use std::str;
//use uuid;
//use bcrypt;
//use jsonwebtoken::{encode, Header};
//use configmisc;
use uuid;
use slog;
use server::loggerenclave::LoggerEnclave;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Product {
pub id: uuid::Uuid,
pub name: String,
pub description: String,
pub manufacturer_id: uuid::Uuid,
pub supplier_id: uuid::Uuid,
pub gtin12: String,
pub gtin13: String,
}
pub fn get_products(req: &mut Request) -> IronResult<Response> {
#[derive(Serialize, Deserialize, Debug)]
pub struct DbData {
pub id: i32,
pub name: String,
pub timestamp: i64,
}
let logger: slog::Logger = get_logger!(req);
info!(logger, "in get_products");
let mut resp = Response::with(status::NotFound);
let arcpool = match req.get::<Write<dal::DalPostgresPool>>() {
Ok(x) => x,
Err(e) => {
error!(
logger,
"Unable to get connection pool, error message [{}]",
e
);
return Ok(resp);
}
};
let lockedpool = match arcpool.lock() {
Ok(x) => x,
Err(e) => {
error!(
logger,
"Unable to get lock on connection pool, error message [{}]",
e
);
return Ok(resp);
}
};
let pool = lockedpool.deref();
let conn = match pool.rw_pool.get() {
Ok(x) => x,
Err(e) => {
error!(
logger,
"Unable to get connection from pool, erro message [{}]",
e
);
return Ok(resp);
}
};
let stmt = match conn.prepare("SELECT id, name FROM PRODUCT") {
Ok(x) => x,
Err(e) => {
error!(logger, "Unable to prepare statement, error message [{}]", e);
return Ok(resp);
}
};
let rows = match stmt.query(&[]) {
Ok(x) => x,
Err(e) => {
error!(logger, "Unable to execute query, error message [{}]", e);
return Ok(resp);
}
};
for row in rows.iter() {
let mut iteration_count = 0;
let _id: i32 = row.get("id");
let _name: String = row.get("name");
/*// time crate
let _timestamp: Timespec = row.get("timestamp");
let utc_tm: Tm = at_utc(_timestamp);
let local_tm: Tm = utc_tm.to_local();
info!(
logger,
"row [{}, {}, utc {}, local {}] ",
_id,
_name,
utc_tm.asctime(),
local_tm.asctime()
);
*/
// chrono crate
let _datetime_utc: DateTime<Utc> = row.get("timestamp");
let _datetime_local: DateTime<Local> = row.get("timestamp");
info!(logger,
"row [{}, {}, utc {}, local {}] ",
_id,
_name,
_datetime_utc.to_rfc2822(),
_datetime_local.to_rfc2822(),
);
let data: DbData = DbData {
id: _id,
name: _name,
//timestamp: _timestamp.sec, // time crate
timestamp: _datetime_utc.timestamp(), // chrono crate
};
if let Ok(json_resp) = serde_json::to_string(&data) {
resp = Response::with((status::Ok, json_resp));
resp.headers.set(ContentType(
Mime(TopLevel::Application, SubLevel::Json, vec![]),
));
};
info!(logger, "iteration_count = {}", iteration_count);
iteration_count += 1;
//break; // we only need first element
}
Ok(resp)
}
pub fn add_product(req: &mut Request) -> IronResult<Response> {
let mut resp = Response::with(status::NotFound);
let logger: slog::Logger = get_logger!(req);
//{}
let rbody = req.get::<bodyparser::Json>();
info!(logger, "rbody {:?}", rbody);
let product = match req.get::<bodyparser::Struct<Product>>() {
Ok(Some(x)) => x,
_ => {
info!(logger, "Unable to get Product data from request");
return Ok(resp);
}
};
info!(logger, "product = {:?}", product);
let arcpool = match req.get::<Write<dal::DalPostgresPool>>() {
Ok(x) => x,
Err(e) => {
error!(
logger,
"Unable to get connection pool, error message [{}]",
e
);
return Ok(resp);
}
};
let lockedpool = match arcpool.lock() {
Ok(x) => x,
Err(e) => {
error!(
logger,
"Unable to get lock on connection pool, error message [{}]",
e
);
return Ok(resp);
}
};
let pool = lockedpool.deref();
let conn = match pool.rw_pool.get() {
Ok(x) => x,
Err(e) => {
error!(
logger,
"Unable to get connection from pool, erro message [{}]",
e
);
return Ok(resp);
}
};
let mut str_stmt: String = "INSERT INTO PRODUCT (id, name, description, manufacturer_id, supplier_id, gtin12, gtin13)".to_string();
str_stmt += " VALUES ($1, $2, $3, $4, $5, $6, $7);";
let stmt = match conn.prepare(&str_stmt) {
Ok(x) => x,
Err(e) => {
error!(logger, "Unable to prepare statement, error message [{}]", e);
return Ok(resp);
}
};
let _res = match stmt.execute(&[
&product.id,
&product.name,
&product.description,
&product.manufacturer_id,
&product.supplier_id,
&product.gtin12,
&product.gtin13
]) {
Ok(x) => x,
Err(e) => {
error!(logger, "Unable to add product into database, error message [{}]", e);
return Ok(resp);
}
};
info!(logger, "Successfully added product to database {}", product.id);
resp=Response::with(status::Ok);
Ok(resp)
}
pub fn edit_product(req: &mut Request) -> IronResult<Response> {
let mut resp = Response::with(status::NotFound);
let logger: slog::Logger = get_logger!(req);
//{}
let rbody = req.get::<bodyparser::Json>();
info!(logger, "rbody {:?}", rbody);
let product = match req.get::<bodyparser::Struct<Product>>() {
Ok(Some(x)) => x,
_ => {
info!(logger, "Unable to get Product data from request");
return Ok(resp);
}
};
info!(logger, "product = {:?}", product);
let arcpool = match req.get::<Write<dal::DalPostgresPool>>() {
Ok(x) => x,
Err(e) => {
error!(
logger,
"Unable to get connection pool, error message [{}]",
e
);
return Ok(resp);
}
};
let lockedpool = match arcpool.lock() {
Ok(x) => x,
Err(e) => {
error!(
logger,
"Unable to get lock on connection pool, error message [{}]",
e
);
return Ok(resp);
}
};
let pool = lockedpool.deref();
let conn = match pool.rw_pool.get() {
Ok(x) => x,
Err(e) => {
error!(
logger,
"Unable to get connection from pool, erro message [{}]",
e
);
return Ok(resp);
}
};
let mut str_stmt: String = "UPDATE PRODUCT SET name=$2, description=$3, manufacturer_id=$4, supplier_id=$5, gtin12=$6, gtin13=$7)".to_string();
str_stmt += " WHERE id=$1;";
let stmt = match conn.prepare(&str_stmt) {
Ok(x) => x,
Err(e) => {
error!(logger, "Unable to prepare statement, error message [{}]", e);
return Ok(resp);
}
};
let _res = match stmt.execute(&[
&product.id,
&product.name,
&product.description,
&product.manufacturer_id,
&product.supplier_id,
&product.gtin12,
&product.gtin13
]) {
Ok(x) => x,
Err(e) => {
error!(logger, "Unable to edit product in database, error message [{}]", e);
return Ok(resp);
}
};
info!(logger, "Successfully edited product in database {}", product.id);
resp=Response::with(status::Ok);
Ok(resp)
}
|
mod utils;
fn main() {
let data = utils::load_input("./data/day_8.txt").unwrap();
let instructions: Vec<(&str, i32)> = data
.lines()
.map(|l| {
let i: Vec<&str> = l.split(" ").collect();
(i[0], i[1].parse::<i32>().unwrap())
})
.collect();
println!("Answer 1/2 {}", answer_1(&instructions));
println!("Answer 2/2 {}", answer_2(&instructions));
}
fn answer_2(instructions: &Vec<(&str, i32)>) -> i32 {
let mut acc: i32 = 0;
let mut cursor: usize = 0;
let len = instructions.len();
let mut recorder = vec![false; len];
let mut switch_recorder = recorder.clone();
let mut switched_once = false;
while cursor < len {
if recorder[cursor] {
// reset everyhting but switch_recorder
cursor = 0;
acc = 0;
switched_once = false;
recorder = vec![false; len];
}
recorder[cursor] = true;
let current_instruction = instructions[cursor];
match current_instruction.0 {
"acc" => {
acc += current_instruction.1;
cursor += 1;
}
"jmp" => {
if switched_once || switch_recorder[cursor] {
// default behavior
cursor = (cursor as i32 + current_instruction.1) as usize
} else {
switched_once = true;
switch_recorder[cursor] = true;
// switched behavior
cursor += 1;
}
}
"nop" => {
if current_instruction.1 == 0 || switched_once || switch_recorder[cursor] {
// default behavior
cursor += 1;
} else {
switched_once = true;
switch_recorder[cursor] = true;
// switched behavior
cursor = (cursor as i32 + current_instruction.1) as usize
}
}
_ => (),
}
}
acc
}
fn answer_1(instructions: &Vec<(&str, i32)>) -> i32 {
let mut acc: i32 = 0;
let mut cursor: usize = 0;
let mut recorder = vec![false; instructions.len()];
loop {
if recorder[cursor] {
break;
}
recorder[cursor] = true;
let current_instruction = instructions[cursor];
match current_instruction.0 {
"acc" => {
acc += current_instruction.1;
cursor += 1;
}
"jmp" => cursor = (cursor as i32 + current_instruction.1) as usize,
_ => cursor += 1,
}
}
acc
}
|
use std::collections::HashMap;
use std::ops::Range;
use lalrpop_util::ParseError as LalrpopError;
use super::error::{ParseError, PolarError, PolarResult, RuntimeError};
use super::kb::KnowledgeBase;
use super::lexer::Token;
use super::rules::*;
use super::terms::*;
// TODO(gj): round up longhand `has_permission/3` and `has_role/3` rules to incorporate their
// referenced permissions & roles (implied & implier side) into the exhaustiveness checks.
// TODO(gj): round up longhand `has_relation/3` rules to check that every declared `relation` has a
// corresponding `has_relation/3` implementation.
// TODO(gj): disallow same string to be declared as a perm/role and a relation.
// This'll come into play for "owner"-style actor relationships.
// This type is used as a pre-validation bridge between LALRPOP & Rust.
#[derive(Debug)]
pub enum Production {
Roles(Term), // List<String>
Permissions(Term), // List<String>
Relations(Term), // Dict<Symbol, Symbol>
Implication(Term, (Term, Option<Term>)), // (String, (String, Option<String>))
}
pub fn validate_relation_keyword(
(keyword, relation): (Term, Term),
) -> Result<Term, LalrpopError<usize, Token, error::ParseError>> {
if keyword.value().as_symbol().unwrap().0 == "on" {
Ok(relation)
} else {
let (loc, ranges) = (keyword.offset(), vec![]);
let msg = format!(
"Unexpected relation keyword '{}'. Did you mean 'on'?",
keyword
);
Err(LalrpopError::User {
error: ParseError::ParseSugar { loc, msg, ranges },
})
}
}
// TODO(gj): Create a Parsed<Term> or something that _always_ has source info.
fn term_source_range(term: &Term) -> Range<usize> {
let (start, end) = term.span().unwrap();
start..end
}
pub fn validate_parsed_declaration(
(name, term): (Symbol, Term),
) -> Result<Production, LalrpopError<usize, Token, error::ParseError>> {
match (name.0.as_ref(), term.value()) {
("roles", Value::List(_)) => Ok(Production::Roles(term)),
("permissions", Value::List(_)) => Ok(Production::Permissions(term)),
("relations", Value::Dictionary(_)) => Ok(Production::Relations(term)),
("roles", Value::Dictionary(_)) | ("permissions", Value::Dictionary(_)) => {
let (loc, ranges) = (term.offset(), vec![term_source_range(&term)]);
let msg = format!("Expected '{}' declaration to be a list of strings; found a dictionary:\n", name);
Err(LalrpopError::User { error: ParseError::ParseSugar { loc, msg, ranges } })
}
("relations", Value::List(_)) => Err(LalrpopError::User {
error: ParseError::ParseSugar {
loc: term.offset(),
msg: "Expected 'relations' declaration to be a dictionary; found a list:\n".to_owned(),
ranges: vec![term_source_range(&term)],
},
}),
(_, Value::List(_)) => Err(LalrpopError::User {
error: ParseError::ParseSugar {
loc: term.offset(),
msg: format!(
"Unexpected declaration '{}'. Did you mean for this to be 'roles = [ ... ];' or 'permissions = [ ... ];'?\n", name
),
ranges: vec![term_source_range(&term)],
},
}),
(_, Value::Dictionary(_)) => Err(LalrpopError::User {
error: ParseError::ParseSugar {
loc: term.offset(),
msg: format!(
"Unexpected declaration '{}'. Did you mean for this to be 'relations = {{ ... }};'?\n", name
),
ranges: vec![term_source_range(&term)],
},
}),
_ => unreachable!(),
}
}
pub fn turn_productions_into_namespace(
keyword: Term,
resource: Term,
productions: Vec<Production>,
) -> Result<Namespace, LalrpopError<usize, Token, error::ParseError>> {
if keyword.value().as_symbol().unwrap().0 != "resource" {
let (loc, ranges) = (keyword.offset(), vec![]);
let msg = format!("Expected 'resource' but found '{}'.", keyword.to_polar());
let error = ParseError::ParseSugar { loc, msg, ranges };
return Err(LalrpopError::User { error });
}
let mut roles: Option<Term> = None;
let mut permissions: Option<Term> = None;
let mut relations: Option<Term> = None;
let mut implications = vec![];
let make_error = |name: &str, previous: &Term, new: &Term| {
let loc = new.offset();
let ranges = vec![term_source_range(previous), term_source_range(new)];
let msg = format!(
"Multiple '{}' declarations in {} namespace.\n",
name,
resource.to_polar()
);
ParseError::ParseSugar { loc, msg, ranges }
};
for production in productions {
match production {
Production::Roles(new) => {
if let Some(previous) = roles {
let error = make_error("roles", &previous, &new);
return Err(LalrpopError::User { error });
}
roles = Some(new);
}
Production::Permissions(new) => {
if let Some(previous) = permissions {
let error = make_error("permissions", &previous, &new);
return Err(LalrpopError::User { error });
}
permissions = Some(new);
}
Production::Relations(new) => {
if let Some(previous) = relations {
let error = make_error("relations", &previous, &new);
return Err(LalrpopError::User { error });
}
relations = Some(new);
}
Production::Implication(head, body) => {
// TODO(gj): Warn the user on duplicate implication definitions.
implications.push(Implication { head, body });
}
}
}
Ok(Namespace {
resource,
roles,
permissions,
relations,
implications,
})
}
#[derive(Clone, Debug)]
pub enum Declaration {
Role,
Permission,
/// `Term` is a `Symbol` that is the (registered) type of the relation. E.g., `Org` in `parent: Org`.
Relation(Term),
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Implication {
/// `Term` is a `String`. E.g., `"member"` in `"member" if "owner";`.
pub head: Term,
/// Both terms are strings. The former is the 'implier' and the latter is the 'relation', e.g.,
/// `"owner"` and `"parent"`, respectively, in `"writer" if "owner" on "parent";`.
pub body: (Term, Option<Term>),
}
impl Implication {
pub fn as_rule(&self, namespace: &Term, namespaces: &Namespaces) -> PolarResult<Rule> {
let Self { head, body } = self;
// Copy SourceInfo from head of implication.
// TODO(gj): assert these can only be None in tests.
let src_id = head.get_source_id().unwrap_or(0);
let (start, end) = head.span().unwrap_or((0, 0));
let name = namespaces.get_rule_name_for_declaration_in_namespace(head, namespace)?;
let params = implication_head_to_params(head, namespace);
let body = implication_body_to_rule_body(body, namespace, namespaces)?;
Ok(Rule::new_from_parser(
src_id, start, end, name, params, body,
))
}
}
type Declarations = HashMap<Term, Declaration>;
impl Declaration {
fn as_relation_type(&self) -> PolarResult<&Term> {
if let Declaration::Relation(relation) = self {
Ok(relation)
} else {
Err(RuntimeError::TypeError {
msg: format!("Expected Relation; got: {:?}", self),
stack_trace: None,
}
.into())
}
}
fn as_rule_name(&self) -> Symbol {
match self {
Declaration::Role => sym!("has_role"),
Declaration::Permission => sym!("has_permission"),
Declaration::Relation(_) => sym!("has_relation"),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Namespace {
pub resource: Term,
pub roles: Option<Term>,
pub permissions: Option<Term>,
pub relations: Option<Term>,
pub implications: Vec<Implication>,
}
#[derive(Clone, Default)]
pub struct Namespaces {
/// Map from resource (`Symbol`) to the declarations for that resource.
declarations: HashMap<Term, Declarations>,
pub implications: HashMap<Term, Vec<Implication>>,
}
impl Namespaces {
pub fn new() -> Self {
Self {
declarations: HashMap::new(),
implications: HashMap::new(),
}
}
pub fn clear(&mut self) {
self.declarations.clear();
self.implications.clear();
}
fn add(&mut self, resource: Term, declarations: Declarations, implications: Vec<Implication>) {
self.declarations.insert(resource.clone(), declarations);
self.implications.insert(resource, implications);
}
fn exists(&self, resource: &Term) -> bool {
self.declarations.contains_key(resource)
}
/// Look up `declaration` in `namespace`.
///
/// Invariant: `namespace` _must_ exist.
fn get_declaration_in_namespace(
&self,
declaration: &Term,
namespace: &Term,
) -> PolarResult<&Declaration> {
if let Some(declaration) = self.declarations[namespace].get(declaration) {
Ok(declaration)
} else {
let (loc, ranges) = (declaration.offset(), vec![]);
let msg = format!("Undeclared term {} referenced in implication in {} namespace. Did you mean to declare it as a role, permission, or relation?", declaration.to_polar(), namespace);
Err(ParseError::ParseSugar { loc, msg, ranges }.into())
}
}
/// Look up `relation` in `namespace` and return its type.
fn get_relation_type_in_namespace(
&self,
relation: &Term,
namespace: &Term,
) -> PolarResult<&Term> {
self.get_declaration_in_namespace(relation, namespace)?
.as_relation_type()
}
/// Look up `declaration` in `namespace` and return the appropriate rule name for rewriting.
fn get_rule_name_for_declaration_in_namespace(
&self,
declaration: &Term,
namespace: &Term,
) -> PolarResult<Symbol> {
Ok(self
.get_declaration_in_namespace(declaration, namespace)?
.as_rule_name())
}
/// Traverse from `namespace` to a related namespace via `relation`, then look up `declaration`
/// in the related namespace and return the appropriate rule name for rewriting.
fn get_rule_name_for_declaration_in_related_namespace(
&self,
declaration: &Term,
relation: &Term,
namespace: &Term,
) -> PolarResult<Symbol> {
let related_namespace = self.get_relation_type_in_namespace(relation, namespace)?;
if let Some(declarations) = self.declarations.get(related_namespace) {
if let Some(declaration) = declarations.get(declaration) {
Ok(declaration.as_rule_name())
} else {
let (loc, ranges) = (declaration.offset(), vec![]);
let msg = format!("{}: Term {} not declared on related resource {}. Did you mean to declare it as a role, permission, or relation on resource {}?", namespace.to_polar(), declaration.to_polar(), related_namespace.to_polar(), related_namespace.to_polar());
Err(ParseError::ParseSugar { loc, msg, ranges }.into())
}
} else {
let (loc, ranges) = (related_namespace.offset(), vec![]);
let msg = format!("{}: Relation {} in implication body `{} on {}` has type {}, but no such namespace exists. Try declaring one: `resource {} {{}}`", namespace.to_polar(), relation.to_polar(), declaration.to_polar(), relation.to_polar(), related_namespace.to_polar(), related_namespace.to_polar());
Err(ParseError::ParseSugar { loc, msg, ranges }.into())
}
}
}
pub fn check_all_relation_types_have_been_registered(kb: &KnowledgeBase) -> Vec<PolarError> {
let mut errors = vec![];
for declarations in kb.namespaces.declarations.values() {
for (declaration, kind) in declarations {
if let Declaration::Relation(relation_type) = kind {
errors.extend(relation_type_is_registered(kb, (declaration, relation_type)).err());
}
}
}
errors
}
fn index_declarations(
roles: Option<Term>,
permissions: Option<Term>,
relations: Option<Term>,
resource: &Term,
) -> PolarResult<HashMap<Term, Declaration>> {
let mut declarations = HashMap::new();
if let Some(roles) = roles {
for role in roles.value().as_list()? {
if declarations
.insert(role.clone(), Declaration::Role)
.is_some()
{
let (loc, ranges) = (role.offset(), vec![]);
let msg = format!(
"{}: Duplicate declaration of {} in the roles list.",
resource.to_polar(),
role.to_polar()
);
return Err(ParseError::ParseSugar { loc, msg, ranges }.into());
}
}
}
if let Some(permissions) = permissions {
for permission in permissions.value().as_list()? {
if let Some(previous) = declarations.insert(permission.clone(), Declaration::Permission)
{
let msg = if matches!(previous, Declaration::Permission) {
format!(
"{}: Duplicate declaration of {} in the permissions list.",
resource.to_polar(),
permission.to_polar()
)
} else {
format!(
"{}: {} declared as a permission but it was previously declared as a role.",
resource.to_polar(),
permission.to_polar()
)
};
let (loc, ranges) = (permission.offset(), vec![]);
return Err(ParseError::ParseSugar { loc, msg, ranges }.into());
}
}
}
if let Some(relations) = relations {
for (relation, relation_type) in &relations.value().as_dict()?.fields {
// Stringify relation so that we can index into the declarations map with a string
// reference to the relation. E.g., relation `creator: User` gets stored as `"creator"
// => Relation(User)` so that when we encounter an implication `"admin" if "creator";`
// we can easily look up what type of declaration `"creator"` is.
let stringified_relation = relation_type.clone_with_value(value!(relation.0.as_str()));
let declaration = Declaration::Relation(relation_type.clone());
if let Some(previous) = declarations.insert(stringified_relation, declaration) {
let msg = match previous {
Declaration::Role => format!(
"{}: '{}' declared as a relation but it was previously declared as a role.",
resource.to_polar(),
relation.to_polar()
),
Declaration::Permission => format!(
"{}: '{}' declared as a relation but it was previously declared as a permission.",
resource.to_polar(),
relation.to_polar()
),
_ => unreachable!("duplicate dict keys aren't parseable"),
};
let (loc, ranges) = (relation_type.offset(), vec![]);
return Err(ParseError::ParseSugar { loc, msg, ranges }.into());
}
}
}
Ok(declarations)
}
fn namespace_as_var(namespace: &Term) -> Value {
let name = &namespace.value().as_symbol().expect("sym").0;
let mut lowercased = name.to_lowercase();
// If the namespace's name is already lowercase, append "_instance" to distinguish the variable
// name from the namespace's name.
if &lowercased == name {
lowercased += "_instance";
}
value!(sym!(lowercased))
}
/// Turn an implication body into an And-wrapped call (for a local implication) or pair of calls
/// (for a cross-resource implication).
fn implication_body_to_rule_body(
(implier, relation): &(Term, Option<Term>),
namespace: &Term,
namespaces: &Namespaces,
) -> PolarResult<Term> {
// Create a variable derived from the name of the current namespace. E.g., if we're in the
// `Repo` namespace, the variable name will be `repo`.
let namespace_var = implier.clone_with_value(namespace_as_var(namespace));
// The actor variable will always be named `actor`.
let actor_var = implier.clone_with_value(value!(sym!("actor")));
// If there's a relation, e.g., `if <implier> on <relation>`...
if let Some(relation) = relation {
// TODO(gj): what if the relation is with the same type? E.g.,
// `Dir { relations = { parent: Dir }; }`. This might cause Polar to loop.
// ...then we need to link the rewritten `<implier>` and `<relation>` rules via a shared
// variable. To be clever, we'll name the variable according to the type of the relation,
// e.g., if the declared relation is `parent: Org` we'll name the variable `org`.
let relation_type = namespaces.get_relation_type_in_namespace(relation, namespace)?;
let relation_type_var = relation.clone_with_value(namespace_as_var(relation_type));
// For the rewritten `<relation>` call, the rule name will always be `has_relation` and the
// arguments, in order, will be: the shared variable we just created above, the
// `<relation>` string, and the namespace variable we created at the top of the function.
// E.g., `vec![org, "parent", repo]`.
let relation_call = relation.clone_with_value(value!(Call {
name: sym!("has_relation"),
args: vec![relation_type_var.clone(), relation.clone(), namespace_var],
kwargs: None
}));
// To get the rule name for the rewritten `<implier>` call, we need to figure out what
// type (role, permission, or relation) `<implier>` is declared as _in the namespace
// related to the current namespace via `<relation>`_. That is, given
// `resource Repo { roles=["writer"]; relations={parent:Org}; "writer" if "owner" on "parent"; }`,
// we need to find out whether `"owner"` is declared as a role, permission, or relation in
// the `Org` namespace. The args for the rewritten `<implier>` call are, in order: the
// actor variable, the `<implier>` string, and the shared variable we created above for the
// related type.
let implier_call = implier.clone_with_value(value!(Call {
name: namespaces
.get_rule_name_for_declaration_in_related_namespace(implier, relation, namespace)?,
args: vec![actor_var, implier.clone(), relation_type_var],
kwargs: None
}));
// Wrap the rewritten `<relation>` and `<implier>` calls in an `And`.
Ok(implier.clone_with_value(value!(op!(And, relation_call, implier_call))))
} else {
// If there's no `<relation>` (e.g., `... if "writer";`), we're dealing with a local
// implication, and the rewriting process is a bit simpler. To get the appropriate rule
// name, we look up the declared type (role, permission, or relation) of `<implier>` in the
// current namespace. The call's args are, in order: the actor variable, the `<implier>`
// string, and the namespace variable. E.g., `vec![actor, "writer", repo]`.
let implier_call = implier.clone_with_value(value!(Call {
name: namespaces.get_rule_name_for_declaration_in_namespace(implier, namespace)?,
args: vec![actor_var, implier.clone(), namespace_var],
kwargs: None
}));
// Wrap the rewritten `<implier>` call in an `And`.
Ok(implier.clone_with_value(value!(op!(And, implier_call))))
}
}
/// Turn an implication head into a trio of params that go in the head of the rewritten rule.
fn implication_head_to_params(head: &Term, namespace: &Term) -> Vec<Parameter> {
let namespace_name = &namespace.value().as_symbol().expect("sym").0;
vec![
Parameter {
parameter: head.clone_with_value(value!(sym!("actor"))),
specializer: None,
},
Parameter {
parameter: head.clone(),
specializer: None,
},
Parameter {
parameter: head.clone_with_value(namespace_as_var(namespace)),
specializer: Some(
namespace.clone_with_value(value!(pattern!(instance!(namespace_name)))),
),
},
]
}
// TODO(gj): better error message, e.g.:
// duplicate namespace declaration: resource Org { ... } defined on line XX of file YY
// previously defined on line AA of file BB
fn check_for_duplicate_namespaces(namespaces: &Namespaces, namespace: &Term) -> PolarResult<()> {
if namespaces.exists(namespace) {
let (loc, ranges) = (namespace.offset(), vec![]);
let msg = format!("Duplicate declaration of {} namespace.", namespace);
return Err(ParseError::ParseSugar { loc, msg, ranges }.into());
}
Ok(())
}
// TODO(gj): no way to know in the core if `term` was registered as a class or a constant.
fn is_registered_class(kb: &KnowledgeBase, term: &Term) -> PolarResult<bool> {
Ok(kb.is_constant(term.value().as_symbol()?))
}
fn check_that_namespace_resource_is_registered_as_a_class(
kb: &KnowledgeBase,
resource: &Term,
) -> PolarResult<()> {
if !is_registered_class(kb, resource)? {
// TODO(gj): better error message
let msg = format!(
"In order to be declared as a namespace, {} must be registered as a class.",
resource.to_polar()
);
let (loc, ranges) = (resource.offset(), vec![]);
// TODO(gj): UnregisteredClassError in the core.
return Err(ParseError::ParseSugar { loc, msg, ranges }.into());
}
Ok(())
}
fn relation_type_is_registered(
kb: &KnowledgeBase,
(relation, kind): (&Term, &Term),
) -> PolarResult<()> {
if !is_registered_class(kb, kind)? {
let msg = format!(
"Type '{}' in relation '{}: {}' must be registered as a class.",
kind.to_polar(),
relation.value().as_string()?,
kind.to_polar(),
);
let (loc, ranges) = (relation.offset(), vec![]);
// TODO(gj): UnregisteredClassError in the core.
return Err(ParseError::ParseSugar { loc, msg, ranges }.into());
}
Ok(())
}
fn check_that_implication_heads_are_declared_locally(
implications: &[Implication],
declarations: &Declarations,
resource: &Term,
) -> Vec<PolarError> {
let mut errors = vec![];
for Implication { head, .. } in implications {
if !declarations.contains_key(head) {
let msg = format!(
"Undeclared term {} referenced in implication in {} namespace. \
Did you mean to declare it as a role, permission, or relation?",
head.to_polar(),
resource
);
let (loc, ranges) = (head.offset(), vec![]);
let error = ParseError::ParseSugar { loc, msg, ranges };
errors.push(error.into());
}
}
errors
}
impl Namespace {
// TODO(gj): Add 'includes' feature to ensure we have a clean hook for validation _after_ all
// Polar rules are loaded.
pub fn add_to_kb(self, kb: &mut KnowledgeBase) -> PolarResult<()> {
let mut errors = vec![];
errors.extend(
check_that_namespace_resource_is_registered_as_a_class(kb, &self.resource).err(),
);
errors.extend(check_for_duplicate_namespaces(&kb.namespaces, &self.resource).err());
let Namespace {
resource,
roles,
permissions,
relations,
implications,
} = self;
let declarations = index_declarations(roles, permissions, relations, &resource)?;
errors.append(&mut check_that_implication_heads_are_declared_locally(
&implications,
&declarations,
&resource,
));
// TODO(gj): Emit all errors instead of just the first.
if !errors.is_empty() {
return Err(errors[0].clone());
}
kb.namespaces.add(resource, declarations, implications);
Ok(())
}
}
#[cfg(test)]
mod tests {
use permute::permute;
use std::collections::HashSet;
use super::*;
use crate::error::ErrorKind;
use crate::parser::{parse_lines, Line};
use crate::polar::Polar;
#[track_caller]
fn expect_error(p: &Polar, policy: &str, expected: &str) {
assert!(matches!(
p.load_str(policy).unwrap_err(),
error::PolarError {
kind: error::ErrorKind::Parse(error::ParseError::ParseSugar {
msg,
..
}),
..
} if msg.contains(expected)
));
}
#[test]
fn test_namespace_rewrite_implications_with_lowercase_resource_specializer() {
let repo_resource = term!(sym!("repo"));
let repo_roles = term!(["reader"]);
let repo_relations = term!(btreemap! { sym!("parent") => term!(sym!("org")) });
let repo_declarations =
index_declarations(Some(repo_roles), None, Some(repo_relations), &repo_resource);
let org_resource = term!(sym!("org"));
let org_roles = term!(["member"]);
let org_declarations = index_declarations(Some(org_roles), None, None, &org_resource);
let mut namespaces = Namespaces::new();
namespaces.add(repo_resource, repo_declarations.unwrap(), vec![]);
namespaces.add(org_resource, org_declarations.unwrap(), vec![]);
let implication = Implication {
head: term!("reader"),
body: (term!("member"), Some(term!("parent"))),
};
let rewritten_role_role = implication
.as_rule(&term!(sym!("repo")), &namespaces)
.unwrap();
assert_eq!(
rewritten_role_role.to_polar(),
r#"has_role(actor, "reader", repo_instance: repo{}) if has_relation(org_instance, "parent", repo_instance) and has_role(actor, "member", org_instance);"#
);
}
#[test]
fn test_namespace_local_rewrite_implications() {
let resource = term!(sym!("Org"));
let roles = term!(["owner", "member"]);
let permissions = term!(["invite", "create_repo"]);
let declarations = index_declarations(Some(roles), Some(permissions), None, &resource);
let mut namespaces = Namespaces::new();
namespaces.add(resource, declarations.unwrap(), vec![]);
let implication = Implication {
head: term!("member"),
body: (term!("owner"), None),
};
let rewritten_role_role = implication
.as_rule(&term!(sym!("Org")), &namespaces)
.unwrap();
assert_eq!(
rewritten_role_role.to_polar(),
r#"has_role(actor, "member", org: Org{}) if has_role(actor, "owner", org);"#
);
let implication = Implication {
head: term!("invite"),
body: (term!("owner"), None),
};
let rewritten_permission_role = implication
.as_rule(&term!(sym!("Org")), &namespaces)
.unwrap();
assert_eq!(
rewritten_permission_role.to_polar(),
r#"has_permission(actor, "invite", org: Org{}) if has_role(actor, "owner", org);"#
);
let implication = Implication {
head: term!("create_repo"),
body: (term!("invite"), None),
};
let rewritten_permission_permission = implication
.as_rule(&term!(sym!("Org")), &namespaces)
.unwrap();
assert_eq!(
rewritten_permission_permission.to_polar(),
r#"has_permission(actor, "create_repo", org: Org{}) if has_permission(actor, "invite", org);"#
);
}
#[test]
fn test_namespace_nonlocal_rewrite_implications() {
let repo_resource = term!(sym!("Repo"));
let repo_roles = term!(["reader"]);
let repo_relations = term!(btreemap! { sym!("parent") => term!(sym!("Org")) });
let repo_declarations =
index_declarations(Some(repo_roles), None, Some(repo_relations), &repo_resource);
let org_resource = term!(sym!("Org"));
let org_roles = term!(["member"]);
let org_declarations = index_declarations(Some(org_roles), None, None, &org_resource);
let mut namespaces = Namespaces::new();
namespaces.add(repo_resource, repo_declarations.unwrap(), vec![]);
namespaces.add(org_resource, org_declarations.unwrap(), vec![]);
let implication = Implication {
head: term!("reader"),
body: (term!("member"), Some(term!("parent"))),
};
let rewritten_role_role = implication
.as_rule(&term!(sym!("Repo")), &namespaces)
.unwrap();
assert_eq!(
rewritten_role_role.to_polar(),
r#"has_role(actor, "reader", repo: Repo{}) if has_relation(org, "parent", repo) and has_role(actor, "member", org);"#
);
}
#[test]
fn test_namespace_must_be_registered() {
let p = Polar::new();
let valid_policy = "resource Org{}";
expect_error(
&p,
valid_policy,
"In order to be declared as a namespace, Org must be registered as a class.",
);
p.register_constant(sym!("Org"), term!("unimportant"));
assert!(p.load_str(valid_policy).is_ok());
}
#[test]
fn test_namespace_duplicate_namespaces() {
let p = Polar::new();
let invalid_policy = "resource Org{}resource Org{}";
p.register_constant(sym!("Org"), term!("unimportant"));
expect_error(
&p,
invalid_policy,
"Duplicate declaration of Org namespace.",
);
}
#[test]
fn test_namespace_with_undeclared_local_implication_head() {
let p = Polar::new();
p.register_constant(sym!("Org"), term!("unimportant"));
expect_error(
&p,
r#"resource Org{"member" if "owner";}"#,
r#"Undeclared term "member" referenced in implication in Org namespace. Did you mean to declare it as a role, permission, or relation?"#,
);
}
#[test]
fn test_namespace_with_undeclared_local_implication_body() {
let p = Polar::new();
p.register_constant(sym!("Org"), term!("unimportant"));
expect_error(
&p,
r#"resource Org {
roles=["member"];
"member" if "owner";
}"#,
r#"Undeclared term "owner" referenced in implication in Org namespace. Did you mean to declare it as a role, permission, or relation?"#,
);
}
#[test]
fn test_namespace_with_undeclared_nonlocal_implication_body() {
let p = Polar::new();
p.register_constant(sym!("Repo"), term!("unimportant"));
p.register_constant(sym!("Org"), term!("unimportant"));
expect_error(
&p,
r#"resource Repo {
roles = ["writer"];
relations = { parent: Org };
"writer" if "owner" on "parent";
}"#,
r#"Repo: Relation "parent" in implication body `"owner" on "parent"` has type Org, but no such namespace exists. Try declaring one: `resource Org {}`"#,
);
expect_error(
&p,
r#"resource Repo {
roles = ["writer"];
relations = { parent: Org };
"writer" if "owner" on "parent";
}
resource Org {}"#,
r#"Repo: Term "owner" not declared on related resource Org. Did you mean to declare it as a role, permission, or relation on resource Org?"#,
);
}
#[test]
#[ignore = "probably easier after the entity PR goes in"]
fn test_namespace_resource_relations_can_only_appear_after_on() {
let p = Polar::new();
p.register_constant(sym!("Repo"), term!("unimportant"));
expect_error(
&p,
r#"resource Repo {
roles = ["owner"];
relations = { parent: Org };
"parent" if "owner";
}"#,
r#"Repo: resource relation "parent" can only appear in an implication following the keyword 'on'."#,
);
}
#[test]
#[ignore = "not yet implemented"]
fn test_namespace_with_circular_implications() {
let p = Polar::new();
p.register_constant(sym!("Repo"), term!("unimportant"));
let policy = r#"resource Repo {
roles = [ "writer" ];
"writer" if "writer";
}"#;
panic!("{}", p.load_str(policy).unwrap_err());
// let policy = r#"resource Repo {
// roles = [ "writer", "reader" ];
// "writer" if "reader";
// "reader" if "writer";
// }"#;
// panic!("{}", p.load_str(policy).unwrap_err());
//
// let policy = r#"resource Repo {
// roles = [ "writer", "reader", "admin" ];
// "admin" if "reader";
// "writer" if "admin";
// "reader" if "writer";
// }"#;
// panic!("{}", p.load_str(policy).unwrap_err());
}
#[test]
fn test_namespace_with_unregistered_relation_type() {
let p = Polar::new();
p.register_constant(sym!("Repo"), term!("unimportant"));
let policy = r#"resource Repo { relations = { parent: Org }; }"#;
expect_error(
&p,
policy,
"Type 'Org' in relation 'parent: Org' must be registered as a class.",
);
p.register_constant(sym!("Org"), term!("unimportant"));
p.load_str(policy).unwrap();
}
#[test]
fn test_namespace_with_clashing_declarations() {
let p = Polar::new();
p.register_constant(sym!("Org"), term!("unimportant"));
expect_error(
&p,
r#"resource Org{
roles = ["egg","egg"];
"egg" if "egg";
}"#,
r#"Org: Duplicate declaration of "egg" in the roles list."#,
);
expect_error(
&p,
r#"resource Org{
roles = ["egg","tootsie"];
permissions = ["spring","egg"];
"egg" if "tootsie";
"tootsie" if "spring";
}"#,
r#"Org: "egg" declared as a permission but it was previously declared as a role."#,
);
expect_error(
&p,
r#"resource Org{
permissions = [ "egg" ];
relations = { egg: Roll };
}"#,
r#"Org: 'egg' declared as a relation but it was previously declared as a permission."#,
);
}
#[test]
fn test_namespace_parsing_permutations() {
use std::iter::FromIterator;
// Policy pieces
let roles = r#"roles = ["writer", "reader"];"#;
let permissions = r#"permissions = ["push", "pull"];"#;
let relations = r#"relations = { creator: User, parent: Org };"#;
let implications = vec![
r#""pull" if "reader";"#,
r#""push" if "writer";"#,
r#""writer" if "creator";"#,
r#""reader" if "member" on "parent";"#,
];
// Maximal namespace
let namespace = Namespace {
resource: term!(sym!("Repo")),
roles: Some(term!(["writer", "reader"])),
permissions: Some(term!(["push", "pull"])),
relations: Some(term!(btreemap! {
sym!("creator") => term!(sym!("User")),
sym!("parent") => term!(sym!("Org")),
})),
implications: vec![
// TODO(gj): implication! macro
Implication {
head: term!("pull"),
body: (term!("reader"), None),
},
Implication {
head: term!("push"),
body: (term!("writer"), None),
},
Implication {
head: term!("writer"),
body: (term!("creator"), None),
},
Implication {
head: term!("reader"),
body: (term!("member"), Some(term!("parent"))),
},
],
};
// Helpers
let equal = |line: &Line, expected: &Namespace| match line {
Line::Namespace(parsed) => {
let parsed_implications: HashSet<&Implication> =
HashSet::from_iter(&parsed.implications);
let expected_implications = HashSet::from_iter(&expected.implications);
parsed.resource == expected.resource
&& parsed.roles == expected.roles
&& parsed.permissions == expected.permissions
&& parsed.relations == expected.relations
&& parsed_implications == expected_implications
}
_ => false,
};
let test_case = |parts: Vec<&str>, expected: &Namespace| {
for permutation in permute(parts).into_iter() {
let mut policy = "resource Repo {\n".to_owned();
policy += &permutation.join("\n");
policy += "}";
assert!(equal(&parse_lines(0, &policy).unwrap()[0], expected));
}
};
// Test each case with and without implications.
let test_cases = |parts: Vec<&str>, expected: &Namespace| {
let mut parts_with_implications = parts.clone();
parts_with_implications.append(&mut implications.clone());
test_case(parts_with_implications, expected);
let expected_without_implications = Namespace {
implications: vec![],
..expected.clone()
};
test_case(parts, &expected_without_implications);
};
// Cases
// Roles, Permissions, Relations
test_cases(vec![roles, permissions, relations], &namespace);
// Roles, Permissions, _________
let expected = Namespace {
relations: None,
..namespace.clone()
};
test_cases(vec![roles, permissions], &expected);
// Roles, ___________, Relations
let expected = Namespace {
permissions: None,
..namespace.clone()
};
test_cases(vec![roles, relations], &expected);
// _____, Permissions, Relations
let expected = Namespace {
roles: None,
..namespace.clone()
};
test_cases(vec![permissions, relations], &expected);
// Roles, ___________, _________
let expected = Namespace {
permissions: None,
relations: None,
..namespace.clone()
};
test_cases(vec![roles], &expected);
// _____, Permissions, _________
let expected = Namespace {
roles: None,
relations: None,
..namespace.clone()
};
test_cases(vec![permissions], &expected);
// _____, ___________, Relations
let expected = Namespace {
roles: None,
permissions: None,
..namespace.clone()
};
test_cases(vec![relations], &expected);
// _____, ___________, _________
let expected = Namespace {
roles: None,
permissions: None,
relations: None,
..namespace
};
test_cases(vec![], &expected);
}
#[test]
fn test_namespace_declaration_keywords() {
let p = Polar::new();
expect_error(
&p,
r#"resource Org{roles={};}"#,
r#"Expected 'roles' declaration to be a list of strings; found a dictionary:"#,
);
expect_error(
&p,
r#"resource Org{relations=[];}"#,
r#"Expected 'relations' declaration to be a dictionary; found a list:"#,
);
expect_error(
&p,
r#"resource Org{foo=[];}"#,
r#"Unexpected declaration 'foo'. Did you mean for this to be 'roles = [ ... ];' or 'permissions = [ ... ];'?"#,
);
expect_error(
&p,
r#"resource Org{foo={};}"#,
r#"Unexpected declaration 'foo'. Did you mean for this to be 'relations = { ... };'?"#,
);
expect_error(
&p,
r#"resource Org{"foo" if "bar" onn "baz";}"#,
r#"Unexpected relation keyword 'onn'. Did you mean 'on'?"#,
);
}
#[test]
fn test_namespace_leading_resource_keyword() {
let p = Polar::new();
assert!(matches!(
p.load_str("Org{}").unwrap_err().kind,
ErrorKind::Parse(ParseError::UnrecognizedToken { .. })
));
expect_error(
&p,
"seahorse Org{}",
"Expected 'resource' but found 'seahorse'.",
);
}
#[test]
fn test_namespace_declaration_keywords_are_not_reserved_words() {
let p = Polar::new();
p.load_str("roles(permissions, on, resource) if permissions.relations = on and resource;")
.unwrap();
}
}
|
//! LRAT proof generation for the Varisat SAT solver.
use std::{
io::{BufWriter, Write},
mem::replace,
};
use anyhow::Error;
use varisat_checker::{CheckedProofStep, CheckerData, ProofProcessor};
use varisat_formula::Lit;
/// Proof processor that generates an LRAT proof.
pub struct WriteLrat<'a> {
binary: bool,
target: BufWriter<Box<dyn Write + 'a>>,
delete_open: bool,
last_added_id: u64,
buffered_deletes: Vec<u64>,
}
impl<'a> ProofProcessor for WriteLrat<'a> {
fn process_step(&mut self, step: &CheckedProofStep, _data: CheckerData) -> Result<(), Error> {
match step {
CheckedProofStep::AddClause { .. } => (),
CheckedProofStep::DuplicatedClause { .. } => (),
_ => {
if !self.buffered_deletes.is_empty() {
let buffered_deletes = replace(&mut self.buffered_deletes, vec![]);
self.open_delete()?;
self.write_ids(&buffered_deletes)?;
}
}
}
match step {
&CheckedProofStep::AddClause { id, .. } => {
self.last_added_id = id;
}
&CheckedProofStep::DuplicatedClause { id, .. }
| &CheckedProofStep::TautologicalClause { id, .. } => {
self.last_added_id = id;
if self.binary {
self.open_delete()?;
self.write_ids(&[id])?;
} else {
// In the textual format the delete command is prefixed by an id which we do not
// know yet.
self.buffered_deletes.push(id);
}
}
&CheckedProofStep::AtClause {
id,
clause,
propagations,
..
} => {
self.close_delete()?;
self.last_added_id = id;
self.write_add_step()?;
self.write_ids(&[id])?;
self.write_lits(clause)?;
self.write_sep()?;
self.write_ids(propagations)?;
self.write_end()?;
}
&CheckedProofStep::DeleteAtClause {
id,
keep_as_redundant,
..
}
| &CheckedProofStep::DeleteRatClause {
id,
keep_as_redundant,
..
} => {
if !keep_as_redundant {
self.open_delete()?;
self.write_ids(&[id])?;
}
}
&CheckedProofStep::DeleteClause { id, .. } => {
self.open_delete()?;
self.write_ids(&[id])?;
}
&CheckedProofStep::UserVar { .. }
| &CheckedProofStep::MakeIrredundant { .. }
| &CheckedProofStep::Model { .. }
| &CheckedProofStep::Assumptions { .. }
| &CheckedProofStep::FailedAssumptions { .. } => (),
}
Ok(())
}
}
impl<'a> WriteLrat<'a> {
/// Create a lrat writing processor.
///
/// The proof is written to `target`. If `binary` is false a normal LRAT proof is emitted. If it
/// is true, the compressed LRAT format is used which is a compact binary encoding. Despite the
/// name, even a compressed LRAT proof can usually still be compressed a lot using a general
/// data compression algorithm.
pub fn new(target: impl Write + 'a, binary: bool) -> WriteLrat<'a> {
WriteLrat {
binary,
target: BufWriter::new(Box::new(target)),
delete_open: false,
last_added_id: 0,
buffered_deletes: vec![],
}
}
/// Write out all steps processed so far.
///
/// This is automatically called when this proof processor is dropped. Calling this explicitly
/// is recommended to handle possible IO errors.
pub fn flush(&mut self) -> Result<(), Error> {
self.close_delete()?;
self.target.flush()?;
Ok(())
}
/// If necessary begin a batched delete step.
fn open_delete(&mut self) -> Result<(), Error> {
if !self.delete_open {
if !self.binary {
self.write_ids(&[self.last_added_id])?;
}
self.write_delete_step()?;
self.delete_open = true;
}
Ok(())
}
/// If necessary end a batched delete step.
fn close_delete(&mut self) -> Result<(), Error> {
if self.delete_open {
self.write_end()?;
self.delete_open = false;
}
Ok(())
}
/// Begin a batched delete step.
fn write_delete_step(&mut self) -> Result<(), Error> {
if self.binary {
self.target.write_all(b"d")?;
} else {
self.target.write_all(b"d ")?;
}
Ok(())
}
/// Begin a clause addition step.
fn write_add_step(&mut self) -> Result<(), Error> {
if self.binary {
self.target.write_all(b"a")?;
}
Ok(())
}
/// Write a list of clause ids.
fn write_ids(&mut self, ids: &[u64]) -> Result<(), Error> {
if self.binary {
for &id in ids {
leb128::write::unsigned(&mut self.target, (id + 1) * 2)?;
}
} else {
for &id in ids {
itoa::write(&mut self.target, id + 1)?;
self.target.write_all(b" ")?;
}
}
Ok(())
}
/// Write a list of literals.
fn write_lits(&mut self, lits: &[Lit]) -> Result<(), Error> {
if self.binary {
for &lit in lits {
leb128::write::unsigned(&mut self.target, lit.code() as u64 + 2)?;
}
} else {
for &lit in lits {
itoa::write(&mut self.target, lit.to_dimacs())?;
self.target.write_all(b" ")?;
}
}
Ok(())
}
/// End the current step.
fn write_end(&mut self) -> Result<(), Error> {
if self.binary {
self.target.write_all(&[0])?
} else {
self.target.write_all(b"0\n")?
}
Ok(())
}
/// Write a separator.
fn write_sep(&mut self) -> Result<(), Error> {
if self.binary {
self.target.write_all(&[0])?
} else {
self.target.write_all(b"0 ")?
}
Ok(())
}
}
impl<'a> Drop for WriteLrat<'a> {
fn drop(&mut self) {
let _ignore_errors = self.close_delete();
}
}
#[cfg(test)]
mod tests {
use super::*;
use proptest::prelude::*;
use std::{
fs::File,
path::PathBuf,
process::{Command, Stdio},
};
use tempfile::TempDir;
use varisat::{dimacs::write_dimacs, ProofFormat, Solver};
use varisat_checker::Checker;
use varisat_formula::{cnf_formula, test::sgen_unsat_formula, CnfFormula};
fn check_lrat(tool: &str, cnf_file: &PathBuf, proof_file: &PathBuf) -> Result<bool, Error> {
let mut child = Command::new(tool)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()?;
let mut stdin = child.stdin.as_mut().unwrap();
writeln!(&mut stdin, "(lrat-check {:?} {:?})", cnf_file, proof_file)?;
let output = child.wait_with_output()?;
let stdout = std::str::from_utf8(&output.stdout)?;
Ok(stdout.contains("s VERIFIED"))
}
fn solve_and_check_lrat(
formula: CnfFormula,
binary: bool,
direct: bool,
) -> Result<bool, Error> {
let tmp = TempDir::new()?;
let lrat_proof = tmp.path().join("proof.lrat");
let cnf_file = tmp.path().join("input.cnf");
let mut dimacs = vec![];
let mut proof = vec![];
let mut write_lrat = WriteLrat::new(File::create(&lrat_proof)?, binary);
write_dimacs(&mut File::create(&cnf_file)?, &formula)?;
let mut solver = Solver::new();
write_dimacs(&mut dimacs, &formula).unwrap();
if direct {
solver.add_proof_processor(&mut write_lrat);
} else {
solver.write_proof(&mut proof, ProofFormat::Varisat);
}
solver.add_dimacs_cnf(&mut &dimacs[..]).unwrap();
assert_eq!(solver.solve().ok(), Some(false));
solver.close_proof()?;
drop(solver);
if !direct {
let mut checker = Checker::new();
checker.add_processor(&mut write_lrat);
checker.add_dimacs_cnf(&mut &dimacs[..]).unwrap();
checker.check_proof(&mut &proof[..]).unwrap();
}
drop(write_lrat);
check_lrat(
if binary { "check-clrat" } else { "check-lrat" },
&cnf_file,
&lrat_proof,
)
}
#[cfg_attr(not(test_check_lrat), ignore)]
#[test]
fn duplicated_clause_lrat() {
for &binary in [false, true].iter() {
for &direct in [false, true].iter() {
assert!(
solve_and_check_lrat(
cnf_formula![
1, 2;
1, 2;
-1, -2;
3;
-3, -1, 2;
-4, 1, -2;
4;
],
binary,
direct
)
.unwrap(),
"binary: {:?} direct: {:?}",
binary,
direct
);
}
}
}
#[cfg_attr(not(test_check_lrat), ignore)]
#[test]
fn unit_conflict_lrat() {
for &binary in [false, true].iter() {
for &direct in [false, true].iter() {
assert!(
solve_and_check_lrat(
cnf_formula![
1;
2, 3;
-1;
4, 5;
],
binary,
direct
)
.unwrap(),
"binary: {:?} direct: {:?}",
binary,
direct
);
}
}
}
proptest! {
#[cfg_attr(not(test_check_lrat), ignore)]
#[test]
fn sgen_unsat_lrat(
formula in sgen_unsat_formula(1..7usize),
binary in proptest::bool::ANY,
direct in proptest::bool::ANY,
) {
prop_assert!(solve_and_check_lrat(formula, binary, direct).unwrap());
}
}
}
|
extern crate grpcio;
extern crate protos;
use std::env;
use std::sync::Arc;
use std::io;
use grpcio::{ChannelBuilder, EnvBuilder};
use protos::mykv::{Order};
use protos::mykv_grpc::MykvClient;
fn main() {
let args = env::args().collect::<Vec<_>>();
if args.len() != 2 {
panic!("Expected exactly four argument, the port number.")
}
let port = args[1]
.parse::<u16>()
.expect(format!("{} is not a valid port number", args[1]).as_str());
let env = Arc::new(EnvBuilder::new().build());
let ch = ChannelBuilder::new(env).connect(format!("localhost:{}", port).as_str());
let client = MykvClient::new(ch);
let mut order = Order::new();
loop {
let mut com = String::new();
let mut key = String::new();
let mut value = String::new();
println!("input command:");
io::stdin().read_line(&mut com)
.expect("Failed to read line");
match com.trim() {
"set" => {
println!("input key:");
io::stdin().read_line(&mut key)
.expect("Failed to read line");
println!("input value:");
io::stdin().read_line(&mut value)
.expect("Failed to read line");
order.set_com(com.trim().to_string());
order.set_key(key.trim().to_string());
order.set_value(value.trim().to_string());
}
"get" => {
println!("input key:");
io::stdin().read_line(&mut key)
.expect("Failed to read line");
order.set_com(com.trim().to_string());
order.set_key(key.trim().to_string());
}
"del" => {
println!("input key:");
io::stdin().read_line(&mut key)
.expect("Failed to read line");
order.set_com(com.trim().to_string());
order.set_key(key.trim().to_string());
}
"scan" => {
println!("input key left:");
io::stdin().read_line(&mut key)
.expect("Failed to read line");
println!("input key right:");
io::stdin().read_line(&mut value)
.expect("Failed to read line");
order.set_com(com.trim().to_string());
order.set_key(key.trim().to_string());
order.set_value(value.trim().to_string());
}
"save" =>{
order.set_com(com.trim().to_string());
}
_ => {
}
}
let data = client.say(&order).expect("RPC Failed!");
println!("order : {:?}", order);
if order.get_com() == "scan" {
let len = data.get_key().len();
let mut i = 0;
while i<len {
println!("key:{}",&data.get_key()[i]);
println!("value:{}",&data.get_value()[i]);
i +=1;
}
}
else {
println!("reply : {:?}", data);
}
}
// loop {
// let mut com = String::new();
// let mut key = String::new();
// let mut value = String::new();
// println!("input command:");
// io::stdin().read_line(&mut com)
// .expect("Failed to read line");
// match (com.trim()) {
// "set" => {
// println!("input key:");
// io::stdin().read_line(&mut key)
// .expect("Failed to read line");
// println!("input value:");
// io::stdin().read_line(&mut value)
// .expect("Failed to read line");
// let mut data = Datakey::new();
// data.set_key(key.trim().to_string());
// let rep = client.set(&data).expect("RPC Failed!");
// println!("set : {:?}", data);
// println!("reply : {:?}", rep);
// }
// "get" => {
// println!("input key:");
// io::stdin().read_line(&mut key)
// .expect("Failed to read line");
// let mut datakey = Datakey::new();
// datakey.set_key(key.trim().to_string());
// let rep = client.get(&datakey).expect("RPC Failed!");
// println!("get : {:?}", datakey);
// println!("reply : {:?}", rep);
// }
// "del" => {
// }
// "scan" => {
// }
// _ =>{
// }
// }
// }
} |
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Control commands used with semctl, shmctl, and msgctl. Source:
// include/uapi/linux/ipc.h.
pub const IPC_RMID: i32 = 0;
pub const IPC_SET: i32 = 1;
pub const IPC_STAT: i32 = 2;
pub const IPC_INFO: i32 = 3;
// resource get request flags. Source: include/uapi/linux/ipc.h
pub const IPC_CREAT: i16 = 00001000;
pub const IPC_EXCL: i16 = 00002000;
pub const IPC_NOWAIT: i16 = 00004000;
pub const IPC_PRIVATE: i32 = 0;
// In Linux, amd64 does not enable CONFIG_ARCH_WANT_IPC_PARSE_VERSION, so SysV
// IPC unconditionally uses the "new" 64-bit structures that are needed for
// features like 32-bit UIDs.
// IPCPerm is equivalent to struct ipc64_perm.
#[repr(C)]
#[derive(Clone, Debug, Default)]
pub struct IPCPerm {
pub Key: u32,
pub UID: u32,
pub GID: u32,
pub CUID: u32,
pub CGID: u32,
pub Mode: u16,
pub pad1: u16,
pub Seq: u16,
pub pad2: u16,
pub pad3: u32,
pub unused1: u64,
pub unused2: u64,
}
|
use diesel::{self, prelude::*};
use rocket_contrib::json::Json;
use crate::componentes_curriculares_model::{
ComponentesCurriculares, InsertableComponenteCurricular, UpdatableComponenteCurricular,
};
use crate::schema;
use crate::DbConn;
#[post("/componentes_curriculares", data = "<componentes_curriculares>")]
pub fn create_componentes_curriculares(
conn: DbConn,
componentes_curriculares: Json<InsertableComponenteCurricular>,
) -> Result<String, String> {
let inserted_rows = diesel::insert_into(schema::componentes_curriculares::table)
.values(&componentes_curriculares.0)
.execute(&conn.0)
.map_err(|err| -> String {
println!("Error inserting row: {:?}", err);
"Error inserting row into database".into()
})?;
Ok(format!("Inserted {} row(s).", inserted_rows))
}
#[get("/componentes_curriculares")]
pub fn read_componentes_curriculares(
conn: DbConn,
) -> Result<Json<Vec<ComponentesCurriculares>>, String> {
use crate::schema::componentes_curriculares::dsl::*;
componentes_curriculares
.load(&conn.0)
.map_err(|err| -> String {
println!("Error querying page views: {:?}", err);
"Error querying page views from the database".into()
})
.map(Json)
}
#[get("/componentes_curriculares/<id>")]
pub fn read_componentes_curriculares_unique(
id: i32,
conn: DbConn,
) -> Result<Json<Vec<ComponentesCurriculares>>, String> {
schema::componentes_curriculares::table
.find(id)
.load(&conn.0)
.map_err(|err| -> String {
println!("Error querying componentes_curriculares: {:?}", err);
"Error querying componentes_curriculares from the database".into()
})
.map(Json)
}
#[put("/componentes_curriculares/<id>", data = "<componentes_curriculares>")]
pub fn update_componentes_curriculares(
id: i32,
conn: DbConn,
componentes_curriculares: Json<UpdatableComponenteCurricular>,
) -> Result<String, String> {
let inserted_rows = diesel::update(schema::componentes_curriculares::table.find(id))
.set(&componentes_curriculares.0)
.execute(&conn.0)
.map_err(|err| -> String {
println!("Error updating row: {:?}", err);
"Error updating row into database".into()
})?;
Ok(format!("Updated {} row(s).", inserted_rows))
}
#[delete("/componentes_curriculares/<id>")]
pub fn delete_componentes_curriculares(id: i32, conn: DbConn) -> Result<String, String> {
let deleted_rows = diesel::delete(schema::componentes_curriculares::table.find(id))
.execute(&conn.0)
.map_err(|err| -> String {
println!("Error deleting row: {:?}", err);
"Error deleting row into database".into()
})?;
Ok(format!("Deleted {} row(s).", deleted_rows))
}
|
use std::net::SocketAddrV4;
use std::sync::atomic::{AtomicU32, Ordering};
use futures::{stream::TryStreamExt, SinkExt, StreamExt};
use tokio::sync::mpsc::Sender;
use warp::{filters::ws::Message, http::header, reject::Rejection, Buf, Filter};
use uuid::Uuid;
use webrtc_unreliable::{Server as RtcServer, SessionEndpoint};
use crate::engine::{
components::player_controlled::PlayerId,
messaging::messages::{OutMessage, WSClientMessage},
network::client_data::ClientData,
};
static NEXT_USER_ID: AtomicU32 = AtomicU32::new(0);
pub async fn start_rtc_server(listen_addr: String, public_addr: String) -> RtcServer {
tokio::spawn(RtcServer::new(
listen_addr.parse().unwrap(),
public_addr.parse().unwrap(),
))
.await
.unwrap()
.expect("rtc server failed to start")
}
pub async fn start_service(
sdp_addr: String,
endpoint: SessionEndpoint,
manager_out: Sender<WSClientMessage>,
) -> tokio::task::JoinHandle<()> {
println!("Web service starting.");
tokio::spawn(async move {
let sdp = warp::post()
.and(warp::path("sdp"))
.and(warp::body::stream())
.and_then(move |data| {
println!("Incomming SDP request...");
let mut session_endpoint = endpoint.clone();
let bytes = TryStreamExt::map_ok(data, |mut buf| Buf::to_bytes(&mut buf));
async move {
match session_endpoint.http_session_request(bytes).await {
Ok(mut resp) => {
resp.headers_mut().insert(
header::ACCESS_CONTROL_ALLOW_ORIGIN,
header::HeaderValue::from_static("*"),
);
Ok::<_, Rejection>(resp)
}
Err(_err) => Err::<_, Rejection>(warp::reject()),
}
}
});
let ws = warp::path("ws")
.map(move || manager_out.clone())
.and(warp::ws())
.map(|manager_out, ws: warp::ws::Ws| {
println!("Ws upgrade request");
ws.on_upgrade(|websocket| ws_connected(websocket, manager_out))
});
let routes = ws.or(sdp);
warp::serve(routes)
.run(sdp_addr.parse::<SocketAddrV4>().unwrap())
.await
})
}
async fn ws_connected(ws: warp::ws::WebSocket, mut manager_out: Sender<WSClientMessage>) {
let my_id = PlayerId(NEXT_USER_ID.fetch_add(1, Ordering::Relaxed));
let uuid = Uuid::new_v4().to_string();
println!("Ws connected. User id: {} with uuid: {}", my_id, &uuid);
let (mut sender, mut receiver) = ws.split();
match sender
.send(Message::binary(
OutMessage::VerifyUuid(uuid.clone()).into_proto_bytes(),
))
.await
{
Ok(_) => (),
Err(e) => println!("{}", e),
};
manager_out
.try_send(WSClientMessage::Connected(
my_id,
ClientData {
ws_client_out: sender,
socket_addr: None,
socket_uuid: uuid,
},
))
.unwrap();
while let Some(result) = receiver.next().await {
let msg = match result {
Ok(msg) => msg,
Err(e) => {
println!("Websocket error! {}", e);
break;
}
};
manager_out
.try_send(WSClientMessage::Packet(my_id, msg.into_bytes()))
.unwrap();
}
//Client disconnected
manager_out
.try_send(WSClientMessage::Disconnected(my_id))
.unwrap();
}
|
#[doc = "Reader of register PTPPPSCR"]
pub type R = crate::R<u32, super::PTPPPSCR>;
#[doc = "Reader of field `PPSFREQ`"]
pub type PPSFREQ_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:3 - PPS frequency selection"]
#[inline(always)]
pub fn ppsfreq(&self) -> PPSFREQ_R {
PPSFREQ_R::new((self.bits & 0x0f) as u8)
}
}
|
use super::*;
use std::ops::*;
impl BitOr<Mask> for WhiteMask {
type Output = Mask;
fn bitor(self, rhs: Mask) -> Self::Output {
self.0 | rhs
}
}
impl BitOr<WhiteMask> for Mask {
type Output = Mask;
fn bitor(self, rhs: WhiteMask) -> Self::Output {
self | rhs.0
}
}
impl BitAnd<Mask> for WhiteMask {
type Output = WhiteMask;
fn bitand(self, rhs: Mask) -> Self::Output {
WhiteMask(self.0 & rhs)
}
}
impl BitAnd<WhiteMask> for Mask {
type Output = WhiteMask;
fn bitand(self, rhs: WhiteMask) -> Self::Output {
WhiteMask(self & rhs.0)
}
}
impl BitAnd<Mask> for BlackMask {
type Output = BlackMask;
fn bitand(self, rhs: Mask) -> Self::Output {
BlackMask(self.0 & rhs)
}
}
impl BitAnd<BlackMask> for Mask {
type Output = BlackMask;
fn bitand(self, rhs: BlackMask) -> Self::Output {
BlackMask(self & rhs.0)
}
}
#[cfg(test)]
mod tests {
use mask::masks::*;
use super::*;
pub fn none() -> Mask {
E
}
pub fn white() -> WhiteMask {
WhiteMask::wrap(_4)
}
#[test]
fn white_bit_or_none() {
assert_eq!(white() | none(), E | _4);
}
#[test]
fn none_bit_or_white() {
assert_eq!(none() | white(), E | _4);
}
} |
//! Definitions for all methods used to set and query
//! the current state of the module
use super::Module;
use openmpt_sys;
use std::os::raw::*;
impl Module {
/// Select a sub-song from a multi-song module.
///
/// ### Parameters
/// * `subsong_num` : Index of the sub-song. -1 plays all sub-songs consecutively.
///
/// ### Returns
/// Whether or not the operation has succeded.
///
/// ### Remarks
/// Whether subsong -1 (all subsongs consecutively), subsong 0 or some other subsong
/// is selected by default, is an implementation detail and subject to change.
/// If you do not want to care about subsongs, it is recommended to just not call this method at all.
pub fn select_subsong(&mut self, subsong_num: i32) -> bool {
let return_code = unsafe {
openmpt_sys::openmpt_module_select_subsong(self.inner, subsong_num)
};
if return_code == 0 { false } else { true }
}
/// Set approximate current song position.
///
/// ### Parameters
/// * `seconds` : Seconds to seek to.
///
/// ### Returns
/// Approximate new song position in seconds.
///
/// ### Remarks
/// If seconds is out of range, the position gets set to song start or end respectively.
pub fn set_position_seconds(&mut self, seconds: c_double) -> c_double {
// Never fails, will set position to begining or end of the song of out of range
unsafe {
openmpt_sys::openmpt_module_set_position_seconds(self.inner, seconds)
}
}
/// Set approximate current song position.
///
/// ### Parameters
/// * `order` : Pattern order number to seek to.
/// * `row` : Pattern row number to seek to.
///
/// ### Returns
/// Approximate new song position in seconds.
///
/// ### Remarks
/// If order or row are out of range, to position is not modified and the current position is returned.
pub fn set_position_order_row(&mut self, order: i32, row: i32) -> c_double {
// Returns current position on failure
unsafe {
openmpt_sys::openmpt_module_set_position_order_row(self.inner, order, row)
}
}
/// Get current song position.
///
/// ### Returns
/// Current song position in seconds.
pub fn get_position_seconds(&mut self) -> c_double {
unsafe {
openmpt_sys::openmpt_module_get_position_seconds(self.inner)
}
}
/// Get the current order.
///
/// ### Returns
/// The current order at which the module is being played back.
pub fn get_current_order(&mut self) -> i32 {
unsafe {
openmpt_sys::openmpt_module_get_current_order(self.inner)
}
}
/// Get the current pattern.
///
/// ### Returns
/// The current pattern that is being played.
pub fn get_current_pattern(&mut self) -> i32 {
unsafe {
openmpt_sys::openmpt_module_get_current_pattern(self.inner)
}
}
/// Get the current row.
///
/// ### Returns
/// The current row at which the current pattern is being played.
pub fn get_current_row(&mut self) -> i32 {
unsafe {
openmpt_sys::openmpt_module_get_current_row(self.inner)
}
}
/// Get the current speed.
///
/// ### Returns
/// The current speed in ticks per row.
pub fn get_current_speed(&mut self) -> i32 {
unsafe {
openmpt_sys::openmpt_module_get_current_speed(self.inner)
}
}
/// Get the current tempo.
///
/// ### Returns
/// The current tempo in tracker units. The exact meaning of this value depends on the tempo mode being used.
pub fn get_current_tempo(&mut self) -> i32 {
unsafe {
openmpt_sys::openmpt_module_get_current_tempo(self.inner)
}
}
/// Get the current amount of playing channels.
///
/// ### Returns
/// The amount of sample channels that are currently being rendered.
pub fn get_current_playing_channels(&mut self) -> i32 {
unsafe {
openmpt_sys::openmpt_module_get_current_playing_channels(self.inner)
}
}
/// Get the approximate song duration.
///
/// ### Returns
/// Approximate duration of current sub-song in seconds.
pub fn get_duration_seconds(&mut self) -> c_double {
// Depends on the current subsong
unsafe {
openmpt_sys::openmpt_module_get_duration_seconds(self.inner)
}
}
/// Get an approximate indication of the channel volume.
///
/// ### Parameters
/// * `channel_num` : The channel whose volume should be retrieved.
///
/// ### Returns
/// The approximate channel volume.
///
/// ### Remarks
/// The returned value is solely based on the note velocity and
/// does not take the actual waveform of the playing sample into account.
pub fn get_current_channel_vu_mono(&mut self, channel_num: i32) -> c_float {
unsafe {
openmpt_sys::openmpt_module_get_current_channel_vu_mono(self.inner, channel_num)
}
}
/// Get an approximate indication of the channel volume on the front-left speaker.
///
/// ### Parameters
/// * `channel_num` : The channel whose volume should be retrieved.
///
/// ### Returns
/// The approximate channel volume.
///
/// ### Remarks
/// The returned value is solely based on the note velocity and
/// does not take the actual waveform of the playing sample into account.
pub fn get_current_channel_vu_left(&mut self, channel_num: i32) -> c_float {
unsafe {
openmpt_sys::openmpt_module_get_current_channel_vu_left(self.inner, channel_num)
}
}
/// Get an approximate indication of the channel volume on the front-right speaker.
///
/// ### Parameters
/// * `channel_num` : The channel whose volume should be retrieved.
///
/// ### Returns
/// The approximate channel volume.
///
/// ### Remarks
/// The returned value is solely based on the note velocity and
/// does not take the actual waveform of the playing sample into account.
pub fn get_current_channel_vu_right(&mut self, channel_num: i32) -> c_float {
unsafe {
openmpt_sys::openmpt_module_get_current_channel_vu_right(self.inner, channel_num)
}
}
/// Get an approximate indication of the channel volume on the rear-left speaker.
///
/// ### Parameters
/// * `channel_num` : The channel whose volume should be retrieved.
///
/// ### Returns
/// The approximate channel volume.
///
/// ### Remarks
/// The returned value is solely based on the note velocity and
/// does not take the actual waveform of the playing sample into account.
pub fn get_current_channel_vu_rear_left(&mut self, channel_num: i32) -> c_float {
unsafe {
openmpt_sys::openmpt_module_get_current_channel_vu_rear_left(self.inner, channel_num)
}
}
/// Get an approximate indication of the channel volume on the rear-right speaker.
///
/// ### Parameters
/// * `channel_num` : The channel whose volume should be retrieved.
///
/// ### Returns
/// The approximate channel volume.
///
/// ### Remarks
/// The returned value is solely based on the note velocity and
/// does not take the actual waveform of the playing sample into account.
pub fn get_current_channel_vu_rear_right(&mut self, channel_num: i32) -> c_float {
unsafe {
openmpt_sys::openmpt_module_get_current_channel_vu_rear_right(self.inner, channel_num)
}
}
}
// Tests
// #[test]
// fn unatco_can_change_subsong() {
// let mut module = test_helper::load_file_as_module("UNATCO.it").unwrap();
// let subsongs = module.get_subsongs();
// assert_eq!(subsongs.len(), 5); // Main, Game over, Dialogue /w intro, Combat, Dialogue loop
// for song in subsongs {
// assert!(module.select_subsong(&song));
// }
// } |
#[doc = "Reader of register _3_ISC"]
pub type R = crate::R<u32, super::_3_ISC>;
#[doc = "Writer for register _3_ISC"]
pub type W = crate::W<u32, super::_3_ISC>;
#[doc = "Register _3_ISC `reset()`'s with value 0"]
impl crate::ResetValue for super::_3_ISC {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `INTCNTZERO`"]
pub type INTCNTZERO_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INTCNTZERO`"]
pub struct INTCNTZERO_W<'a> {
w: &'a mut W,
}
impl<'a> INTCNTZERO_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `INTCNTLOAD`"]
pub type INTCNTLOAD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INTCNTLOAD`"]
pub struct INTCNTLOAD_W<'a> {
w: &'a mut W,
}
impl<'a> INTCNTLOAD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `INTCMPAU`"]
pub type INTCMPAU_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INTCMPAU`"]
pub struct INTCMPAU_W<'a> {
w: &'a mut W,
}
impl<'a> INTCMPAU_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `INTCMPAD`"]
pub type INTCMPAD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INTCMPAD`"]
pub struct INTCMPAD_W<'a> {
w: &'a mut W,
}
impl<'a> INTCMPAD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `INTCMPBU`"]
pub type INTCMPBU_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INTCMPBU`"]
pub struct INTCMPBU_W<'a> {
w: &'a mut W,
}
impl<'a> INTCMPBU_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `INTCMPBD`"]
pub type INTCMPBD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INTCMPBD`"]
pub struct INTCMPBD_W<'a> {
w: &'a mut W,
}
impl<'a> INTCMPBD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
impl R {
#[doc = "Bit 0 - Counter=0 Interrupt"]
#[inline(always)]
pub fn intcntzero(&self) -> INTCNTZERO_R {
INTCNTZERO_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Counter=Load Interrupt"]
#[inline(always)]
pub fn intcntload(&self) -> INTCNTLOAD_R {
INTCNTLOAD_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Comparator A Up Interrupt"]
#[inline(always)]
pub fn intcmpau(&self) -> INTCMPAU_R {
INTCMPAU_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Comparator A Down Interrupt"]
#[inline(always)]
pub fn intcmpad(&self) -> INTCMPAD_R {
INTCMPAD_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Comparator B Up Interrupt"]
#[inline(always)]
pub fn intcmpbu(&self) -> INTCMPBU_R {
INTCMPBU_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Comparator B Down Interrupt"]
#[inline(always)]
pub fn intcmpbd(&self) -> INTCMPBD_R {
INTCMPBD_R::new(((self.bits >> 5) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Counter=0 Interrupt"]
#[inline(always)]
pub fn intcntzero(&mut self) -> INTCNTZERO_W {
INTCNTZERO_W { w: self }
}
#[doc = "Bit 1 - Counter=Load Interrupt"]
#[inline(always)]
pub fn intcntload(&mut self) -> INTCNTLOAD_W {
INTCNTLOAD_W { w: self }
}
#[doc = "Bit 2 - Comparator A Up Interrupt"]
#[inline(always)]
pub fn intcmpau(&mut self) -> INTCMPAU_W {
INTCMPAU_W { w: self }
}
#[doc = "Bit 3 - Comparator A Down Interrupt"]
#[inline(always)]
pub fn intcmpad(&mut self) -> INTCMPAD_W {
INTCMPAD_W { w: self }
}
#[doc = "Bit 4 - Comparator B Up Interrupt"]
#[inline(always)]
pub fn intcmpbu(&mut self) -> INTCMPBU_W {
INTCMPBU_W { w: self }
}
#[doc = "Bit 5 - Comparator B Down Interrupt"]
#[inline(always)]
pub fn intcmpbd(&mut self) -> INTCMPBD_W {
INTCMPBD_W { w: self }
}
}
|
fn main() {
}
// end-msg: WARN crate `SNAKE` should have a snake case
// end-msg: NOTE #[warn(non_snake_case)] on by default
|
extern crate itertools;
use itertools::Itertools;
#[macro_use]
extern crate failure_derive;
extern crate failure;
use failure::Error;
extern crate serde;
#[macro_use]
extern crate lazy_static;
extern crate toml;
use swayipc::reply::{Node, NodeType, WindowChange, WindowEvent, WorkspaceChange, WorkspaceEvent};
use swayipc::Connection;
use std::collections::HashMap as Map;
pub mod config;
pub mod icons;
pub struct Config {
pub icons: Map<String, char>,
pub aliases: Map<String, String>,
pub general: Map<String, String>,
pub options: Map<String, bool>,
}
impl Default for Config {
fn default() -> Self {
Config {
icons: icons::NONE.clone(),
aliases: config::EMPTY_MAP.clone(),
general: config::EMPTY_MAP.clone(),
options: config::EMPTY_OPT_MAP.clone(),
}
}
}
#[derive(Debug, Fail)]
enum LookupError {
#[fail(
display = "Failed to get app_id or window_properties for node: {:#?}",
_0
)]
MissingInformation(String),
#[fail(display = "Failed to get name for workspace: {:#?}", _0)]
WorkspaceName(Box<Node>),
}
fn get_option(config: &Config, key: &str) -> bool {
return match config.options.get(key) {
Some(v) => *v,
None => false,
};
}
fn get_class(node: &Node, config: &Config) -> Result<String, LookupError> {
let name = {
match &node.app_id {
Some(id) => Some(id.to_owned()),
None => node
.window_properties
.as_ref()
.and_then(|p| p.class.as_ref())
.map(|p| p.to_owned()),
}
};
if let Some(class) = name {
let class_display_name = match config.aliases.get(&class) {
Some(alias) => alias,
None => &class,
};
let no_names = get_option(config, "no_names");
Ok(match config.icons.get(&class) {
Some(icon) => {
if no_names {
format!("{}", icon)
} else {
format!("{} {}", icon, class_display_name)
}
}
None => match config.general.get("default_icon") {
Some(default_icon) => {
if no_names {
default_icon.to_string()
} else {
format!("{} {}", default_icon, class_display_name)
}
}
None => {
class_display_name.to_string()
}
},
})
} else {
Err(LookupError::MissingInformation(format!("{:?}", node)))
}
}
/// return a collection of workspace nodes
fn get_workspaces(tree: Node) -> Vec<Node> {
let mut out = Vec::new();
for output in tree.nodes {
for container in output.nodes {
if let NodeType::Workspace = container.node_type {
out.push(container);
}
}
}
out
}
/// get all nodes for any depth collection of nodes
fn get_window_nodes(mut nodes: Vec<Vec<&Node>>) -> Vec<&Node> {
let mut window_nodes = Vec::new();
while let Some(next) = nodes.pop() {
for n in next {
nodes.push(n.nodes.iter().collect());
if n.window.is_some() {
window_nodes.push(n);
} else if n.app_id.is_some() {
window_nodes.push(n);
}
}
}
window_nodes
}
/// Return a collection of window classes
fn get_classes(workspace: &Node, config: &Config) -> Vec<String> {
let window_nodes = {
let mut f = get_window_nodes(vec![workspace.floating_nodes.iter().collect()]);
let mut n = get_window_nodes(vec![workspace.nodes.iter().collect()]);
n.append(&mut f);
n
};
let mut window_classes = Vec::new();
for node in window_nodes {
let class = match get_class(node, config) {
Ok(class) => class,
Err(e) => {
eprintln!("get class error: {}", e);
continue;
}
};
window_classes.push(class);
}
window_classes
}
/// Update all workspace names in tree
pub fn update_tree(connection: &mut Connection, config: &Config) -> Result<(), Error> {
let tree = connection.get_tree()?;
for workspace in get_workspaces(tree) {
let separator = match config.general.get("separator") {
Some(s) => s,
None => " | ",
};
let classes = get_classes(&workspace, config);
let classes = if get_option(config, "remove_duplicates") {
classes.into_iter().unique().collect()
} else {
classes
};
let classes = classes.join(separator);
let classes = if !classes.is_empty() {
format!(" {}", classes)
} else {
classes
};
let old: String = workspace
.name
.to_owned()
.ok_or_else(|| LookupError::WorkspaceName(Box::new(workspace)))?;
let mut new = old.split(' ').next().unwrap().to_owned();
if !classes.is_empty() {
new.push_str(&classes);
}
if old != new {
let command = format!("rename workspace \"{}\" to \"{}\"", old, new);
connection.run_command(&command)?;
}
}
Ok(())
}
pub fn handle_window_event(
event: &WindowEvent,
connection: &mut Connection,
config: &Config,
) -> Result<(), Error> {
match event.change {
WindowChange::New | WindowChange::Close | WindowChange::Move => {
update_tree(connection, config)
}
_ => Ok(()),
}
}
pub fn handle_workspace_event(
event: &WorkspaceEvent,
connection: &mut Connection,
config: &Config,
) -> Result<(), Error> {
match event.change {
WorkspaceChange::Empty | WorkspaceChange::Focus => update_tree(connection, config),
_ => Ok(()),
}
}
|
use std::path::PathBuf;
fn main() {
// Open Cargo.toml
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let manifest_path = format!("{manifest_dir}/Cargo.toml");
let manifest_str = std::fs::read_to_string(&manifest_path)
.unwrap_or_else(|e| panic!("Could not open {manifest_path}: {e}"));
let manifest_data: toml::Value =
toml::de::from_str(&manifest_str).expect("Could not parse Cargo manifest as TOML");
// Find the romfs setting and compute the path
let romfs_dir_setting = manifest_data
.as_table()
.and_then(|table| table.get("package"))
.and_then(toml::Value::as_table)
.and_then(|table| table.get("metadata"))
.and_then(toml::Value::as_table)
.and_then(|table| table.get("cargo-3ds"))
.and_then(toml::Value::as_table)
.and_then(|table| table.get("romfs_dir"))
.and_then(toml::Value::as_str)
.unwrap_or("romfs");
let romfs_path = PathBuf::from(format!("{manifest_dir}/{romfs_dir_setting}"));
// Check if the romfs path exists so we can compile the module
if romfs_path.exists() {
println!("cargo:rustc-cfg=romfs_exists");
}
println!("cargo:rerun-if-changed={manifest_dir}");
}
|
extern crate copperline;
use copperline::Copperline;
mod rlisp;
use rlisp::{Eval, Parser, Scope, Tokenizer};
fn main() {
let cfg = copperline::Config {
encoding: copperline::Encoding::Utf8,
mode: copperline::EditMode::Emacs,
};
let mut cl = Copperline::new();
while let Ok(line) = cl.read_line(">> ", &cfg) {
match line.as_str() {
"exit" | "quit" | "q" => break,
_ => {}
};
line.tokenize()
.map(|s| s.parse())
.map(|a| a.eval(Scope::new().boxup()))
.map(|r| {
cl.add_history(line);
println!("{}", r);
})
.unwrap_or_else(|e| println!("{}", e));
}
}
|
use {
super::{Mass, Restitution},
vek::{Aabb, Vec3},
};
pub struct CollisionBody {
pub position: Vec3<f32>,
pub velocity: Option<Vec3<f32>>,
pub mass: Option<f32>,
pub input_velocity: Option<Vec3<f32>>,
pub restitution: Option<f32>,
pub aabb: Aabb<f32>,
}
impl CollisionBody {
pub fn velocity(&self) -> Vec3<f32> {
self.velocity.unwrap_or_else(|| Vec3::zero())
}
pub fn input_velocity(&self) -> Vec3<f32> {
self.input_velocity.unwrap_or_else(|| Vec3::zero())
}
pub fn mass(&self) -> f32 {
self.mass.unwrap_or(0.0).min(Mass::DEFAULT_VALUE)
}
fn restitution(&self) -> f32 {
self.restitution.unwrap_or(Restitution::DEFAULT_VALUE)
}
}
impl Default for CollisionBody {
fn default() -> Self {
CollisionBody {
position: Vec3::zero(),
velocity: None,
mass: None,
input_velocity: None,
restitution: None,
aabb: Default::default(), // TODO does a 0-size aabb make sense?
}
}
}
pub struct CollisionResult {
pub body1_velocity: Vec3<f32>,
pub body2_velocity: Vec3<f32>,
pub resolve_direction: Vec3<f32>,
}
pub fn resolve_collision(body1: CollisionBody, body2: CollisionBody) -> CollisionResult {
let p1 = body1.position;
let v1 = body1.velocity();
let m1 = body1.mass();
let p2 = body2.position;
let v2 = body2.velocity();
let m2 = body2.mass();
let relative_velocity = (v2 + body2.input_velocity()) - (v1 + body1.input_velocity());
let distance = p1 - p2;
let mut normal = distance.normalized();
if normal.map(|e| !e.is_finite()).reduce_or() {
normal = Vec3::unit_x();
}
let restitution = body1.restitution().max(body2.restitution());
let speed = relative_velocity.dot(normal) * restitution;
if speed < 0.0 {
// no change in velocity
return CollisionResult {
body1_velocity: v1,
body2_velocity: v2,
resolve_direction: Vec3::zero(),
};
}
let impulse = 2.0 * speed / (m1 + m2);
let v1 = v1 - impulse * m2 * normal;
let v2 = v2 - impulse * m1 * normal;
let collision_vector = body1.aabb.collision_vector_with_aabb(body2.aabb);
let min_axis_bits = collision_vector
.map(|e| e.abs())
.reduce_partial_min()
.to_bits();
let resolve_direction = collision_vector.map(|e| {
if e.abs().to_bits() == min_axis_bits {
-e
} else {
0.0
}
});
CollisionResult {
body1_velocity: v1,
body2_velocity: v2,
resolve_direction,
}
}
|
use serde_derive::{Deserialize, Serialize};
use simble::Symbol;
#[derive(Copy, Clone, Debug, Serialize)]
#[serde(tag = "method", content = "params")]
pub enum ServerCommand {
SubscribeOrderbook { symbol: Symbol },
GetSymbol { symbol: Symbol },
}
#[derive(Copy, Clone, Debug, Serialize)]
pub struct Envelope<T> {
#[serde(flatten)]
pub body: T,
pub id: u64,
}
#[derive(Clone, Debug, Deserialize)]
pub struct Order {
pub price: String,
pub size: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SnapshotOrderbook {
pub ask: Vec<Order>,
pub bid: Vec<Order>,
pub symbol: Symbol,
}
#[derive(Clone, Debug, Deserialize)]
pub struct UpdateOrderbook {
pub ask: Vec<Order>,
pub bid: Vec<Order>,
pub symbol: Symbol,
// sequence: usize,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(tag = "method", content = "params")]
#[serde(rename_all = "camelCase")]
pub enum ClientMessage {
SnapshotOrderbook(SnapshotOrderbook),
UpdateOrderbook(UpdateOrderbook),
}
#[derive(Clone, Debug, Deserialize)]
pub struct ClientError {
pub message: String,
pub code: u64,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetSymbol {
// id: Symbol,
pub base_currency: Symbol,
pub quote_currency: Symbol,
pub quantity_increment: String,
pub tick_size: String,
pub take_liquidity_rate: String,
pub provide_liquidity_rate: String,
pub fee_currency: Symbol,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(untagged)]
pub enum Reply {
Bool(bool),
GetSymbol(GetSymbol),
}
#[derive(Clone, Debug, Deserialize)]
#[serde(untagged)]
pub enum ClientEnvelope {
Message(ClientMessage),
Reply { result: Reply, id: u64 },
Error { error: ClientError, id: Option<u64> },
}
|
/// ```rust,ignore
/// 给定一个正整数,返回它在 Excel 表中相对应的列名称。
///
/// 例如,
///
/// 1 -> A
/// 2 -> B
/// 3 -> C
/// ...
/// 26 -> Z
/// 27 -> AA
/// 28 -> AB
/// ...
///
/// 示例 1:
///
/// 输入: 1
/// 输出: "A"
///
/// 示例 2:
///
/// 输入: 28
/// 输出: "AB"
///
/// 示例 3:
///
/// 输入: 701
/// 输出: "ZY"
///
/// ```
use std::collections::HashMap;
pub fn convert_to_title(n: i32) -> String {
let mut v = n;
let mut char_vector: Vec<char> = Vec::new();
let ch_map: HashMap<i32, char> = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
.chars()
.enumerate()
.map(|(x, y)| (x as i32, y))
.collect();
loop {
char_vector.insert(0, ch_map[&((v - 1) % 26)]);
v = (v - 1) / 26;
if v == 0 {
break;
}
}
println!("{:?}", char_vector);
char_vector.iter().cloned().collect::<String>()
}
#[cfg(test)]
mod test {
use super::convert_to_title;
#[test]
fn test_convert_to_title() {
assert_eq!(convert_to_title(1), "A".to_string());
assert_eq!(convert_to_title(28), "AB".to_string());
}
}
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::sync::Arc;
use alloc::sync::Weak;
use core::ops::Deref;
use alloc::string::ToString;
use core::any::Any;
use spin::Mutex;
use super::super::super::super::kernel::waiter::*;
use super::super::super::super::tcpip::tcpip::*;
use super::super::super::super::qlib::common::*;
use super::super::super::super::qlib::linux_def::*;
use super::super::super::super::qlib::linux::socket::*;
use super::super::super::super::task::*;
//use super::super::super::control::*;
use super::unix::*;
use super::queue::*;
use super::connectioned::*;
// connectionlessEndpoint is a unix endpoint for unix sockets that support operating in
// a conectionless fashon.
//
// Specifically, this means datagram unix sockets not created with
// socketpair(2).
#[derive(Clone)]
pub struct ConnectionLessEndPointWeak(Weak<Mutex<BaseEndpointInternal>>);
impl ConnectionLessEndPointWeak {
pub fn Upgrade(&self) -> Option<ConnectionLessEndPoint> {
let c = match self.0.upgrade() {
None => return None,
Some(c) => c,
};
return Some(ConnectionLessEndPoint(BaseEndpoint(c)))
}
}
impl ConnectionLessEndPoint {
pub fn Downgrade(&self) -> ConnectionLessEndPointWeak {
return ConnectionLessEndPointWeak(Arc::downgrade(&(self.0).0));
}
}
#[derive(Clone)]
pub struct ConnectionLessEndPoint(BaseEndpoint);
impl Deref for ConnectionLessEndPoint {
type Target = BaseEndpoint;
fn deref(&self) -> &BaseEndpoint {
&self.0
}
}
impl ConnectionLessEndPoint {
pub fn New(hostfd: i32) -> Self {
let bep = BaseEndpoint::NewWithHostfd(hostfd);
let queue = bep.lock().queue.clone();
let queueReceiver = QueueReceiver::New(MsgQueue::New(queue, Queue::default(), INITIAL_LIMIT));
bep.lock().receiver = Some(Arc::new(queueReceiver));
return Self(bep)
}
pub fn IsBound(&self) -> bool {
return self.0.IsBound();
}
pub fn State(&self) -> i32 {
if self.IsBound() {
return SS_UNCONNECTED
} else if self.lock().Connected() {
return SS_CONNECTING
}
return SS_DISCONNECTING;
}
pub fn RefCount(&self) -> usize {
return Arc::strong_count(&self.0);
}
// BidirectionalConnect implements BoundEndpoint.BidirectionalConnect.
pub fn BidirectionalConnect<T: 'static + ConnectingEndpoint>(&self,
_task: &Task,
_ce: Arc<T>,
_returnConnect: impl Fn(Arc<Receiver>, Arc<ConnectedEndpoint>)) -> Result<()> {
return Err(Error::SysError(SysErr::ECONNREFUSED))
}
// UnidirectionalConnect implements BoundEndpoint.UnidirectionalConnect.
pub fn UnidirectionalConnect(&self) -> Result<UnixConnectedEndpoint> {
let r = self.lock().receiver.clone();
let r = match r {
None => return Err(Error::SysError(SysErr::ECONNREFUSED)),
Some(r) => r,
};
let q = match r.as_any().downcast_ref::<QueueReceiver>() {
None => return Err(Error::SysError(SysErr::ECONNREFUSED)),
Some(q) => q.readQueue.clone(),
};
return Ok(UnixConnectedEndpoint::New(Arc::new(self.clone()), q))
}
}
impl Passcred for ConnectionLessEndPoint {
fn Passcred(&self) -> bool {
return self.0.Passcred();
}
}
impl PartialEndPoint for ConnectionLessEndPoint {
fn Type(&self) -> i32 {
return SockType::SOCK_DGRAM
}
// GetLocalAddress returns the bound path.
fn GetLocalAddress(&self) -> Result<SockAddrUnix> {
return self.0.GetLocalAddress();
}
}
impl Endpoint for ConnectionLessEndPoint {
fn as_any(&self) -> &Any {
return self
}
// Close puts the endpoint in a closed state and frees all resources associated
// with it.
//
// The socket will be a fresh state after a call to close and may be reused.
// That is, close may be used to "unbind" or "disconnect" the socket in error
// paths.
fn Close(&self) {
let mut r = None;
{
let mut e = self.lock();
if e.Connected() {
e.receiver.as_ref().unwrap().CloseRecv();
r = e.receiver.take();
e.connected = None;
}
if e.path.len() != 0 {
e.path = "".to_string();
}
}
if let Some(r) = r {
r.CloseNotify();
}
}
fn RecvMsg(&self, data: &mut [IoVec], creds: bool, numRights: u64, peek: bool, addr: Option<&mut SockAddrUnix>)
-> Result<(usize, usize, SCMControlMessages, bool)> {
return self.0.RecvMsg(data, creds, numRights, peek, addr)
}
// SendMsg writes data and a control message to the specified endpoint.
// This method does not block if the data cannot be written.
fn SendMsg(&self, data: &[IoVec], c: &SCMControlMessages, to: &Option<BoundEndpoint>) -> Result<usize> {
let tmp = to.clone();
let to = match tmp {
None => return self.0.SendMsg(data, c, to),
Some(to) => to.clone(),
};
let connected = match to.UnidirectionalConnect() {
Err(_) => return Err(Error::SysError(SysErr::EINVAL)),
Ok(c) => c,
};
let (n, notify) = connected.Send(data, c, &SockAddrUnix::New(&self.lock().path))?;
if notify {
connected.SendNotify();
}
return Ok(n)
}
// Connect attempts to connect directly to server.
fn Connect(&self, _task: &Task, server: &BoundEndpoint) -> Result<()> {
let connected = server.UnidirectionalConnect()?;
self.lock().connected = Some(Arc::new(connected));
return Ok(())
}
fn Shutdown(&self, flags: ShutdownFlags) -> Result<()> {
return self.0.Shutdown(flags)
}
// Listen starts listening on the connection.
fn Listen(&self, _: i32) -> Result<()> {
return Err(Error::SysError(SysErr::EOPNOTSUPP))
}
// Accept accepts a new connection.
fn Accept(&self) -> Result<ConnectionedEndPoint> {
return Err(Error::SysError(SysErr::EOPNOTSUPP))
}
// Bind binds the connection.
//
// For Unix endpoints, this _only sets the address associated with the socket_.
// Work associated with sockets in the filesystem or finding those sockets must
// be done by a higher level.
//
// Bind will fail only if the socket is connected, bound or the passed address
// is invalid (the empty string).
fn Bind(&self, addr: &SockAddrUnix) -> Result<()> {
let mut e = self.lock();
if e.path.len() != 0 {
return Err(Error::SysError(SysErr::EINVAL))
}
e.path = addr.Path.clone();
return Ok(())
}
fn GetRemoteAddress(&self) -> Result<SockAddrUnix> {
return self.0.GetRemoteAddress();
}
fn SetSockOpt(&self, opt: &SockOpt) -> Result<()> {
return self.0.SetSockOpt(opt);
}
fn GetSockOpt(&self, opt: &mut SockOpt) -> Result<()> {
return self.0.GetSockOpt(opt);
}
}
impl ConnectedPasscred for ConnectionLessEndPoint {
fn ConnectedPasscred(&self) -> bool {
return self.0.ConnectedPasscred();
}
}
impl Waitable for ConnectionLessEndPoint {
fn Readiness(&self, _task: &Task, mask: EventMask) -> EventMask {
let e = self.lock();
let mut ready = 0;
if mask & EVENT_IN != 0 && e.receiver.as_ref().unwrap().Readable() {
ready |= EVENT_IN;
}
if e.Connected() {
if mask & EVENT_OUT != 0 && e.connected.as_ref().unwrap().Writable() {
ready |= EVENT_OUT;
}
}
return ready;
}
fn EventRegister(&self, task: &Task, e: &WaitEntry, mask: EventMask) {
self.0.EventRegister(task, e, mask)
}
fn EventUnregister(&self, task: &Task,e: &WaitEntry) {
self.0.EventUnregister(task, e)
}
}
|
use float::Float;
use int::Int;
macro_rules! fp_overflow {
(infinity, $fty:ty, $sign: expr) => {
return {
<$fty as Float>::from_parts(
$sign,
<$fty as Float>::exponent_max() as <$fty as Float>::Int,
0 as <$fty as Float>::Int)
}
}
}
macro_rules! fp_convert {
($intrinsic:ident: $ity:ty, $fty:ty) => {
pub extern "C" fn $intrinsic(i: $ity, debug: bool) -> $fty {
let work_bits = 3;
let work_round = 1 << (work_bits - 1);
let work_mask = (1 << (work_bits + 1)) - 1;
let exponent_bias = <$fty>::exponent_bias();
let exponent_max = <$fty>::exponent_max();
let significand_bits = <$fty>::significand_bits();
let significand_wbits = significand_bits + work_bits + 1;
let integer_bits = <$fty>::bits();
if i == 0 {
return <$fty>::from_parts(false,0,0)
}
// work register.
let (sign, i) = i.init_float();
let mut wr = i as <$fty as Float>::Int;
let payload_len = integer_bits - wr.leading_zeros();
let mut exp = exponent_bias + payload_len - 1;
if exp >= exponent_max {
// overflow to infinity
fp_overflow!(infinity, $fty, sign);
}
if payload_len < significand_wbits {
let left_shift = significand_wbits - payload_len;
wr = wr.wrapping_shl(left_shift);
} else {
let right_shift = payload_len - significand_wbits; // this is also the number of unused bits from the i
let has_spare_bits = (if right_shift == 0 {
0
} else {
wr.wrapping_shl(integer_bits - right_shift)
} != 0) as <$fty as Float>::Int;
// shift and if there is any dropped bit to 1, raise the least significant bit.
wr = (wr >> right_shift) | has_spare_bits;
}
wr &= (<$fty>::significand_mask() << work_bits) | work_mask;
if (wr & work_mask) > work_round {
wr += work_round;
}
if wr >= (1<< (significand_wbits - 1)) {
exp += 1;
if exp >= exponent_max {
fp_overflow!(infinity, $fty, sign);
}
}
if debug { println!("woops") }
let frac = wr >> work_bits;
<$fty>::from_parts(sign, exp as <$fty as Float>::Int, frac)
}
}
}
fp_convert!(__floatsisf: i32, f32);
fp_convert!(__floatsidf: i32, f64);
fp_convert!(__floatdidf: i64, f64);
fp_convert!(__floatunsisf: u32, f32);
fp_convert!(__floatunsidf: u32, f64);
fp_convert!(__floatundidf: u64, f64);
macro_rules! fp_fix {
($intrinsic:ident: $fty:ty, $ity:ty) => {
pub extern "C" fn $intrinsic(f: $fty, debug: bool) -> $ity {
let significand_bits = <$fty>::significand_bits() as <$fty as Float>::Int;
let exponent_bias = <$fty>::exponent_bias() as <$fty as Float>::Int;
let dst_bits = <$ity>::bits() as <$fty as Float>::Int;
let wr = f.repr();
if debug { println!("wr={:x} {}", wr, f); }
let sign: $ity = if (wr & <$fty>::sign_mask()) != 0 {
-1
} else {
1
};
let mut exponent = (wr & <$fty>::exponent_mask()) >> <$fty>::significand_bits();
let significand = wr & <$fty>::significand_mask() | <$fty>::implicit_bit();
if debug { println!("{} {:b}", exponent, significand); }
if exponent < exponent_bias {
return 0;
}
exponent -= exponent_bias;
if debug { println!("{}", exponent); }
if exponent >= dst_bits {
return if sign == -1 {
<$ity>::min_value()
} else {
<$ity>::max_value()
}
}
if exponent < significand_bits {
let rshift = significand_bits - exponent;
if debug { println!("{:b}>>{}", significand >> rshift, rshift); }
sign * (significand >> rshift) as $ity + 1
} else {
let lshift = exponent - significand_bits;
if debug { println!("{:b}<<{}", significand << lshift, lshift); }
sign * (significand << lshift) as $ity + 1
}
}
}
}
fp_fix!(__fixsfsi: f32, i32);
fp_fix!(__fixsfdi: f32, i64);
fp_fix!(__fixdfsi: f64, i32);
fp_fix!(__fixdfdi: f64, i64);
// NOTE(cfg) for some reason, on arm*-unknown-linux-gnueabihf, our implementation doesn't
// match the output of its gcc_s or compiler-rt counterpart. Until we investigate further, we'll
// just avoid testing against them on those targets. Do note that our implementation gives the
// correct answer; gcc_s and compiler-rt are incorrect in this case.
//
#[cfg(all(test, not(arm_linux)))]
mod tests {
use qc::{I32, U32, I64, U64, F32, F64};
check! {
fn __floatsisf(f: extern fn(i32) -> f32,
a: I32)
-> Option<F32> {
Some(F32(f(a.0)))
}
fn __floatsidf(f: extern fn(i32) -> f64,
a: I32)
-> Option<F64> {
Some(F64(f(a.0)))
}
fn __floatdidf(f: extern fn(i64) -> f64,
a: I64)
-> Option<F64> {
Some(F64(f(a.0)))
}
fn __floatunsisf(f: extern fn(u32) -> f32,
a: U32)
-> Option<F32> {
Some(F32(f(a.0)))
}
fn __floatunsidf(f: extern fn(u32) -> f64,
a: U32)
-> Option<F64> {
Some(F64(f(a.0)))
}
fn __floatundidf(f: extern fn(u64) -> f64,
a: U64)
-> Option<F64> {
Some(F64(f(a.0)))
}
fn __fixsfsi(f: extern fn(f32) -> i32,
a: F32)
-> Option<I32> {
Some(I32(f(a.0)))
}
}
}
|
// Copyright 2020-2021, The Tremor Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![cfg(not(tarpaulin_include))]
use crate::source::prelude::*;
use std::io::{BufRead as StdBufRead, BufReader, Read};
use std::time::Duration;
use tremor_common::file;
use xz2::read::XzDecoder;
#[derive(Deserialize, Debug, Clone)]
pub struct Config {
/// source file to read data from, it will be iterated over repeatedly,
/// can be xz compressed
pub source: String,
/// Interval in nanoseconds for coordinated emission testing
pub interval: Option<u64>,
/// Number of iterations to stop after
pub iters: Option<u64>,
#[serde(default = "Default::default")]
pub base64: bool,
}
impl ConfigImpl for Config {}
#[derive(Clone)]
pub struct Blaster {
pub config: Config,
onramp_id: TremorUrl,
data: Vec<u8>,
acc: Acc,
origin_uri: EventOriginUri,
}
impl std::fmt::Debug for Blaster {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Blaster")
}
}
impl onramp::Impl for Blaster {
fn from_config(id: &TremorUrl, config: &Option<YamlValue>) -> Result<Box<dyn Onramp>> {
if let Some(config) = config {
let config: Config = Config::new(config)?;
let mut source_data_file = file::open(&config.source)?;
let mut data = vec![];
let ext = file::extension(&config.source);
if ext == Some("xz") {
XzDecoder::new(source_data_file).read_to_end(&mut data)?;
} else {
source_data_file.read_to_end(&mut data)?;
};
let origin_uri = EventOriginUri {
uid: 0,
scheme: "tremor-blaster".to_string(),
host: hostname(),
port: None,
path: vec![config.source.clone()],
};
Ok(Box::new(Self {
config,
data,
acc: Acc::default(),
origin_uri,
onramp_id: id.clone(),
}))
} else {
Err("Missing config for blaster onramp".into())
}
}
}
#[derive(Clone, Default)]
struct Acc {
elements: Vec<Vec<u8>>,
count: usize,
}
impl Acc {
fn next(&mut self) -> Vec<u8> {
// actually safe because we only get element from a slot < elements.len()
let next = unsafe {
self.elements
.get_unchecked(self.count % self.elements.len())
.clone()
};
self.count += 1;
next
}
}
#[async_trait::async_trait()]
impl Source for Blaster {
fn id(&self) -> &TremorUrl {
&self.onramp_id
}
async fn pull_event(&mut self, _id: u64) -> Result<SourceReply> {
// TODO better sleep perhaps
if let Some(ival) = self.config.interval {
task::sleep(Duration::from_nanos(ival)).await;
}
if Some(self.acc.count as u64) == self.config.iters {
return Ok(SourceReply::StateChange(SourceState::Disconnected));
};
Ok(SourceReply::Data {
origin_uri: self.origin_uri.clone(),
data: self.acc.next(),
meta: None,
codec_override: None,
stream: 0,
})
}
async fn init(&mut self) -> Result<SourceState> {
let elements: Result<Vec<Vec<u8>>> =
StdBufRead::lines(BufReader::new(self.data.as_slice()))
.map(|e| -> Result<Vec<u8>> {
if self.config.base64 {
Ok(base64::decode(&e?.as_bytes())?)
} else {
Ok(e?.as_bytes().to_vec())
}
})
.collect();
self.acc.elements = elements?;
Ok(SourceState::Connected)
}
}
#[async_trait::async_trait]
impl Onramp for Blaster {
async fn start(&mut self, config: OnrampConfig<'_>) -> Result<onramp::Addr> {
self.origin_uri.uid = config.onramp_uid;
SourceManager::start(self.clone(), config).await
}
fn default_codec(&self) -> &str {
"json"
}
}
|
use crate::util::outputbuffer::OutputBuffer;
use serde::export::fmt::Debug;
pub mod gamma;
pub mod group;
pub mod identity;
/// After raytracing, a `PostProcessor` will be applied to the outputbuffer.
/// There are many options. If multiple postprocessor steps are required,
/// you can use a `PostProcessorGroup` which applies other postprocessors in order.
pub trait PostProcessor: Debug {
fn process(&self, buffer: OutputBuffer) -> OutputBuffer;
}
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::collections::btree_set::BTreeSet;
use core::ops::Deref;
use alloc::sync::Arc;
use spin::Mutex;
use alloc::string::String;
use super::super::qlib::mem::areaset::*;
use super::super::qlib::range::*;
use super::super::qlib::common::*;
use super::super::qlib::linux_def::*;
use super::super::task::*;
use super::super::kernel::waiter::*;
#[derive(Clone, Copy)]
pub enum LockType {
// ReadLock describes a POSIX regional file lock to be taken
// read only. There may be multiple of these locks on a single
// file region as long as there is no writer lock on the same
// region.
ReadLock,
// WriteLock describes a POSIX regional file lock to be taken
// write only. There may be only a single holder of this lock
// and no read locks.
WriteLock,
}
type UniqueId = u64;
pub const READ_LOCK: u32 = 0;
pub const WRITE_LOCK: u32 = 1;
#[derive(Default)]
pub struct LockInternel {
pub Readers: BTreeSet<UniqueId>,
pub Writer: Option<UniqueId>,
}
// Lock is a regional file lock. It consists of either a single writer
// or a set of readers.
//
// A Lock may be upgraded from a read lock to a write lock only if there
// is a single reader and that reader has the same uid as the write lock.
//
// A Lock may be downgraded from a write lock to a read lock only if
// the write lock's uid is the same as the read lock.
//
#[derive(Clone, Default)]
pub struct Lock(Arc<Mutex<LockInternel>>);
impl Deref for Lock {
type Target = Arc<Mutex<LockInternel>>;
fn deref(&self) -> &Arc<Mutex<LockInternel>> {
&self.0
}
}
impl Lock {
// isHeld returns true if uid is a holder of Lock.
pub fn IsHeld(&self, uid: UniqueId) -> bool {
let l = self.lock();
if l.Writer == Some(uid) {
return true
};
return l.Readers.contains(&uid)
}
// lock sets uid as a holder of a typed lock on Lock.
//
// Preconditions: canLock is true for the range containing this Lock.
pub fn Lock(&self, uid: UniqueId, t: LockType) {
let mut l = self.lock();
match t {
LockType::ReadLock => {
// If we are already a reader, then this is a no-op.
if l.Readers.contains(&uid) {
return;
}
// We cannot downgrade a write lock to a read lock unless the
// uid is the same.
if l.Writer.is_some() {
if l.Writer != Some(uid) {
panic!("lock: cannot downgrade write lock to read lock for uid {}, writer is {:?}", uid, l.Writer)
}
// Ensure that there is only one reader if upgrading.
l.Readers.clear();
l.Writer = None;
}
l.Readers.insert(uid);
return;
}
LockType::WriteLock => {
// If we are already the writer, then this is a no-op.
if l.Writer == Some(uid) {
return
}
// We can only upgrade a read lock to a write lock if there
// is only one reader and that reader has the same uid as
// the write lock.
let readers = l.Readers.len();
if readers > 0 {
if readers != 1 {
panic!("lock: cannot upgrade read lock to write lock for uid {}, too many readers {:?}", uid, l.Readers)
}
if !l.Readers.contains(&uid) {
panic!("lock: cannot upgrade read lock to write lock for uid {}, conflicting reader {:?}", uid, l.Readers)
}
}
// Ensure that there is only a writer.
l.Readers.clear();
l.Writer = Some(uid);
}
}
}
}
pub fn MakeLock(uid: UniqueId, t: LockType) -> Lock {
let mut val = LockInternel::default();
match t {
LockType::ReadLock => {
val.Readers.insert(uid);
}
LockType::WriteLock => {
val.Writer = Some(uid);
}
}
return Lock(Arc::new(Mutex::new(val)))
}
impl AreaValue for Lock {
fn Merge(&self, _r1: &Range, _r2: &Range, val2: &Self) -> Option<Self> {
let v1 = self.lock();
let v2 = val2.lock();
// Merge only if the Readers/Writers are identical.
if v1.Readers.len() != v2.Readers.len() {
return None;
}
for id in v1.Readers.iter() {
if !v2.Readers.contains(id) {
return None
}
}
if v1.Writer != v2.Writer {
return None;
}
return Some(val2.clone())
}
fn Split(&self, _r: &Range, _split: u64) -> (Self, Self) {
// Copy the segment so that split segments don't contain map references
// to other segments.
let mut v2 = LockInternel::default();
let v1 = self.lock();
for r in v1.Readers.iter() {
v2.Readers.insert(*r);
}
v2.Writer = v1.Writer;
return (self.clone(), Self(Arc::new(Mutex::new(v2))))
}
}
pub struct LocksInternal {
// locks is the set of region locks currently held on an Inode.
pub locks: AreaSet<Lock>,
// queue is the queue of waiters that are waiting on a lock.
pub queue: Queue,
}
impl Default for LocksInternal {
fn default() -> Self {
return Self {
locks: AreaSet::New(0, MAX_RANGE),
queue: Queue::default(),
}
}
}
impl LocksInternal {
pub fn Lock(&mut self, uid: UniqueId, t: LockType, r: &Range) -> bool {
// Don't attempt to insert anything with a range of 0 and treat this
// as a successful no-op.
if r.Len() == 0 {
return true;
}
// Do a first-pass check. We *could* hold onto the segments we
// checked if canLock would return true, but traversing the segment
// set should be fast and this keeps things simple.
if !self.CanLock(uid, t, r) {
return false;
}
// Get our starting point.
let (mut seg, gap) = self.locks.Find(r.Start());
if gap.Ok() {
// Fill in the gap and get the next segment to modify.
seg = self.locks.Insert(&gap, &gap.Range().Intersect(r), MakeLock(uid, t)).NextSeg();
} else if seg.Range().Start() < r.Start() {
let (_, tmp) = self.locks.Split(&seg, r.Start());
seg = tmp;
}
while seg.Ok() && seg.Range().Start() < r.End() {
// Split the last one if necessary.
if seg.Range().End() > r.End() {
let (tmp, _) = self.locks.SplitUnchecked(&seg, r.End());
seg = tmp;
}
// Set the lock on the segment. This is guaranteed to
// always be safe, given canLock above.
let value = seg.Value();
value.Lock(uid, t);
// Fill subsequent gaps.
let gap = seg.NextGap();
let gr = gap.Range().Intersect(r);
if gr.Len() > 0 {
seg = self.locks.Insert(&gap, &gr, MakeLock(uid, t)).NextSeg();
} else {
seg = gap.NextSeg()
}
}
return true;
}
// unlock is always successful. If uid has no locks held for the range LockRange,
// unlock is a no-op.
pub fn Unlock(&mut self, uid: UniqueId, r: &Range) {
if r.Len() == 0{
return;
}
// Get our starting point.
let mut seg = self.locks.UpperBoundSeg(r.Start());
while seg.Ok() && seg.Range().Start() < r.End() {
// If this segment doesn't have a lock from uid then
// there is no need to fragment the set with Isolate (below).
// In this case just move on to the next segment.
if !seg.Value().IsHeld(uid) {
seg = seg.NextSeg();
continue;
}
// Ensure that if we need to unlock a sub-segment that
// we don't unlock/remove that entire segment.
seg = self.locks.Isolate(&seg, r);
let value = seg.Value();
let value = value.lock();
let mut remove = false;
if value.Writer == Some(uid) {
// If we are unlocking a writer, then since there can
// only ever be one writer and no readers, then this
// lock should always be removed from the set.
remove = true;
} else if value.Readers.contains(&uid) {
// If uid is the last reader, then just remove the entire
// segment.
if value.Readers.len() == 1 {
remove = true;
} else {
// Otherwise we need to remove this reader without
// affecting any other segment's readers. To do
// this, we need to make a copy of the Readers map
// and not add this uid.
let newValue = Lock::default();
{
let mut newLock = newValue.lock();
for r in value.Readers.iter() {
if *r != uid {
newLock.Readers.insert(*r);
}
}
}
seg.SetValue(newValue)
}
}
if remove {
seg = self.locks.Remove(&seg).NextSeg();
} else {
seg = seg.NextSeg();
}
}
}
// lockable returns true if check returns true for every Lock in LockRange.
// Further, check should return true if Lock meets the callers requirements
// for locking Lock.
pub fn Lockable(&self, r: &Range, check: &Fn(&Lock) -> bool) -> bool {
// Get our starting point.
let mut seg = self.locks.LowerBoundSeg(r.Start());
while seg.Ok() && seg.Range().Start() < r.End() {
// Note that we don't care about overruning the end of the
// last segment because if everything checks out we'll just
// split the last segment.
let value = seg.Value();
if !check(&value) {
return false;
}
// Jump to the next segment, ignoring gaps, for the same
// reason we ignored the first gap.
seg = seg.NextSeg();
}
// No conflict, we can get a lock for uid over the entire range.
return true;
}
pub fn CanLock(&self, uid: UniqueId, t: LockType, r: &Range) -> bool {
match t {
LockType::ReadLock => {
return self.Lockable(r, &|value: &Lock| {
// If there is no writer, there's no problem adding
// another reader.
if value.lock().Writer.is_none() {
return true;
}
// If there is a writer, then it must be the same uid
// in order to downgrade the lock to a read lock.
return *(value.lock().Writer.as_ref().unwrap()) == uid
})
}
LockType::WriteLock => {
return self.Lockable(r, &|value: &Lock| {
// If there is no writer, there's no problem adding
// another reader.
let value = value.lock();
if value.Writer.is_none() {
// Then this uid can only take a write lock if
// this is a private upgrade, meaning that the
// only reader is uid.
return value.Readers.len() == 1 && value.Readers.contains(&uid);
}
// If the uid is already a writer on this region, then
// adding a write lock would be a no-op.
return value.Writer == Some(uid)
})
}
}
}
}
#[derive(Clone, Default)]
pub struct Locks(Arc<Mutex<LocksInternal>>);
impl Deref for Locks {
type Target = Arc<Mutex<LocksInternal>>;
fn deref(&self) -> &Arc<Mutex<LocksInternal>> {
&self.0
}
}
impl Locks {
// LockRegion attempts to acquire a typed lock for the uid on a region
// of a file. Returns true if successful in locking the region. If false
// is returned, the caller should normally interpret this as "try again later" if
// accquiring the lock in a non-blocking mode or "interrupted" if in a blocking mode.
// Blocker is the interface used to provide blocking behavior, passing a nil Blocker
// will result in non-blocking behavior.
pub fn LockRegion(&self, task: &Task, uid: UniqueId, t: LockType, r: &Range, block: bool) -> Result<bool> {
loop {
let mut l = self.lock();
// Blocking locks must run in a loop because we'll be woken up whenever an unlock event
// happens for this lock. We will then attempt to take the lock again and if it fails
// continue blocking.
let res = l.Lock(uid, t, r);
if !res && block {
l.queue.EventRegister(task, &task.blocker.generalEntry, EVENTMASK_ALL);
core::mem::drop(l);
defer!(self.lock().queue.EventUnregister(task, &task.blocker.generalEntry));
match task.blocker.BlockGeneral() {
Err(Error::ErrInterrupted) => return Err(Error::SysError(SysErr::ERESTARTSYS)),
Err(e) => return Err(e),
Ok(()) => (),
}
// Try again now that someone has unlocked.
continue;
}
return Ok(res);
}
}
pub fn Print(&self) -> String {
return self.lock().locks.Print();
}
// UnlockRegion attempts to release a lock for the uid on a region of a file.
// This operation is always successful, even if there did not exist a lock on
// the requested region held by uid in the first place.
pub fn UnlockRegion(&self, _task: &Task, uid: UniqueId, r: &Range) {
let mut l = self.lock();
l.Unlock(uid, r);
// Now that we've released the lock, we need to wake up any waiters.
l.queue.Notify(EVENTMASK_ALL)
}
}
// ComputeRange takes a positive file offset and computes the start of a LockRange
// using start (relative to offset) and the end of the LockRange using length. The
// values of start and length may be negative but the resulting LockRange must
// preserve that LockRange.Start < LockRange.End and LockRange.Start > 0.
pub fn ComputeRange(start: i64, length: i64, offset: i64) -> Result<Range> {
let mut offset = offset;
offset += start;
// fcntl(2): "l_start can be a negative number provided the offset
// does not lie before the start of the file"
if offset < 0 {
return Err(Error::SysError(SysErr::EINVAL))
}
// fcntl(2): Specifying 0 for l_len has the special meaning: lock all
// bytes starting at the location specified by l_whence and l_start
// through to the end of file, no matter how large the file grows.
let mut end = MAX_RANGE;
if length > 0 {
// fcntl(2): If l_len is positive, then the range to be locked
// covers bytes l_start up to and including l_start+l_len-1.
//
// Since LockRange.End is exclusive we need not -1 from length..
end = (offset + length) as u64;
} else if length < 0 {
// fcntl(2): If l_len is negative, the interval described by
// lock covers bytes l_start+l_len up to and including l_start-1.
//
// Since LockRange.End is exclusive we need not -1 from offset.
let signedEnd = offset;
// Add to offset using a negative length (subtract).
offset += length;
if offset < 0 {
return Err(Error::SysError(SysErr::EINVAL))
}
if signedEnd < offset {
return Err(Error::SysError(SysErr::EOVERFLOW))
}
// At this point signedEnd cannot be negative,
// since we asserted that offset is not negative
// and it is not less than offset.
end = signedEnd as u64;
}
let len = if end == MAX_RANGE {
MAX_RANGE
} else {
end - offset as u64
};
return Ok(Range::New(offset as u64, len))
} |
//! Command `new`
use crate::{
registry::{redep, Manifest, Registry},
result::Result,
};
use etc::{Etc, FileSystem, Write};
use serde::Serialize;
use std::{path::PathBuf, process::Command};
use toml::Serializer;
/// Genrate workspace
pub fn workspace(target: &PathBuf, registry: &Registry) -> Result<Manifest> {
let crates = etc::find_all(target, "Cargo.toml")?;
let ts = target.to_str().unwrap_or_default();
let mut mani = Manifest::default();
let mut members = vec![];
for ct in crates {
let ps = ct.to_string_lossy();
if ps.contains("/target/") {
continue;
}
// Redirect deps
redep(&ct, registry)?;
// Register path
let begin = ps.find(ts).unwrap_or(0) + ts.len();
if ps.len() > (begin + 12) {
members.push(ps[(begin + 1)..ps.len() - 11].to_string())
}
}
mani.workspace.members = members;
Ok(mani)
}
/// Exec command `new`
pub fn exec(target: PathBuf, skip: bool) -> Result<()> {
// Check wasm
if !skip {
Command::new("rustup")
.args(vec!["install", "nightly"])
.status()?;
Command::new("rustup")
.args(vec![
"target",
"add",
"wasm32-unknown-unknown",
"--toolchain",
"nightly",
])
.status()?;
}
// Fetch registry
let registry = Registry::new()?;
let substrate = Etc::from(®istry.0);
let template = substrate.find("node-template")?;
etc::cp_r(template, PathBuf::from(&target))?;
// Create manifest
let mani = workspace(&target, ®istry)?;
let mut dst = String::with_capacity(128);
mani.serialize(Serializer::pretty(&mut dst).pretty_array(true))?;
Etc::from(&target).open("Cargo.toml")?.write(dst)?;
println!("Created node-template {:?} succeed!", &target);
Ok(())
}
|
pub mod obj;
pub mod ply;
|
use std::sync::Arc;
use arrow::datatypes::{DataType, Field, Fields, Schema, SchemaRef};
/// Prepare an arrow Schema for transport over the Arrow Flight protocol
///
/// Converts dictionary types to underlying types due to <https://github.com/apache/arrow-rs/issues/3389>
pub fn prepare_schema_for_flight(schema: SchemaRef) -> SchemaRef {
let fields: Fields = schema
.fields()
.iter()
.map(|field| match field.data_type() {
DataType::Dictionary(_, value_type) => Arc::new(
Field::new(
field.name(),
value_type.as_ref().clone(),
field.is_nullable(),
)
.with_metadata(field.metadata().clone()),
),
_ => Arc::clone(field),
})
.collect();
Arc::new(Schema::new(fields).with_metadata(schema.metadata().clone()))
}
|
use image::GenericImageView;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Image(pub(crate) image::RgbaImage);
impl Image {
pub fn new(data: &[u8]) -> Result<Self, image::ImageError> {
Ok(Self(image::load_from_memory(data)?.to_rgba8()))
}
pub fn width(&self) -> u32 {
self.0.width()
}
pub fn height(&self) -> u32 {
self.0.height()
}
pub fn region(&self, x: u32, y: u32, w: u32, h: u32) -> SubImage {
SubImage(self.0.view(x, y, w, h))
}
}
pub struct SubImage<'i>(
image::SubImage<&'i <image::RgbaImage as image::GenericImageView>::InnerImageView>,
);
impl SubImage<'_> {
pub fn width(&self) -> u32 {
self.0.width()
}
pub fn height(&self) -> u32 {
self.0.height()
}
}
impl From<SubImage<'_>> for Image {
fn from(image: SubImage<'_>) -> Self {
Image(image.0.to_image())
}
}
|
pub struct Solution;
impl Solution {
pub fn roman_to_int(s: String) -> i32 {
fn rec(res: i32, bytes: &[u8]) -> i32 {
match bytes.split_first() {
None => res,
Some((&b'I', bytes)) => match bytes.split_first() {
Some((&b'V', bytes)) => rec(res + 4, bytes),
Some((&b'X', bytes)) => rec(res + 9, bytes),
_ => rec(res + 1, bytes),
},
Some((&b'V', bytes)) => rec(res + 5, bytes),
Some((&b'X', bytes)) => match bytes.split_first() {
Some((&b'L', bytes)) => rec(res + 40, bytes),
Some((&b'C', bytes)) => rec(res + 90, bytes),
_ => rec(res + 10, bytes),
},
Some((&b'L', bytes)) => rec(res + 50, bytes),
Some((&b'C', bytes)) => match bytes.split_first() {
Some((&b'D', bytes)) => rec(res + 400, bytes),
Some((&b'M', bytes)) => rec(res + 900, bytes),
_ => rec(res + 100, bytes),
},
Some((&b'D', bytes)) => rec(res + 500, bytes),
Some((&b'M', bytes)) => rec(res + 1000, bytes),
_ => unreachable!(),
}
}
rec(0, &s.into_bytes())
}
}
#[test]
fn test0013() {
assert_eq!(Solution::roman_to_int("III".to_string()), 3);
assert_eq!(Solution::roman_to_int("IV".to_string()), 4);
assert_eq!(Solution::roman_to_int("IX".to_string()), 9);
assert_eq!(Solution::roman_to_int("LVIII".to_string()), 58);
assert_eq!(Solution::roman_to_int("MCMXCIV".to_string()), 1994);
}
|
pub mod server_fixture;
|
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
fn main() {
let path = Path::new("data/input.txt");
let mut file = File::open(&path).expect("File read error");
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
let not_triangles_count = s.trim()
.lines()
.map(|line| {
let mut sides: Vec<u16> = line.split_whitespace().map(|l| l.parse().unwrap()).collect::<Vec<u16>>();
sides.sort();
sides[0] + sides[1] > sides[2]
})
.filter(|&r| r == true)
.count();
println!("Part 1: {}", not_triangles_count);
let mut break_occurs: u8 = 0;
let not_triangles_count: u16 = s.trim()
.split(|c: char| if c != '\n' { false }
else if break_occurs == 2 {break_occurs = 0; true}
else {break_occurs += 1; false})
.map(|three_lines| {
let vecs: Vec<Vec<u16>> = three_lines.lines()
.map(|line| line.split_whitespace()
.map(|l| l.parse()
.unwrap())
.collect::<Vec<u16>>())
.collect();
let mut good_count: u16 = 0;
for x in 0..3 {
let mut l: Vec<u16> = (0..3).map(|y| vecs[y][x]).collect();
l.sort();
if l[0] + l[1] > l[2] {
good_count += 1;
};
};
good_count
})
.sum();
println!("Part 2: {}", not_triangles_count);
}
|
use std::{cmp::Reverse, collections::BTreeSet};
use proconio::{input, marker::Usize1};
fn main() {
input! {
n: usize,
k: usize,
q: usize,
xy: [(Usize1, i64); q],
};
let mut a = vec![0; n];
let mut high = BTreeSet::new();
let mut low = BTreeSet::new();
for i in 0..n {
if i < k {
high.insert((Reverse(a[i]), i));
} else {
low.insert((Reverse(a[i]), i));
}
}
let mut ans = 0;
for (i, new) in xy {
let old = a[i];
if high.contains(&(Reverse(old), i)) {
if let Some((Reverse(low_max), j)) = low.iter().next().copied() {
if new >= low_max {
high.remove(&(Reverse(old), i));
high.insert((Reverse(new), i));
ans -= old;
ans += new;
} else {
high.remove(&(Reverse(old), i));
low.remove(&(Reverse(low_max), j));
high.insert((Reverse(low_max), j));
low.insert((Reverse(new), i));
ans -= old;
ans += low_max;
}
} else {
high.remove(&(Reverse(old), i));
high.insert((Reverse(new), i));
ans -= old;
ans += new;
}
} else if low.contains(&(Reverse(old), i)) {
let (Reverse(high_min), j) = high.iter().last().copied().unwrap();
if new <= high_min {
low.remove(&(Reverse(old), i));
low.insert((Reverse(new), i));
} else {
low.remove(&(Reverse(old), i));
high.remove(&(Reverse(high_min), j));
low.insert((Reverse(high_min), j));
high.insert((Reverse(new), i));
ans -= high_min;
ans += new;
}
} else {
unreachable!();
}
a[i] = new;
println!("{}", ans);
}
}
|
use core::any::TypeId;
use core::fmt::{self, Display};
use core::ops::Deref;
use num_bigint::{BigInt, Sign};
use num_traits::ToPrimitive;
use super::Float;
/// BigIntegers are arbitrary-width integers whose size is too large to fit in
/// an immediate/SmallInteger value.
#[derive(Clone, Hash)]
#[repr(transparent)]
pub struct BigInteger(pub BigInt);
impl BigInteger {
pub const TYPE_ID: TypeId = TypeId::of::<BigInteger>();
#[inline]
pub fn as_i64(&self) -> Option<i64> {
self.0.to_i64()
}
#[inline]
pub fn as_f64(&self) -> Option<f64> {
self.0.to_f64()
}
#[inline]
pub fn as_float(&self) -> Option<Float> {
self.0.to_f64().map(|f| f.into())
}
#[inline]
pub fn is_negative(&self) -> bool {
self.0.sign() == Sign::Minus
}
#[inline]
pub fn is_positive(&self) -> bool {
!self.is_negative()
}
}
impl Deref for BigInteger {
type Target = BigInt;
#[inline]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<i64> for BigInteger {
fn from(value: i64) -> Self {
Self(value.into())
}
}
impl From<usize> for BigInteger {
fn from(value: usize) -> Self {
Self(value.into())
}
}
impl From<BigInt> for BigInteger {
#[inline]
fn from(value: BigInt) -> Self {
Self(value)
}
}
impl TryInto<usize> for &BigInteger {
type Error = ();
#[inline]
fn try_into(self) -> Result<usize, Self::Error> {
let i = self.as_i64().ok_or(())?;
i.try_into().map_err(|_| ())
}
}
impl Eq for BigInteger {}
impl PartialEq for BigInteger {
fn eq(&self, other: &Self) -> bool {
self.0.eq(&other.0)
}
}
impl PartialEq<i64> for BigInteger {
fn eq(&self, y: &i64) -> bool {
match self.as_i64() {
Some(x) => x.eq(y),
None => false,
}
}
}
impl PartialEq<Float> for BigInteger {
fn eq(&self, y: &Float) -> bool {
match self.as_float() {
Some(x) => y.eq(&x),
None => false,
}
}
}
impl PartialOrd for BigInteger {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialOrd<i64> for BigInteger {
fn partial_cmp(&self, other: &i64) -> Option<core::cmp::Ordering> {
let i = self.as_i64()?;
Some(i.cmp(other))
}
}
impl PartialOrd<Float> for BigInteger {
fn partial_cmp(&self, other: &Float) -> Option<core::cmp::Ordering> {
use core::cmp::Ordering;
match other.as_f64() {
x if x.is_infinite() => {
if x.is_sign_negative() {
Some(Ordering::Less)
} else {
Some(Ordering::Greater)
}
}
_ => {
let too_large = if self.is_negative() {
Ordering::Greater
} else {
Ordering::Less
};
let Some(x) = self.as_i64() else { return Some(too_large); };
Some(other.partial_cmp(&x)?.reverse())
}
}
}
}
impl Ord for BigInteger {
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.0.cmp(&other.0)
}
}
impl fmt::Debug for BigInteger {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", &self.0)
}
}
impl Display for BigInteger {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl core::ops::Add for &BigInteger {
type Output = BigInteger;
fn add(self, rhs: &BigInteger) -> Self::Output {
BigInteger(self.deref().add(&rhs.0))
}
}
impl core::ops::Add<i64> for &BigInteger {
type Output = BigInteger;
fn add(self, rhs: i64) -> Self::Output {
BigInteger(self.deref().add(rhs))
}
}
impl core::ops::Sub for &BigInteger {
type Output = BigInteger;
fn sub(self, rhs: &BigInteger) -> Self::Output {
BigInteger(self.deref().sub(&rhs.0))
}
}
impl core::ops::Sub<i64> for &BigInteger {
type Output = BigInteger;
fn sub(self, rhs: i64) -> Self::Output {
BigInteger(self.deref().sub(rhs))
}
}
impl core::ops::Div for &BigInteger {
type Output = BigInteger;
fn div(self, rhs: &BigInteger) -> Self::Output {
BigInteger(self.deref().div(&rhs.0))
}
}
impl core::ops::Div<i64> for &BigInteger {
type Output = BigInteger;
fn div(self, rhs: i64) -> Self::Output {
BigInteger(self.deref().div(rhs))
}
}
impl core::ops::Rem for &BigInteger {
type Output = BigInteger;
fn rem(self, rhs: &BigInteger) -> Self::Output {
BigInteger(self.deref().rem(&rhs.0))
}
}
impl core::ops::Rem<i64> for &BigInteger {
type Output = BigInteger;
fn rem(self, rhs: i64) -> Self::Output {
BigInteger(self.deref().rem(rhs))
}
}
impl core::ops::Mul for &BigInteger {
type Output = BigInteger;
fn mul(self, rhs: &BigInteger) -> Self::Output {
BigInteger(self.deref().mul(&rhs.0))
}
}
impl core::ops::Mul<i64> for &BigInteger {
type Output = BigInteger;
fn mul(self, rhs: i64) -> Self::Output {
BigInteger(self.deref().mul(rhs))
}
}
impl core::ops::Shl<i64> for &BigInteger {
type Output = BigInteger;
fn shl(self, rhs: i64) -> Self::Output {
BigInteger(self.deref().shl(rhs))
}
}
impl core::ops::Shr<i64> for &BigInteger {
type Output = BigInteger;
fn shr(self, rhs: i64) -> Self::Output {
BigInteger(self.deref().shr(rhs))
}
}
|
use crate::Registration;
use anyhow::{Context, Result};
use std::collections::HashMap;
use std::fmt::Display;
/// Emit the data back to the host.
pub fn register(registration: &Registration) -> Result<()> {
let buffer =
serde_json::to_vec(registration).context("Could not turn registration to JSON.")?;
let mut slice = buffer.into_boxed_slice();
unsafe {
ffi::register(slice.as_mut_ptr() as u32, slice.len() as u32);
}
Ok(())
}
/// Emit the data back to the host.
/// When returning `Ok(i64)` it indicates the number of events emitted so far.
pub fn emit(mut data: impl AsMut<[u8]>) -> Result<u32> {
let data = data.as_mut();
let retval = unsafe { ffi::emit(data.as_mut_ptr() as u32, data.len() as u32) };
Ok(retval)
}
/// Emit the data back to the host.
pub fn raise(error: impl Display) -> Result<u32> {
let mut string = format!("{}", error);
let buffer = unsafe { string.as_mut_vec() };
let parts = buffer.as_mut_slice();
let retval = unsafe { ffi::raise(parts.as_mut_ptr() as u32, parts.len() as u32) };
Ok(retval)
}
/// Retrieve the options from the instance context.
pub fn config() -> Result<HashMap<String, serde_json::Value>> {
let size = unsafe { ffi::config_size() };
let ptr = crate::interop::allocate_buffer(size);
unsafe { ffi::config(ptr as u32, size) };
let buffer = unsafe { Vec::from_raw_parts(ptr as *mut u8, size as usize, size as usize) };
let config: HashMap<String, serde_json::Value> = serde_json::from_slice(&buffer)?;
Ok(config)
}
pub mod ffi {
extern "C" {
pub(super) fn register(ptr: u32, size: u32);
pub(super) fn emit(ptr: u32, size: u32) -> u32;
pub(super) fn raise(ptr: u32, size: u32) -> u32;
pub(super) fn config(ptr: u32, size: u32);
pub(super) fn config_size() -> u32;
}
}
|
extern crate log;
extern crate rocket;
extern crate rocket_contrib;
use rocket::State;
use rocket_contrib::json::Json;
use std::sync::{mpsc, Mutex, mpsc::Receiver};
use rocket::config::{Config, Environment, LoggingLevel};
#[allow(unused_imports)]
use log::{trace, debug, info, warn, error};
use crate::database::DB;
use crate::utils::consts;
const DEFAULT_MAX_RESULT_COUNT: usize = 10;
const DEFAULT_MAX_TRAFFIC_COUNT: usize = 30;
#[get("/")]
fn index() -> &'static str {
"Hello, Cylons !"
}
#[get("/feed/result?<last_id>&<max_count>")]
fn feed_result(
db_read: State<Mutex<DB>>,
last_id: Option<usize>,
max_count: Option<usize>
) -> Json<Vec<(usize, String, String, u8, String, String, u8, u8, String, u8)>> {
let last_id = if let Some(value) = last_id { value } else { 0 };
let max_count = if let Some(value) = max_count { value } else { DEFAULT_MAX_RESULT_COUNT };
// pre-allocate limited memory
let mut ret = Vec::with_capacity(if max_count > 1000 { 1000 } else { max_count });
let db_read = db_read.lock().expect("Failed to acquire lock for 'db_read'");
match db_read.read_only_sql(
&format!("select id,time_str,ip,module_type,module_name,module_info,
risk_type,risk_level,result,false_positive from t_result where id > {} limit {}",
last_id, max_count )) {
Err(err) => error!("DB query error: {}", err),
Ok(mut cursor) => {
while let Some(row) = cursor.next().unwrap() {
ret.push((
row[0].as_integer().unwrap() as usize,
row[1].as_string().unwrap().to_owned(),
row[2].as_string().unwrap().to_owned(),
row[3].as_integer().unwrap() as u8,
row[4].as_string().unwrap().to_owned(),
row[5].as_string().unwrap().to_owned(),
row[6].as_integer().unwrap() as u8,
row[7].as_integer().unwrap() as u8,
row[8].as_string().unwrap().to_owned(),
row[9].as_integer().unwrap() as u8
));
}
}
}
drop(db_read);
Json(ret)
}
#[get("/feed/traffic?<max_count>")]
fn feed_traffic(
parsed_packet_rx: State<Mutex<Receiver<(String, String, u16, String, u16, String, u16, String)>>>,
max_count: Option<usize>
) -> Json<Vec<(String, String, u16, String, u16, String, u16, String)>> {
let max_count = if let Some(value) = max_count { value } else { DEFAULT_MAX_TRAFFIC_COUNT };
// pre-allocate limited memory
let mut ret = Vec::with_capacity(if max_count > 1000 { 1000 } else { max_count });
let parsed_packet_rx = parsed_packet_rx.lock().expect("Failed to acquire lock for 'parsed_packet_rx'");
for _ in 0..max_count {
match parsed_packet_rx.try_recv() {
Ok(value) => ret.push(value),
Err(mpsc::TryRecvError::Empty) => break,
Err(mpsc::TryRecvError::Disconnected) => break, // TODO on channel closed, how to exit?
}
}
drop(parsed_packet_rx);
Json(ret)
}
pub fn run(listen_addr: &str, parsed_packet_rx: mpsc::Receiver<(String, String, u16, String, u16, String, u16, String)>) {
let _listen_addr: Vec<&str> = listen_addr.trim().split(":").collect();
let (addr, port) = match _listen_addr.len() == 2 {
true => (_listen_addr[0], _listen_addr[1].parse::<u16>().unwrap()),
false => {
error!("api_listen_addr format error: {}, default to [127.0.0.1:8000] ", listen_addr);
("127.0.0.1", 8000)
}
};
let config = Config::build(Environment::Production)
.address(addr)
.port(port)
.log_level(LoggingLevel::Off) // this does not help
.finalize().unwrap();
let db_read = DB::new(consts::DB_READ_ONLY).unwrap(); // read only db connection
rocket::custom(config)
.manage(Mutex::new(parsed_packet_rx))
.manage(Mutex::new(db_read))
.mount("/", routes![index])
.mount("/api", routes![feed_traffic, feed_result])
.launch();
}
|
extern crate nalgebra_glm as glm;
use std::fs::File;
use std::io::Read;
use std::sync::{Arc, Mutex, RwLock};
use std::thread;
use std::{mem, os::raw::c_void, ptr};
mod shader;
mod util;
use glutin::event::{
DeviceEvent,
ElementState::{Pressed, Released},
Event, KeyboardInput,
VirtualKeyCode::{self, *},
WindowEvent,
};
use glutin::event_loop::ControlFlow;
const SCREEN_W: u32 = 800;
const SCREEN_H: u32 = 600;
// == // Helper functions to make interacting with OpenGL a little bit prettier. You *WILL* need these! // == //
// The names should be pretty self explanatory
fn byte_size_of_array<T>(val: &[T]) -> isize {
std::mem::size_of_val(&val[..]) as isize
}
// Get the OpenGL-compatible pointer to an arbitrary array of numbers
fn pointer_to_array<T>(val: &[T]) -> *const c_void {
&val[0] as *const T as *const c_void
}
// Get the size of the given type in bytes
fn size_of<T>() -> i32 {
mem::size_of::<T>() as i32
}
// Get an offset in bytes for n units of type T
fn offset<T>(n: u32) -> *const c_void {
(n * mem::size_of::<T>() as u32) as *const T as *const c_void
}
fn read_triangles_from_file() -> Result<Vec<f32>, ()> {
// Takes in an arbitraray amount of trinagles from a file
let mut vertices: Vec<f32>;
match File::open(".\\src\\triangles.txt") {
Ok(mut file) => {
let mut content = String::new();
// Read all the file content into a variable
file.read_to_string(&mut content).unwrap();
vertices = content
.split(" ")
.map(|x| x.parse::<f32>().unwrap())
.collect();
println!("{}", content);
Ok(vertices)
}
// Error handling
Err(error) => {
println!("Error message: {}", error);
std::process::exit(1);
}
}
}
// Get a null pointer (equivalent to an offset of 0)
// ptr::null()
// let p = 0 as *const c_void
// == // Modify and complete the function below for the first task
unsafe fn init_vao(vertices: &Vec<f32>, indices: &Vec<u32>, colors: &Vec<f32>) -> u32 {
// Returns the ID of the newly instantiated vertex array object upon its creation
// VAO - way to bind vbo with spesification
let mut vao: u32 = 0; // Create
gl::GenVertexArrays(1, &mut vao); // Generate
gl::BindVertexArray(vao); // Bind
// VBO - buffer for the vertices/positions
let mut vbo: u32 = 0;
gl::GenBuffers(1, &mut vbo); // creates buffer, generates an id for the vertex buffer - stored on vram
gl::BindBuffer(gl::ARRAY_BUFFER, vbo); // Binding is sort of like creating layers in photoshop
gl::BufferData(
gl::ARRAY_BUFFER,
byte_size_of_array(&vertices),
pointer_to_array(&vertices),
gl::STATIC_DRAW,
);
// Vaa = Vertex attrib array
gl::VertexAttribPointer(0, 3, gl::FLOAT, gl::FALSE, 0, 0 as *const c_void);
gl::EnableVertexAttribArray(0);
// CBO - vbo for the color buffer, RGBA
let mut cbo: u32 = 1;
gl::GenBuffers(1, &mut cbo);
gl::BindBuffer(gl::ARRAY_BUFFER, cbo);
gl::BufferData(
gl::ARRAY_BUFFER,
byte_size_of_array(&colors),
pointer_to_array(&colors),
gl::STATIC_DRAW,
);
// 2nd attribute buffer is for colors
gl::VertexAttribPointer(1, 4, gl::FLOAT, gl::FALSE, size_of::<f32>() * 4, 0 as *const c_void);
gl::EnableVertexAttribArray(1);
// Index buffer object = connect the dots, multiple usecases for same vertices.
let mut ibo: u32 = 0;
gl::GenBuffers(1, &mut ibo);
gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, ibo);
gl::BufferData(
gl::ELEMENT_ARRAY_BUFFER,
byte_size_of_array(&indices),
pointer_to_array(&indices),
gl::STATIC_DRAW,
);
vao
}
fn main() {
// Set up the necessary objects to deal with windows and event handling
let el = glutin::event_loop::EventLoop::new();
let wb = glutin::window::WindowBuilder::new()
.with_title("Gloom-rs")
.with_resizable(false)
.with_inner_size(glutin::dpi::LogicalSize::new(SCREEN_W, SCREEN_H));
let cb = glutin::ContextBuilder::new().with_vsync(true);
let windowed_context = cb.build_windowed(wb, &el).unwrap();
// Uncomment these if you want to use the mouse for controls, but want it to be confined to the screen and/or invisible.
// windowed_context.window().set_cursor_grab(true).expect("failed to grab cursor");
// windowed_context.window().set_cursor_visible(false);
// Set up a shared vector for keeping track of currently pressed keys
let arc_pressed_keys = Arc::new(Mutex::new(Vec::<VirtualKeyCode>::with_capacity(10)));
// Make a reference of this vector to send to the render thread
let pressed_keys = Arc::clone(&arc_pressed_keys);
// Set up shared tuple for tracking mouse movement between frames
let arc_mouse_delta = Arc::new(Mutex::new((0f32, 0f32)));
// Make a reference of this tuple to send to the render thread
let mouse_delta = Arc::clone(&arc_mouse_delta);
// Spawn a separate thread for rendering, so event handling doesn't block rendering
let render_thread = thread::spawn(move || {
// Acquire the OpenGL Context and load the function pointers. This has to be done inside of the rendering thread, because
// an active OpenGL context cannot safely traverse a thread boundary
let context = unsafe {
let c = windowed_context.make_current().unwrap();
gl::load_with(|symbol| c.get_proc_address(symbol) as *const _);
c
};
// Set up openGL
unsafe {
gl::Enable(gl::DEPTH_TEST);
gl::DepthFunc(gl::LESS);
gl::Enable(gl::CULL_FACE);
gl::Disable(gl::MULTISAMPLE);
gl::Enable(gl::BLEND);
gl::BlendFunc(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA);
gl::Enable(gl::DEBUG_OUTPUT_SYNCHRONOUS);
gl::DebugMessageCallback(Some(util::debug_callback), ptr::null());
// Print some diagnostics
println!(
"{}: {}",
util::get_gl_string(gl::VENDOR),
util::get_gl_string(gl::RENDERER)
);
println!("OpenGL\t: {}", util::get_gl_string(gl::VERSION));
println!(
"GLSL\t: {}",
util::get_gl_string(gl::SHADING_LANGUAGE_VERSION)
);
}
let c: Vec<f32> = vec![
-0.8, -0.6, 0.0,
-0.5, -0.6, 0.0,
-0.65, -0.2, 0.0,
0.5, -0.6, 0.0,
0.8, -0.6, 0.0,
0.65, -0.2, 0.0,
-0.2, 0.3, 0.0,
0.2, 0.6, 0.0,
0.0, 0.6, 0.0,
];
let i: Vec<u32> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8];
let col: Vec<f32> = vec![
1.0, 0.0, 0.0, 0.9,
1.0, 0.0, 0.0, 0.9,
1.0, 0.0, 0.0, 0.9,
0.0, 1.0, 0.0, 0.8,
0.0, 1.0, 0.0, 0.8,
0.0, 1.0, 0.0, 0.8,
0.0, 0.0, 1.0, 0.7,
0.0, 0.0, 1.0, 0.7,
0.0, 0.0, 1.0, 0.7,
];
let overLappingCoordinates: Vec<f32> = vec![
-0.3, 0.0, 0.7,
0.3, 0.0, 0.7,
0.0, 0.5, 0.7,
-0.1, 0.3, 0.8,
0.3, 0.0, 0.8,
0.3, 0.6, 0.8,
-0.4, 0.6, 0.6,
-0.4, 0.0, 0.6,
0.2, 0.3, 0.6
];
let overLappingColors: Vec<f32> = vec![
1.0, 0.0, 0.0, 0.6,
1.0, 0.0, 0.0, 0.6,
1.0, 0.0, 0.0, 0.6,
0.0, 1.0, 0.0, 0.8,
0.0, 1.0, 0.0, 0.8,
0.0, 1.0, 0.0, 0.8,
0.0, 0.0, 1.0, 0.9,
0.0, 0.0, 1.0, 0.9,
0.0, 0.0, 1.0, 0.9,
];
let coordinates: Vec<f32> = vec![
-0.6, -0.6, 0.0,
0.6, -0.6, 0.0,
0.0, 0.6, 0.0
];
let triangle_indices: Vec<u32> = vec![0, 1, 2];
let colors: Vec<f32> = vec![
0.0, 0.0, 1.0, 1.0,
0.0, 0.0, 1.0, 1.0,
0.0, 0.0, 1.0, 1.0
];
// == // Set up your VAO here
unsafe {
let vao = init_vao(&overLappingCoordinates, &i, &overLappingColors);
}
// Setup uniform locations
let trans_loc: i32;
let time_loc: i32;
let opacity_loc: i32;
unsafe {
// Creates shader. using multiple attaches since they return self, and link them all together at the end
let shdr = shader::ShaderBuilder::new()
.attach_file(".\\shaders\\simple.vert")
.attach_file(".\\shaders\\simple.frag")
.link();
// Get uniform locations
trans_loc = shdr.get_uniform_location("transformation");
time_loc = shdr.get_uniform_location("time");
opacity_loc = shdr.get_uniform_location("opacity");
shdr.activate();
}
// Used to demonstrate keyboard handling -- feel free to remove
let mut _arbitrary_number = 0.0;
let first_frame_time = std::time::Instant::now();
let mut last_frame_time = first_frame_time;
// The main rendering loop
let persp_mat: glm::Mat4 = glm::perspective(
(SCREEN_H as f32) / (SCREEN_W as f32),
90.0,
1.0,
100.0
);
let persp_trans: glm::Mat4 = glm::translation(
&glm::vec3(0.0, 0.0, -2.0)
);
let mut proj: glm::Mat4 = persp_mat * persp_trans;
let model: glm::Mat4 = glm::identity();
let mut trans_matrix: glm::Mat4 = glm::identity();
let mut rot_x = 0.0;
let mut rot_y = 0.0;
let rot_step: f32 = 2.0;
let mut opacity: f32 = 0.0;
let mut v_time:f32 = 0.0;
let mut trans_x = 0.0;
let mut trans_y = 0.0;
let mut trans_z = -4.0;
let trans_step: f32 = 0.1;
let mut view: glm::Mat4 = glm::identity();
loop {
let now = std::time::Instant::now();
let elapsed = now.duration_since(first_frame_time).as_secs_f32();
let delta_time = now.duration_since(last_frame_time).as_secs_f32();
last_frame_time = now;
// Handle keyboard input
if let Ok(keys) = pressed_keys.lock() {
for key in keys.iter() {
// I'm using WASDEQ to handle inputs
// Also use arrowkeys for rotation
match key {
VirtualKeyCode::W => {
trans_z += trans_step;
},
VirtualKeyCode::A => {
trans_x += trans_step;
},
VirtualKeyCode::S => {
trans_z -= trans_step;
},
VirtualKeyCode::D => {
trans_x -= trans_step;
},
VirtualKeyCode::E => {
trans_y -= trans_step;
},
VirtualKeyCode::Q => {
trans_y += trans_step;
},
VirtualKeyCode::R => {
// Reset camera
view = glm::identity();
},
VirtualKeyCode::Up => {
rot_x -= rot_step;
},
VirtualKeyCode::Down => {
rot_x += rot_step;
},
VirtualKeyCode::Left => {
rot_y -= rot_step;
},
VirtualKeyCode::Right => {
rot_y += rot_step;
},
_ => {}
}
}
}
// Handle mouse movement. delta contains the x and y movement of the mouse since last frame in pixels
if let Ok(mut delta) = mouse_delta.lock() {
*delta = (0.0, 0.0);
}
opacity = (elapsed * 10.0).sin() / 2.0 + 0.6;
v_time = elapsed.sin();
let trans: glm::Mat4 = glm::translation(&glm::vec3(trans_x, trans_y, trans_z));
let rot: glm::Mat4 = glm::rotation(rot_x.to_radians(), &glm::vec3(1.0, 0.0, 0.0)) * glm::rotation(rot_y.to_radians(), &glm::vec3(0.0, 1.0, 0.0));
let scale: glm::Mat4 = glm::identity();
view = rot * trans * view;
let mut mod_view = view * model;
// Transmat here becomes MVP matrix after getting built up by model,
// view ( rotation, translation ), and projection
let trans_mat = proj * mod_view;
//Billboard task
/*
mod_view.m11 = 1.0;
mod_view.m12 = 0.0;
mod_view.m13 = 0.0;
mod_view.m21 = 0.0;
mod_view.m22 = 1.0;
mod_view.m23 = 0.0;
mod_view.m31 = 0.0;
mod_view.m32 = 0.0;
mod_view.m33 = 1.0;
*/
// Reset values
trans_x = 0.0;
trans_y = 0.0;
trans_z = 0.0;
rot_y = 0.0;
rot_x = 0.0;
unsafe {
gl::ClearColor(0.76862745, 0.71372549, 0.94901961, 1.0); // moon raker, full opacity
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
// Now we can use these uniforms in our shaders
gl::Uniform1f(opacity_loc, opacity);
gl::Uniform1f(time_loc, v_time);
gl::UniformMatrix4fv(trans_loc, 1, gl::FALSE, trans_mat.as_ptr());
// Issue the necessary commands to draw your scene here
// We have 15 indices for the 5 triangles, 3 for 1 and so on
let num_of_indices = 3 * 3;
let num_of_square_indices = 6;
gl::DrawElements(
gl::TRIANGLES,
num_of_indices,
gl::UNSIGNED_INT,
ptr::null(),
);
}
context.swap_buffers().unwrap();
}
});
// Keep track of the health of the rendering thread
let render_thread_healthy = Arc::new(RwLock::new(true));
let render_thread_watchdog = Arc::clone(&render_thread_healthy);
thread::spawn(move || {
if !render_thread.join().is_ok() {
if let Ok(mut health) = render_thread_watchdog.write() {
println!("Render thread panicked!");
*health = false;
}
}
});
// Start the event loop -- This is where window events get handled
el.run(move |event, _, control_flow| {
*control_flow = ControlFlow::Wait;
// Terminate program if render thread panics
if let Ok(health) = render_thread_healthy.read() {
if *health == false {
*control_flow = ControlFlow::Exit;
}
}
match event {
Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => {
*control_flow = ControlFlow::Exit;
}
// Keep track of currently pressed keys to send to the rendering thread
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
state: key_state,
virtual_keycode: Some(keycode),
..
},
..
},
..
} => {
if let Ok(mut keys) = arc_pressed_keys.lock() {
match key_state {
Released => {
if keys.contains(&keycode) {
let i = keys.iter().position(|&k| k == keycode).unwrap();
keys.remove(i);
}
}
Pressed => {
if !keys.contains(&keycode) {
keys.push(keycode);
}
}
}
}
// Handle escape separately
match keycode {
Escape => {
*control_flow = ControlFlow::Exit;
}
_ => {}
}
}
Event::DeviceEvent {
event: DeviceEvent::MouseMotion { delta },
..
} => {
// Accumulate mouse movement
if let Ok(mut position) = arc_mouse_delta.lock() {
*position = (position.0 + delta.0 as f32, position.1 + delta.1 as f32);
}
}
_ => {}
}
});
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.