text stringlengths 8 4.13M |
|---|
//! A metadata summary of a Parquet file in object storage, with the ability to
//! download & execute a scan.
use crate::{
storage::{ParquetExecInput, ParquetStorage},
ParquetFilePath,
};
use data_types::{ParquetFile, TimestampMinMax};
use schema::Schema;
use std::{mem, sync::Arc};
use uuid::Uuid;
/// A abstract representation of a Parquet file in object storage, with
/// associated metadata.
#[derive(Debug)]
pub struct ParquetChunk {
/// Parquet file.
parquet_file: Arc<ParquetFile>,
/// Schema that goes with this table's parquet file
schema: Schema,
/// Persists the parquet file within a namespace's relative path
store: ParquetStorage,
}
impl ParquetChunk {
/// Create parquet chunk.
pub fn new(parquet_file: Arc<ParquetFile>, schema: Schema, store: ParquetStorage) -> Self {
Self {
parquet_file,
schema,
store,
}
}
/// Store that contains this file.
pub fn store(&self) -> &ParquetStorage {
&self.store
}
/// Return raw parquet file metadata.
pub fn parquet_file(&self) -> &Arc<ParquetFile> {
&self.parquet_file
}
/// Return object store id
pub fn object_store_id(&self) -> Uuid {
self.parquet_file.object_store_id
}
/// Return the approximate memory size of the chunk, in bytes including the
/// dictionary, tables, and their rows.
pub fn size(&self) -> usize {
mem::size_of_val(self) + self.parquet_file.size() - mem::size_of_val(&self.parquet_file)
}
/// Infallibly return the full schema (for all columns) for this chunk
pub fn schema(&self) -> &Schema {
&self.schema
}
/// Return stream of data read from parquet file
/// Inputs for [`ParquetExec`].
///
/// See [`ParquetExecInput`] for more information.
///
/// [`ParquetExec`]: datafusion::datasource::physical_plan::ParquetExec
pub fn parquet_exec_input(&self) -> ParquetExecInput {
let path: ParquetFilePath = self.parquet_file.as_ref().into();
self.store.parquet_exec_input(&path, self.file_size_bytes())
}
/// The total number of rows in all row groups in this chunk.
pub fn rows(&self) -> usize {
self.parquet_file.row_count as usize
}
/// Size of the parquet file in object store
pub fn file_size_bytes(&self) -> usize {
self.parquet_file.file_size_bytes as usize
}
/// return time range
pub fn timestamp_min_max(&self) -> TimestampMinMax {
TimestampMinMax {
min: self.parquet_file.min_time.get(),
max: self.parquet_file.max_time.get(),
}
}
}
|
fn main(){
let triple = (2,55,"string");
match triple{
(1, y, z) => println!("matching tupple with first value 1. second:{y:?}, third:{z:?}."),
// can't match on the middle value only.
// (..,55, ..) => println!("only caring about second value."),
(.., "string") => println!("matching tupple with \"string\" on the end. ignoring the rest."),
(x, y, z) => println!("catch all. first:{x}, second:{y:?}, third:{z:?}."),
_ => println!("not matching anything."), // unreachable because of the catch all.
}
} |
use self::token::ValidToken;
use super::config::Config;
use super::status;
use super::worker::{Job, JobTrigger};
use crate::fs::next_job_id;
use crate::worker::WorkerSender;
use rocket::{self, State};
use rocket::config::{ConfigBuilder, Environment};
use rocket::fairing::AdHoc;
use rocket::http::Status;
use rocket::response::Failure;
use rocket_contrib::Json;
mod token;
#[derive(Serialize, Deserialize)]
struct CreateJobResponse {
id: u64,
}
impl CreateJobResponse {
fn new(id: u64) -> Self {
CreateJobResponse { id }
}
}
#[post("/v1/jobs/<project_name>")]
fn create_job(
token: ValidToken,
tx: State<WorkerSender>,
config: State<Config>,
project_name: String,
) -> Result<Json<CreateJobResponse>, Failure> {
let projects = &config.projects;
match projects
.get(&project_name)
.filter(|_| token.can_access(&project_name))
{
Some(val) => val,
None => return Err(Failure(Status::Forbidden)),
};
let job_id = match next_job_id(&project_name) {
Ok(id) => id,
Err(_) => return Err(Failure(Status::InternalServerError)),
};
let job = Job {
id: job_id,
project: project_name,
trigger: JobTrigger::Webhook {
token: token.token_name().into(),
},
};
match tx.send(job) {
Ok(_) => Ok(Json(CreateJobResponse::new(job_id))),
Err(_) => Err(Failure(Status::InternalServerError)),
}
}
pub(crate) fn start_server(config: Config, sender: WorkerSender) {
#[cfg(not(debug_assertions))]
let environment = Environment::Production;
#[cfg(debug_assertions)]
let environment = Environment::Development;
let tls_enabled = config.main.tls.is_some();
let rocket_config = {
let builder = ConfigBuilder::new(environment)
.address(config.main.listen.address.clone())
.port(config.main.listen.port);
if let Some(ref tls) = config.main.tls {
builder
.tls(tls.certificate(), tls.certificate_key())
.unwrap()
} else {
builder.unwrap()
}
};
rocket::custom(rocket_config, false)
.attach(AdHoc::on_launch(move |rocket| {
let config = rocket.config();
let protocol = if tls_enabled { "https" } else { "http" };
status!(
"Server is listening on {}://{}:{}",
protocol,
config.address,
config.port
);
}))
.manage(sender)
.manage(config)
.mount("/", routes![create_job])
.launch();
}
|
#[derive(Debug, PartialEq)]
enum Units {
Miles(f64, String),
Feet(f64, String),
Inches(f64, String),
Kilometers(f64, String),
Meters(f64, String),
Centimeters(f64, String),
Pounds(f64, String),
Ounces(f64, String),
Kilograms(f64, String),
Grams(f64, String),
DegreesCelsius(f64, String),
DegreesFahrenheit(f64, String),
}
impl Units {
fn new(val: f64, unit: &str) -> Self {
match unit {
"km" | "kms" | "kilometer" | "kilometers" => Units::Kilometers(val, String::from("km")),
"m" | "ms" | "meter" | "meters" => Units::Meters(val, String::from("m")),
"cm" | "cms" | "centimeter" | "centimeters" => Units::Centimeters(val, String::from("cm")),
"mile" | "miles" => Units::Miles(val, String::from("miles")),
"feet" | "foot" | "ft" => Units::Feet(val, String::from("ft")),
"inches" | "inch" => Units::Inches(val, String::from("inches")),
"kg" | "kilogram" | "kilograms" => Units::Kilograms(val, String::from("kg")),
"g" | "gram" | "grams" => Units::Grams(val, String::from("grams")),
"lbs" | "pound" | "pounds" => Units::Pounds(val, String::from("lbs")),
"oz" | "ounces" | "ounce" => Units::Ounces(val, String::from("oz")),
"c" | "℃" | "celsius" => Units::DegreesCelsius(val, String::from("℃")),
"f" | "℉" | "fahrenheit" => Units::DegreesFahrenheit(val, String::from("℉")),
_ => panic!("Unknown type was passed into Units::new(), check your input"),
}
}
fn convert(&self) -> Units {
match self {
// length
// imperial
Units::Miles(val, _) => Units::Kilometers(val * 1.609344, String::from("km")),
Units::Feet(val, _) => Units::Meters(val * 0.3048, String::from("m")),
Units::Inches(val, _) => Units::Centimeters(val * 2.54, String::from("cm")),
// metric
Units::Kilometers(val, _) => Units::Miles(val * 0.6213712, String::from("miles")),
Units::Meters(val, _) => Units::Feet(val / 0.3048, String::from("ft")),
Units::Centimeters(val, _) => Units::Inches(val / 2.54, String::from("inches")),
// weight
// imperial
Units::Pounds(val, _) => Units::Kilograms(val * 0.4535924, String::from("kg")),
Units::Ounces(val, _) => Units::Grams(val * 28.34952, String::from("grams")),
// metric
Units::Kilograms(val, _) => Units::Pounds(val / 0.4535924, String::from("lbs")),
Units::Grams(val, _) => Units::Ounces(val / 28.34952, String::from("oz")),
// temperature
Units::DegreesCelsius(val, _) => Units::DegreesFahrenheit(val * 1.8 + 32_f64, String::from("℉")),
Units::DegreesFahrenheit(val, _) => Units::DegreesCelsius((val - 32_f64) / 1.8, String::from("℃")),
}
}
fn destruct_enum(unit: &Units) -> (f64, String) {
match unit {
Units::Miles(val, unit) => (*val, unit.to_string()),
Units::Feet(val, unit) => (*val, unit.to_string()),
Units::Inches(val, unit) => (*val, unit.to_string()),
// metric
Units::Kilometers(val, unit) => (*val, unit.to_string()),
Units::Meters(val, unit) => (*val, unit.to_string()),
Units::Centimeters(val, unit) => (*val, unit.to_string()),
// weight
// imperial
Units::Pounds(val, unit) => (*val, unit.to_string()),
Units::Ounces(val, unit) => (*val, unit.to_string()),
// metric
Units::Kilograms(val, unit) => (*val, unit.to_string()),
Units::Grams(val, unit) => (*val, unit.to_string()),
Units::DegreesCelsius(val, unit) => (*val, unit.to_string()),
Units::DegreesFahrenheit(val, unit) => (*val, unit.to_string()),
}
}
}
const LIST_POSSIBLE: &[&str] = &[
"km",
"kms",
"kilometers",
"kilometer",
"m",
"meters",
"meter",
"cm",
"centimeters",
"centimeter",
"kg",
"kilograms",
"kilogram",
"grams",
"gram",
"mile",
"miles",
"ft",
"feet",
"foot",
"inches",
"inch",
"lbs",
"pounds",
"pound",
"oz",
"ounces",
"ounce",
"c",
"℃",
"celsius",
"f",
"℉",
"fahrenheit",
];
fn parse_input(msg: &str) -> Option<Vec<Units>> {
let mut has_convertibles = false;
let msg = msg.to_lowercase();
for elem in LIST_POSSIBLE {
if msg.contains(elem) {
has_convertibles = true;
break;
}
}
if !has_convertibles {
return None;
}
let msg: Vec<_> = msg.split_ascii_whitespace().collect();
let mut values_vec = vec![];
for i in 0..msg.len() {
let word = msg[i].trim_end_matches(&[',', '.', '/', ';', ':', '|', '"', '\'', '\\'][..]);
if LIST_POSSIBLE.contains(&word) {
if let Ok(val) = msg[i - 1].parse::<f64>() {
values_vec.push(Units::new(val, word));
}
}
}
if !values_vec.is_empty() {
Some(values_vec)
} else {
None
}
}
fn assemble_response(values_vec: &[Units]) -> String {
let mut response = String::new();
for v in values_vec {
let (value, unit) = Units::destruct_enum(v);
let (converted_value, converted_unit) = Units::destruct_enum(&v.convert());
if converted_value.abs() < 1.0 {
response.push_str(&format!(
"{} {} is {} {}\n",
value, unit, converted_value, converted_unit
));
} else {
response.push_str(&format!(
"{} {} is {:.2} {}\n",
value, unit, converted_value, converted_unit
));
}
}
response
}
pub fn respond_to_msg(msg: &str) -> Option<String> {
parse_input(msg).map(|units| assemble_response(&units))
}
#[cfg(test)]
mod tests {
use super::*;
use float_cmp::{approx_eq, F64Margin};
#[test]
fn unit_conversion() {
if let Units::Meters(val_left, _) = Units::Meters(3.048, String::from("m")) {
if let Units::Meters(val_right, _) = Units::Feet(10.0, String::from("ft")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::Feet(val_left, _) = Units::Feet(32.80839895, String::from("ft")) {
if let Units::Feet(val_right, _) = Units::Meters(10.0, String::from("m")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::Kilograms(val_left, _) = Units::Kilograms(4.535924, String::from("kg")) {
if let Units::Kilograms(val_right, _) = Units::Pounds(10.0, String::from("lbs")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::Pounds(val_left, _) = Units::Pounds(22.04623, String::from("lbs")) {
if let Units::Pounds(val_right, _) = Units::Kilograms(10.0, String::from("kg")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::Ounces(val_left, _) = Units::Ounces(0.3527396, String::from("oz")) {
if let Units::Ounces(val_right, _) = Units::Grams(10.0, String::from("g")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::Grams(val_left, _) = Units::Grams(283.4952, String::from("g")) {
if let Units::Grams(val_right, _) = Units::Ounces(10.0, String::from("oz")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::Miles(val_left, _) = Units::Miles(10.0, String::from("miles")) {
if let Units::Miles(val_right, _) = Units::Kilometers(16.09344, String::from("km")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::Kilometers(val_left, _) = Units::Kilometers(10.0, String::from("km")) {
if let Units::Kilometers(val_right, _) = Units::Miles(6.213712, String::from("miles")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::DegreesCelsius(val_left, _) = Units::DegreesCelsius(10.0, String::from("℃")) {
if let Units::DegreesCelsius(val_right, _) = Units::DegreesFahrenheit(50.0, String::from("℉")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::DegreesCelsius(val_left, _) = Units::DegreesCelsius(0.0, String::from("℃")) {
if let Units::DegreesCelsius(val_right, _) = Units::DegreesFahrenheit(32.0, String::from("℉")).convert() {
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::DegreesCelsius(val_left, _) = Units::DegreesCelsius(-40.0, String::from("℃")) {
if let Units::DegreesCelsius(val_right, _) = Units::DegreesFahrenheit(-40.0, String::from("℉")).convert()
{
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::DegreesFahrenheit(val_left, _) = Units::DegreesFahrenheit(15.0, String::from("℃")) {
if let Units::DegreesFahrenheit(val_right, _) =
Units::DegreesCelsius(-9.444444, String::from("℉")).convert()
{
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::DegreesFahrenheit(val_left, _) = Units::DegreesFahrenheit(0.0, String::from("℃")) {
if let Units::DegreesFahrenheit(val_right, _) =
Units::DegreesCelsius(-17.77778, String::from("℉")).convert()
{
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
if let Units::DegreesFahrenheit(val_left, _) = Units::DegreesFahrenheit(-40.0, String::from("℃")) {
if let Units::DegreesFahrenheit(val_right, _) = Units::DegreesCelsius(-40.0, String::from("℉")).convert()
{
assert!(
approx_eq!(
f64,
val_left,
val_right,
F64Margin {
epsilon: 0.001,
ulps: 2
}
),
"\nleft != right\n{} != {}",
val_left,
val_right
);
} else {
panic!("FAILED TO ASSIGN")
}
} else {
panic!("FAILED TO ASSIGN")
}
}
#[test]
fn parsing_msg_single_unit() {
let msg = "Hello, I am 171 cm tall";
assert_eq!(
parse_input(msg),
Some(vec![Units::Centimeters(171.0, String::from("cm"))])
);
}
#[test]
fn parsing_msg_multiple_units() {
let msg = "Hello, I am 171 cm tall and weigh 140 pounds";
assert_eq!(
parse_input(msg),
Some(vec![
Units::Centimeters(171.0, String::from("cm")),
Units::Pounds(140.0, String::from("lbs"))
])
);
}
#[test]
fn parsing_msg_malformed_single_unit() {
let msg = "Hello, I am none cm tall";
assert_eq!(parse_input(msg), None);
}
#[test]
fn parsing_msg_malformed_unit_plus_additional_correct_unit() {
let msg = "Hello, I am none cm tall and weigh 140 pounds";
assert_eq!(
parse_input(msg),
Some(vec![Units::Pounds(140.0, String::from("lbs"))])
);
}
#[test]
fn parsing_msg_multiple_malformed_units_multiple_correct_units() {
let msg = "Hello, I am none cm tall and weigh 140 pounds and my city is 343 kms in area and my cat's name is little Foot";
assert_eq!(
parse_input(msg),
Some(vec![
Units::Pounds(140.0, String::from("lbs")),
Units::Kilometers(343.0, String::from("km"))
])
);
}
#[test]
fn parsing_msg_strip_punctuation_correct_units() {
let msg = "Hello, I am none cm tall and weigh 140 pounds. My city is 343 kms in area and my cat's name is little Foot";
assert_eq!(
parse_input(msg),
Some(vec![
Units::Pounds(140.0, String::from("lbs")),
Units::Kilometers(343.0, String::from("km"))
])
);
}
#[test]
fn parsing_msg_strip_multiple_punctuation_correct_units() {
let msg = "Hello, I am none cm tall and weigh 140 pounds,|.,;. My city is 343 kms.,; in area and my cat's name is little Foot";
assert_eq!(
parse_input(msg),
Some(vec![
Units::Pounds(140.0, String::from("lbs")),
Units::Kilometers(343.0, String::from("km"))
])
);
}
#[test]
fn parsing_msg_parse_floats() {
let msg = "Maddy-hops is exactly 0.00171 kilometers tall";
assert_eq!(
parse_input(msg),
Some(vec![Units::Kilometers(0.00171, String::from("km"))])
);
}
#[test]
fn destructing_units() {
let unit = Units::Feet(300.0, String::from("ft"));
assert_eq!(Units::destruct_enum(&unit,), (300.0, String::from("ft")))
}
#[test]
fn assemble_response_single_unit() {
let msg = "Maddy-hops is exactly 0.00171 kilometers tall";
let units_vec = parse_input(msg).unwrap();
assert_eq!(
"0.00171 km is 0.001062544752 miles\n".to_string(),
assemble_response(&units_vec)
);
}
#[test]
fn assemble_response_multiple_units() {
let msg = "Maddy-hops is exactly 0.00171 kilometers tall and weighs 140 pounds.";
let units_vec = parse_input(msg).unwrap();
assert_eq!(
"0.00171 km is 0.001062544752 miles\n140 lbs is 63.50 kg\n".to_string(),
assemble_response(&units_vec)
);
}
#[test]
fn assemble_response_degrees() {
let msg = "it's -30 c where I live rn";
let units_vec = parse_input(msg).unwrap();
assert_eq!(
"-30 ℃ is -22.00 ℉\n".to_string(),
assemble_response(&units_vec)
);
}
}
|
#[doc = "Reader of register CICR"]
pub type R = crate::R<u32, super::CICR>;
#[doc = "Writer for register CICR"]
pub type W = crate::W<u32, super::CICR>;
#[doc = "Register CICR `reset()`'s with value 0"]
impl crate::ResetValue for super::CICR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "LSI ready Interrupt Clear\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LSIRDYC_A {
#[doc = "1: Clear interrupt flag"]
CLEAR = 1,
}
impl From<LSIRDYC_A> for bool {
#[inline(always)]
fn from(variant: LSIRDYC_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `LSIRDYC`"]
pub type LSIRDYC_R = crate::R<bool, LSIRDYC_A>;
impl LSIRDYC_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<bool, LSIRDYC_A> {
use crate::Variant::*;
match self.bits {
true => Val(LSIRDYC_A::CLEAR),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `CLEAR`"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == LSIRDYC_A::CLEAR
}
}
#[doc = "Write proxy for field `LSIRDYC`"]
pub struct LSIRDYC_W<'a> {
w: &'a mut W,
}
impl<'a> LSIRDYC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LSIRDYC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "LSE ready Interrupt Clear"]
pub type LSERDYC_A = LSIRDYC_A;
#[doc = "Reader of field `LSERDYC`"]
pub type LSERDYC_R = crate::R<bool, LSIRDYC_A>;
#[doc = "Write proxy for field `LSERDYC`"]
pub struct LSERDYC_W<'a> {
w: &'a mut W,
}
impl<'a> LSERDYC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LSERDYC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "HSI ready Interrupt Clear"]
pub type HSIRDYC_A = LSIRDYC_A;
#[doc = "Reader of field `HSIRDYC`"]
pub type HSIRDYC_R = crate::R<bool, LSIRDYC_A>;
#[doc = "Write proxy for field `HSIRDYC`"]
pub struct HSIRDYC_W<'a> {
w: &'a mut W,
}
impl<'a> HSIRDYC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: HSIRDYC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "HSE ready Interrupt Clear"]
pub type HSERDYC_A = LSIRDYC_A;
#[doc = "Reader of field `HSERDYC`"]
pub type HSERDYC_R = crate::R<bool, LSIRDYC_A>;
#[doc = "Write proxy for field `HSERDYC`"]
pub struct HSERDYC_W<'a> {
w: &'a mut W,
}
impl<'a> HSERDYC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: HSERDYC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `HSE_ready_Interrupt_Clear`"]
pub type HSE_READY_INTERRUPT_CLEAR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `HSE_ready_Interrupt_Clear`"]
pub struct HSE_READY_INTERRUPT_CLEAR_W<'a> {
w: &'a mut W,
}
impl<'a> HSE_READY_INTERRUPT_CLEAR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "RC48 ready Interrupt Clear"]
pub type HSI48RDYC_A = LSIRDYC_A;
#[doc = "Reader of field `HSI48RDYC`"]
pub type HSI48RDYC_R = crate::R<bool, LSIRDYC_A>;
#[doc = "Write proxy for field `HSI48RDYC`"]
pub struct HSI48RDYC_W<'a> {
w: &'a mut W,
}
impl<'a> HSI48RDYC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: HSI48RDYC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "PLL1 ready Interrupt Clear"]
pub type PLL1RDYC_A = LSIRDYC_A;
#[doc = "Reader of field `PLL1RDYC`"]
pub type PLL1RDYC_R = crate::R<bool, LSIRDYC_A>;
#[doc = "Write proxy for field `PLL1RDYC`"]
pub struct PLL1RDYC_W<'a> {
w: &'a mut W,
}
impl<'a> PLL1RDYC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PLL1RDYC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "PLL2 ready Interrupt Clear"]
pub type PLL2RDYC_A = LSIRDYC_A;
#[doc = "Reader of field `PLL2RDYC`"]
pub type PLL2RDYC_R = crate::R<bool, LSIRDYC_A>;
#[doc = "Write proxy for field `PLL2RDYC`"]
pub struct PLL2RDYC_W<'a> {
w: &'a mut W,
}
impl<'a> PLL2RDYC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PLL2RDYC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "PLL3 ready Interrupt Clear"]
pub type PLL3RDYC_A = LSIRDYC_A;
#[doc = "Reader of field `PLL3RDYC`"]
pub type PLL3RDYC_R = crate::R<bool, LSIRDYC_A>;
#[doc = "Write proxy for field `PLL3RDYC`"]
pub struct PLL3RDYC_W<'a> {
w: &'a mut W,
}
impl<'a> PLL3RDYC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PLL3RDYC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "LSE clock security system Interrupt Clear"]
pub type LSECSSC_A = LSIRDYC_A;
#[doc = "Reader of field `LSECSSC`"]
pub type LSECSSC_R = crate::R<bool, LSIRDYC_A>;
#[doc = "Write proxy for field `LSECSSC`"]
pub struct LSECSSC_W<'a> {
w: &'a mut W,
}
impl<'a> LSECSSC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LSECSSC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "HSE clock security system Interrupt Clear"]
pub type HSECSSC_A = LSIRDYC_A;
#[doc = "Reader of field `HSECSSC`"]
pub type HSECSSC_R = crate::R<bool, LSIRDYC_A>;
#[doc = "Write proxy for field `HSECSSC`"]
pub struct HSECSSC_W<'a> {
w: &'a mut W,
}
impl<'a> HSECSSC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: HSECSSC_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LSIRDYC_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
impl R {
#[doc = "Bit 0 - LSI ready Interrupt Clear"]
#[inline(always)]
pub fn lsirdyc(&self) -> LSIRDYC_R {
LSIRDYC_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - LSE ready Interrupt Clear"]
#[inline(always)]
pub fn lserdyc(&self) -> LSERDYC_R {
LSERDYC_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - HSI ready Interrupt Clear"]
#[inline(always)]
pub fn hsirdyc(&self) -> HSIRDYC_R {
HSIRDYC_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - HSE ready Interrupt Clear"]
#[inline(always)]
pub fn hserdyc(&self) -> HSERDYC_R {
HSERDYC_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - CSI ready Interrupt Clear"]
#[inline(always)]
pub fn hse_ready_interrupt_clear(&self) -> HSE_READY_INTERRUPT_CLEAR_R {
HSE_READY_INTERRUPT_CLEAR_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - RC48 ready Interrupt Clear"]
#[inline(always)]
pub fn hsi48rdyc(&self) -> HSI48RDYC_R {
HSI48RDYC_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - PLL1 ready Interrupt Clear"]
#[inline(always)]
pub fn pll1rdyc(&self) -> PLL1RDYC_R {
PLL1RDYC_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - PLL2 ready Interrupt Clear"]
#[inline(always)]
pub fn pll2rdyc(&self) -> PLL2RDYC_R {
PLL2RDYC_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - PLL3 ready Interrupt Clear"]
#[inline(always)]
pub fn pll3rdyc(&self) -> PLL3RDYC_R {
PLL3RDYC_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - LSE clock security system Interrupt Clear"]
#[inline(always)]
pub fn lsecssc(&self) -> LSECSSC_R {
LSECSSC_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - HSE clock security system Interrupt Clear"]
#[inline(always)]
pub fn hsecssc(&self) -> HSECSSC_R {
HSECSSC_R::new(((self.bits >> 10) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - LSI ready Interrupt Clear"]
#[inline(always)]
pub fn lsirdyc(&mut self) -> LSIRDYC_W {
LSIRDYC_W { w: self }
}
#[doc = "Bit 1 - LSE ready Interrupt Clear"]
#[inline(always)]
pub fn lserdyc(&mut self) -> LSERDYC_W {
LSERDYC_W { w: self }
}
#[doc = "Bit 2 - HSI ready Interrupt Clear"]
#[inline(always)]
pub fn hsirdyc(&mut self) -> HSIRDYC_W {
HSIRDYC_W { w: self }
}
#[doc = "Bit 3 - HSE ready Interrupt Clear"]
#[inline(always)]
pub fn hserdyc(&mut self) -> HSERDYC_W {
HSERDYC_W { w: self }
}
#[doc = "Bit 4 - CSI ready Interrupt Clear"]
#[inline(always)]
pub fn hse_ready_interrupt_clear(&mut self) -> HSE_READY_INTERRUPT_CLEAR_W {
HSE_READY_INTERRUPT_CLEAR_W { w: self }
}
#[doc = "Bit 5 - RC48 ready Interrupt Clear"]
#[inline(always)]
pub fn hsi48rdyc(&mut self) -> HSI48RDYC_W {
HSI48RDYC_W { w: self }
}
#[doc = "Bit 6 - PLL1 ready Interrupt Clear"]
#[inline(always)]
pub fn pll1rdyc(&mut self) -> PLL1RDYC_W {
PLL1RDYC_W { w: self }
}
#[doc = "Bit 7 - PLL2 ready Interrupt Clear"]
#[inline(always)]
pub fn pll2rdyc(&mut self) -> PLL2RDYC_W {
PLL2RDYC_W { w: self }
}
#[doc = "Bit 8 - PLL3 ready Interrupt Clear"]
#[inline(always)]
pub fn pll3rdyc(&mut self) -> PLL3RDYC_W {
PLL3RDYC_W { w: self }
}
#[doc = "Bit 9 - LSE clock security system Interrupt Clear"]
#[inline(always)]
pub fn lsecssc(&mut self) -> LSECSSC_W {
LSECSSC_W { w: self }
}
#[doc = "Bit 10 - HSE clock security system Interrupt Clear"]
#[inline(always)]
pub fn hsecssc(&mut self) -> HSECSSC_W {
HSECSSC_W { w: self }
}
}
|
//! To run this code, clone the rusty_engine repository and run the command:
//!
//! cargo run --release --example text
use rusty_engine::prelude::*;
struct GameState {
timer: Timer,
}
fn main() {
let mut game = Game::new();
let fps = game.add_text("fps", "FPS: ");
fps.translation = Vec2::new(0.0, 250.0);
fps.font = "font/FiraMono-Medium.ttf".to_string();
fps.font_size = 60.0;
let zoom_msg = game.add_text(
"zoom_msg",
"Changing font size re-renders the text smoothly at a different size,\nbut using this technique for animation is both jittery (character kerning) and expensive.",
);
zoom_msg.font_size = 35.0;
zoom_msg.translation = Vec2::new(0.0, 150.0);
let font_msg = game.add_text(
"font_msg",
"You can choose a font at creation time by providing the filename of a font stored in assets/.\n\"font/FiraSans-Bold.ttf\" is the default. \"font/FiraMono-Medium.ttf\" is also included in the asset pack."
);
font_msg.font_size = 20.0;
font_msg.font = "font/FiraMono-Medium.ttf".to_string();
font_msg.translation.y = 0.0;
let msg = game.add_text("msg", "Changing the text's translation, rotation, and scale is fast,\n so feel free to do that a lot.");
msg.font_size = 24.0;
msg.translation.y = -100.0;
let translation = game.add_text("translation", "Translation");
translation.font_size = 36.0;
translation.translation = Vec2::new(-400.0, -230.0);
let rotation = game.add_text("rotation", "Rotation");
rotation.font_size = 36.0;
rotation.translation = Vec2::new(0.0, -230.0);
let scale = game.add_text("scale", "Scale");
scale.font_size = 36.0;
scale.translation = Vec2::new(400.0, -230.0);
let game_state = GameState {
timer: Timer::from_seconds(0.2, true),
};
game.add_logic(game_logic);
game.run(game_state);
}
fn game_logic(engine: &mut Engine, game_state: &mut GameState) {
if game_state.timer.tick(engine.delta).just_finished() {
let mut fps = engine.texts.get_mut("fps").unwrap();
fps.value = format!("FPS: {:.1}", 1.0 / engine.delta_f32);
}
let t = engine.texts.get_mut("translation").unwrap();
t.translation.x = 50.0 * (engine.time_since_startup_f64).sin() as f32 - 400.0;
t.translation.y = 50.0 * (engine.time_since_startup_f64).cos() as f32 - 230.0;
let r = engine.texts.get_mut("rotation").unwrap();
r.rotation -= 1.5 * engine.delta_f32;
let s = engine.texts.get_mut("scale").unwrap();
s.scale = 1.5 + ((engine.time_since_startup_f64 * 0.5).cos() as f32) * -1.0;
let msg3 = engine.texts.get_mut("zoom_msg").unwrap();
msg3.font_size = 10.0 * (engine.time_since_startup_f64 * 0.5).cos() as f32 + 25.0;
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::net::SocketAddr;
use std::path::Path;
use common_config::GlobalConfig;
use common_config::InnerConfig;
use common_exception::ErrorCode;
use common_http::HttpError;
use common_http::HttpShutdownHandler;
use common_meta_types::anyerror::AnyError;
use poem::get;
use poem::listener::RustlsCertificate;
use poem::listener::RustlsConfig;
use poem::middleware::CatchPanic;
use poem::middleware::NormalizePath;
use poem::middleware::TrailingSlash;
use poem::put;
use poem::Endpoint;
use poem::EndpointExt;
use poem::Route;
use tracing::info;
use super::v1::upload_to_stage;
use crate::auth::AuthMgr;
use crate::servers::http::middleware::HTTPSessionMiddleware;
use crate::servers::http::v1::clickhouse_router;
use crate::servers::http::v1::query_route;
use crate::servers::http::v1::streaming_load;
use crate::servers::Server;
#[derive(Copy, Clone)]
pub enum HttpHandlerKind {
Query,
Clickhouse,
}
impl HttpHandlerKind {
pub fn usage(&self, sock: SocketAddr) -> String {
match self {
HttpHandlerKind::Query => {
format!(
r#" curl -u root: --request POST '{:?}/v1/query/' --header 'Content-Type: application/json' --data-raw '{{"sql": "SELECT avg(number) FROM numbers(100000000)"}}'
"#,
sock,
)
}
HttpHandlerKind::Clickhouse => {
let json = r#"{"foo": "bar"}"#;
format!(
r#" echo 'create table test(foo string)' | curl -u root: '{:?}' --data-binary @-
echo '{}' | curl -u root: '{:?}/?query=INSERT%20INTO%20test%20FORMAT%20JSONEachRow' --data-binary @-"#,
sock, json, sock,
)
}
}
}
}
pub struct HttpHandler {
shutdown_handler: HttpShutdownHandler,
kind: HttpHandlerKind,
}
impl HttpHandler {
pub fn create(kind: HttpHandlerKind) -> Box<dyn Server> {
Box::new(HttpHandler {
kind,
shutdown_handler: HttpShutdownHandler::create("http handler".to_string()),
})
}
fn wrap_auth(&self, config: &InnerConfig, ep: Route) -> impl Endpoint {
let auth_manager = AuthMgr::create(config);
let session_middleware = HTTPSessionMiddleware::create(self.kind, auth_manager);
ep.with(session_middleware).boxed()
}
async fn build_router(&self, config: &InnerConfig, sock: SocketAddr) -> impl Endpoint {
let ep_v1 = Route::new()
.nest("/query", query_route())
.at("/streaming_load", put(streaming_load))
.at("/upload_to_stage", put(upload_to_stage));
let ep_v1 = self.wrap_auth(config, ep_v1);
let ep_clickhouse = Route::new().nest("/", clickhouse_router());
let ep_clickhouse = self.wrap_auth(config, ep_clickhouse);
let ep_usage = Route::new().at(
"/",
get(poem::endpoint::make_sync(move |_| {
HttpHandlerKind::Query.usage(sock)
})),
);
let ep_health = Route::new().at("/", get(poem::endpoint::make_sync(move |_| "ok")));
let ep = match self.kind {
HttpHandlerKind::Query => Route::new()
.at("/", ep_usage)
.nest("/health", ep_health)
.nest("/v1", ep_v1)
.nest("/clickhouse", ep_clickhouse),
HttpHandlerKind::Clickhouse => Route::new()
.nest("/", ep_clickhouse)
.nest("/health", ep_health),
};
ep.with(NormalizePath::new(TrailingSlash::Trim))
.with(CatchPanic::new())
.boxed()
}
fn build_tls(config: &InnerConfig) -> Result<RustlsConfig, std::io::Error> {
let certificate = RustlsCertificate::new()
.cert(std::fs::read(
config.query.http_handler_tls_server_cert.as_str(),
)?)
.key(std::fs::read(
config.query.http_handler_tls_server_key.as_str(),
)?);
let mut cfg = RustlsConfig::new().fallback(certificate);
if Path::new(&config.query.http_handler_tls_server_root_ca_cert).exists() {
cfg = cfg.client_auth_required(std::fs::read(
config.query.http_handler_tls_server_root_ca_cert.as_str(),
)?);
}
Ok(cfg)
}
async fn start_with_tls(&mut self, listening: SocketAddr) -> Result<SocketAddr, HttpError> {
info!("Http Handler TLS enabled");
let config = GlobalConfig::instance();
let tls_config = Self::build_tls(config.as_ref())
.map_err(|e: std::io::Error| HttpError::TlsConfigError(AnyError::new(&e)))?;
let router = self.build_router(config.as_ref(), listening).await;
self.shutdown_handler
.start_service(listening, Some(tls_config), router, None)
.await
}
async fn start_without_tls(&mut self, listening: SocketAddr) -> Result<SocketAddr, HttpError> {
let router = self
.build_router(GlobalConfig::instance().as_ref(), listening)
.await;
self.shutdown_handler
.start_service(listening, None, router, None)
.await
}
}
#[async_trait::async_trait]
impl Server for HttpHandler {
async fn shutdown(&mut self, graceful: bool) {
self.shutdown_handler.shutdown(graceful).await;
}
async fn start(&mut self, listening: SocketAddr) -> Result<SocketAddr, ErrorCode> {
let config = GlobalConfig::instance();
let res = match config.query.http_handler_tls_server_key.is_empty()
|| config.query.http_handler_tls_server_cert.is_empty()
{
true => self.start_without_tls(listening).await,
false => self.start_with_tls(listening).await,
};
res.map_err(|e: HttpError| match e {
HttpError::BadAddressFormat(any_err) => {
ErrorCode::BadAddressFormat(any_err.to_string())
}
le @ HttpError::ListenError { .. } => ErrorCode::CannotListenerPort(le.to_string()),
HttpError::TlsConfigError(any_err) => {
ErrorCode::TLSConfigurationFailure(any_err.to_string())
}
})
}
}
|
//! An entropy encoder.
mod lazy_stream;
use self::lazy_stream::*;
use super::dictionary::Fetch;
use super::probabilities::IntoStatistics;
use super::rw::*;
use bytes::lengthwriter::LengthWriter;
use bytes::varnum::WriteVarNum;
use io::statistics::{Bytes, ContentInfo, Instances};
use io::{Path, TokenWriter};
use TokenWriterError;
use binjs_shared::{
FieldName, IdentifierName, InterfaceName, Node, PropertyKey, SharedString, F64,
};
use std::io::Write;
use std::ops::DerefMut;
#[allow(unused_imports)] // We keep enabling/disabling this.
use itertools::Itertools;
use range_encoding::opus;
/// An arbitrary initialization size for buffers.
const INITIAL_BUFFER_SIZE_BYTES: usize = 32768;
impl IntoStatistics for ContentInfo<LazyStream> {
type AsStatistics = ContentInfo<Bytes>;
/// Finalize and return the number of compressed bytes written.
///
/// This number is determined by examining the length of the buffer
/// to which this stream writes.
fn into_statistics(self, _description: &str) -> ContentInfo<Bytes> {
self.into_with(|name, field| field.into_statistics(name))
}
}
impl ContentInfo<opus::Writer<LengthWriter>> {
/// Finalize and return the number of compressed bytes written.
///
/// This number is determined by examining the underlying LengthWriter.
pub fn into_statistics(self) -> ContentInfo<Bytes> {
self.into_with(|_, value| value.done().unwrap().len().into())
}
}
/// An entropy encoder, based on the Opus bit-level entropy coding.
pub struct Encoder {
/// Shared dictionaries.
options: ::entropy::Options,
// -- Content
/// Main stream compressed by entropy coding.
writer: opus::Writer<Vec<u8>>,
/// A file at which to dump the contents of the main stream.
dump_path: Option<std::path::PathBuf>,
/// Parts of the content that we do not know how to compress correctly
/// with entropy coding yet, and that we rather compress by Brotli
/// at the time of this writing.
///
/// We're using an `LazyStream` rather than directly compressing, so
/// as to simplify dumping of raw data to files, for forensics purposes,
/// and also so as to let us entirely skip streams that have 0 bytes written..
///
/// This is something of a hack and should be removed once we have a better
/// idea of *what* we should encode with Brotli and what we shouldn't.
content_streams: ContentInfo<LazyStream>,
/// Parts of the header that we compress with Brotli.
prelude_streams: PreludeStreams<LazyStream>,
// --- Statistics.
/// Measure the number of bytes written.
content_opus_lengths: ContentInfo<opus::Writer<LengthWriter>>,
/// Measure the number of entries written.
content_instances: ContentInfo<Instances>,
}
impl Encoder {
/// Create a new Encoder.
pub fn new(path: Option<&std::path::Path>, options: ::entropy::Options) -> Self {
// FIXME: We shouldn't need to clone the entire `options`. A shared immutable reference would do nicely.
Encoder {
writer: opus::Writer::new(Vec::with_capacity(INITIAL_BUFFER_SIZE_BYTES)),
dump_path: path.map(|path| {
let mut buf = std::path::PathBuf::new();
buf.push(path);
buf.set_extension("streams");
buf.push("main.entropy");
buf
}),
options,
content_opus_lengths: ContentInfo::with(|_| opus::Writer::new(LengthWriter::new())),
content_streams: ContentInfo::with(|name| {
let maybe_buf = match path {
None => None,
Some(path) => {
let mut buf = std::path::PathBuf::new();
buf.push(path);
buf.set_extension("streams");
buf.push(name);
buf.set_extension("content");
Some(buf)
}
};
LazyStream::new(maybe_buf)
}),
prelude_streams: PreludeStreams::with(|name| {
let maybe_buf = match path {
None => None,
Some(path) => {
let mut buf = std::path::PathBuf::new();
buf.push(path);
buf.set_extension("streams");
buf.push(name);
buf.set_extension("prelude");
Some(buf)
}
};
LazyStream::new(maybe_buf)
}),
content_instances: ContentInfo::with(|_| 0.into()),
}
}
}
/// Emit a single symbol to the main (entropy-compressed) stream.
///
/// Note that this macro could not be implemented as a simple method, as we need to adapt it to different field names.
///
/// Usage:
/// `emit_symbol_to_main_stream!(self, name_of_the_probability_table, name_of_the_ContentInfo_field, "Description, used for debugging", path_in_the_ast, value_to_encode)`
macro_rules! emit_symbol_to_main_stream {
( $me: ident, $table: ident, $info: ident, $description: expr, $path: expr, $value: expr ) => {
{
use std::borrow::Borrow;
let path = $path.borrow();
debug!(target: "entropy_details", "Known paths ({}): [{}]",
$description,
$me.options
.probability_tables
.$table
.paths()
.map(|k| format!("{:?}", k))
.format(", "));
// 1. Locate the `SymbolInfo` information for this value given the
// path information.
let symbol = $me.options
.probability_tables
.$table
.stats_by_node_value_mut(path, &$value)
.ok_or_else(|| {
debug!(target: "entropy", "Couldn't find value {:?} at {:?} ({})",
$value, path, $description);
TokenWriterError::NotInDictionary(format!("{}: {:?} at {:?}", $description, $value, path))
})?;
// FIXME: For extensibility purposes, if the value is not in the dictionary, we should
// add it to the prelude and a relevant content stream.
// 2. This gives us an index (`symbol.index`) and a probability distribution
// (`symbol.distribution`). Use them to write the probability at bit-level.
let mut distribution = symbol.distribution
.borrow_mut();
$me.writer.symbol(symbol.index.into(), distribution.deref_mut())
.map_err(TokenWriterError::WriteError)?;
// 3. Also, update statistics
$me.content_opus_lengths
.$info
.symbol(symbol.index.into(), distribution.deref_mut())
.map_err(TokenWriterError::WriteError)?;
$me.content_instances
.$info += Into::<Instances>::into(1);
Ok(())
}
}
}
/// Add a user-extensible symbol to one of the content streams.
/// If the symbol is not part of either the static dictionary
/// or the prelude dictionary, it is added to the latter.
///
/// This macro is designed for simple values whose binary representation
/// may be concatenated without loss of information. For string-like values
/// that need additional info, see `emit_string_symbol_to_streams`.
///
///
/// Note that this macro could not be implemented as a simple method, as we need to adapt it to different field names.
///
/// Usage:
/// `emit_simple_symbol_to_streams!(self, name_of_the_indexed_table, name_of_the_stream, value_to_encode, "Description, used for debugging")`
macro_rules! emit_simple_symbol_to_streams {
( $me: ident, $dictionary: ident, $out: ident, $writer: ident, $value: expr, $description: expr ) => {
if let Fetch::Miss(_) = emit_symbol_to_content_stream!($me, $dictionary, $out, $value, $description) {
// The value does not appear either in the static dictionary or in the prelude dictionary.
// Add it to the latter.
$me.prelude_streams.$out.increment();
$me.prelude_streams.$out.$writer(*$value)
.map_err(TokenWriterError::WriteError)?;
}
}
}
/// Add a user-extensible symbol to one of the content streams.
/// If the symbol is not part of either the static dictionary
/// or the prelude dictionary, it is added to the latter.
///
/// This macro is designed for string-like values, which may not
/// be simply concatenated to obtain a dictionary. For string-like
/// values, in addition to concatenation, we also record a list of
/// lengths, which we use during decoding to extract individual
/// strings.
///
/// Note that this macro could not be implemented as a simple method, as we need to adapt it to different field names.
///
/// Usage:
/// `emit_string_symbol_to_streams!(self, name_of_the_indexed_table, name_of_the_string_prelude_stream, name_of_the_string_length_prelude_stream, value_to_encode, "Description, used for debugging")`
macro_rules! emit_string_symbol_to_streams {
( $me: ident, $dictionary: ident, $out: ident, $len: ident, $value: expr, $description: expr ) => {
if let Fetch::Miss(_) = emit_symbol_to_content_stream!($me, $dictionary, $out, $value, $description) {
// The value does not appear either in the static dictionary or in the prelude dictionary.
// Add it to the latter.
$me.prelude_streams.$out.increment();
match $value {
Some(string) => {
// Write the binary representation of the length of string to the
// prelude stream `foo_len`, the binary representation of the string itself
// to the prelude stream `foo`.
let bytes = string.as_str()
.as_bytes();
$me.prelude_streams.$len.write_maybe_varnum(Some(bytes.len() as u32))
.map_err(TokenWriterError::WriteError)?;
$me.prelude_streams.$out.write_all(bytes)
.map_err(TokenWriterError::WriteError)?;
}
None => {
// If the string is `None`, just use the `null` varnum as length.
$me.prelude_streams.$len.write_maybe_varnum(None)
.map_err(TokenWriterError::WriteError)?;
}
}
}
}
}
/// Implementation shared by `emit_simple_symbol_to_streams` and `emit_string_symbol_to_streams`.
///
/// Fetch the index of a value in the dictionary and write it to the relevant content
/// stream. If this causes a new slot to be allocated in the dictionary, return
/// `Miss(_)` - the caller is responsible to ensure that the value is written
/// to the prelude stream.
///
/// Note that this macro could not be implemented as a simple method, as we need to adapt it to different field names.
///
/// Usage:
/// `emit_symbol_to_content_stream!(self, name_of_the_indexed_table, name_of_the_string_content_stream, value_to_encode, "Description, used for debugging")`
macro_rules! emit_symbol_to_content_stream {
( $me: ident, $dictionary:ident, $out:ident, $value: expr, $description: expr ) => {
{
use bytes::varnum::WriteVarNum;
let value = $value;
// 1. Fetch the index in the dictionary.
let fetch = $me.options
.probability_tables
.$dictionary
.fetch_index(value);
debug!(target: "write", "Writing index {:?} as {:?} index to {}", $value, fetch, $description);
let index = match fetch {
Fetch::Hit(index) => index,
Fetch::Miss(index) => index
};
// Note: We must make sure that we don't forget to write the value
// to the prelude if it's a Miss.
let as_usize: usize = index.clone();
let as_u32: u32 = as_usize as u32;
// 2. Locate stream
let ref mut stream = $me.content_streams
.$out;
// 3. Write the index to Brotli.
stream
.write_varnum(as_u32)
.map_err(TokenWriterError::WriteError)?;
// 4. Also, update statistics
$me.content_instances
.$out += Into::<Instances>::into(1);
// Return value will instruct the caller to write data to the prelude.
fetch
}
}
}
impl Encoder {
/// Flush a lazy stream (either a prelude stream or a content stream) into a buffer.
///
/// If the stream is empty, do nothing. Otherwise, add `[name_of_stream]compression_method;compressed_bytes`.
fn flush_stream(
name: &str,
stream: &mut LazyStream,
out: &mut Vec<u8>,
) -> Result<(), std::io::Error> {
debug!(target: "write", "Encoder::flush_stream {}, {} instances", name, stream.instances());
let bytes_written = stream.bytes_written();
stream.flush()?;
if let Some(ref data) = stream.data() {
debug!(target: "write", "Encoder::flush_stream: {} contains {} compressed bytes ({} uncompressed bytes written)",
name,
data.len(),
bytes_written,
);
// Stream name
out.write_all(b"[")?;
out.write_all(name.as_bytes())?;
out.write_all(b"]")?;
out.write_all(FORMAT_BROTLI)?;
// Stream length
let len = data.len();
out.write_varnum(len as u32)?;
// Stream content
out.write_all(data)?;
}
Ok(())
}
}
impl TokenWriter for Encoder {
type Data = Vec<u8>;
fn done(mut self) -> Result<Self::Data, TokenWriterError> {
let mut data: Vec<u8> = Vec::with_capacity(INITIAL_BUFFER_SIZE_BYTES);
data.extend(GLOBAL_HEADER_START);
// FIXME: Write additional headers.
// Write prelude compressed streams, containing dictionaries.
data.extend(SECTION_PRELUDE);
for (name, stream) in self.prelude_streams.iter_mut().sorted_by_key(|kv| kv.0) {
Self::flush_stream(name, stream, &mut data).map_err(TokenWriterError::WriteError)?;
}
// Write content compressed streams, containing references to
// both the prelude dictionaries and the static dictionaries.
data.extend(SECTION_CONTENT);
for (name, stream) in self.content_streams.iter_mut().sorted_by_key(|kv| kv.0) {
Self::flush_stream(name, stream, &mut data).map_err(TokenWriterError::WriteError)?;
}
// Write main stream of entropy-compressed data.
data.write_all(SECTION_MAIN)
.map_err(TokenWriterError::WriteError)?;
data.write_all(FORMAT_ENTROPY_0)
.map_err(TokenWriterError::WriteError)?;
let entropy = self.writer.done().map_err(TokenWriterError::WriteError)?;
if let Some(path) = self.dump_path {
let mut file = std::fs::File::create(path).map_err(TokenWriterError::WriteError)?;
file.write_all(&entropy)
.map_err(TokenWriterError::WriteError)?;
file.flush().map_err(TokenWriterError::WriteError)?;
}
data.write_all(&entropy)
.map_err(TokenWriterError::WriteError)?;
// Update byte lengths
*self.options.content_lengths.borrow_mut() += self
.content_opus_lengths
.into_with(|_, field| field.done().unwrap().len());
*self.options.content_lengths.borrow_mut() +=
self.content_streams.into_statistics("brotli");
// Update number of instances
*self.options.content_instances.borrow_mut() += self.content_instances;
Ok(data)
}
// --- Fixed set
fn bool_at(&mut self, value: Option<bool>, path: &Path) -> Result<(), TokenWriterError> {
emit_symbol_to_main_stream!(self, bool_by_path, bools, "bool_by_path", path, value)
}
fn string_enum_at(
&mut self,
value: &SharedString,
path: &Path,
) -> Result<(), TokenWriterError> {
emit_symbol_to_main_stream!(
self,
string_enum_by_path,
string_enums,
"string_enum_by_path",
path,
value
)
}
fn enter_tagged_tuple_at(
&mut self,
_node: &Node,
tag: &InterfaceName,
_children: &[&FieldName],
path: &Path,
) -> Result<(), TokenWriterError> {
emit_symbol_to_main_stream!(
self,
interface_name_by_path,
interface_names,
"interface_name_by_path",
path,
tag
)
}
// --- User-extensible values
fn float_at(&mut self, value: Option<f64>, _path: &Path) -> Result<(), TokenWriterError> {
use bytes::float::WriteVarFloat;
emit_simple_symbol_to_streams!(
self,
floats,
floats,
write_maybe_varfloat2,
&value.map(F64::from),
"float_at"
);
Ok(())
}
fn unsigned_long_at(&mut self, value: u32, _path: &Path) -> Result<(), TokenWriterError> {
emit_simple_symbol_to_streams!(
self,
unsigned_longs,
unsigned_longs,
write_varnum,
&value,
"unsigned_long_at"
);
Ok(())
}
fn string_at(
&mut self,
value: Option<&SharedString>,
_path: &Path,
) -> Result<(), TokenWriterError> {
emit_string_symbol_to_streams!(
self,
string_literals,
string_literals,
string_literals_len,
&value.cloned(),
"string_at"
);
Ok(())
}
fn identifier_name_at(
&mut self,
value: Option<&IdentifierName>,
_path: &Path,
) -> Result<(), TokenWriterError> {
emit_string_symbol_to_streams!(
self,
identifier_names,
identifier_names,
identifier_names_len,
&value.cloned(),
"identifier_name_at"
);
Ok(())
}
fn property_key_at(
&mut self,
value: Option<&PropertyKey>,
_path: &Path,
) -> Result<(), TokenWriterError> {
emit_string_symbol_to_streams!(
self,
property_keys,
property_keys,
property_keys_len,
&value.cloned(),
"property_key_at"
);
Ok(())
}
fn enter_list_at(&mut self, len: usize, _path: &Path) -> Result<(), TokenWriterError> {
emit_simple_symbol_to_streams!(
self,
list_lengths,
list_lengths,
write_maybe_varnum,
&Some(len as u32),
"enter_list_at"
);
Ok(())
}
fn offset_at(&mut self, _path: &Path) -> Result<(), TokenWriterError> {
unimplemented!()
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::ops::Range;
use common_arrow::arrow::bitmap::Bitmap;
use common_arrow::arrow::bitmap::MutableBitmap;
use crate::property::Domain;
use crate::types::ArgType;
use crate::types::DataType;
use crate::types::GenericMap;
use crate::types::ValueType;
use crate::utils::arrow::bitmap_into_mut;
use crate::values::Column;
use crate::values::Scalar;
use crate::ColumnBuilder;
use crate::ScalarRef;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BooleanType;
impl ValueType for BooleanType {
type Scalar = bool;
type ScalarRef<'a> = bool;
type Column = Bitmap;
type Domain = BooleanDomain;
type ColumnIterator<'a> = common_arrow::arrow::bitmap::utils::BitmapIter<'a>;
type ColumnBuilder = MutableBitmap;
#[inline]
fn upcast_gat<'short, 'long: 'short>(long: bool) -> bool {
long
}
fn to_owned_scalar<'a>(scalar: Self::ScalarRef<'a>) -> Self::Scalar {
scalar
}
fn to_scalar_ref<'a>(scalar: &'a Self::Scalar) -> Self::ScalarRef<'a> {
*scalar
}
fn try_downcast_scalar<'a>(scalar: &'a ScalarRef) -> Option<Self::ScalarRef<'a>> {
match scalar {
ScalarRef::Boolean(scalar) => Some(*scalar),
_ => None,
}
}
fn try_downcast_column<'a>(col: &'a Column) -> Option<Self::Column> {
match col {
Column::Boolean(column) => Some(column.clone()),
_ => None,
}
}
fn try_downcast_builder<'a>(
builder: &'a mut ColumnBuilder,
) -> Option<&'a mut Self::ColumnBuilder> {
match builder {
crate::ColumnBuilder::Boolean(builder) => Some(builder),
_ => None,
}
}
fn try_downcast_domain(domain: &Domain) -> Option<Self::Domain> {
domain.as_boolean().map(BooleanDomain::clone)
}
fn upcast_scalar(scalar: Self::Scalar) -> Scalar {
Scalar::Boolean(scalar)
}
fn upcast_column(col: Self::Column) -> Column {
Column::Boolean(col)
}
fn upcast_domain(domain: Self::Domain) -> Domain {
Domain::Boolean(domain)
}
fn column_len<'a>(col: &'a Self::Column) -> usize {
col.len()
}
fn index_column<'a>(col: &'a Self::Column, index: usize) -> Option<Self::ScalarRef<'a>> {
col.get(index)
}
unsafe fn index_column_unchecked<'a>(
col: &'a Self::Column,
index: usize,
) -> Self::ScalarRef<'a> {
col.get_bit_unchecked(index)
}
fn slice_column<'a>(col: &'a Self::Column, range: Range<usize>) -> Self::Column {
col.clone().sliced(range.start, range.end - range.start)
}
fn iter_column<'a>(col: &'a Self::Column) -> Self::ColumnIterator<'a> {
col.iter()
}
fn column_to_builder(col: Self::Column) -> Self::ColumnBuilder {
bitmap_into_mut(col)
}
fn builder_len(builder: &Self::ColumnBuilder) -> usize {
builder.len()
}
fn push_item(builder: &mut Self::ColumnBuilder, item: Self::ScalarRef<'_>) {
builder.push(item);
}
fn push_default(builder: &mut Self::ColumnBuilder) {
builder.push(false);
}
fn append_column(builder: &mut Self::ColumnBuilder, bitmap: &Self::Column) {
builder.extend_from_bitmap(bitmap)
}
fn build_column(builder: Self::ColumnBuilder) -> Self::Column {
builder.into()
}
fn build_scalar(builder: Self::ColumnBuilder) -> Self::Scalar {
assert_eq!(builder.len(), 1);
builder.get(0)
}
}
impl ArgType for BooleanType {
fn data_type() -> DataType {
DataType::Boolean
}
fn full_domain() -> Self::Domain {
BooleanDomain {
has_false: true,
has_true: true,
}
}
fn create_builder(capacity: usize, _: &GenericMap) -> Self::ColumnBuilder {
MutableBitmap::with_capacity(capacity)
}
fn column_from_iter(iter: impl Iterator<Item = Self::Scalar>, _: &GenericMap) -> Self::Column {
match iter.size_hint() {
(_, Some(_)) => unsafe { MutableBitmap::from_trusted_len_iter_unchecked(iter).into() },
(_, None) => MutableBitmap::from_iter(iter).into(),
}
}
fn column_from_ref_iter<'a>(
iter: impl Iterator<Item = Self::ScalarRef<'a>>,
generics: &GenericMap,
) -> Self::Column {
Self::column_from_iter(iter, generics)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct BooleanDomain {
pub has_false: bool,
pub has_true: bool,
}
|
use crate::architecture;
use crate::architecture::Endian;
use crate::executor::*;
use crate::il;
use crate::memory;
use crate::translator::x86::Amd64;
use crate::translator::{Options, Translator};
use crate::RC;
fn init_amd64_driver<'d>(
instruction_bytes: Vec<u8>,
scalars: Vec<(&str, il::Constant)>,
memory_: Memory,
) -> Driver {
let mut backing = memory::backing::Memory::new(Endian::Little);
backing.set_memory(
0,
instruction_bytes,
memory::MemoryPermissions::EXECUTE | memory::MemoryPermissions::READ,
);
let function = Amd64::new().translate_function(&backing, 0).unwrap();
let location = if function
.control_flow_graph()
.block(0)
.unwrap()
.instructions()
.is_empty()
{
il::ProgramLocation::new(Some(0), il::FunctionLocation::EmptyBlock(0))
} else {
il::ProgramLocation::new(Some(0), il::FunctionLocation::Instruction(0, 0))
};
// println!("{}", function.control_flow_graph().graph().dot_graph());
let mut program = il::Program::new();
program.add_function(function);
let mut state = State::new(memory_);
for scalar in scalars {
state.set_scalar(scalar.0, scalar.1);
}
Driver::new(
RC::new(program),
location,
state,
RC::new(architecture::Amd64::new()),
)
}
fn step_to(mut driver: Driver, target_address: u64) -> Driver {
loop {
driver = driver.step().unwrap();
if let Some(address) = driver.location().apply(driver.program()).unwrap().address() {
if address == target_address {
return driver;
}
}
}
}
fn mk128const(lo: u64, hi: u64) -> il::Constant {
eval(
&il::Expression::or(
il::Expression::shl(
il::Expression::zext(128, il::expr_const(lo, 64)).unwrap(),
il::expr_const(64, 128),
)
.unwrap(),
il::Expression::zext(128, il::expr_const(hi, 64)).unwrap(),
)
.unwrap(),
)
.unwrap()
}
#[test]
fn lea() {
// lea ecx, [rax - 0x3]
let bytes: Vec<u8> = vec![0x8d, 0x48, 0xfd];
let translator = Amd64::new();
let _ = translator
.translate_block(&bytes, 0, &Options::new())
.unwrap();
}
#[test]
fn movd() {
// movd xmm1, esi
// nop
let bytes: Vec<u8> = vec![0x66, 0x0f, 0x6e, 0xce, 0x90];
let driver = init_amd64_driver(
bytes,
vec![
(
"xmm1",
mk128const(0x0000_0000_1111_1111, 0x2222_2222_3333_3333),
),
("rsi", il::const_(0x1111_2222_dead_beef, 64)),
],
Memory::new(Endian::Little),
);
let driver = step_to(driver, 0x4);
assert_eq!(driver.state().get_scalar("xmm1").unwrap().bits(), 128);
assert!(eval(
&il::Expression::cmpeq(
driver.state().get_scalar("xmm1").unwrap().clone().into(),
mk128const(0x0000_0000_0000_0000, 0x0000_0000_dead_beef).into()
)
.unwrap()
)
.unwrap()
.is_one());
}
#[test]
fn pcmpeqd() {
// pcmeqd xmm0, xmm1
// nop
let bytes: Vec<u8> = vec![0x66, 0x0f, 0x76, 0xc1, 0x90];
let driver = init_amd64_driver(
bytes.clone(),
vec![
(
"xmm0",
mk128const(0x0000_0000_1111_1111, 0x2222_2222_3333_3333),
),
(
"xmm1",
mk128const(0x0000_0000_1111_1111, 0x2222_2222_3333_3333),
),
],
Memory::new(Endian::Little),
);
let driver = step_to(driver, 0x4);
assert!(eval(
&il::Expression::cmpeq(
driver.state().get_scalar("xmm0").unwrap().clone().into(),
mk128const(0xffff_ffff_ffff_ffff, 0xffff_ffff_ffff_ffff).into()
)
.unwrap()
)
.unwrap()
.is_one());
let driver = init_amd64_driver(
bytes,
vec![
(
"xmm0",
mk128const(0x0000_0000_1111_1111, 0x2232_2222_3333_3333),
),
(
"xmm1",
mk128const(0x0000_0000_1111_1111, 0x2222_2222_3333_3333),
),
],
Memory::new(Endian::Little),
);
let driver = step_to(driver, 0x4);
assert!(eval(
&il::Expression::cmpeq(
driver.state().get_scalar("xmm0").unwrap().clone().into(),
mk128const(0xffff_ffff_ffff_ffff, 0x0000_0000_ffff_ffff).into()
)
.unwrap()
)
.unwrap()
.is_one());
}
#[test]
fn pcmpeqb() {
// pcmeqb xmm0, xmm1
// nop
let bytes: Vec<u8> = vec![0x66, 0x0f, 0x74, 0xc1, 0x90];
let driver = init_amd64_driver(
bytes,
vec![
(
"xmm0",
mk128const(0x0000_0000_1111_1111, 0x2222_2222_3333_3333),
),
(
"xmm1",
mk128const(0x0000_0000_1111_1111, 0x5555_5555_0011_3322),
),
],
Memory::new(Endian::Little),
);
let driver = step_to(driver, 0x4);
assert!(eval(
&il::Expression::cmpeq(
driver.state().get_scalar("xmm0").unwrap().clone().into(),
mk128const(0xffff_ffff_ffff_ffff, 0x0000_0000_0000_ff00).into()
)
.unwrap()
)
.unwrap()
.is_one());
}
#[test]
fn pmovmskb() {
// pcmeqb xmm0, xmm1
// nop
let bytes: Vec<u8> = vec![0x66, 0x0f, 0xd7, 0xd4, 0x90];
let driver = init_amd64_driver(
bytes,
vec![(
"xmm4",
mk128const(0x00ff_00ff_0000_0000, 0xffff_ffff_ff00_ff00),
)],
Memory::new(Endian::Little),
);
let driver = step_to(driver, 0x4);
assert_eq!(
driver
.state()
.get_scalar("rdx")
.unwrap()
.value_u64()
.unwrap(),
0b0101_0000_1111_1010
);
}
#[test]
fn rol() {
// rol rax, 0x11
// nop
let bytes: Vec<u8> = vec![0x48, 0xc1, 0xc0, 0x11, 0x90];
let driver = init_amd64_driver(
bytes,
vec![("rax", il::const_(0xbfeffffffd00, 64))],
Memory::new(Endian::Little),
);
let driver = step_to(driver, 0x4);
assert_eq!(
driver.state().get_scalar("rax").unwrap(),
&il::const_(0x7fdffffffa000001, 64)
);
}
#[test]
fn ror() {
// ror r8, 0x11
// nop
let bytes: Vec<u8> = vec![0x49, 0xc1, 0xc8, 0x11, 0x90];
let driver = init_amd64_driver(
bytes,
vec![("r8", il::const_(0x7fdfffffed200001, 64))],
Memory::new(Endian::Little),
);
let driver = step_to(driver, 0x4);
assert_eq!(
driver.state().get_scalar("r8").unwrap(),
&il::const_(0xbfeffffff690, 64)
);
}
|
mod create_post_command;
pub use create_post_command::*;
|
#![allow(non_camel_case_types, non_snake_case)]
use otspec::types::*;
use otspec::{deserialize_visitor, read_field};
use otspec_macros::tables;
use serde::de::{SeqAccess, Visitor};
use serde::ser::SerializeSeq;
use serde::Serializer;
use serde::{Deserialize, Deserializer, Serialize};
tables!(
Panose {
u8 panose0
u8 panose1
u8 panose2
u8 panose3
u8 panose4
u8 panose5
u8 panose6
u8 panose7
u8 panose8
u8 panose9
}
os2core {
uint16 version
int16 xAvgCharWidth
uint16 usWeightClass
uint16 usWidthClass
uint16 fsType
int16 ySubscriptXSize
int16 ySubscriptYSize
int16 ySubscriptXOffset
int16 ySubscriptYOffset
int16 ySuperscriptXSize
int16 ySuperscriptYSize
int16 ySuperscriptXOffset
int16 ySuperscriptYOffset
int16 yStrikeoutSize
int16 yStrikeoutPosition
int16 sFamilyClass
Panose panose
uint32 ulUnicodeRange1
uint32 ulUnicodeRange2
uint32 ulUnicodeRange3
uint32 ulUnicodeRange4
Tag achVendID
uint16 fsSelection
uint16 usFirstCharIndex
uint16 usLastCharIndex
int16 sTypoAscender
int16 sTypoDescender
int16 sTypoLineGap
uint16 usWinAscent
uint16 usWinDescent
}
os2v1 {
uint32 ulCodePageRange1
uint32 ulCodePageRange2
}
os2v2 {
int16 sxHeight
int16 sCapHeight
uint16 usDefaultChar
uint16 usBreakChar
uint16 usMaxContext
}
os2v5 {
uint16 usLowerOpticalPointSize
uint16 usUpperOpticalPointSize
}
);
/// Represents a font's OS/2 (OS/2 and Windows Metrics) table
#[derive(Debug, PartialEq)]
pub struct os2 {
/// Table version (between 0 and 5)
pub version: uint16,
/// Average width (xMax-xMin) of all non-empty glyphs
pub xAvgCharWidth: int16,
/// Visual weight class (0-1000)
pub usWeightClass: uint16,
/// Visual width class (1=Ultra-Condensed <-> 9=Ultra-Expanded)
pub usWidthClass: uint16,
/// Font embedding permissions bit field
pub fsType: uint16,
/// Horizontal size of subscript glyphs
pub ySubscriptXSize: int16,
/// Vertical size of subscript glyphs
pub ySubscriptYSize: int16,
/// Horizontal offset of subscript glyphs
pub ySubscriptXOffset: int16,
/// Vertical offset of subscript glyphs
pub ySubscriptYOffset: int16,
/// Horizontal size of superscript glyphs
pub ySuperscriptXSize: int16,
/// Vertical size of superscript glyphs
pub ySuperscriptYSize: int16,
/// Horizontal offset of superscript glyphs
pub ySuperscriptXOffset: int16,
/// Vertical offset of superscript glyphs
pub ySuperscriptYOffset: int16,
/// Thickness of strikeout dash (usually same as em dash thickness)
pub yStrikeoutSize: int16,
/// Strikeout dash position above baseline
pub yStrikeoutPosition: int16,
/// IBM font class parameter. See <https://docs.microsoft.com/en-us/typography/opentype/spec/ibmfc>.
pub sFamilyClass: int16,
/// PANOSE metrics. See <https://monotype.github.io/panose/pan1.htm>.
pub panose: Panose,
/// Supported unicode range (bitfield)
pub ulUnicodeRange1: uint32,
/// Supported unicode range (bitfield)
pub ulUnicodeRange2: uint32,
/// Supported unicode range (bitfield)
pub ulUnicodeRange3: uint32,
/// Supported unicode range (bitfield)
pub ulUnicodeRange4: uint32,
/// Registered vendor ID. See <https://docs.microsoft.com/en-gb/typography/vendors/>.
pub achVendID: Tag,
/// Font selection bitfield
pub fsSelection: uint16,
/// Minimum Unicode codepoint supported by font
pub usFirstCharIndex: uint16,
/// Maximum Unicode codepoint supported by font
pub usLastCharIndex: uint16,
/// Typographic ascender
pub sTypoAscender: int16,
/// Typographic descender
pub sTypoDescender: int16,
/// Typographic line gap
pub sTypoLineGap: int16,
/// Windows clipping region ascender
pub usWinAscent: uint16,
/// Windows clipping region descender (Usually positive!)
pub usWinDescent: uint16,
/// Bitfield of supported codepages (Version >=1)
pub ulCodePageRange1: Option<uint32>,
/// Bitfield of supported codepages (Version >=1)
pub ulCodePageRange2: Option<uint32>,
/// x-Height (Version >= 2)
pub sxHeight: Option<int16>,
/// Cap height (Version >= 2)
pub sCapHeight: Option<int16>,
/// GID used for undefined glyph (Version >= 2)
pub usDefaultChar: Option<uint16>,
/// GID used for word break glyph (Version >= 2)
pub usBreakChar: Option<uint16>,
/// Length of largest contextual lookup (Version >= 2)
pub usMaxContext: Option<uint16>,
/// Lowest supported optical point size. Deprecated, use STAT instead (Version >= 5)
pub usLowerOpticalPointSize: Option<uint16>,
/// Highest supported optical point size. Deprecated, use STAT instead (Version >= 5)
pub usUpperOpticalPointSize: Option<uint16>,
}
impl Serialize for os2 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(&os2core {
version: self.version,
xAvgCharWidth: self.xAvgCharWidth,
usWeightClass: self.usWeightClass,
usWidthClass: self.usWidthClass,
fsType: self.fsType,
ySubscriptXSize: self.ySubscriptXSize,
ySubscriptYSize: self.ySubscriptYSize,
ySubscriptXOffset: self.ySubscriptXOffset,
ySubscriptYOffset: self.ySubscriptYOffset,
ySuperscriptXSize: self.ySuperscriptXSize,
ySuperscriptYSize: self.ySuperscriptYSize,
ySuperscriptXOffset: self.ySuperscriptXOffset,
ySuperscriptYOffset: self.ySuperscriptYOffset,
yStrikeoutSize: self.yStrikeoutSize,
yStrikeoutPosition: self.yStrikeoutPosition,
sFamilyClass: self.sFamilyClass,
panose: Panose {
panose0: self.panose.panose0,
panose1: self.panose.panose1,
panose2: self.panose.panose2,
panose3: self.panose.panose3,
panose4: self.panose.panose4,
panose5: self.panose.panose5,
panose6: self.panose.panose6,
panose7: self.panose.panose7,
panose8: self.panose.panose8,
panose9: self.panose.panose9,
},
ulUnicodeRange1: self.ulUnicodeRange1,
ulUnicodeRange2: self.ulUnicodeRange2,
ulUnicodeRange3: self.ulUnicodeRange3,
ulUnicodeRange4: self.ulUnicodeRange4,
achVendID: self.achVendID,
fsSelection: self.fsSelection,
usFirstCharIndex: self.usFirstCharIndex,
usLastCharIndex: self.usLastCharIndex,
sTypoAscender: self.sTypoAscender,
sTypoDescender: self.sTypoDescender,
sTypoLineGap: self.sTypoLineGap,
usWinAscent: self.usWinAscent,
usWinDescent: self.usWinDescent,
})?;
if self.version > 0 {
seq.serialize_element(&os2v1 {
ulCodePageRange1: self.ulCodePageRange1.unwrap_or(0),
ulCodePageRange2: self.ulCodePageRange1.unwrap_or(0),
})?;
}
if self.version > 1 {
seq.serialize_element(&os2v2 {
sxHeight: self.sxHeight.unwrap_or(0),
sCapHeight: self.sCapHeight.unwrap_or(0),
usDefaultChar: self.usDefaultChar.unwrap_or(0),
usBreakChar: self.usBreakChar.unwrap_or(0),
usMaxContext: self.usMaxContext.unwrap_or(0),
})?;
}
if self.version > 4 {
seq.serialize_element(&os2v5 {
usLowerOpticalPointSize: self.usLowerOpticalPointSize.unwrap_or(0),
usUpperOpticalPointSize: self.usUpperOpticalPointSize.unwrap_or(0),
})?;
}
seq.end()
}
}
deserialize_visitor!(
os2,
Os2Visitor,
fn visit_seq<A: SeqAccess<'de>>(self, mut seq: A) -> Result<Self::Value, A::Error> {
let core = read_field!(seq, os2core, "an OS/2 table");
let mut res = os2 {
version: core.version,
xAvgCharWidth: core.xAvgCharWidth,
usWeightClass: core.usWeightClass,
usWidthClass: core.usWidthClass,
fsType: core.fsType,
ySubscriptXSize: core.ySubscriptXSize,
ySubscriptYSize: core.ySubscriptYSize,
ySubscriptXOffset: core.ySubscriptXOffset,
ySubscriptYOffset: core.ySubscriptYOffset,
ySuperscriptXSize: core.ySuperscriptXSize,
ySuperscriptYSize: core.ySuperscriptYSize,
ySuperscriptXOffset: core.ySuperscriptXOffset,
ySuperscriptYOffset: core.ySuperscriptYOffset,
yStrikeoutSize: core.yStrikeoutSize,
yStrikeoutPosition: core.yStrikeoutPosition,
sFamilyClass: core.sFamilyClass,
panose: core.panose,
ulUnicodeRange1: core.ulUnicodeRange1,
ulUnicodeRange2: core.ulUnicodeRange2,
ulUnicodeRange3: core.ulUnicodeRange3,
ulUnicodeRange4: core.ulUnicodeRange4,
achVendID: core.achVendID,
fsSelection: core.fsSelection,
usFirstCharIndex: core.usFirstCharIndex,
usLastCharIndex: core.usLastCharIndex,
sTypoAscender: core.sTypoAscender,
sTypoDescender: core.sTypoDescender,
sTypoLineGap: core.sTypoLineGap,
usWinAscent: core.usWinAscent,
usWinDescent: core.usWinDescent,
ulCodePageRange1: None,
ulCodePageRange2: None,
sxHeight: None,
sCapHeight: None,
usDefaultChar: None,
usBreakChar: None,
usMaxContext: None,
usLowerOpticalPointSize: None,
usUpperOpticalPointSize: None,
};
if core.version > 0 {
let v1 = read_field!(seq, os2v1, "OS/2 version 1 fields");
res.ulCodePageRange1 = Some(v1.ulCodePageRange1);
res.ulCodePageRange2 = Some(v1.ulCodePageRange2);
}
if core.version > 1 {
let v2 = read_field!(seq, os2v2, "OS/2 version 2 fields");
res.sxHeight = Some(v2.sxHeight);
res.sCapHeight = Some(v2.sCapHeight);
res.usDefaultChar = Some(v2.usDefaultChar);
res.usBreakChar = Some(v2.usBreakChar);
res.usMaxContext = Some(v2.usMaxContext);
}
if core.version > 4 {
let v5 = read_field!(seq, os2v5, "OS/2 version 5 fields");
res.usLowerOpticalPointSize = Some(v5.usLowerOpticalPointSize);
res.usUpperOpticalPointSize = Some(v5.usUpperOpticalPointSize);
}
Ok(res)
}
);
|
#[cfg(feature = "serde")]
mod submodule {
use super::*;
use serde_bytes::Bytes;
#[cfg(test)]
mod tests {
extern crate serde_ as serde;
use serde::{Deserialize, Serialize};
#[test]
fn foo() {
// error[E0463]: can't find crate for `serde`
// --> src/lib.rs:15:40
// |
// 15 | #[derive(Debug, Serialize, Deserialize, PartialEq)]
// | ^^^^^^^^^^^ can't find crate
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Foo {}
}
}
}
|
use std::path::Path;
use image::{ImageBuffer, Rgba};
use tiny_fail::{ErrorMessageExt, Fail};
use crate::bucket::Bucket;
use crate::color::alpha_brend;
use crate::grid::Grid;
use crate::scale::ColorScale;
pub fn gen_and_save_image(
path: impl AsRef<Path>,
bucket: &Bucket,
grid: &Grid,
colorscale: &dyn ColorScale,
) -> Result<(), Fail> {
let img = gen_image(bucket, grid, colorscale);
save_image(path, &img)?;
Ok(())
}
pub fn gen_image(
bucket: &Bucket,
grid: &Grid,
colorscale: &dyn ColorScale,
) -> ImageBuffer<Rgba<u8>, Vec<u8>> {
let (w, h) = bucket.dim();
let scale = bucket.scale();
let max_count = bucket.max_count();
let max_log = ((max_count + 1) as f32).log2();
let img = ImageBuffer::from_fn(w as u32, h as u32, |x, y| {
let cnt = bucket.get(x, y);
let foregroud = if cnt > 0 {
let r = ((cnt + 1) as f32).log2() / max_log;
colorscale.scale(r)
} else {
colorscale.background().unwrap_or(Rgba([0; 4]))
};
let pixcel = bucket.real_pixcel(x, y);
let grid_color = grid.background(&pixcel, scale);
let a = foregroud.0[3];
Rgba([
alpha_brend(foregroud.0[0], grid_color.0[0], a),
alpha_brend(foregroud.0[1], grid_color.0[1], a),
alpha_brend(foregroud.0[2], grid_color.0[2], a),
alpha_brend(255, grid_color.0[3], a),
])
});
img
}
pub fn save_image(
path: impl AsRef<Path>,
img: &ImageBuffer<Rgba<u8>, Vec<u8>>,
) -> Result<(), Fail> {
img.save(path).err_msg(format!("failed save image"))?;
Ok(())
}
|
use std::process::Command;
pub fn git_check() {
Command::new("git")
.args(&["status", "--porcelain"])
.output()
.expect("This tool requires git. Please install git and try again.");
}
pub fn git_commit(message: &str) {
Command::new("git")
.args(&["commit", "-m", message])
.status()
.expect("Something went wrong trying to commit the new change.");
}
/// Pushes to the current branch.
pub fn git_push() {
let branch_result = Command::new("git")
.args(&["rev-parse", "--abbrev-ref", "HEAD"])
.output()
.expect("Something went wrong trying to push the branch.")
.stdout;
let branch = std::str::from_utf8(&branch_result)
.expect("git sent a non-utf-8 byte stream as standard out")
.trim()
.to_owned();
println!("Discovered Working on branch: {}", branch);
Command::new("git")
.args(&["push", "--tags", "-u", "origin", &branch])
.status()
.expect("Something went wrong trying to push the branch.");
}
pub fn git_tag(version: &str) {
Command::new("git")
.args(&["tag", "-am", version, version])
.status()
.expect("Something went wrong when creating a git tag.");
}
|
#[doc = "Reader of register CSR"]
pub type R = crate::R<u32, super::CSR>;
#[doc = "Writer for register CSR"]
pub type W = crate::W<u32, super::CSR>;
#[doc = "Register CSR `reset()`'s with value 0"]
impl crate::ResetValue for super::CSR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Low-power reset flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LPWRSTF_A {
#[doc = "0: No reset has occured"]
NORESET = 0,
#[doc = "1: A reset has occured"]
RESET = 1,
}
impl From<LPWRSTF_A> for bool {
#[inline(always)]
fn from(variant: LPWRSTF_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `LPWRSTF`"]
pub type LPWRSTF_R = crate::R<bool, LPWRSTF_A>;
impl LPWRSTF_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPWRSTF_A {
match self.bits {
false => LPWRSTF_A::NORESET,
true => LPWRSTF_A::RESET,
}
}
#[doc = "Checks if the value of the field is `NORESET`"]
#[inline(always)]
pub fn is_no_reset(&self) -> bool {
*self == LPWRSTF_A::NORESET
}
#[doc = "Checks if the value of the field is `RESET`"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == LPWRSTF_A::RESET
}
}
#[doc = "Write proxy for field `LPWRSTF`"]
pub struct LPWRSTF_W<'a> {
w: &'a mut W,
}
impl<'a> LPWRSTF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LPWRSTF_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No reset has occured"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::NORESET)
}
#[doc = "A reset has occured"]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::RESET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
#[doc = "Window watchdog reset flag"]
pub type WWDGRSTF_A = LPWRSTF_A;
#[doc = "Reader of field `WWDGRSTF`"]
pub type WWDGRSTF_R = crate::R<bool, LPWRSTF_A>;
#[doc = "Write proxy for field `WWDGRSTF`"]
pub struct WWDGRSTF_W<'a> {
w: &'a mut W,
}
impl<'a> WWDGRSTF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: WWDGRSTF_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No reset has occured"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::NORESET)
}
#[doc = "A reset has occured"]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::RESET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Independent watchdog reset flag"]
pub type IWDGRSTF_A = LPWRSTF_A;
#[doc = "Reader of field `IWDGRSTF`"]
pub type IWDGRSTF_R = crate::R<bool, LPWRSTF_A>;
#[doc = "Write proxy for field `IWDGRSTF`"]
pub struct IWDGRSTF_W<'a> {
w: &'a mut W,
}
impl<'a> IWDGRSTF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: IWDGRSTF_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No reset has occured"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::NORESET)
}
#[doc = "A reset has occured"]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::RESET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
#[doc = "Software reset flag"]
pub type SFTRSTF_A = LPWRSTF_A;
#[doc = "Reader of field `SFTRSTF`"]
pub type SFTRSTF_R = crate::R<bool, LPWRSTF_A>;
#[doc = "Write proxy for field `SFTRSTF`"]
pub struct SFTRSTF_W<'a> {
w: &'a mut W,
}
impl<'a> SFTRSTF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SFTRSTF_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No reset has occured"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::NORESET)
}
#[doc = "A reset has occured"]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::RESET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);
self.w
}
}
#[doc = "POR/PDR reset flag"]
pub type PORRSTF_A = LPWRSTF_A;
#[doc = "Reader of field `PORRSTF`"]
pub type PORRSTF_R = crate::R<bool, LPWRSTF_A>;
#[doc = "Write proxy for field `PORRSTF`"]
pub struct PORRSTF_W<'a> {
w: &'a mut W,
}
impl<'a> PORRSTF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PORRSTF_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No reset has occured"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::NORESET)
}
#[doc = "A reset has occured"]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::RESET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27);
self.w
}
}
#[doc = "PIN reset flag"]
pub type PINRSTF_A = LPWRSTF_A;
#[doc = "Reader of field `PINRSTF`"]
pub type PINRSTF_R = crate::R<bool, LPWRSTF_A>;
#[doc = "Write proxy for field `PINRSTF`"]
pub struct PINRSTF_W<'a> {
w: &'a mut W,
}
impl<'a> PINRSTF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PINRSTF_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No reset has occured"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::NORESET)
}
#[doc = "A reset has occured"]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::RESET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26);
self.w
}
}
#[doc = "Remove reset flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RMVF_A {
#[doc = "1: Clears the reset flag"]
CLEAR = 1,
}
impl From<RMVF_A> for bool {
#[inline(always)]
fn from(variant: RMVF_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `RMVF`"]
pub type RMVF_R = crate::R<bool, RMVF_A>;
impl RMVF_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<bool, RMVF_A> {
use crate::Variant::*;
match self.bits {
true => Val(RMVF_A::CLEAR),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `CLEAR`"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == RMVF_A::CLEAR
}
}
#[doc = "Write proxy for field `RMVF`"]
pub struct RMVF_W<'a> {
w: &'a mut W,
}
impl<'a> RMVF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RMVF_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears the reset flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RMVF_A::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "RTC software reset\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RTCRST_A {
#[doc = "1: Resets the RTC peripheral"]
RESET = 1,
}
impl From<RTCRST_A> for bool {
#[inline(always)]
fn from(variant: RTCRST_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `RTCRST`"]
pub type RTCRST_R = crate::R<bool, RTCRST_A>;
impl RTCRST_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<bool, RTCRST_A> {
use crate::Variant::*;
match self.bits {
true => Val(RTCRST_A::RESET),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `RESET`"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == RTCRST_A::RESET
}
}
#[doc = "Write proxy for field `RTCRST`"]
pub struct RTCRST_W<'a> {
w: &'a mut W,
}
impl<'a> RTCRST_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RTCRST_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Resets the RTC peripheral"]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(RTCRST_A::RESET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);
self.w
}
}
#[doc = "Reader of field `RTCEN`"]
pub type RTCEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RTCEN`"]
pub struct RTCEN_W<'a> {
w: &'a mut W,
}
impl<'a> RTCEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);
self.w
}
}
#[doc = "Reader of field `RTCSEL`"]
pub type RTCSEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RTCSEL`"]
pub struct RTCSEL_W<'a> {
w: &'a mut W,
}
impl<'a> RTCSEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16);
self.w
}
}
#[doc = "Reader of field `LSEBYP`"]
pub type LSEBYP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LSEBYP`"]
pub struct LSEBYP_W<'a> {
w: &'a mut W,
}
impl<'a> LSEBYP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `LSERDY`"]
pub type LSERDY_R = crate::R<bool, bool>;
#[doc = "Reader of field `LSEON`"]
pub type LSEON_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LSEON`"]
pub struct LSEON_W<'a> {
w: &'a mut W,
}
impl<'a> LSEON_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `LSIRDY`"]
pub type LSIRDY_R = crate::R<bool, bool>;
#[doc = "Reader of field `LSION`"]
pub type LSION_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LSION`"]
pub struct LSION_W<'a> {
w: &'a mut W,
}
impl<'a> LSION_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Options bytes loading reset flag"]
pub type OBLRSTF_A = LPWRSTF_A;
#[doc = "Reader of field `OBLRSTF`"]
pub type OBLRSTF_R = crate::R<bool, LPWRSTF_A>;
#[doc = "Write proxy for field `OBLRSTF`"]
pub struct OBLRSTF_W<'a> {
w: &'a mut W,
}
impl<'a> OBLRSTF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: OBLRSTF_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No reset has occured"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::NORESET)
}
#[doc = "A reset has occured"]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(LPWRSTF_A::RESET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);
self.w
}
}
#[doc = "Reader of field `LSECSSD`"]
pub type LSECSSD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LSECSSD`"]
pub struct LSECSSD_W<'a> {
w: &'a mut W,
}
impl<'a> LSECSSD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `LSECSSON`"]
pub type LSECSSON_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LSECSSON`"]
pub struct LSECSSON_W<'a> {
w: &'a mut W,
}
impl<'a> LSECSSON_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
impl R {
#[doc = "Bit 31 - Low-power reset flag"]
#[inline(always)]
pub fn lpwrstf(&self) -> LPWRSTF_R {
LPWRSTF_R::new(((self.bits >> 31) & 0x01) != 0)
}
#[doc = "Bit 30 - Window watchdog reset flag"]
#[inline(always)]
pub fn wwdgrstf(&self) -> WWDGRSTF_R {
WWDGRSTF_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 29 - Independent watchdog reset flag"]
#[inline(always)]
pub fn iwdgrstf(&self) -> IWDGRSTF_R {
IWDGRSTF_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 28 - Software reset flag"]
#[inline(always)]
pub fn sftrstf(&self) -> SFTRSTF_R {
SFTRSTF_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 27 - POR/PDR reset flag"]
#[inline(always)]
pub fn porrstf(&self) -> PORRSTF_R {
PORRSTF_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bit 26 - PIN reset flag"]
#[inline(always)]
pub fn pinrstf(&self) -> PINRSTF_R {
PINRSTF_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 24 - Remove reset flag"]
#[inline(always)]
pub fn rmvf(&self) -> RMVF_R {
RMVF_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 23 - RTC software reset"]
#[inline(always)]
pub fn rtcrst(&self) -> RTCRST_R {
RTCRST_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bit 22 - RTC clock enable"]
#[inline(always)]
pub fn rtcen(&self) -> RTCEN_R {
RTCEN_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bits 16:17 - RTC and LCD clock source selection"]
#[inline(always)]
pub fn rtcsel(&self) -> RTCSEL_R {
RTCSEL_R::new(((self.bits >> 16) & 0x03) as u8)
}
#[doc = "Bit 10 - External low-speed oscillator bypass"]
#[inline(always)]
pub fn lsebyp(&self) -> LSEBYP_R {
LSEBYP_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 9 - External low-speed oscillator ready"]
#[inline(always)]
pub fn lserdy(&self) -> LSERDY_R {
LSERDY_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8 - External low-speed oscillator enable"]
#[inline(always)]
pub fn lseon(&self) -> LSEON_R {
LSEON_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 1 - Internal low-speed oscillator ready"]
#[inline(always)]
pub fn lsirdy(&self) -> LSIRDY_R {
LSIRDY_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0 - Internal low-speed oscillator enable"]
#[inline(always)]
pub fn lsion(&self) -> LSION_R {
LSION_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 25 - Options bytes loading reset flag"]
#[inline(always)]
pub fn oblrstf(&self) -> OBLRSTF_R {
OBLRSTF_R::new(((self.bits >> 25) & 0x01) != 0)
}
#[doc = "Bit 12 - CSS on LSE failure Detection"]
#[inline(always)]
pub fn lsecssd(&self) -> LSECSSD_R {
LSECSSD_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 11 - CSS on LSE enable"]
#[inline(always)]
pub fn lsecsson(&self) -> LSECSSON_R {
LSECSSON_R::new(((self.bits >> 11) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 31 - Low-power reset flag"]
#[inline(always)]
pub fn lpwrstf(&mut self) -> LPWRSTF_W {
LPWRSTF_W { w: self }
}
#[doc = "Bit 30 - Window watchdog reset flag"]
#[inline(always)]
pub fn wwdgrstf(&mut self) -> WWDGRSTF_W {
WWDGRSTF_W { w: self }
}
#[doc = "Bit 29 - Independent watchdog reset flag"]
#[inline(always)]
pub fn iwdgrstf(&mut self) -> IWDGRSTF_W {
IWDGRSTF_W { w: self }
}
#[doc = "Bit 28 - Software reset flag"]
#[inline(always)]
pub fn sftrstf(&mut self) -> SFTRSTF_W {
SFTRSTF_W { w: self }
}
#[doc = "Bit 27 - POR/PDR reset flag"]
#[inline(always)]
pub fn porrstf(&mut self) -> PORRSTF_W {
PORRSTF_W { w: self }
}
#[doc = "Bit 26 - PIN reset flag"]
#[inline(always)]
pub fn pinrstf(&mut self) -> PINRSTF_W {
PINRSTF_W { w: self }
}
#[doc = "Bit 24 - Remove reset flag"]
#[inline(always)]
pub fn rmvf(&mut self) -> RMVF_W {
RMVF_W { w: self }
}
#[doc = "Bit 23 - RTC software reset"]
#[inline(always)]
pub fn rtcrst(&mut self) -> RTCRST_W {
RTCRST_W { w: self }
}
#[doc = "Bit 22 - RTC clock enable"]
#[inline(always)]
pub fn rtcen(&mut self) -> RTCEN_W {
RTCEN_W { w: self }
}
#[doc = "Bits 16:17 - RTC and LCD clock source selection"]
#[inline(always)]
pub fn rtcsel(&mut self) -> RTCSEL_W {
RTCSEL_W { w: self }
}
#[doc = "Bit 10 - External low-speed oscillator bypass"]
#[inline(always)]
pub fn lsebyp(&mut self) -> LSEBYP_W {
LSEBYP_W { w: self }
}
#[doc = "Bit 8 - External low-speed oscillator enable"]
#[inline(always)]
pub fn lseon(&mut self) -> LSEON_W {
LSEON_W { w: self }
}
#[doc = "Bit 0 - Internal low-speed oscillator enable"]
#[inline(always)]
pub fn lsion(&mut self) -> LSION_W {
LSION_W { w: self }
}
#[doc = "Bit 25 - Options bytes loading reset flag"]
#[inline(always)]
pub fn oblrstf(&mut self) -> OBLRSTF_W {
OBLRSTF_W { w: self }
}
#[doc = "Bit 12 - CSS on LSE failure Detection"]
#[inline(always)]
pub fn lsecssd(&mut self) -> LSECSSD_W {
LSECSSD_W { w: self }
}
#[doc = "Bit 11 - CSS on LSE enable"]
#[inline(always)]
pub fn lsecsson(&mut self) -> LSECSSON_W {
LSECSSON_W { w: self }
}
}
|
use serde::{Serialize, Serializer};
use std::convert::TryFrom;
use std::fs::File;
use std::io::Result;
use std::{collections::HashSet, path::Path};
use bam::Record;
use noodles_bam::{self as bam, bai};
use noodles_bgzf::VirtualPosition;
use noodles_sam::{self as sam};
#[derive(Debug, Serialize)]
pub struct RefSeq {
name: String,
index: usize,
len: usize,
#[serde(serialize_with = "serde_virtual_position")]
start: VirtualPosition,
#[serde(serialize_with = "serde_virtual_position")]
end: VirtualPosition,
seq_start: u64,
seq_end: u64,
blocks: Vec<Block>,
}
#[derive(Debug, Serialize)]
pub struct Block {
start: u64,
end: u64,
seq_start: u64,
seq_end: u64,
mapped_count: usize,
unmapped_count: usize,
}
pub fn bam_blocks<P: AsRef<Path>>(path: P) -> Result<Vec<RefSeq>> {
let index = bai::read(path.as_ref().with_extension("bam.bai"))?;
let mut reader = File::open(path.as_ref()).map(bam::Reader::new)?;
let header = reader.read_header()?.parse::<sam::Header>().unwrap();
let mut ref_seqs: Vec<RefSeq> = Vec::with_capacity(header.reference_sequences().len());
let joined_ref_seqs = header
.reference_sequences()
.into_iter()
.zip(index.reference_sequences().iter())
.enumerate();
for (idx, ((ref_seq_name, hdr_ref_seq), idx_ref_seq)) in joined_ref_seqs {
if let Some(metadata) = idx_ref_seq.metadata() {
let blocks: HashSet<u64> = idx_ref_seq
.bins()
.iter()
.flat_map(|bin| bin.chunks().iter())
.flat_map(|chunk| vec![chunk.start(), chunk.end()])
.map(|vpos| vpos.compressed())
.collect();
let mut blocks: Vec<u64> = blocks.into_iter().collect();
blocks.sort_unstable();
let intervals: Vec<(u64, u64)> = blocks
.iter()
.take(blocks.len() - 1)
.zip(blocks.iter().skip(1))
.map(|(start, end)| (*start, *end))
.collect();
let mut ref_seq_start = u64::MAX;
let mut ref_seq_end = u64::MIN;
let mut blocks = Vec::new();
let mut record: Record = Record::default();
for (start, end) in intervals {
let mut seq_start = u64::MAX;
let mut seq_end = u64::MIN;
let mut mapped_count = 0usize;
let mut unmapped_count = 0usize;
reader.seek(VirtualPosition::try_from((start, 0u16)).unwrap())?;
while reader.virtual_position().compressed() < end {
reader.read_record(&mut record)?;
if let Some(position) = record.position() {
mapped_count += 1;
let pos = i32::from(position) as u64;
seq_start = u64::min(seq_start, pos);
seq_end = u64::max(seq_end, pos);
} else {
unmapped_count += 1;
}
}
if seq_start < seq_end {
ref_seq_start = u64::min(ref_seq_start, seq_start);
ref_seq_end = u64::max(ref_seq_end, seq_end);
blocks.push(Block {
start,
end,
seq_start,
seq_end,
mapped_count,
unmapped_count,
})
}
}
let rs = RefSeq {
index: idx,
len: hdr_ref_seq.len() as usize,
name: ref_seq_name.clone(),
start: metadata.start_position(),
end: metadata.end_position(),
seq_start: ref_seq_start,
seq_end: ref_seq_end,
blocks,
};
ref_seqs.push(rs);
}
}
Ok(ref_seqs)
}
pub fn serde_virtual_position<S>(
vpos: &VirtualPosition,
serializer: S,
) -> core::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let s = format!("{}/{}", vpos.compressed(), vpos.uncompressed());
serializer.serialize_str(s.as_str())
}
|
use super::Request;
use crate::error::NotpResult;
use crate::store::DataStore;
/// Lists all existing identifiers.
///
/// It will only print the existing identifiers. Such as
/// ```
/// 1. Google
/// 2. Slack
/// 3. Jira
/// ```
pub(crate) fn list<T: DataStore>(request: Request<'_, T>) -> NotpResult<()> {
request.store.list()
}
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashMap;
use std::cmp::max;
use std::slice;
use std::iter;
use super::*;
pub struct TestGraph {
num_nodes: usize,
start_node: usize,
successors: HashMap<usize, Vec<usize>>,
predecessors: HashMap<usize, Vec<usize>>,
}
impl TestGraph {
pub fn new(start_node: usize, edges: &[(usize, usize)]) -> Self {
let mut graph = TestGraph {
num_nodes: start_node + 1,
start_node,
successors: HashMap::new(),
predecessors: HashMap::new(),
};
for &(source, target) in edges {
graph.num_nodes = max(graph.num_nodes, source + 1);
graph.num_nodes = max(graph.num_nodes, target + 1);
graph.successors.entry(source).or_insert(vec![]).push(target);
graph.predecessors.entry(target).or_insert(vec![]).push(source);
}
for node in 0..graph.num_nodes {
graph.successors.entry(node).or_insert(vec![]);
graph.predecessors.entry(node).or_insert(vec![]);
}
graph
}
}
impl DirectedGraph for TestGraph {
type Node = usize;
}
impl WithStartNode for TestGraph {
fn start_node(&self) -> usize {
self.start_node
}
}
impl WithNumNodes for TestGraph {
fn num_nodes(&self) -> usize {
self.num_nodes
}
}
impl WithPredecessors for TestGraph {
fn predecessors<'graph>(&'graph self,
node: usize)
-> <Self as GraphPredecessors<'graph>>::Iter {
self.predecessors[&node].iter().cloned()
}
}
impl WithSuccessors for TestGraph {
fn successors<'graph>(&'graph self, node: usize) -> <Self as GraphSuccessors<'graph>>::Iter {
self.successors[&node].iter().cloned()
}
}
impl<'graph> GraphPredecessors<'graph> for TestGraph {
type Item = usize;
type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
}
impl<'graph> GraphSuccessors<'graph> for TestGraph {
type Item = usize;
type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
}
|
/// ```rust,ignore
/// 172. 阶乘后的零
///
/// 给定一个整数 n,返回 n! 结果尾数中零的数量。
///
/// 示例 1:
///
/// 输入: 3
/// 输出: 0
/// 解释: 3! = 6, 尾数中没有零。
///
/// 示例 2:
///
/// 输入: 5
/// 输出: 1
/// 解释: 5! = 120, 尾数中有 1 个零.
///
/// 说明: 你算法的时间复杂度应为 O(log n) 。
/// ```
pub fn trailing_zeroes(n: i32) -> i32 {
let mut n = n;
let mut count = 0;
while n >= 5 {
count += n / 5;
n /= 5;
}
count
}
#[cfg(test)]
mod test
{
use super::trailing_zeroes;
#[test]
fn test_trailing_zeroes()
{
assert_eq!(trailing_zeroes(3), 0);
}
}
|
#[macro_use]
extern crate rocket;
use parking_lot::RwLock;
//use rocket::config::{Config, Environment};
use rocket::response::status::{Created, NoContent};
//use rocket::fairing::AdHoc;
use rocket::State;
use rocket_contrib::json::Json;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
use uuid::Uuid;
// We need to implement the "Clone" trait in order to
// call the "cloned" method in the "get_dogs" function.
#[derive(Clone, Deserialize, Serialize, Debug)]
struct Dog {
id: String,
breed: String,
name: String,
}
#[derive(Deserialize, Serialize, Debug)]
struct NewDog {
breed: String,
name: String,
}
struct MyState {
dog_map: Arc<RwLock<HashMap<String, Dog>>>,
}
#[rocket::main]
async fn main() {
let mut dog_map: HashMap<String, Dog> = HashMap::new();
let id = Uuid::new_v4().to_string();
let dog = Dog {
id: id.clone(),
name: "Comet".to_string(),
breed: "Whippet".to_string(),
};
dog_map.insert(id, dog);
let state = MyState {
dog_map: Arc::new(RwLock::new(dog_map)),
};
#[post("/", format = "json", data = "<json>")]
//fn create_dog(json: Json<NewDog>, state: State<MyState>) -> Created<Json> {
fn create_dog(json: Json<NewDog>, state: State<MyState>) -> Created<Json<Dog>> {
let new_dog = json.into_inner();
let id = Uuid::new_v4().to_string();
let dog = Dog {
id: id.clone(),
name: new_dog.name,
breed: new_dog.breed,
};
let url = format!("http://localhost:1234/dog/{}", &id);
let mut dog_map = state.dog_map.write();
dog_map.insert(id, dog.clone());
Created::new(url).body(Json(dog))
}
#[delete("/<id>")]
fn delete_dog(id: String, state: State<MyState>) -> NoContent {
let mut dog_map = state.dog_map.write();
dog_map.remove(&id);
NoContent
}
#[get("/<id>", format = "json")]
fn get_dog(id: String, state: State<MyState>) -> Option<Json<Dog>> {
let dog_map = state.dog_map.read();
if let Some(dog) = dog_map.get(&id) {
Some(Json(dog.clone()))
} else {
None
}
}
#[get("/")]
fn get_dogs(state: State<MyState>) -> Json<Vec<Dog>> {
let dog_map = state.dog_map.read();
let dogs = dog_map.values().cloned().collect();
Json(dogs)
}
#[put("/<id>", format = "json", data = "<json>")]
fn update_dog(id: String, json: Json<Dog>, state: State<MyState>) -> Json<Dog> {
let dog: Dog = json.into_inner();
let mut dog_map = state.dog_map.write();
dog_map.insert(id, dog.clone());
Json(dog)
}
//TODO: Learn how to get this to use TLS/HTTPS.
// Note that https://rocket.rs/v0.4/guide/configuration/ says
// "Warning: Rocket's built-in TLS is not considered ready for
// production use. It is intended for development use only."
rocket::ignite()
/*
.attach(AdHoc::on_attach(|rocket| {
//TODO: Can you use this to get the active host and port
//TODO: for setting url in create_dog?
let config = rocket.config();
dbg!(config);
}))
*/
.manage(state)
.mount(
"/dog",
routes![create_dog, delete_dog, get_dog, get_dogs, update_dog],
)
.launch()
.await
.expect("failed to start rocket");
}
|
/// Multiway Tree
#[derive(Debug, Clone, PartialEq)]
pub struct MTree {
value: char,
children: Vec<MTree>,
}
impl MTree {
/// Creates a leaf node
pub fn leaf(value: char) -> Self {
MTree::node(value, vec![])
}
/// Creates a node with children
pub fn node(value: char, children: Vec<MTree>) -> Self {
MTree {
value: value,
children: children,
}
}
/// Returns the node value
pub fn get_value(&self) -> char {
self.value
}
/// Returns children
pub fn get_children(&self) -> &Vec<MTree> {
&self.children
}
}
|
use super::bus::Busable;
pub struct Ram {
bank0: [u8; 0x1000],
banks: Vec<[u8; 0x1000]>,
high_ram: [u8; 0x7f],
current_bank: usize,
}
impl Ram {
pub fn new() -> Self {
Ram {
bank0: [0; 0x1000],
banks: vec![[0; 0x1000]; 6],
high_ram: [0; 0x7f],
current_bank: 0,
}
}
}
impl Busable for Ram {
fn read(&self, addr: u16) -> u8 {
if (0xc000..0xd000).contains(&addr) {
self.bank0[(addr - 0xc000) as usize]
} else if (0xff80..=0xfffe).contains(&addr) {
self.high_ram[(addr - 0xff80) as usize]
} else if (0xd000..0xe000).contains(&addr) {
self.banks[self.current_bank][(addr - 0xd000) as usize]
} else {
panic!("Invalid RAM read at {:x}", addr);
}
}
fn write(&mut self, addr: u16, val: u8) {
if (0xc000..0xd000).contains(&addr) {
self.bank0[(addr - 0xc000) as usize] = val;
} else if (0xff80..=0xfffe).contains(&addr) {
self.high_ram[(addr - 0xff80) as usize] = val;
} else if (0xd000..0xe000).contains(&addr) {
self.banks[self.current_bank][(addr - 0xd000) as usize] = val;
} else {
panic!("Invalid RAM write at {:x}", addr);
}
}
}
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A vector type intended to be used for collecting from iterators onto the stack.
//!
//! Space for up to N elements is provided on the stack. If more elements are collected, Vec is
//! used to store the values on the heap.
//!
//! The N above is determined by Array's implementor, by way of an associated constant.
use std::ops::{Deref, DerefMut, RangeBounds};
use std::iter::{self, IntoIterator, FromIterator};
use std::slice;
use std::vec;
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
use array_vec::{self, Array, ArrayVec};
#[derive(Hash, Debug)]
pub enum AccumulateVec<A: Array> {
Array(ArrayVec<A>),
Heap(Vec<A::Element>)
}
impl<A> Clone for AccumulateVec<A>
where A: Array,
A::Element: Clone {
fn clone(&self) -> Self {
match *self {
AccumulateVec::Array(ref arr) => AccumulateVec::Array(arr.clone()),
AccumulateVec::Heap(ref vec) => AccumulateVec::Heap(vec.clone()),
}
}
}
impl<A: Array> AccumulateVec<A> {
pub fn new() -> AccumulateVec<A> {
AccumulateVec::Array(ArrayVec::new())
}
pub fn is_array(&self) -> bool {
match self {
AccumulateVec::Array(..) => true,
AccumulateVec::Heap(..) => false,
}
}
pub fn one(el: A::Element) -> Self {
iter::once(el).collect()
}
pub fn many<I: IntoIterator<Item=A::Element>>(iter: I) -> Self {
iter.into_iter().collect()
}
pub fn len(&self) -> usize {
match *self {
AccumulateVec::Array(ref arr) => arr.len(),
AccumulateVec::Heap(ref vec) => vec.len(),
}
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn pop(&mut self) -> Option<A::Element> {
match *self {
AccumulateVec::Array(ref mut arr) => arr.pop(),
AccumulateVec::Heap(ref mut vec) => vec.pop(),
}
}
pub fn drain<R>(&mut self, range: R) -> Drain<A>
where R: RangeBounds<usize>
{
match *self {
AccumulateVec::Array(ref mut v) => {
Drain::Array(v.drain(range))
},
AccumulateVec::Heap(ref mut v) => {
Drain::Heap(v.drain(range))
},
}
}
}
impl<A: Array> Deref for AccumulateVec<A> {
type Target = [A::Element];
fn deref(&self) -> &Self::Target {
match *self {
AccumulateVec::Array(ref v) => v,
AccumulateVec::Heap(ref v) => v,
}
}
}
impl<A: Array> DerefMut for AccumulateVec<A> {
fn deref_mut(&mut self) -> &mut [A::Element] {
match *self {
AccumulateVec::Array(ref mut v) => v,
AccumulateVec::Heap(ref mut v) => v,
}
}
}
impl<A: Array> FromIterator<A::Element> for AccumulateVec<A> {
fn from_iter<I>(iter: I) -> AccumulateVec<A> where I: IntoIterator<Item=A::Element> {
let iter = iter.into_iter();
if iter.size_hint().1.map_or(false, |n| n <= A::LEN) {
let mut v = ArrayVec::new();
v.extend(iter);
AccumulateVec::Array(v)
} else {
AccumulateVec::Heap(iter.collect())
}
}
}
pub struct IntoIter<A: Array> {
repr: IntoIterRepr<A>,
}
enum IntoIterRepr<A: Array> {
Array(array_vec::Iter<A>),
Heap(vec::IntoIter<A::Element>),
}
impl<A: Array> Iterator for IntoIter<A> {
type Item = A::Element;
fn next(&mut self) -> Option<A::Element> {
match self.repr {
IntoIterRepr::Array(ref mut arr) => arr.next(),
IntoIterRepr::Heap(ref mut iter) => iter.next(),
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
match self.repr {
IntoIterRepr::Array(ref iter) => iter.size_hint(),
IntoIterRepr::Heap(ref iter) => iter.size_hint(),
}
}
}
pub enum Drain<'a, A: Array>
where A::Element: 'a
{
Array(array_vec::Drain<'a, A>),
Heap(vec::Drain<'a, A::Element>),
}
impl<'a, A: Array> Iterator for Drain<'a, A> {
type Item = A::Element;
fn next(&mut self) -> Option<A::Element> {
match *self {
Drain::Array(ref mut drain) => drain.next(),
Drain::Heap(ref mut drain) => drain.next(),
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
match *self {
Drain::Array(ref drain) => drain.size_hint(),
Drain::Heap(ref drain) => drain.size_hint(),
}
}
}
impl<A: Array> IntoIterator for AccumulateVec<A> {
type Item = A::Element;
type IntoIter = IntoIter<A>;
fn into_iter(self) -> Self::IntoIter {
IntoIter {
repr: match self {
AccumulateVec::Array(arr) => IntoIterRepr::Array(arr.into_iter()),
AccumulateVec::Heap(vec) => IntoIterRepr::Heap(vec.into_iter()),
}
}
}
}
impl<'a, A: Array> IntoIterator for &'a AccumulateVec<A> {
type Item = &'a A::Element;
type IntoIter = slice::Iter<'a, A::Element>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, A: Array> IntoIterator for &'a mut AccumulateVec<A> {
type Item = &'a mut A::Element;
type IntoIter = slice::IterMut<'a, A::Element>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<A: Array> From<Vec<A::Element>> for AccumulateVec<A> {
fn from(v: Vec<A::Element>) -> AccumulateVec<A> {
AccumulateVec::many(v)
}
}
impl<A: Array> Default for AccumulateVec<A> {
fn default() -> AccumulateVec<A> {
AccumulateVec::new()
}
}
impl<A> Encodable for AccumulateVec<A>
where A: Array,
A::Element: Encodable {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
}
}
impl<A> Decodable for AccumulateVec<A>
where A: Array,
A::Element: Decodable {
fn decode<D: Decoder>(d: &mut D) -> Result<AccumulateVec<A>, D::Error> {
d.read_seq(|d, len| {
(0..len).map(|i| d.read_seq_elt(i, |d| Decodable::decode(d))).collect()
})
}
}
|
use std::io::prelude::*;
use std::net::{TcpListener, TcpStream};
use std::time::{Duration, Instant};
fn handle_client(mut stream: TcpStream, tcp_start: Instant) -> std::io::Result<()> {
println!("Connection from {}", stream.peer_addr()?);
loop {
let mut buffer = [0; 2048];
let read_len = stream.read(&mut buffer)?;
println!("Received: {:?}\n Length: {}", buffer, read_len);
stream.write(b"Hello Client!")?;
let tcp_time = tcp_start.elapsed();
println!("Time elapsed: {:?}", tcp_time);
}
Ok(())
}
fn main() -> std::io::Result<()> {
println!("Listening...");
let listener = TcpListener::bind("127.0.0.1:34254")?;
// accept connections and process them serially
for stream in listener.incoming() {
let tcp_start = Instant::now();
handle_client(stream?, tcp_start).unwrap();
}
Ok(())
}
|
use scanner_proc_macro::insert_scanner;
#[insert_scanner]
fn main() {
let (a , b) = scan!((f64, f64));
let x = a / a.hypot(b);
let y = b / a.hypot(b);
println!("{} {}", x, y);
}
|
pub trait Sampler {
fn get_samples_per_pixel(&self) -> u32;
// TODO:
}
|
use std::fmt;
use juniper::ScalarValue;
use serde::{de, Deserialize, Deserializer, Serialize};
/// Common utilities used across tests.
pub mod util {
use futures::StreamExt as _;
use juniper::{
graphql_value, DefaultScalarValue, EmptyMutation, EmptySubscription, ExecutionError,
GraphQLError, GraphQLType, RootNode, ScalarValue, Value, ValuesStream,
};
pub fn schema<'q, C, Q>(
query_root: Q,
) -> RootNode<'q, Q, EmptyMutation<C>, EmptySubscription<C>>
where
Q: GraphQLType<DefaultScalarValue, Context = C, TypeInfo = ()> + 'q,
{
RootNode::new(
query_root,
EmptyMutation::<C>::new(),
EmptySubscription::<C>::new(),
)
}
pub fn schema_with_scalar<'q, S, C, Q>(
query_root: Q,
) -> RootNode<'q, Q, EmptyMutation<C>, EmptySubscription<C>, S>
where
Q: GraphQLType<S, Context = C, TypeInfo = ()> + 'q,
S: ScalarValue + 'q,
{
RootNode::new_with_scalar_value(
query_root,
EmptyMutation::<C>::new(),
EmptySubscription::<C>::new(),
)
}
/// Extracts a single next value from the result returned by
/// [`juniper::resolve_into_stream()`] and transforms it into a regular
/// [`Value`].
pub async fn extract_next<S: ScalarValue>(
input: Result<(Value<ValuesStream<'_, S>>, Vec<ExecutionError<S>>), GraphQLError>,
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError> {
let (stream, errs) = input?;
if !errs.is_empty() {
return Ok((Value::Null, errs));
}
if let Value::Object(obj) = stream {
for (name, mut val) in obj {
if let Value::Scalar(ref mut stream) = val {
return match stream.next().await {
Some(Ok(val)) => Ok((graphql_value!({ name: val }), vec![])),
Some(Err(e)) => Ok((Value::Null, vec![e])),
None => Ok((Value::Null, vec![])),
};
}
}
}
panic!("Expected to get Value::Object containing a Stream")
}
}
#[derive(Clone, Debug, PartialEq, ScalarValue, Serialize)]
#[serde(untagged)]
pub enum MyScalarValue {
#[value(as_float, as_int)]
Int(i32),
Long(i64),
#[value(as_float)]
Float(f64),
#[value(as_str, as_string, into_string)]
String(String),
#[value(as_bool)]
Boolean(bool),
}
impl<'de> Deserialize<'de> for MyScalarValue {
fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
type Value = MyScalarValue;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("a valid input value")
}
fn visit_bool<E: de::Error>(self, b: bool) -> Result<Self::Value, E> {
Ok(MyScalarValue::Boolean(b))
}
fn visit_i32<E: de::Error>(self, n: i32) -> Result<Self::Value, E> {
Ok(MyScalarValue::Int(n))
}
fn visit_i64<E: de::Error>(self, b: i64) -> Result<Self::Value, E> {
if b <= i64::from(i32::MAX) {
self.visit_i32(b.try_into().unwrap())
} else {
Ok(MyScalarValue::Long(b))
}
}
fn visit_u32<E: de::Error>(self, n: u32) -> Result<Self::Value, E> {
if n <= i32::MAX as u32 {
self.visit_i32(n.try_into().unwrap())
} else {
self.visit_u64(n.into())
}
}
fn visit_u64<E: de::Error>(self, n: u64) -> Result<Self::Value, E> {
if n <= i64::MAX as u64 {
self.visit_i64(n.try_into().unwrap())
} else {
// Browser's `JSON.stringify()` serializes all numbers
// having no fractional part as integers (no decimal point),
// so we must parse large integers as floating point,
// otherwise we would error on transferring large floating
// point numbers.
// TODO: Use `FloatToInt` conversion once stabilized:
// https://github.com/rust-lang/rust/issues/67057
Ok(MyScalarValue::Float(n as f64))
}
}
fn visit_f64<E: de::Error>(self, f: f64) -> Result<Self::Value, E> {
Ok(MyScalarValue::Float(f))
}
fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> {
self.visit_string(s.into())
}
fn visit_string<E: de::Error>(self, s: String) -> Result<Self::Value, E> {
Ok(MyScalarValue::String(s))
}
}
de.deserialize_any(Visitor)
}
}
|
//! An example of how to use lists in PickleDB. It includes:
//! * Creating a new DB
//! * Loading an existing DB from a file
//! * Creating and removing lists
//! * Adding and removing items of different type to lists
//! * Retrieving list items
use pickledb::{PickleDb, PickleDbDumpPolicy, SerializationMethod};
use serde::{Deserialize, Serialize};
use std::fmt::{self, Display, Formatter};
/// Define an example struct which represents a rectangle.
/// Next we'll show how to use it in lists.
#[derive(Serialize, Deserialize)]
struct Rectangle {
width: i32,
length: i32,
}
impl Display for Rectangle {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Rectangle: length={}, width={}", self.length, self.width)
}
}
/// Create a new DB and add one key-value pair to it
fn create_db(db_name: &str) {
let mut new_db = PickleDb::new(
db_name,
PickleDbDumpPolicy::AutoDump,
SerializationMethod::Bin,
);
new_db.set("key1", &100).unwrap();
}
fn main() {
// create a new DB
create_db("example.db");
// load the DB
let mut db = PickleDb::load(
"example.db",
PickleDbDumpPolicy::AutoDump,
SerializationMethod::Bin,
)
.unwrap();
// print the existing value in key1
println!("The value of key1 is: {}", db.get::<i32>("key1").unwrap());
// create a new list
db.lcreate("list1")
.unwrap()
// add an integer item to the list
.ladd(&200)
// add an floating point item to the list
.ladd(&2.1)
// add a string to the list
.ladd(&String::from("my list"))
// add a vector of chars to the list
.ladd(&vec!['a', 'b', 'c'])
// add multiple values to the list: add 3 rectangles
.lextend(&[
Rectangle {
width: 2,
length: 4,
},
Rectangle {
width: 10,
length: 22,
},
Rectangle {
width: 1,
length: 22,
},
]);
// print the list length
println!("list1 length is: {}", db.llen("list1"));
// print the item in each position of the list
println!("list1[0] = {}", db.lget::<i32>("list1", 0).unwrap());
println!("list1[1] = {}", db.lget::<f64>("list1", 1).unwrap());
println!("list1[2] = {}", db.lget::<String>("list1", 2).unwrap());
println!("list1[3] = {:?}", db.lget::<Vec<char>>("list1", 3).unwrap());
println!("list1[4] = {}", db.lget::<Rectangle>("list1", 4).unwrap());
println!("list1[6] = {}", db.lget::<Rectangle>("list1", 5).unwrap());
println!("list1[7] = {}", db.lget::<Rectangle>("list1", 6).unwrap());
// remove an item in the list
db.lpop::<i32>("list1", 0);
// print the new first item of the list
println!("The new list1[0] = {}", db.lget::<f64>("list1", 0).unwrap());
// remove the entire list
db.lrem_list("list1").unwrap();
// was list1 removed?
println!(
"list1 was removed. Is it still in the db? {}",
db.lexists("list1")
);
// create a new list
db.lcreate("list2").unwrap().lextend(&[1, 2, 3, 4]);
// iterate over the items in list2
for item_iter in db.liter("list2") {
println!("Current item is: {}", item_iter.get_item::<i32>().unwrap());
}
}
|
use actix_web::{web, App, Error as AWError, HttpResponse, HttpServer, Result};
use survey_manager_api::commands::{handle_command_async};
use survey_manager_api::inputs::{CreateSurveyDTO, UpdateSurveyDTO};
use survey_manager_core::app_services::commands::{CreateSurveyCommand, UpdateSurveyCommand, RemoveSurveyCommand};
use survey_manager_core::app_services::token::*;
use futures::Future;
use serde_derive::{Serialize, Deserialize};
use dotenv::dotenv;
use uuid::Uuid;
use survey_manager_core::app_services::queries::{FindSurveyQuery, FindSurveysByAuthorQuery};
use survey_manager_api::queries::{handle_queries_async};
use survey_manager_api::extractors::{Token as BearerToken};
use survey_manager_api::responders::{SurveyIdResponder, GetSurveyResponder};
use survey_manager_api::async_utils::{decode_payload_async, try_into_create_cmd_async, try_into_update_cmd_async};
// For grabbing a token from get_token endpoint.
#[derive(Serialize)]
struct Token {
token: String,
}
#[derive(Deserialize)]
pub struct SurveyId {
id: String,
}
fn create_survey(
dto: web::Json<CreateSurveyDTO>,
) -> impl Future<Item = HttpResponse, Error = AWError> {
try_into_create_cmd_async(dto.into_inner())
.from_err()
.and_then(move |cmd: CreateSurveyCommand| {
handle_command_async(cmd.into())
.from_err()
.and_then(move |res| {
SurveyIdResponder::new(res).respond()
})
})
}
fn update_survey(
dto: web::Json<UpdateSurveyDTO>,
) -> impl Future<Item = HttpResponse, Error = AWError> {
try_into_update_cmd_async(dto.into_inner())
.from_err()
.and_then(move |cmd: UpdateSurveyCommand| {
handle_command_async(cmd.into())
.from_err()
.and_then(move |res| {
SurveyIdResponder::new(res).respond()
})
})
}
fn remove_survey(
token: BearerToken,
params: web::Path<SurveyId>,
) -> impl Future<Item = HttpResponse, Error = AWError> {
let id = params.into_inner().id;
decode_payload_async(token.into_inner())
.from_err()
.and_then(move |Payload{username, ..}| {
let remove_survey_cmd = RemoveSurveyCommand {
id: id.clone(),
requesting_author: username,
};
handle_command_async(remove_survey_cmd.into())
.from_err()
.and_then(move |_| {
// TODO: Replace with json response.
Ok(HttpResponse::Ok().body("Deleted"))
})
})
}
fn find_survey(
token: BearerToken,
params: web::Path<SurveyId>,
) -> impl Future<Item = HttpResponse, Error = AWError> {
let id = params.into_inner().id;
decode_payload_async(token.into_inner())
.from_err()
.and_then(move |Payload{username, ..}| {
let find_survey_query = FindSurveyQuery {
id: id.clone(),
requesting_author: username,
};
handle_queries_async(find_survey_query.into())
.from_err()
.and_then(move |res| {
Ok(GetSurveyResponder::new(res, id).respond())
})
})
}
fn find_authors_surveys(
token: BearerToken,
) -> impl Future<Item = HttpResponse, Error = AWError> {
decode_payload_async(token.into_inner())
.from_err()
.and_then(move |Payload{username, ..}| {
let find_authors_surveys = FindSurveysByAuthorQuery { author: username, page_config: None };
handle_queries_async(find_authors_surveys.into())
.from_err()
.and_then(move |res| {
Ok(HttpResponse::Ok()
.content_type("application/json")
.body(res))
})
})
}
fn get_token(
) -> Result<HttpResponse, AWError> {
let fake_user_id = Uuid::new_v4();
let token_str = create_token("test_user".to_string(), fake_user_id.to_string());
let token = Token { token: token_str, };
Ok(HttpResponse::Ok().json(token))
}
fn main() -> std::io::Result<()> {
dotenv().ok();
let addr = match std::env::var("SERVER_HOST") {
Ok(host) => host,
Err(_) => "0.0.0.0:8000".to_string(),
};
println!("Starting http server: {}", &addr);
// Start http server
HttpServer::new(move || {
App::new()
.service(
web::resource("/survey")
.route(web::get().to_async(find_authors_surveys))
.route(web::post().to_async(create_survey))
.route(web::patch().to_async(update_survey)),
)
.service(
web::resource("/survey/{id}")
.route(web::get().to_async(find_survey))
.route(web::delete().to_async(remove_survey)),
)
.service(
web::resource("/token")
.route(web::get().to(get_token)),
)
})
.bind(&addr)?
.run()
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
pub const CLSID_AudioFrameNativeFactory: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x16a0a3b9_9f65_4102_9367_2cda3a4f372a);
pub const CLSID_VideoFrameNativeFactory: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xd194386a_04e3_4814_8100_b2b0ae6d78c7);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct IAudioFrameNative(pub ::windows::core::IUnknown);
impl IAudioFrameNative {
pub unsafe fn GetData<T: ::windows::core::Interface>(&self) -> ::windows::core::Result<T> {
let mut result__ = ::core::option::Option::None;
(::windows::core::Interface::vtable(self).6)(::core::mem::transmute_copy(self), &<T as ::windows::core::Interface>::IID, &mut result__ as *mut _ as *mut _).and_some(result__)
}
}
unsafe impl ::windows::core::Interface for IAudioFrameNative {
type Vtable = IAudioFrameNative_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x20be1e2e_930f_4746_9335_3c332f255093);
}
impl ::core::convert::From<IAudioFrameNative> for ::windows::core::IUnknown {
fn from(value: IAudioFrameNative) -> Self {
value.0
}
}
impl ::core::convert::From<&IAudioFrameNative> for ::windows::core::IUnknown {
fn from(value: &IAudioFrameNative) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for IAudioFrameNative {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a IAudioFrameNative {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[repr(C)]
#[doc(hidden)]
pub struct IAudioFrameNative_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, riid: *const ::windows::core::GUID, ppv: *mut *mut ::core::ffi::c_void) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct IAudioFrameNativeFactory(pub ::windows::core::IUnknown);
impl IAudioFrameNativeFactory {
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_Media_MediaFoundation"))]
pub unsafe fn CreateFromMFSample<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Media::MediaFoundation::IMFSample>, Param1: ::windows::core::IntoParam<'a, super::super::super::Foundation::BOOL>, T: ::windows::core::Interface>(&self, data: Param0, forcereadonly: Param1) -> ::windows::core::Result<T> {
let mut result__ = ::core::option::Option::None;
(::windows::core::Interface::vtable(self).6)(::core::mem::transmute_copy(self), data.into_param().abi(), forcereadonly.into_param().abi(), &<T as ::windows::core::Interface>::IID, &mut result__ as *mut _ as *mut _).and_some(result__)
}
}
unsafe impl ::windows::core::Interface for IAudioFrameNativeFactory {
type Vtable = IAudioFrameNativeFactory_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x7bd67cf8_bf7d_43e6_af8d_b170ee0c0110);
}
impl ::core::convert::From<IAudioFrameNativeFactory> for ::windows::core::IUnknown {
fn from(value: IAudioFrameNativeFactory) -> Self {
value.0
}
}
impl ::core::convert::From<&IAudioFrameNativeFactory> for ::windows::core::IUnknown {
fn from(value: &IAudioFrameNativeFactory) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for IAudioFrameNativeFactory {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a IAudioFrameNativeFactory {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[repr(C)]
#[doc(hidden)]
pub struct IAudioFrameNativeFactory_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_Media_MediaFoundation"))] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, data: ::windows::core::RawPtr, forcereadonly: super::super::super::Foundation::BOOL, riid: *const ::windows::core::GUID, ppv: *mut *mut ::core::ffi::c_void) -> ::windows::core::HRESULT,
#[cfg(not(all(feature = "Win32_Foundation", feature = "Win32_Media_MediaFoundation")))] usize,
);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct IVideoFrameNative(pub ::windows::core::IUnknown);
impl IVideoFrameNative {
pub unsafe fn GetData<T: ::windows::core::Interface>(&self) -> ::windows::core::Result<T> {
let mut result__ = ::core::option::Option::None;
(::windows::core::Interface::vtable(self).6)(::core::mem::transmute_copy(self), &<T as ::windows::core::Interface>::IID, &mut result__ as *mut _ as *mut _).and_some(result__)
}
pub unsafe fn GetDevice<T: ::windows::core::Interface>(&self) -> ::windows::core::Result<T> {
let mut result__ = ::core::option::Option::None;
(::windows::core::Interface::vtable(self).7)(::core::mem::transmute_copy(self), &<T as ::windows::core::Interface>::IID, &mut result__ as *mut _ as *mut _).and_some(result__)
}
}
unsafe impl ::windows::core::Interface for IVideoFrameNative {
type Vtable = IVideoFrameNative_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x26ba702b_314a_4620_aaf6_7a51aa58fa18);
}
impl ::core::convert::From<IVideoFrameNative> for ::windows::core::IUnknown {
fn from(value: IVideoFrameNative) -> Self {
value.0
}
}
impl ::core::convert::From<&IVideoFrameNative> for ::windows::core::IUnknown {
fn from(value: &IVideoFrameNative) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for IVideoFrameNative {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a IVideoFrameNative {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[repr(C)]
#[doc(hidden)]
pub struct IVideoFrameNative_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, riid: *const ::windows::core::GUID, ppv: *mut *mut ::core::ffi::c_void) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, riid: *const ::windows::core::GUID, ppv: *mut *mut ::core::ffi::c_void) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct IVideoFrameNativeFactory(pub ::windows::core::IUnknown);
impl IVideoFrameNativeFactory {
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_Media_MediaFoundation"))]
pub unsafe fn CreateFromMFSample<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Media::MediaFoundation::IMFSample>, Param4: ::windows::core::IntoParam<'a, super::super::super::Foundation::BOOL>, Param6: ::windows::core::IntoParam<'a, super::super::super::Media::MediaFoundation::IMFDXGIDeviceManager>, T: ::windows::core::Interface>(
&self,
data: Param0,
subtype: *const ::windows::core::GUID,
width: u32,
height: u32,
forcereadonly: Param4,
mindisplayaperture: *const super::super::super::Media::MediaFoundation::MFVideoArea,
device: Param6,
) -> ::windows::core::Result<T> {
let mut result__ = ::core::option::Option::None;
(::windows::core::Interface::vtable(self).6)(
::core::mem::transmute_copy(self),
data.into_param().abi(),
::core::mem::transmute(subtype),
::core::mem::transmute(width),
::core::mem::transmute(height),
forcereadonly.into_param().abi(),
::core::mem::transmute(mindisplayaperture),
device.into_param().abi(),
&<T as ::windows::core::Interface>::IID,
&mut result__ as *mut _ as *mut _,
)
.and_some(result__)
}
}
unsafe impl ::windows::core::Interface for IVideoFrameNativeFactory {
type Vtable = IVideoFrameNativeFactory_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x69e3693e_8e1e_4e63_ac4c_7fdc21d9731d);
}
impl ::core::convert::From<IVideoFrameNativeFactory> for ::windows::core::IUnknown {
fn from(value: IVideoFrameNativeFactory) -> Self {
value.0
}
}
impl ::core::convert::From<&IVideoFrameNativeFactory> for ::windows::core::IUnknown {
fn from(value: &IVideoFrameNativeFactory) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for IVideoFrameNativeFactory {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a IVideoFrameNativeFactory {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[repr(C)]
#[doc(hidden)]
pub struct IVideoFrameNativeFactory_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_Media_MediaFoundation"))]
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, data: ::windows::core::RawPtr, subtype: *const ::windows::core::GUID, width: u32, height: u32, forcereadonly: super::super::super::Foundation::BOOL, mindisplayaperture: *const super::super::super::Media::MediaFoundation::MFVideoArea, device: ::windows::core::RawPtr, riid: *const ::windows::core::GUID, ppv: *mut *mut ::core::ffi::c_void) -> ::windows::core::HRESULT,
#[cfg(not(all(feature = "Win32_Foundation", feature = "Win32_Media_MediaFoundation")))] usize,
);
|
use raster::{editor, BlendMode, PositionMode, *};
fn main() {
let mut image1 =
raster::open("X:/Rust Projects/dark_sky_editor/assets/test_ships/starling.png").unwrap();
let mut image2 =
raster::open("X:/Rust Projects/dark_sky_editor/assets/test_ships/scout.png").unwrap();
let h = image2.height;
let w = image2.width;
if image1.height > image2.height {
editor::resize(&mut image2, w, h, ResizeMode::ExactHeight).unwrap();
raster::interpolate::resample(&mut image2, 2000, 2000, InterpolationMode::Bicubic).unwrap();
raster::save(&image2, "resize2.png").unwrap();
} else {
}
let normal = editor::blend(
&image1,
&image2,
BlendMode::Normal,
1.0,
PositionMode::Center,
0,
0,
)
.unwrap();
// All the other blend modes
let difference = editor::blend(
&image1,
&image2,
BlendMode::Difference,
1.0,
PositionMode::Center,
0,
0,
)
.unwrap();
let multiply = editor::blend(
&image1,
&image2,
BlendMode::Multiply,
1.0,
PositionMode::Center,
0,
0,
)
.unwrap();
let overlay = editor::blend(
&image1,
&image2,
BlendMode::Overlay,
1.0,
PositionMode::Center,
0,
0,
)
.unwrap();
let screen = editor::blend(
&image1,
&image2,
BlendMode::Screen,
1.0,
PositionMode::Center,
0,
0,
)
.unwrap();
raster::save(&normal, "test_blend_normal.png").unwrap();
raster::save(&difference, "test_blend_difference.png").unwrap();
raster::save(&multiply, "test_blend_multiply.png").unwrap();
raster::save(&overlay, "test_blend_overlay.png").unwrap();
raster::save(&screen, "test_blend_screen.png").unwrap();
}
|
//! Process lifetime management
use types::{int_t};
use syscalls::*;
use rust::prelude::*;
use posix::signal::{raise, SIGABRT};
static mut ATEXIT_FNS: [Option<extern "C" fn()>; 32] = [
None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None,
];
/// Terminates the process normally, performing the regular cleanup.
/// All C streams are closed, and all files created with tmpfile are removed.
/// Status can be zero or EXIT_SUCCESS, or EXIT_FAILURE.
#[no_mangle]
pub unsafe extern fn exit(x: int_t) -> ! {
for func in ATEXIT_FNS.iter().rev() {
if let &Some(func) = func {
func();
}
}
_exit(x);
}
/// _Exit is a synonym for _exit
#[no_mangle]
pub extern fn _Exit(x: int_t) -> ! {
_exit(x);
}
#[no_mangle]
pub extern fn _exit(x: int_t) -> ! {
unsafe {sys_exit(x);}
loop { }; // for divergence check
}
#[no_mangle]
pub unsafe extern fn abort() {
raise(SIGABRT);
}
#[no_mangle]
/// Note: this doesn't check for a null argument, sparing a branch.
pub unsafe extern fn atexit(func: Option<extern "C" fn()>) -> int_t {
for i in &mut ATEXIT_FNS {
if i.is_none() {
*i = func;
return 0;
}
}
return 1;
}
|
use serde::{Deserialize, Serialize};
use n3_machine_ffi::{MachineId, WorkStatus};
#[derive(Debug, Serialize, Deserialize)]
pub enum Response {
Error { message: String },
Load { num_machines: MachineId },
Status { status: WorkStatus },
}
impl Response {
pub fn load(self) -> Result<MachineId, String> {
match self {
Self::Load { num_machines } => Ok(num_machines),
Self::Error { message } => Err(message),
_ => unreachable!(),
}
}
pub fn status(self) -> Result<WorkStatus, String> {
match self {
Self::Status { status } => Ok(status),
Self::Error { message } => Err(message),
_ => unreachable!(),
}
}
}
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::GString;
use glib::StaticType;
use glib::Value;
use glib_sys;
use gobject_sys;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
use webkit2_webextension_sys;
use DOMElement;
use DOMEventTarget;
use DOMHTMLElement;
use DOMNode;
use DOMObject;
glib_wrapper! {
pub struct DOMHTMLAnchorElement(Object<webkit2_webextension_sys::WebKitDOMHTMLAnchorElement, webkit2_webextension_sys::WebKitDOMHTMLAnchorElementClass, DOMHTMLAnchorElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget;
match fn {
get_type => || webkit2_webextension_sys::webkit_dom_html_anchor_element_get_type(),
}
}
pub const NONE_DOMHTML_ANCHOR_ELEMENT: Option<&DOMHTMLAnchorElement> = None;
pub trait DOMHTMLAnchorElementExt: 'static {
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_charset(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_coords(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_hash(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_host(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_hostname(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_href(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_hreflang(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_name(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_pathname(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_port(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_protocol(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_rel(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_rev(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_search(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_shape(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_target(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_text(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_type_attr(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_charset(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_coords(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_hash(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_host(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_hostname(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_href(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_hreflang(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_name(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_pathname(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_port(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_protocol(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_rel(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_rev(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_search(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_shape(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_target(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
#[cfg(any(feature = "v2_16", feature = "dox"))]
fn set_text(&self, value: &str);
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_type_attr(&self, value: &str);
fn set_property_text(&self, text: Option<&str>);
fn get_property_type(&self) -> Option<GString>;
fn set_property_type(&self, type_: Option<&str>);
fn connect_property_charset_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_coords_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_hash_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_host_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_hostname_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_href_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_hreflang_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_name_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_pathname_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_port_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_protocol_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_rel_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_rev_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_search_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_shape_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_target_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_text_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_type_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<DOMHTMLAnchorElement>> DOMHTMLAnchorElementExt for O {
fn get_charset(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_charset(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_coords(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_coords(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_hash(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_hash(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_host(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_host(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_hostname(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_hostname(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_href(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_href(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_hreflang(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_hreflang(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_name(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_name(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_pathname(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_pathname(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_port(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_port(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_protocol(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_protocol(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_rel(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_rel(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_rev(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_rev(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_search(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_search(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_shape(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_shape(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_target(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_target(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_text(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_text(
self.as_ref().to_glib_none().0,
),
)
}
}
fn get_type_attr(&self) -> Option<GString> {
unsafe {
from_glib_full(
webkit2_webextension_sys::webkit_dom_html_anchor_element_get_type_attr(
self.as_ref().to_glib_none().0,
),
)
}
}
fn set_charset(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_charset(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_coords(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_coords(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_hash(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_hash(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_host(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_host(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_hostname(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_hostname(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_href(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_href(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_hreflang(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_hreflang(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_name(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_name(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_pathname(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_pathname(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_port(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_port(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_protocol(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_protocol(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_rel(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_rel(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_rev(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_rev(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_search(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_search(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_shape(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_shape(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_target(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_target(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
#[cfg(any(feature = "v2_16", feature = "dox"))]
fn set_text(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_text(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_type_attr(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_anchor_element_set_type_attr(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn set_property_text(&self, text: Option<&str>) {
unsafe {
gobject_sys::g_object_set_property(
self.to_glib_none().0 as *mut gobject_sys::GObject,
b"text\0".as_ptr() as *const _,
Value::from(text).to_glib_none().0,
);
}
}
fn get_property_type(&self) -> Option<GString> {
unsafe {
let mut value = Value::from_type(<GString as StaticType>::static_type());
gobject_sys::g_object_get_property(
self.to_glib_none().0 as *mut gobject_sys::GObject,
b"type\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `type` getter")
}
}
fn set_property_type(&self, type_: Option<&str>) {
unsafe {
gobject_sys::g_object_set_property(
self.to_glib_none().0 as *mut gobject_sys::GObject,
b"type\0".as_ptr() as *const _,
Value::from(type_).to_glib_none().0,
);
}
}
fn connect_property_charset_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_charset_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::charset\0".as_ptr() as *const _,
Some(transmute(notify_charset_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_coords_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_coords_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::coords\0".as_ptr() as *const _,
Some(transmute(notify_coords_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_hash_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_hash_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::hash\0".as_ptr() as *const _,
Some(transmute(notify_hash_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_host_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_host_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::host\0".as_ptr() as *const _,
Some(transmute(notify_host_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_hostname_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_hostname_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::hostname\0".as_ptr() as *const _,
Some(transmute(notify_hostname_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_href_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_href_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::href\0".as_ptr() as *const _,
Some(transmute(notify_href_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_hreflang_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_hreflang_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::hreflang\0".as_ptr() as *const _,
Some(transmute(notify_hreflang_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_name_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_name_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::name\0".as_ptr() as *const _,
Some(transmute(notify_name_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_pathname_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_pathname_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::pathname\0".as_ptr() as *const _,
Some(transmute(notify_pathname_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_port_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_port_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::port\0".as_ptr() as *const _,
Some(transmute(notify_port_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_protocol_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_protocol_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::protocol\0".as_ptr() as *const _,
Some(transmute(notify_protocol_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_rel_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_rel_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::rel\0".as_ptr() as *const _,
Some(transmute(notify_rel_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_rev_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_rev_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::rev\0".as_ptr() as *const _,
Some(transmute(notify_rev_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_search_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_search_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::search\0".as_ptr() as *const _,
Some(transmute(notify_search_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_shape_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_shape_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::shape\0".as_ptr() as *const _,
Some(transmute(notify_shape_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_target_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_target_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::target\0".as_ptr() as *const _,
Some(transmute(notify_target_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_text_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_text_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::text\0".as_ptr() as *const _,
Some(transmute(notify_text_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
fn connect_property_type_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_type_trampoline<P, F: Fn(&P) + 'static>(
this: *mut webkit2_webextension_sys::WebKitDOMHTMLAnchorElement,
_param_spec: glib_sys::gpointer,
f: glib_sys::gpointer,
) where
P: IsA<DOMHTMLAnchorElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLAnchorElement::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::type\0".as_ptr() as *const _,
Some(transmute(notify_type_trampoline::<Self, F> as usize)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for DOMHTMLAnchorElement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "DOMHTMLAnchorElement")
}
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::ALTCLKCFG {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = "Possible values of the field `SYSCTL_ALTCLKCFG_ALTCLK`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYSCTL_ALTCLKCFG_ALTCLKR {
#[doc = "PIOSC"]
SYSCTL_ALTCLKCFG_ALTCLK_PIOSC,
#[doc = "Hibernation Module Real-time clock output (RTCOSC)"]
SYSCTL_ALTCLKCFG_ALTCLK_RTCOSC,
#[doc = "Low-frequency internal oscillator (LFIOSC)"]
SYSCTL_ALTCLKCFG_ALTCLK_LFIOSC,
#[doc = r"Reserved"]
_Reserved(u8),
}
impl SYSCTL_ALTCLKCFG_ALTCLKR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
match *self {
SYSCTL_ALTCLKCFG_ALTCLKR::SYSCTL_ALTCLKCFG_ALTCLK_PIOSC => 0,
SYSCTL_ALTCLKCFG_ALTCLKR::SYSCTL_ALTCLKCFG_ALTCLK_RTCOSC => 3,
SYSCTL_ALTCLKCFG_ALTCLKR::SYSCTL_ALTCLKCFG_ALTCLK_LFIOSC => 4,
SYSCTL_ALTCLKCFG_ALTCLKR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _from(value: u8) -> SYSCTL_ALTCLKCFG_ALTCLKR {
match value {
0 => SYSCTL_ALTCLKCFG_ALTCLKR::SYSCTL_ALTCLKCFG_ALTCLK_PIOSC,
3 => SYSCTL_ALTCLKCFG_ALTCLKR::SYSCTL_ALTCLKCFG_ALTCLK_RTCOSC,
4 => SYSCTL_ALTCLKCFG_ALTCLKR::SYSCTL_ALTCLKCFG_ALTCLK_LFIOSC,
i => SYSCTL_ALTCLKCFG_ALTCLKR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `SYSCTL_ALTCLKCFG_ALTCLK_PIOSC`"]
#[inline(always)]
pub fn is_sysctl_altclkcfg_altclk_piosc(&self) -> bool {
*self == SYSCTL_ALTCLKCFG_ALTCLKR::SYSCTL_ALTCLKCFG_ALTCLK_PIOSC
}
#[doc = "Checks if the value of the field is `SYSCTL_ALTCLKCFG_ALTCLK_RTCOSC`"]
#[inline(always)]
pub fn is_sysctl_altclkcfg_altclk_rtcosc(&self) -> bool {
*self == SYSCTL_ALTCLKCFG_ALTCLKR::SYSCTL_ALTCLKCFG_ALTCLK_RTCOSC
}
#[doc = "Checks if the value of the field is `SYSCTL_ALTCLKCFG_ALTCLK_LFIOSC`"]
#[inline(always)]
pub fn is_sysctl_altclkcfg_altclk_lfiosc(&self) -> bool {
*self == SYSCTL_ALTCLKCFG_ALTCLKR::SYSCTL_ALTCLKCFG_ALTCLK_LFIOSC
}
}
#[doc = "Values that can be written to the field `SYSCTL_ALTCLKCFG_ALTCLK`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYSCTL_ALTCLKCFG_ALTCLKW {
#[doc = "PIOSC"]
SYSCTL_ALTCLKCFG_ALTCLK_PIOSC,
#[doc = "Hibernation Module Real-time clock output (RTCOSC)"]
SYSCTL_ALTCLKCFG_ALTCLK_RTCOSC,
#[doc = "Low-frequency internal oscillator (LFIOSC)"]
SYSCTL_ALTCLKCFG_ALTCLK_LFIOSC,
}
impl SYSCTL_ALTCLKCFG_ALTCLKW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _bits(&self) -> u8 {
match *self {
SYSCTL_ALTCLKCFG_ALTCLKW::SYSCTL_ALTCLKCFG_ALTCLK_PIOSC => 0,
SYSCTL_ALTCLKCFG_ALTCLKW::SYSCTL_ALTCLKCFG_ALTCLK_RTCOSC => 3,
SYSCTL_ALTCLKCFG_ALTCLKW::SYSCTL_ALTCLKCFG_ALTCLK_LFIOSC => 4,
}
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_ALTCLKCFG_ALTCLKW<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_ALTCLKCFG_ALTCLKW<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYSCTL_ALTCLKCFG_ALTCLKW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "PIOSC"]
#[inline(always)]
pub fn sysctl_altclkcfg_altclk_piosc(self) -> &'a mut W {
self.variant(SYSCTL_ALTCLKCFG_ALTCLKW::SYSCTL_ALTCLKCFG_ALTCLK_PIOSC)
}
#[doc = "Hibernation Module Real-time clock output (RTCOSC)"]
#[inline(always)]
pub fn sysctl_altclkcfg_altclk_rtcosc(self) -> &'a mut W {
self.variant(SYSCTL_ALTCLKCFG_ALTCLKW::SYSCTL_ALTCLKCFG_ALTCLK_RTCOSC)
}
#[doc = "Low-frequency internal oscillator (LFIOSC)"]
#[inline(always)]
pub fn sysctl_altclkcfg_altclk_lfiosc(self) -> &'a mut W {
self.variant(SYSCTL_ALTCLKCFG_ALTCLKW::SYSCTL_ALTCLKCFG_ALTCLK_LFIOSC)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(15 << 0);
self.w.bits |= ((value as u32) & 15) << 0;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:3 - Alternate Clock Source"]
#[inline(always)]
pub fn sysctl_altclkcfg_altclk(&self) -> SYSCTL_ALTCLKCFG_ALTCLKR {
SYSCTL_ALTCLKCFG_ALTCLKR::_from(((self.bits >> 0) & 15) as u8)
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:3 - Alternate Clock Source"]
#[inline(always)]
pub fn sysctl_altclkcfg_altclk(&mut self) -> _SYSCTL_ALTCLKCFG_ALTCLKW {
_SYSCTL_ALTCLKCFG_ALTCLKW { w: self }
}
}
|
// Copyright (c) 2016 <daggerbot@gmail.com>
// This software is available under the terms of the zlib license.
// See README.md for more information.
use std::fs::File;
use std::io::{self, Read};
use std::str::FromStr;
use core::{AppContext, ModuleQuery};
use serde::Deserialize;
use toml;
/// Info used when detecting ROMs.
#[derive(Deserialize)]
struct RomQueryInfo {
len: u64,
name_offset: usize,
sha256: String,
}
/// Deserialized from 'roms.toml'.
#[derive(Deserialize)]
struct RomQueryFile {
roms: Vec<RomQueryInfo>,
}
impl RomQueryFile {
fn load (app: &AppContext) -> Option<RomQueryFile> {
for path in app.module_dirs().map(|dir| dir.join("g1/roms.toml")) {
let mut file = match File::open(&path) {
Ok(file) => file,
Err(err) => {
if err.kind() != io::ErrorKind::NotFound {
error!("can't open '{}': {}", path.display(), err);
}
continue;
},
};
let mut src = String::new();
if let Err(err) = file.read_to_string(&mut src) {
error!("can't read '{}': {}", path.display(), err);
continue;
}
let value = match toml::Value::from_str(src.as_str()) {
Ok(value) => value,
Err(err) => {
error!("can't parse '{}': {}", path.display(), err.first().unwrap());
continue;
},
};
let mut decoder = toml::Decoder::new(value);
let query_file = match RomQueryFile::deserialize(&mut decoder) {
Ok(f) => f,
Err(err) => {
error!("can't deserialize '{}': {}", path.display(), err);
continue;
},
};
return Some(query_file);
}
None
}
}
/// Detect supported game ROMs.
pub fn query_modules (app: &AppContext, _: &mut ModuleQuery) {
let _query_file = RomQueryFile::load(app);
}
|
mod body;
use self::body::body;
use crate::http::{bad_request, ok, Request, Result};
use itertools::Itertools;
pub async fn get(req: Request) -> Result {
let body = {
req.query::<Vec<(String, String)>>()
.map_err(|_| bad_request())?
.into_iter()
.format_with("\n", |(key, value), f| {
f(&format_args!("{} = {}", key, value))
})
.to_string()
};
ok(body)
}
pub async fn post(req: Request) -> Result {
body(req).await
}
pub async fn put(req: Request) -> Result {
body(req).await
}
pub async fn patch(req: Request) -> Result {
body(req).await
}
pub async fn delete(req: Request) -> Result {
body(req).await
}
#[cfg(test)]
mod test {
use super::*;
use crate::headers::ContentType;
use crate::test::*;
use hyper::{Method, StatusCode};
use std::collections::HashSet;
use std::iter::FromIterator;
#[tokio::test]
async fn test_get() {
let res = request().path("/?key=val").handle(get).await.unwrap();
assert_eq!(res.status(), StatusCode::OK);
let body = res.read_body_utf8().await.unwrap();
assert_eq!(body, "key = val");
}
#[tokio::test]
async fn test_multi_get() {
let res = request()
.path("/?key=val&other=something&key=another")
.handle(get)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
let body = res.read_body_utf8().await.unwrap();
let result: HashSet<&str> = HashSet::from_iter(body.split("\n"));
let expected = HashSet::from_iter(vec![
"key = val",
"other = something",
"key = another",
]);
assert_eq!(expected, result)
}
#[tokio::test]
async fn test_post() {
let res = request()
.method(Method::POST)
.typed_header(ContentType::form_url_encoded())
.body("key=val")
.handle(post)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
let body = res.read_body_utf8().await.unwrap();
assert_eq!(body, "key = val");
}
#[tokio::test]
async fn test_multi_post() {
let res = request()
.method(Method::POST)
.typed_header(ContentType::form_url_encoded())
.body("key=val&other=something&key=another")
.handle(post)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
let body = res.read_body_utf8().await.unwrap();
let result: HashSet<&str> = HashSet::from_iter(body.split("\n"));
let expected = HashSet::from_iter(vec![
"key = val",
"other = something",
"key = another",
]);
assert_eq!(expected, result)
}
#[tokio::test]
async fn test_put() {
let res = request()
.method(Method::PUT)
.typed_header(ContentType::form_url_encoded())
.body("key=val")
.handle(put)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
let body = res.read_body_utf8().await.unwrap();
assert_eq!(body, "key = val");
}
#[tokio::test]
async fn test_multi_put() {
let res = request()
.method(Method::PUT)
.typed_header(ContentType::form_url_encoded())
.body("key=val&other=something&key=another")
.handle(put)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
let body = res.read_body_utf8().await.unwrap();
let result: HashSet<&str> = HashSet::from_iter(body.split("\n"));
let expected = HashSet::from_iter(vec![
"key = val",
"other = something",
"key = another",
]);
assert_eq!(expected, result)
}
#[tokio::test]
async fn test_patch() {
let res = request()
.method(Method::PATCH)
.typed_header(ContentType::form_url_encoded())
.body("key=val")
.handle(patch)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
let body = res.read_body_utf8().await.unwrap();
assert_eq!(body, "key = val");
}
#[tokio::test]
async fn test_multi_patch() {
let res = request()
.method(Method::PATCH)
.typed_header(ContentType::form_url_encoded())
.body("key=val&other=something&key=another")
.handle(patch)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
let body = res.read_body_utf8().await.unwrap();
let result: HashSet<&str> = HashSet::from_iter(body.split("\n"));
let expected = HashSet::from_iter(vec![
"key = val",
"other = something",
"key = another",
]);
assert_eq!(expected, result)
}
#[tokio::test]
async fn test_delete() {
let res = request()
.method(Method::DELETE)
.handle(delete)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
}
}
|
fn main() {
// neg + pos
let a = 160;
let b = -20;
println!("{:?}", a + b); // 140
let c = -10;
if c > 0 {
println!("Positif");
} else {
println!("Negatif");
}
let d = -10 / 2;
println!("{:?}", d);
let a = -3000;
let b = 30;
println!("{:?}", b + a);
// sisa bagi
let a = -160;
let b = a % -60;
println!("{:?}", b);
}
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from ../gir-files
// DO NOT EDIT
use crate::DateFormat;
use glib::translate::*;
glib::wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Date(Boxed<ffi::SoupDate>);
match fn {
copy => |ptr| ffi::soup_date_copy(mut_override(ptr)),
free => |ptr| ffi::soup_date_free(ptr),
type_ => || ffi::soup_date_get_type(),
}
}
impl Date {
#[doc(alias = "soup_date_new")]
pub fn new(year: i32, month: i32, day: i32, hour: i32, minute: i32, second: i32) -> Date {
assert_initialized_main_thread!();
unsafe {
from_glib_full(ffi::soup_date_new(year, month, day, hour, minute, second))
}
}
#[doc(alias = "soup_date_new_from_now")]
#[doc(alias = "new_from_now")]
pub fn from_now(offset_seconds: i32) -> Date {
assert_initialized_main_thread!();
unsafe {
from_glib_full(ffi::soup_date_new_from_now(offset_seconds))
}
}
#[doc(alias = "soup_date_new_from_string")]
#[doc(alias = "new_from_string")]
pub fn from_string(date_string: &str) -> Option<Date> {
assert_initialized_main_thread!();
unsafe {
from_glib_full(ffi::soup_date_new_from_string(date_string.to_glib_none().0))
}
}
#[doc(alias = "soup_date_new_from_time_t")]
#[doc(alias = "new_from_time_t")]
pub fn from_time_t(when: libc::c_long) -> Date {
assert_initialized_main_thread!();
unsafe {
from_glib_full(ffi::soup_date_new_from_time_t(when))
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[doc(alias = "soup_date_get_day")]
#[doc(alias = "get_day")]
pub fn day(&mut self) -> i32 {
unsafe {
ffi::soup_date_get_day(self.to_glib_none_mut().0)
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[doc(alias = "soup_date_get_hour")]
#[doc(alias = "get_hour")]
pub fn hour(&mut self) -> i32 {
unsafe {
ffi::soup_date_get_hour(self.to_glib_none_mut().0)
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[doc(alias = "soup_date_get_minute")]
#[doc(alias = "get_minute")]
pub fn minute(&mut self) -> i32 {
unsafe {
ffi::soup_date_get_minute(self.to_glib_none_mut().0)
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[doc(alias = "soup_date_get_month")]
#[doc(alias = "get_month")]
pub fn month(&mut self) -> i32 {
unsafe {
ffi::soup_date_get_month(self.to_glib_none_mut().0)
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[doc(alias = "soup_date_get_offset")]
#[doc(alias = "get_offset")]
pub fn offset(&mut self) -> i32 {
unsafe {
ffi::soup_date_get_offset(self.to_glib_none_mut().0)
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[doc(alias = "soup_date_get_second")]
#[doc(alias = "get_second")]
pub fn second(&mut self) -> i32 {
unsafe {
ffi::soup_date_get_second(self.to_glib_none_mut().0)
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[doc(alias = "soup_date_get_utc")]
#[doc(alias = "get_utc")]
pub fn utc(&mut self) -> i32 {
unsafe {
ffi::soup_date_get_utc(self.to_glib_none_mut().0)
}
}
#[cfg(any(feature = "v2_32", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_32")))]
#[doc(alias = "soup_date_get_year")]
#[doc(alias = "get_year")]
pub fn year(&mut self) -> i32 {
unsafe {
ffi::soup_date_get_year(self.to_glib_none_mut().0)
}
}
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
#[doc(alias = "soup_date_is_past")]
pub fn is_past(&mut self) -> bool {
unsafe {
from_glib(ffi::soup_date_is_past(self.to_glib_none_mut().0))
}
}
#[doc(alias = "soup_date_to_string")]
pub fn to_string(&mut self, format: DateFormat) -> Option<glib::GString> {
unsafe {
from_glib_full(ffi::soup_date_to_string(self.to_glib_none_mut().0, format.into_glib()))
}
}
#[doc(alias = "soup_date_to_time_t")]
pub fn to_time_t(&mut self) -> libc::c_long {
unsafe {
ffi::soup_date_to_time_t(self.to_glib_none_mut().0)
}
}
//#[cfg(any(feature = "v2_24", feature = "dox"))]
//#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
//#[doc(alias = "soup_date_to_timeval")]
//pub fn to_timeval(&mut self, time: /*Ignored*/glib::TimeVal) {
// unsafe { TODO: call ffi:soup_date_to_timeval() }
//}
}
|
use proconio::{input, marker::Bytes};
fn main() {
input! {
_n: usize,
s: Bytes,
};
let l = s.iter().position(|&b| b == b'|').unwrap();
let r = s.iter().rposition(|&b| b == b'|').unwrap();
let m = s.iter().rposition(|&b| b == b'*').unwrap();
if l < m && m < r {
println!("in");
} else {
println!("out");
}
}
|
use std::fs;
/*
* Challenge Specs #1
*
* [1721, 979, 366, 299, 675, 1456]
* [299, 366, 675, 979, 1456, 1721]
*
* 299 + 1721 = 2020
*
* In this list, the two entries that sum to 2020 are 1721 and 299.
* Multiplying them together produces 1721 * 299 = 514579, so the correct answer is 514579.
*
*/
fn solution1(xs: &Vec<i32>) {
let mut ys = xs.clone();
loop {
ys.rotate_right(1);
let res: Vec<_> = xs
.iter()
.zip(ys.iter())
.filter(|(&x, &y)| x + y == 2020)
.map(|(&x, &y)| x * y)
.collect();
if !res.is_empty() {
println!("res: {:?}", res);
break;
}
}
}
/*
* Challenge Specs 2
*
* Using the above example again, the three entries that sum to 2020 are 979, 366, and 675.
* Multiplying them together produces the answer, 241861950
*
*/
fn solution2(xs: &Vec<i32>) {
let mut ys = xs.clone();
let mut zs = xs.clone();
zs.rotate_right(1);
for x in xs {
ys.rotate_right(1);
zs.rotate_right(1);
let res: Vec<_> = ys
.iter()
.zip(zs.iter())
.filter(|(&y, &z)| x + y + z == 2020)
.map(|(&y, &z)| x * y * z)
.collect();
if !res.is_empty() {
println!("res: {:?}", res);
break;
}
}
}
fn main() {
let mut contents: Vec<i32> = fs::read_to_string("input.txt")
.expect("something went wrong")
.split("\n")
.filter(|x| !x.is_empty())
.map(|x| x.parse::<i32>().unwrap())
.collect();
contents.sort();
solution1(&contents);
solution2(&contents);
}
|
// Copyright © 2016-2017 VMware, Inc. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//! This module contains the backend code that executes commited consensus operations.
//! It represents the "state machine" in a "replicated state machine".
//!
//! Specifically, this backend wraps vertree and provides a generic public interface
//! to the consensus code so that the consensus code can be independent of any specific
//! api and it's tightly coupled backend implementation.
use std::convert::From;
use super::internal_api_messages::{ApiReq, ApiRsp, ApiError, TreeOp, TreeCas, TreeOpResult};
use vertree::{self, Tree};
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct Backend {
pub tree: Tree
}
impl Default for Backend {
fn default() -> Backend {
Backend::new()
}
}
impl Backend {
pub fn new() -> Backend {
Backend {
tree: Tree::new()
}
}
/// The sole method for executing backend operations provided to the consensus system
pub fn call(&mut self, msg: ApiReq) -> ApiRsp {
match msg {
ApiReq::TreeOp(tree_op) => {
match self.run_tree_op(tree_op) {
Ok(vr_api_rsp) => vr_api_rsp,
Err(vertree_error) => vertree_error.into()
}
},
ApiReq::TreeCas(tree_cas) => {
match self.run_tree_cas(tree_cas) {
Ok(vr_api_rsp) => vr_api_rsp,
Err(vertree_error) => vertree_error.into()
}
}
}
}
fn run_tree_op(&mut self, tree_op: TreeOp) -> Result<ApiRsp, vertree::Error> {
let val = match tree_op {
TreeOp::Snapshot {directory} => {
let s = self.tree.snapshot(&directory)?;
ApiRsp::Path(s)
},
TreeOp::CreateNode {path, ty} => {
self.tree = self.tree.create(&path, ty.into())?;
ApiRsp::Ok
},
TreeOp::DeleteNode {path} => {
let (_, tree) = self.tree.delete(&path)?;
self.tree = tree;
ApiRsp::Ok
},
TreeOp::ListKeys {..} => {
// Use the path variable when vertree supports it
ApiRsp::TreeOpResult(TreeOpResult::Keys(self.tree.keys()))
},
TreeOp::BlobPut {path, val} => {
let (reply, tree) = self.tree.blob_put(path, val)?;
self.tree = tree;
reply.into()
},
TreeOp::BlobGet {path} => {
let reply = self.tree.blob_get(path)?;
reply.into()
},
TreeOp::BlobSize {path} => {
let reply = self.tree.blob_size(path)?;
reply.into()
},
TreeOp::QueuePush {path, val} => {
let (reply, tree) = self.tree.queue_push(path, val)?;
self.tree = tree;
reply.into()
},
TreeOp::QueuePop {path} => {
let (reply, tree) = self.tree.queue_pop(path)?;
self.tree = tree;
reply.into()
},
TreeOp::QueueFront {path} => {
let reply = self.tree.queue_front(path)?;
reply.into()
},
TreeOp::QueueBack {path} => {
let reply = self.tree.queue_back(path)?;
reply.into()
},
TreeOp::QueueLen {path} => {
let reply = self.tree.queue_len(path)?;
reply.into()
},
TreeOp::SetInsert {path, val} => {
let (reply, tree) = self.tree.set_insert(path, val)?;
self.tree = tree;
reply.into()
},
TreeOp::SetRemove {path, val} => {
let (reply, tree) = self.tree.set_remove(path, val)?;
self.tree = tree;
reply.into()
},
TreeOp::SetContains {path, val} => {
let reply= self.tree.set_contains(path, val)?;
reply.into()
},
TreeOp::SetUnion {paths, sets} => {
let reply = self.tree.set_union(paths, sets)?;
reply.into()
},
TreeOp::SetIntersection {path1, path2} => {
let reply = self.tree.set_intersection(&path1, &path2)?;
reply.into()
},
TreeOp::SetDifference {path1, path2} => {
let reply = self.tree.set_difference(&path1, &path2)?;
reply.into()
},
TreeOp::SetSymmetricDifference {path1, path2} => {
let reply = self.tree.set_symmetric_difference(&path1, &path2)?;
reply.into()
},
TreeOp::SetSubsetPath {path1, path2} => {
let reply = self.tree.set_subset(path1, Some(path2), None)?;
reply.into()
},
TreeOp::SetSubsetSet {path, set} => {
let reply = self.tree.set_subset(path, None, Some(set))?;
reply.into()
},
TreeOp::SetSupersetPath {path1, path2} => {
let reply = self.tree.set_superset(path1, Some(path2), None)?;
reply.into()
},
TreeOp::SetSupersetSet {path, set} => {
let reply = self.tree.set_superset(path, None, Some(set))?;
reply.into()
}
};
Ok(val)
}
fn run_tree_cas(&mut self, tree_cas: TreeCas) -> Result<ApiRsp, vertree::Error> {
let TreeCas {guards, ops} = tree_cas;
// All cas operations must be writes for now
if ops.iter().any(|s| !s.is_write()) {
let err = ApiError::InvalidCas("All cas operations must be writes".to_string());
// Returning Ok here because we already have a ApiRsp (even though it's an error)
return Ok(ApiRsp::Error(err));
}
let guards = guards.into_iter().map(vertree::Guard::from).collect();
let ops = ops.into_iter().map(vertree::WriteOp::from).collect();
let (replies, tree) = self.tree.multi_cas(guards, ops)?;
self.tree = tree;
let replies = replies.into_iter().map(|r| r.into()).collect();
Ok(ApiRsp::TreeCasResult(replies))
}
}
|
static TOKEN_E : &'static str = "e";
static TOKEN_ZERO : &'static str = "0";
static TOKEN_POINT : &'static str = ".";
pub fn to_expo(s: &str) -> Result<String, &'static str> {
let len = s.len();
if len == 1 && s == "." {
return Err("invalid input!");
}
for c in s.chars() {
if c >= '0' && c <= '9' || c == '.' {
} else {
return Err("invalid input!");
}
}
let mut idx: isize = - 1;
if let Some(x) = s.find(".") {
idx = x as isize;
}
match get_format(idx, len) {
FormatType::AtHead => {
if len > 2 {
let ret = s.chars().nth(1).unwrap().to_string() + TOKEN_POINT + s.get(2..).unwrap() + TOKEN_E + "-1".to_string().as_str();
Ok(ret)
} else {
let ret = s.chars().nth(1).unwrap().to_string() + TOKEN_POINT + TOKEN_ZERO + TOKEN_E + "-1".to_string().as_str();
Ok(ret)
}
},
FormatType::AtRear => {
let left_len = len - 1;
let move_steps = left_len - 1;
let ret = s.chars().nth(0).unwrap().to_string() + TOKEN_POINT + s.get(1..left_len).unwrap() + TOKEN_E + move_steps.to_string().as_str();
Ok(ret)
},
FormatType::AtMid => {
let idx = idx as usize;
let left = s.get(0..idx).unwrap();
let right = s.get(idx+1..).unwrap();
let left_len = left.len();
let right_len = right.len();
if s.chars().nth(0).unwrap() != '0' {
let ret = s.chars().nth(0).unwrap().to_string() + TOKEN_POINT + left.get(1..).unwrap() + right + TOKEN_E + (left_len - 1).to_string().as_str();
return Ok(ret);
}
let mut right_index = 0;
while right_index < right_len {
if right.chars().nth(right_index).unwrap() != '0' {
break;
}
right_index += 1;
}
if right_index == right_len - 1 {
let ret = right.chars().nth(right_index).unwrap().to_string() + TOKEN_POINT + "0" + TOKEN_E + (-1 - right_index as isize).to_string().as_str();
Ok(ret)
} else {
let ret = right.chars().nth(right_index).unwrap().to_string() + TOKEN_POINT + right.get(right_index+1..).unwrap() + TOKEN_E + (-1 - right_index as isize).to_string().as_str();
Ok(ret)
}
}
FormatType::AtNone => {
let ret = s.chars().nth(0).unwrap().to_string() + TOKEN_POINT + s.get(1..).unwrap() + TOKEN_E + (len-1).to_string().as_str();
Ok(ret)
}
}
}
pub fn get_exponent(s: &str) -> isize {
let mut ret = 0isize;
if let Some(x) = s.find("e") {
if let Some(y) = s.get(x+1..) {
if let Ok(z) = y.parse::<isize>() {
ret = z;
}
}
}
ret
}
pub enum FormatType {
//.开头格式 .11
AtHead,
//.在中间的格式 11.22
AtMid,
//.结尾的格式 11.
AtRear,
//没有.的格式 22
AtNone,
}
pub fn get_format(idx: isize, len: usize) -> FormatType {
if idx == 0 {
return FormatType::AtHead;
}
if idx == (len - 1) as isize {
return FormatType::AtRear;
}
if idx == -1 {
return FormatType::AtNone;
}
return FormatType::AtMid;
} |
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for #49685: drop elaboration was not revealing the
// value of `impl Trait` returns, leading to an ICE.
fn main() {
let _ = Some(())
.into_iter()
.flat_map(|_| Some(()).into_iter().flat_map(func));
}
fn func(_: ()) -> impl Iterator<Item = ()> {
Some(()).into_iter().flat_map(|_| vec![])
}
|
mod guilds;
mod leave;
mod ping;
mod prefix;
use serenity::framework::standard::macros::group;
use self::guilds::GUILDS_COMMAND;
use self::leave::LEAVE_COMMAND;
use self::ping::PING_COMMAND;
use self::prefix::PREFIX_COMMAND;
#[group()]
#[commands(ping, prefix, guilds, leave)]
pub struct Commands;
|
extern crate fall_test;
extern crate fall_tree;
use fall_test::{rust, match_ast};
use fall_tree::dump_file;
fn ast(code: &str) -> String {
dump_file(&rust::LANG.parse(code.to_owned()))
}
#[test]
fn opt_pub() {
match_ast(&ast("\
struct Foo {}
fn bar() {}
pub struct Baz {}
pub fn quux() {}
"), r#"
FILE
STRUCT_DEF
STRUCT "struct"
IDENT "Foo"
LBRACE "{"
RBRACE "}"
FN_DEF
FN "fn"
IDENT "bar"
LPAREN "("
RPAREN ")"
LBRACE "{"
RBRACE "}"
STRUCT_DEF
KW_PUB "pub"
STRUCT "struct"
IDENT "Baz"
LBRACE "{"
RBRACE "}"
FN_DEF
KW_PUB "pub"
FN "fn"
IDENT "quux"
LPAREN "("
RPAREN ")"
LBRACE "{"
RBRACE "}"
"#)
}
#[test]
fn missing_token() {
match_ast(&ast("fn foo foo"), r#"
FILE
FN_DEF
FN "fn"
IDENT "foo"
ERROR ""
ERROR
IDENT "foo"
"#);
}
#[test]
fn skipping() {
match_ast(&ast("foo fn foo(){} bar baz struct S {} quuz"), r#"
FILE
ERROR
IDENT "foo"
FN_DEF
FN "fn"
IDENT "foo"
LPAREN "("
RPAREN ")"
LBRACE "{"
RBRACE "}"
ERROR
IDENT "bar"
IDENT "baz"
STRUCT_DEF
STRUCT "struct"
IDENT "S"
LBRACE "{"
RBRACE "}"
ERROR
IDENT "quuz""#);
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::any::Any;
use std::collections::hash_map::DefaultHasher;
use std::fmt::Debug;
use std::fmt::Display;
use std::fmt::Formatter;
use std::hash::Hash;
use std::hash::Hasher;
use std::sync::Arc;
use common_catalog::plan::PartInfo;
use common_compress::CompressAlgorithm;
#[typetag::serde(tag = "type")]
pub trait DynData: Send + Sync + 'static {
fn as_any(&self) -> &dyn Any;
}
#[derive(serde::Serialize, serde::Deserialize, Clone, Eq)]
pub struct FileInfo {
pub path: String,
pub size: usize,
pub num_splits: usize,
pub compress_alg: Option<CompressAlgorithm>,
}
impl PartialEq for FileInfo {
fn eq(&self, other: &Self) -> bool {
self.path == other.path
}
}
#[derive(serde::Serialize, serde::Deserialize, Clone)]
pub struct SplitInfo {
pub file: Arc<FileInfo>,
pub seq_in_file: usize,
pub offset: usize,
pub size: usize,
pub num_file_splits: usize,
pub format_info: Option<Arc<dyn DynData>>,
}
impl PartialEq for SplitInfo {
fn eq(&self, other: &Self) -> bool {
self.file == other.file
&& self.seq_in_file == other.seq_in_file
&& self.num_file_splits == other.num_file_splits
}
}
impl Eq for SplitInfo {}
#[typetag::serde(name = "stage_file_partition")]
impl PartInfo for SplitInfo {
fn as_any(&self) -> &dyn Any {
self
}
fn equals(&self, info: &Box<dyn PartInfo>) -> bool {
match info.as_any().downcast_ref::<SplitInfo>() {
None => false,
Some(other) => self == other,
}
}
fn hash(&self) -> u64 {
let mut s = DefaultHasher::new();
self.file.path.hash(&mut s);
self.seq_in_file.hash(&mut s);
s.finish()
}
}
impl Debug for SplitInfo {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SplitInfo")
.field("seq_in_file", &self.seq_in_file)
.field("offset", &self.offset)
.field("size", &self.size)
.finish()
}
}
impl Display for SplitInfo {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let n = self.file.num_splits;
if n > 1 {
write!(
f,
"{}[split {}/{}][{}..{}({} bytes)]",
self.file.path,
self.seq_in_file,
n,
self.offset,
self.offset + self.size,
self.size,
)
} else if self.size > 0 {
write!(f, "{}({} bytes)", self.file.path, self.size)
} else {
write!(f, "{}", self.file.path)
}
}
}
pub fn split_by_size(size: usize, split_size: usize) -> Vec<(usize, usize)> {
let mut splits = vec![];
let n = (size + split_size - 1) / split_size;
for i in 0..n {
let start = i * split_size;
let end = std::cmp::min((i + 1) * split_size, size);
splits.push((start, end - start))
}
splits
}
impl SplitInfo {
pub fn start_row_text(&self) -> Option<usize> {
if self.seq_in_file == 0 { Some(0) } else { None }
}
pub fn from_stream_split(path: String, compress_alg: Option<CompressAlgorithm>) -> Self {
SplitInfo {
file: Arc::new(FileInfo {
path,
size: 0,
num_splits: 1,
compress_alg,
}),
seq_in_file: 0,
offset: 0,
size: 0,
num_file_splits: 1,
format_info: None,
}
}
}
|
use anyhow::Context;
use rusqlite::OptionalExtension;
use stark_hash::Felt;
/// This migration renames the starknet_blocks to block_headers and adds the state_commitment,
/// transaction_count and event_count columns, and also renames the root column to storage_commitment.
pub(crate) fn migrate(tx: &rusqlite::Transaction<'_>) -> anyhow::Result<()> {
tracing::info!("Refactoring the block_headers table, this may take a while");
tx.execute("ALTER TABLE starknet_blocks RENAME TO block_headers", [])
.context("Renaming starknet_blocks table to block_headers")?;
tx.execute(
"ALTER TABLE block_headers RENAME COLUMN root TO storage_commitment",
[],
)
.context("Renaming block_headers.root column to block_headers.storage_commitment")?;
tx.execute(
"ALTER TABLE block_headers ADD COLUMN state_commitment BLOB NOT NULL DEFAULT x'0000000000000000000000000000000000000000000000000000000000000000'",
[],
)
.context("Adding state_commitment column")?;
tx.execute(
"ALTER TABLE block_headers ADD COLUMN transaction_count INTEGER NOT NULL DEFAULT 0",
[],
)
.context("Adding transaction_count column")?;
tx.execute(
"ALTER TABLE block_headers ADD COLUMN event_count INTEGER NOT NULL DEFAULT 0",
[],
)
.context("Adding event_count column")?;
tx.execute(
r"UPDATE block_headers SET transaction_count = (
SELECT COUNT(1) FROM starknet_transactions WHERE starknet_transactions.block_hash = block_headers.hash
)",
[],
)
.context("Setting tx counts")?;
tx.execute(
r"UPDATE block_headers SET event_count = (
SELECT COUNT(1) FROM starknet_events WHERE starknet_events.block_number = block_headers.number
)",
[],
)
.context("Setting event counts")?;
tx.execute(
r"UPDATE block_headers SET state_commitment = storage_commitment WHERE class_commitment IS NULL OR class_commitment = x'0000000000000000000000000000000000000000000000000000000000000000'",
[],
)
.context("Setting state_commitment = storage_commitment")?;
let Some(start): Option<i64> = tx
.query_row(
"SELECT number FROM block_headers WHERE state_commitment = x'0000000000000000000000000000000000000000000000000000000000000000' ORDER BY number ASC LIMIT 1",
[],
|row| row.get(0),
)
.optional()
.context("Counting rows")? else {
return Ok(());
};
let mut reader = tx
.prepare(
"SELECT number, storage_commitment, class_commitment FROM block_headers WHERE number >= ?",
)
.context("Preparing commitment reader statement")?;
let mut writer = tx
.prepare("UPDATE block_headers SET state_commitment = ? WHERE number = ?")
.context("Preparing commitment writer statement")?;
let rows = reader
.query_map([start], |row| {
let number: u64 = row.get(0).unwrap();
let storage: Vec<u8> = row.get(1).unwrap();
let class: Vec<u8> = row.get(2).unwrap();
Ok((number, storage, class))
})
.context("Querying commitments")?;
const GLOBAL_STATE_VERSION: Felt = pathfinder_common::felt_bytes!(b"STARKNET_STATE_V0");
for row in rows {
let (number, storage, class) = row.context("Iterating over rows")?;
let storage = Felt::from_be_slice(&storage).context("Parsing storage commitment bytes")?;
let class = Felt::from_be_slice(&class).context("Parsing class commitment bytes")?;
let state_commitment: Felt = stark_poseidon::poseidon_hash_many(&[
GLOBAL_STATE_VERSION.into(),
storage.into(),
class.into(),
])
.into();
writer
.execute(rusqlite::params![number, state_commitment.as_be_bytes()])
.context("Updating state commitment")?;
}
Ok(())
}
|
pub mod number_theory;
pub mod search;
pub mod sorting;
|
use std::fmt;
use std::ops::Range;
use std::rc::Rc;
use super::{IoCtlDevice, IoCtlManager};
use crate::platform::traits::BatteryIterator;
use crate::Result;
pub struct IoCtlIterator {
manager: Rc<IoCtlManager>,
range: Range<libc::c_int>,
}
impl Iterator for IoCtlIterator {
type Item = Result<IoCtlDevice>;
fn next(&mut self) -> Option<Self::Item> {
loop {
match self.range.next() {
None => return None,
Some(idx) => {
let bif = self.manager.bif(idx);
let bst = self.manager.bst(idx);
match (bif, bst) {
(Ok(Some(bif)), Ok(Some(bst))) => {
return Some(Ok(IoCtlDevice::new(idx, bif, bst)));
}
(Err(e), _) => return Some(Err(e)),
(_, Err(e)) => return Some(Err(e)),
// If bif or bst is invalid (`Ok(None)` here),
// silently skipping it, same as FreeBSD does
_ => continue,
}
}
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some((self.range.end - self.range.start) as usize))
}
}
impl BatteryIterator for IoCtlIterator {
type Manager = IoCtlManager;
type Device = IoCtlDevice;
fn new(manager: Rc<Self::Manager>) -> Result<Self> {
let batteries = manager.count()?;
Ok(Self {
manager,
range: (0..batteries),
})
}
}
impl fmt::Debug for IoCtlIterator {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("FreeBSDIterator")
.field("start", &self.range.start)
.field("end", &self.range.end)
.finish()
}
}
|
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::collections::VecDeque;
use std::fmt;
use std::io::{self, BufRead};
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
struct Vector {
x: i32,
y: i32,
}
impl fmt::Display for Vector {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "({}, {})", self.x, self.y)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
struct Q {
d: i32,
v: Vector,
}
impl Vector {
fn angle(&self) -> f64 {
(self.y as f64).atan2(self.x as f64)
}
}
fn risky_cmp<T>(a: &T, b: &T) -> Ordering
where
T: PartialOrd<T>,
{
a.partial_cmp(b).unwrap()
}
fn gcd(x: i32, y: i32) -> i32 {
assert!(x >= 0);
assert!(y >= 0);
let (mut a, mut b) = if x > y { (x, y) } else { (y, x) };
while b != 0 {
let r = a % b;
a = b;
b = r;
}
a
}
fn get_slope(a: &Vector, b: &Vector) -> Q {
let x = b.x - a.x;
let y = b.y - a.y;
let d = gcd(x.abs(), y.abs());
Q {
d,
v: Vector { x: x / d, y: y / d },
}
}
fn parse_map(lines: Vec<String>) -> Vec<Vector> {
let mut asteroids = Vec::new();
for (y, line) in lines.iter().enumerate() {
for (x, cell) in line.chars().enumerate() {
if cell == '#' {
asteroids.push(Vector {
x: x as i32,
y: y as i32,
});
}
}
}
asteroids
}
fn visible_from(asteroids: &Vec<Vector>, i: usize) -> BTreeMap<Vector, Vec<Q>> {
let a = &asteroids[i];
let mut equivs = BTreeMap::new();
for (j, b) in asteroids.iter().enumerate() {
if i != j {
let slope = get_slope(a, b);
equivs
.entry(slope.v)
.or_insert_with(|| Vec::new())
.push(Q { d: slope.d, v: *b });
}
}
for v in equivs.values_mut() {
v.sort()
}
equivs
}
fn part1(asteroids: &Vec<Vector>) -> (BTreeMap<Vector, Vec<Q>>, usize) {
(0..asteroids.len())
.map(|i| (visible_from(&asteroids, i), i))
.max_by_key(|(v, _)| v.len())
.unwrap()
}
fn part2(mut visible: BTreeMap<Vector, Vec<Q>>) -> Option<i32> {
let mut dirs: Vec<Vector> = visible.keys().map(|v| *v).collect();
dirs.sort_by(|a, b| risky_cmp(&a.angle(), &b.angle()));
let (i, _) = dirs
.iter()
.enumerate()
.filter(|(_, v)| v.angle() >= (-std::f64::consts::PI / 2.))
.next()
.unwrap();
let mut deque: VecDeque<&Vector> = dirs.iter().collect();
deque.rotate_left(i);
let mut j = 0;
let mut two_hundred = None;
for dir in deque.iter().cycle() {
if let Some(qs) = visible.get_mut(dir) {
let q = qs.remove(0);
println!("{} {:?}", j, q);
if qs.is_empty() {
visible.remove(dir);
}
if j == 199 {
println!("^^^^^^^^");
two_hundred = Some(q.v.x * 100 + q.v.y);
}
if visible.is_empty() {
break;
}
j += 1;
}
}
two_hundred
}
fn main() {
let stdin = io::stdin();
let handle = stdin.lock();
let lines = handle.lines().map(|l| l.unwrap()).collect::<Vec<String>>();
let asteroids = parse_map(lines);
let (v, i) = part1(&asteroids);
println!("{:?} {:?}", v, asteroids[i]);
println!("{}", part2(v).unwrap());
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_example1() {
let lines: Vec<String> = vec![
String::from(".#..#"),
String::from("....."),
String::from("#####"),
String::from("....#"),
String::from("...##"),
];
let asteroids = parse_map(lines);
let (v, _) = part1(&asteroids);
assert_eq!(v.len(), 8);
}
#[test]
fn test_part2_example() {
let lines: Vec<String> = vec![
String::from(".#....#####...#.."),
String::from("##...##.#####..##"),
String::from("##...#...#.#####."),
String::from("..#.....#...###.."),
String::from("..#.#.....#....##"),
];
let asteroids = parse_map(lines);
let i = asteroids
.iter()
.position(|a| a == &Vector { x: 8, y: 3 })
.expect("should find it");
let visible = visible_from(&asteroids, i);
part2(visible);
}
#[test]
fn test_big_example() {
let lines: Vec<String> = vec![
String::from(".#..##.###...#######"),
String::from("##.############..##."),
String::from(".#.######.########.#"),
String::from(".###.#######.####.#."),
String::from("#####.##.#.##.###.##"),
String::from("..#####..#.#########"),
String::from("####################"),
String::from("#.####....###.#.#.##"),
String::from("##.#################"),
String::from("#####.##.###..####.."),
String::from("..######..##.#######"),
String::from("####.##.####...##..#"),
String::from(".#####..#.######.###"),
String::from("##...#.##########..."),
String::from("#.##########.#######"),
String::from(".####.#.###.###.#.##"),
String::from("....##.##.###..#####"),
String::from(".#.#.###########.###"),
String::from("#.#.#.#####.####.###"),
String::from("###.##.####.##.#..##"),
];
let asteroids = parse_map(lines);
let (visible, i) = part1(&asteroids);
let best = asteroids[i];
assert_eq!(best.x, 11);
assert_eq!(best.y, 13);
assert_eq!(Some(802), part2(visible));
}
}
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-pretty `dyn ::foo` parses differently in the current edition
use std::fmt::Display;
static BYTE: u8 = 33;
fn main() {
let x: &(dyn 'static + Display) = &BYTE;
let y: Box<dyn Display + 'static> = Box::new(BYTE);
let _: &dyn (Display) = &BYTE;
let _: &dyn (::std::fmt::Display) = &BYTE;
let xstr = format!("{}", x);
let ystr = format!("{}", y);
assert_eq!(xstr, "33");
assert_eq!(ystr, "33");
}
|
use piston_window::*;
use piston_window::text::Text;
use crate::widget::Widget;
use crate::widget::WidgetImpl;
use crate::widget::Rect;
use crate::h_scroll::HScroll;
pub struct ScrollPanel {
widget: WidgetImpl
}
impl ScrollPanel {
pub fn new() -> Self {
let mut widget = WidgetImpl::new();
let h_scroll = Box::new(HScroll::new());
widget.add_child(h_scroll);
ScrollPanel {
widget
}
}
}
impl Widget for ScrollPanel {
fn layout(&mut self, bounds: Rect) {
println!("ScrollPanel {:?}", bounds);
self.widget.layout(bounds);
}
fn get_bounds(&self) -> Rect {
return self.widget.bounds;
}
fn add_child(&mut self, child: Box<Widget>) {
self.widget.children.insert(0, child);
}
fn draw(&self, ctx: Context, gl: &mut G2d, glyphs: &mut Glyphs) {
self.widget.draw(ctx, gl, glyphs)
}
} |
use super::track::Track;
use super::util;
use super::writer::Writer;
use crate::MkvId;
use std::ops::Deref;
pub struct AudioTrack {
track_: Track,
// Audio track element names.
bit_depth_: u64,
channels_: u64,
sample_rate_: f64,
}
impl Deref for AudioTrack {
type Target = Track;
fn deref(&self) -> &Track {
&self.track_
}
}
impl AudioTrack {
pub fn new() -> AudioTrack {
AudioTrack {
track_: Track::new(),
bit_depth_: 0,
channels_: 1,
sample_rate_: 0.0,
}
}
pub fn set_bit_depth(&mut self, bit_depth: u64) {
self.bit_depth_ = bit_depth;
}
pub fn bit_depth(&self) -> u64 {
return self.bit_depth_;
}
pub fn set_channels(&mut self, channels: u64) {
self.channels_ = channels;
}
pub fn channels(&self) -> u64 {
return self.channels_;
}
pub fn set_sample_rate(&mut self, sample_rate: f64) {
self.sample_rate_ = sample_rate;
}
pub fn sample_rate(&self) -> f64 {
return self.sample_rate_;
}
pub fn PayloadSize(&self) -> u64 {
let parent_size = self.track_.PayloadSize();
let mut size =
util::EbmlElementSizeArgF32(MkvId::MkvSamplingFrequency, self.sample_rate_ as f32);
size += util::EbmlElementSizeArgU64(MkvId::MkvChannels, self.channels_);
if self.bit_depth_ > 0 {
size += util::EbmlElementSizeArgU64(MkvId::MkvBitDepth, self.bit_depth_);
}
size += util::EbmlMasterElementSize(MkvId::MkvAudio, size);
return parent_size + size;
}
pub fn Write(&self, writer: &mut dyn Writer) -> bool {
if !self.track_.Write(writer) {
return false;
}
// Calculate AudioSettings size.
let mut size =
util::EbmlElementSizeArgF32(MkvId::MkvSamplingFrequency, self.sample_rate_ as f32);
size += util::EbmlElementSizeArgU64(MkvId::MkvChannels, self.channels_);
if self.bit_depth_ > 0 {
size += util::EbmlElementSizeArgU64(MkvId::MkvBitDepth, self.bit_depth_);
}
if !util::WriteEbmlMasterElement(writer, MkvId::MkvAudio, size) {
return false;
}
let payload_position = writer.get_position();
if !util::WriteEbmlElementArgF32(
writer,
MkvId::MkvSamplingFrequency,
self.sample_rate_ as f32,
) {
return false;
}
if !util::WriteEbmlElementArgU64(writer, MkvId::MkvChannels, self.channels_) {
return false;
}
if self.bit_depth_ > 0 {
if !util::WriteEbmlElementArgU64(writer, MkvId::MkvBitDepth, self.bit_depth_) {
return false;
}
}
let stop_position = writer.get_position();
if stop_position - payload_position != size {
return false;
}
return true;
}
}
|
#![feature(test)]
#![deny(warnings)]
extern crate futures;
extern crate hyper;
extern crate pretty_env_logger;
extern crate test;
extern crate tokio_core;
use std::net::SocketAddr;
use futures::{Future, Stream};
use tokio_core::reactor::{Core, Handle};
use tokio_core::net::TcpListener;
use hyper::client;
use hyper::header::{ContentLength, ContentType};
use hyper::Method;
use hyper::server::{self, Service};
#[bench]
fn get_one_at_a_time(b: &mut test::Bencher) {
let _ = pretty_env_logger::init();
let mut core = Core::new().unwrap();
let handle = core.handle();
let addr = spawn_hello(&handle);
let client = hyper::Client::new(&handle);
let url: hyper::Url = format!("http://{}/get", addr).parse().unwrap();
b.bytes = 160 * 2 + PHRASE.len() as u64;
b.iter(move || {
let work = client.get(url.clone()).and_then(|res| {
res.body().for_each(|_chunk| {
Ok(())
})
});
core.run(work).unwrap();
});
}
#[bench]
fn post_one_at_a_time(b: &mut test::Bencher) {
let _ = pretty_env_logger::init();
let mut core = Core::new().unwrap();
let handle = core.handle();
let addr = spawn_hello(&handle);
let client = hyper::Client::new(&handle);
let url: hyper::Url = format!("http://{}/get", addr).parse().unwrap();
let post = "foo bar baz quux";
b.bytes = 180 * 2 + post.len() as u64 + PHRASE.len() as u64;
b.iter(move || {
let mut req = client::Request::new(Method::Post, url.clone());
req.headers_mut().set(ContentLength(post.len() as u64));
req.set_body(post);
let work = client.request(req).and_then(|res| {
res.body().for_each(|_chunk| {
Ok(())
})
});
core.run(work).unwrap();
});
}
static PHRASE: &'static [u8] = include_bytes!("../CHANGELOG.md"); //b"Hello, World!";
#[derive(Clone, Copy)]
struct Hello;
impl Service for Hello {
type Request = server::Request;
type Response = server::Response;
type Error = hyper::Error;
type Future = ::futures::Finished<Self::Response, hyper::Error>;
fn call(&self, _req: Self::Request) -> Self::Future {
::futures::finished(
server::Response::new()
.with_header(ContentLength(PHRASE.len() as u64))
.with_header(ContentType::plaintext())
.with_body(PHRASE)
)
}
}
fn spawn_hello(handle: &Handle) -> SocketAddr {
let addr = "127.0.0.1:0".parse().unwrap();
let listener = TcpListener::bind(&addr, handle).unwrap();
let addr = listener.local_addr().unwrap();
let handle2 = handle.clone();
let http = hyper::server::Http::new();
handle.spawn(listener.incoming().for_each(move |(socket, addr)| {
http.bind_connection(&handle2, socket, addr, Hello);
Ok(())
}).then(|_| Ok(())));
return addr
}
|
use std::collections::VecDeque;
use std::convert::TryFrom;
use std::io;
use error::Error;
pub mod error;
/// Used to read input for the program.
///
/// Mainly used to allow easier testing.
pub trait ProgInput {
fn read(&mut self) -> Result<String, Error>;
}
/// Used to write output from the program.
///
/// Mainly used to allow easier testing.
pub trait ProgOutput {
fn write(&mut self, output: &str) -> Result<(), Error>;
}
/// Reads in program input from stdin.
#[derive(Copy, Clone, Debug, Default, Hash, Eq, PartialEq)]
pub struct StdInProgInput {}
impl StdInProgInput {
pub fn new() -> Self {
StdInProgInput {}
}
}
impl ProgInput for StdInProgInput {
fn read(&mut self) -> Result<String, Error> {
let mut input = String::new();
let _ = io::stdin().read_line(&mut input)?;
Ok(input)
}
}
/// Writes program output to stdout.
#[derive(Copy, Clone, Debug, Default, Hash, Eq, PartialEq)]
pub struct StdOutProgOutput {}
impl StdOutProgOutput {
pub fn new() -> Self {
StdOutProgOutput {}
}
}
impl ProgOutput for StdOutProgOutput {
fn write(&mut self, output: &str) -> Result<(), Error> {
println!("{}", output);
Ok(())
}
}
/// Parse a string into memory state.
pub fn parse_mem_state(input: &str) -> Result<Vec<i64>, std::num::ParseIntError> {
input
.split(',')
.map(|s| s.trim().parse::<i64>())
.collect::<Result<Vec<i64>, std::num::ParseIntError>>()
}
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
enum ParamMode {
Position,
Immediate,
Relative,
}
/// The operation as well as the parameter modes for operands.
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
enum OpCode {
Add,
Mul,
Input,
Output,
JumpIfTrue,
JumpIfFalse,
LessThan,
Equals,
AdjustsRelativeBase,
Halt,
}
fn param_mode(param: u32, op: i64) -> ParamMode {
match (op % 10i64.pow(param + 3)) / 10i64.pow(param + 2) {
0 => ParamMode::Position,
1 => ParamMode::Immediate,
2 => ParamMode::Relative,
_ => panic!("unexpected parameter mode"),
}
}
fn decode_op_code(op: i64) -> OpCode {
let op_code = op % 100;
match op_code {
1 => OpCode::Add,
2 => OpCode::Mul,
3 => OpCode::Input,
4 => OpCode::Output,
5 => OpCode::JumpIfTrue,
6 => OpCode::JumpIfFalse,
7 => OpCode::LessThan,
8 => OpCode::Equals,
9 => OpCode::AdjustsRelativeBase,
99 => OpCode::Halt,
_ => panic!("unexpected op"),
}
}
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
pub enum ProgState {
NotStarted,
Halt,
NeedInput,
}
#[derive(Clone, Debug, Hash, Eq, PartialEq)]
pub struct Prog {
mem_state: Vec<i64>,
pc: usize,
relative_base: isize,
state: ProgState,
}
impl Prog {
pub fn new(init_mem_state: &[i64]) -> Self {
let mut mem_state = vec![0; init_mem_state.len()];
mem_state.copy_from_slice(init_mem_state);
Prog {
mem_state,
pc: 0,
relative_base: 0,
state: ProgState::NotStarted,
}
}
}
impl Prog {
fn get_operand(&mut self, param_num: usize, op_code: i64) -> Result<i64, Error> {
match param_mode(u32::try_from(param_num)?, op_code) {
ParamMode::Position => {
let index = usize::try_from(self.mem_state[self.pc + (param_num + 1)])?;
if index >= self.mem_state.len() {
self.mem_state.resize(index + 1, 0);
}
Ok(self.mem_state[index])
}
ParamMode::Immediate => Ok(self.mem_state[self.pc + (param_num + 1)]),
ParamMode::Relative => {
let index = usize::try_from(
isize::try_from(self.mem_state[self.pc + (param_num + 1)])?
+ self.relative_base,
)?;
if index >= self.mem_state.len() {
self.mem_state.resize(index + 1, 0);
}
Ok(self.mem_state[index])
}
}
}
fn store_value(&mut self, value: i64, param_num: usize, op_code: i64) -> Result<(), Error> {
match param_mode(u32::try_from(param_num)?, op_code) {
ParamMode::Position => {
let index = usize::try_from(self.mem_state[self.pc + (param_num + 1)])?;
if index >= self.mem_state.len() {
self.mem_state.resize(index + 1, 0);
}
self.mem_state[index] = value;
Ok(())
}
ParamMode::Immediate => unreachable!(),
ParamMode::Relative => {
let index = usize::try_from(
isize::try_from(self.mem_state[self.pc + (param_num + 1)])?
+ self.relative_base,
)?;
if index >= self.mem_state.len() {
self.mem_state.resize(index + 1, 0);
}
self.mem_state[index] = value;
Ok(())
}
}
}
/// Runs a program given an initial memory state.
pub fn run<T, S>(&mut self, input: &mut T, output: &mut S) -> Result<(), Error>
where
T: ProgInput,
S: ProgOutput,
{
loop {
let op_code = self.mem_state[self.pc];
match decode_op_code(op_code) {
OpCode::Add => {
let operand_0 = self.get_operand(0, op_code)?;
let operand_1 = self.get_operand(1, op_code)?;
self.store_value(operand_0 + operand_1, 2, op_code)?;
self.pc += 4;
}
OpCode::Mul => {
let operand_0 = self.get_operand(0, op_code)?;
let operand_1 = self.get_operand(1, op_code)?;
self.store_value(operand_0 * operand_1, 2, op_code)?;
self.pc += 4;
}
OpCode::Input => {
let input = match input.read() {
Ok(v) => v,
Err(Error::NoAvailableInput) => {
self.state = ProgState::NeedInput;
return Ok(());
}
Err(e) => return Err(e),
};
let input = input.trim().parse::<i64>()?;
self.store_value(input, 0, op_code)?;
self.pc += 2;
}
OpCode::Output => {
let operand_0 = self.get_operand(0, op_code)?;
output.write(&format!("{}", operand_0))?;
self.pc += 2;
}
OpCode::JumpIfTrue => {
let operand_0 = self.get_operand(0, op_code)?;
if operand_0 != 0 {
let operand_1 = self.get_operand(1, op_code)?;
self.pc = usize::try_from(operand_1)?;
} else {
self.pc += 3;
}
}
OpCode::JumpIfFalse => {
let operand_0 = self.get_operand(0, op_code)?;
if operand_0 == 0 {
let operand_1 = self.get_operand(1, op_code)?;
self.pc = usize::try_from(operand_1)?;
} else {
self.pc += 3;
}
}
OpCode::LessThan => {
let operand_0 = self.get_operand(0, op_code)?;
let operand_1 = self.get_operand(1, op_code)?;
self.store_value(if operand_0 < operand_1 { 1 } else { 0 }, 2, op_code)?;
self.pc += 4;
}
OpCode::Equals => {
let operand_0 = self.get_operand(0, op_code)?;
let operand_1 = self.get_operand(1, op_code)?;
self.store_value(if operand_0 == operand_1 { 1 } else { 0 }, 2, op_code)?;
self.pc += 4;
}
OpCode::AdjustsRelativeBase => {
let operand_0 = self.get_operand(0, op_code)?;
self.relative_base =
isize::try_from(i64::try_from(self.relative_base)? + operand_0)?;
self.pc += 2;
}
OpCode::Halt => {
self.state = ProgState::Halt;
return Ok(());
}
}
}
}
}
#[derive(Clone, Debug, Hash, Eq, PartialEq)]
struct VecDequeProgInput {
data: VecDeque<String>,
}
impl VecDequeProgInput {
fn new() -> Self {
VecDequeProgInput {
data: VecDeque::new(),
}
}
}
impl ProgInput for VecDequeProgInput {
fn read(&mut self) -> Result<String, Error> {
if let Some(value) = self.data.pop_front() {
Ok(value)
} else {
Err(Error::NoAvailableInput)
}
}
}
#[derive(Clone, Debug, Hash, Eq, PartialEq)]
struct VecProgOutput {
data: Vec<String>,
}
impl VecProgOutput {
fn new() -> Self {
VecProgOutput { data: Vec::new() }
}
}
impl ProgOutput for VecProgOutput {
fn write(&mut self, output: &str) -> Result<(), Error> {
self.data.push(output.to_string());
Ok(())
}
}
use std::ops::Range;
fn build_input(existing_input: &[i64], rng: Range<i64>, count: i64) -> Vec<Vec<i64>> {
if count <= 0 {
return vec![];
}
let inputs = rng
.clone()
.filter_map(|i| {
if existing_input.contains(&i) {
None
} else {
let mut input = Vec::from(existing_input);
input.push(i);
Some(input)
}
})
.collect();
if count <= 1 {
inputs
} else {
inputs
.into_iter()
.map(|i| build_input(&i, rng.clone(), count - 1))
.flatten()
.collect()
}
}
pub fn find_max_thrust_signal(init_mem_state: &[i64]) -> Result<Option<(Vec<i64>, i64)>, Error> {
let mut max_result: Option<(Vec<i64>, i64)> = None;
for inputs in build_input(&[], 0..5, 5) {
if let Some(thrust_signal) = run_amplifiers_in_feedback_loop(init_mem_state, &inputs)? {
if let Some(exist_result) = max_result.as_ref() {
if exist_result.1 < thrust_signal {
max_result = Some((inputs, thrust_signal));
}
} else {
max_result = Some((inputs, thrust_signal));
}
}
}
Ok(max_result)
}
pub fn find_max_thrust_signal_in_feedback_loop(
init_mem_state: &[i64],
) -> Result<Option<(Vec<i64>, i64)>, Error> {
let mut max_result: Option<(Vec<i64>, i64)> = None;
for inputs in build_input(&[], 5..10, 5) {
if let Some(thrust_signal) = run_amplifiers_in_feedback_loop(init_mem_state, &inputs)? {
if let Some(exist_result) = max_result.as_ref() {
if exist_result.1 < thrust_signal {
max_result = Some((inputs, thrust_signal));
}
} else {
max_result = Some((inputs, thrust_signal));
}
}
}
Ok(max_result)
}
fn run_amplifiers_in_feedback_loop(
init_mem_state: &[i64],
inputs: &[i64],
) -> Result<Option<i64>, Error> {
struct Amp {
prog: Prog,
prog_input: VecDequeProgInput,
}
let mut amps = Vec::<Amp>::with_capacity(inputs.len());
for input in inputs {
let mut mem_state = vec![0; init_mem_state.len()];
mem_state.copy_from_slice(init_mem_state);
let mut prog_input = VecDequeProgInput::new();
prog_input.data.push_back(input.to_string());
amps.push(Amp {
prog: Prog::new(&mem_state),
prog_input,
});
}
amps[0].prog_input.data.push_back(0.to_string());
let mut prog_output = VecProgOutput::new();
loop {
for amp in &mut amps {
prog_output.data.iter().for_each(|o| {
amp.prog_input.data.push_back(o.to_string());
});
prog_output = VecProgOutput::new();
amp.prog.run(&mut amp.prog_input, &mut prog_output)?;
}
if amps[amps.len() - 1].prog.state == ProgState::Halt {
assert!(amps.iter().all(|a| a.prog.state == ProgState::Halt));
return Ok(Some(prog_output.data.pop().unwrap().parse::<i64>()?));
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[derive(Clone, Debug, Hash, Eq, PartialEq)]
struct TestInput {
input: Vec<String>,
}
impl TestInput {
fn new(mut input: Vec<String>) -> Self {
input.reverse();
TestInput { input }
}
}
impl ProgInput for TestInput {
fn read(&mut self) -> Result<String, Error> {
if let Some(input) = self.input.pop() {
Ok(input)
} else {
Err(Error::IoErr(io::Error::from(io::ErrorKind::UnexpectedEof)))
}
}
}
#[derive(Clone, Debug, Hash, Eq, PartialEq)]
struct TestOutput {
output: Vec<String>,
}
impl TestOutput {
fn new() -> Self {
TestOutput { output: Vec::new() }
}
}
impl ProgOutput for TestOutput {
fn write(&mut self, output: &str) -> Result<(), Error> {
self.output.push(output.to_string());
Ok(())
}
}
#[test]
fn param_mode_0() {
assert_eq!(param_mode(0, 101), ParamMode::Immediate);
assert_eq!(param_mode(0, 1), ParamMode::Position);
assert_eq!(param_mode(0, 201), ParamMode::Relative);
}
#[test]
fn param_mode_1() {
assert_eq!(param_mode(1, 1101), ParamMode::Immediate);
assert_eq!(param_mode(1, 1001), ParamMode::Immediate);
assert_eq!(param_mode(1, 101), ParamMode::Position);
assert_eq!(param_mode(1, 1), ParamMode::Position);
assert_eq!(param_mode(1, 2101), ParamMode::Relative);
assert_eq!(param_mode(1, 2001), ParamMode::Relative);
}
#[test]
fn param_mode_2() {
assert_eq!(param_mode(2, 1101), ParamMode::Position);
assert_eq!(param_mode(2, 1001), ParamMode::Position);
assert_eq!(param_mode(2, 101), ParamMode::Position);
assert_eq!(param_mode(2, 1), ParamMode::Position);
assert_eq!(param_mode(2, 20001), ParamMode::Relative);
assert_eq!(param_mode(2, 21001), ParamMode::Relative);
assert_eq!(param_mode(2, 22001), ParamMode::Relative);
}
fn run_prog_no_input_or_output(mem_state: &[i64]) -> Prog {
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(&mut TestInput::new(vec![]), &mut test_output)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert!(test_output.output.is_empty());
prog
}
#[test]
fn day2_ex1() {
let mem_state = vec![1, 9, 10, 3, 2, 3, 11, 0, 99, 30, 40, 50];
let prog = run_prog_no_input_or_output(&mem_state);
assert_eq!(
vec![3500, 9, 10, 70, 2, 3, 11, 0, 99, 30, 40, 50],
prog.mem_state
);
}
#[test]
fn day2_ex2() {
let mem_state = vec![1, 0, 0, 0, 99];
let prog = run_prog_no_input_or_output(&mem_state);
assert_eq!(vec![2, 0, 0, 0, 99], prog.mem_state);
}
#[test]
fn day2_ex3() {
let mem_state = vec![2, 3, 0, 3, 99];
let prog = run_prog_no_input_or_output(&mem_state);
assert_eq!(vec![2, 3, 0, 6, 99], prog.mem_state);
}
#[test]
fn day2_ex4() {
let mem_state = vec![2, 4, 4, 5, 99, 0];
let prog = run_prog_no_input_or_output(&mem_state);
assert_eq!(vec![2, 4, 4, 5, 99, 9801], prog.mem_state);
}
#[test]
fn day2_ex5() {
let mem_state = vec![1, 1, 1, 4, 99, 5, 6, 0, 99];
let prog = run_prog_no_input_or_output(&mem_state);
assert_eq!(vec![30, 1, 1, 4, 2, 5, 6, 0, 99], prog.mem_state);
}
#[test]
fn day5_ex1() {
let mem_state = vec![3, 0, 4, 0, 99];
let mut test_output = TestOutput::new();
let x = String::from("42");
let mut prog = Prog::new(&mem_state);
prog.run(&mut TestInput::new(vec![x.clone()]), &mut test_output)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec![x], test_output.output);
assert_eq!(vec![42, 0, 4, 0, 99], prog.mem_state);
}
#[test]
fn day5_ex2() {
let mem_state = vec![1002, 4, 3, 4, 33];
let prog = run_prog_no_input_or_output(&mem_state);
assert_eq!(vec![1002, 4, 3, 4, 99], prog.mem_state);
}
#[test]
fn day5_ex3() {
let mem_state = vec![3, 9, 8, 9, 10, 9, 4, 9, 99, -1, 8];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("8")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["1"], test_output.output);
assert_eq!(vec![3, 9, 8, 9, 10, 9, 4, 9, 99, 1, 8], prog.mem_state);
}
#[test]
fn day5_ex5() {
let mem_state = vec![3, 9, 8, 9, 10, 9, 4, 9, 99, -1, 8];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("7")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["0"], test_output.output);
assert_eq!(vec![3, 9, 8, 9, 10, 9, 4, 9, 99, 0, 8], prog.mem_state);
}
#[test]
fn day5_ex6() {
let mem_state = vec![3, 9, 7, 9, 10, 9, 4, 9, 99, -1, 8];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("8")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["0"], test_output.output);
assert_eq!(vec![3, 9, 7, 9, 10, 9, 4, 9, 99, 0, 8], prog.mem_state);
}
#[test]
fn day5_ex7() {
let mem_state = vec![3, 9, 7, 9, 10, 9, 4, 9, 99, -1, 8];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("7")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["1"], test_output.output);
assert_eq!(vec![3, 9, 7, 9, 10, 9, 4, 9, 99, 1, 8], prog.mem_state);
}
#[test]
fn day5_ex8() {
let mem_state = vec![3, 3, 1108, -1, 8, 3, 4, 3, 99];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("8")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["1"], test_output.output);
assert_eq!(vec![3, 3, 1108, 1, 8, 3, 4, 3, 99], prog.mem_state);
}
#[test]
fn day5_ex9() {
let mem_state = vec![3, 3, 1108, -1, 8, 3, 4, 3, 99];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("7")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["0"], test_output.output);
assert_eq!(vec![3, 3, 1108, 0, 8, 3, 4, 3, 99], prog.mem_state);
}
#[test]
fn day5_ex10() {
let mem_state = vec![3, 3, 1107, -1, 8, 3, 4, 3, 99];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("8")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["0"], test_output.output);
assert_eq!(vec![3, 3, 1107, 0, 8, 3, 4, 3, 99], prog.mem_state);
}
#[test]
fn day5_ex11() {
let mem_state = vec![3, 3, 1107, -1, 8, 3, 4, 3, 99];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("7")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["1"], test_output.output);
assert_eq!(vec![3, 3, 1107, 1, 8, 3, 4, 3, 99], prog.mem_state);
}
#[test]
fn day5_ex12() {
let mem_state = vec![3, 12, 6, 12, 15, 1, 13, 14, 13, 4, 13, 99, -1, 0, 1, 9];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("0")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["0"], test_output.output);
assert_eq!(
vec![3, 12, 6, 12, 15, 1, 13, 14, 13, 4, 13, 99, 0, 0, 1, 9],
prog.mem_state
);
}
#[test]
fn day5_ex13() {
let mem_state = vec![3, 12, 6, 12, 15, 1, 13, 14, 13, 4, 13, 99, -1, 0, 1, 9];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("1")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["1"], test_output.output);
assert_eq!(
vec![3, 12, 6, 12, 15, 1, 13, 14, 13, 4, 13, 99, 1, 1, 1, 9],
prog.mem_state
);
}
#[test]
fn day5_ex14() {
let mem_state = vec![3, 3, 1105, -1, 9, 1101, 0, 0, 12, 4, 12, 99, 1];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("0")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["0"], test_output.output);
assert_eq!(
vec![3, 3, 1105, 0, 9, 1101, 0, 0, 12, 4, 12, 99, 0],
prog.mem_state
);
}
#[test]
fn day5_ex15() {
let mem_state = vec![3, 3, 1105, -1, 9, 1101, 0, 0, 12, 4, 12, 99, 1];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("1")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["1"], test_output.output);
assert_eq!(
vec![3, 3, 1105, 1, 9, 1101, 0, 0, 12, 4, 12, 99, 1],
prog.mem_state
);
}
#[test]
fn day5_ex16() {
let mem_state = vec![
3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36, 98, 0,
0, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000, 1, 20, 4,
20, 1105, 1, 46, 98, 99,
];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("7")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["999"], test_output.output);
assert_eq!(
vec![
3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36,
98, 0, 7, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000,
1, 20, 4, 20, 1105, 1, 46, 98, 99
],
prog.mem_state
);
}
#[test]
fn day5_ex17() {
let mem_state = vec![
3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36, 98, 0,
0, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000, 1, 20, 4,
20, 1105, 1, 46, 98, 99,
];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("8")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["1000"], test_output.output);
assert_eq!(
vec![
3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36,
98, 1000, 8, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101,
1000, 1, 20, 4, 20, 1105, 1, 46, 98, 99
],
prog.mem_state
);
}
#[test]
fn day5_ex18() {
let mem_state = vec![
3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36, 98, 0,
0, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000, 1, 20, 4,
20, 1105, 1, 46, 98, 99,
];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(
&mut TestInput::new(vec![String::from("9")]),
&mut test_output,
)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec!["1001"], test_output.output);
assert_eq!(
vec![
3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36,
98, 1001, 9, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101,
1000, 1, 20, 4, 20, 1105, 1, 46, 98, 99
],
prog.mem_state
);
}
#[test]
fn test_build_input() {
assert_eq!(
vec![vec![0], vec![1], vec![2], vec![3], vec![4],],
build_input(&[], 0..5, 1)
);
assert_eq!(
vec![
vec![0, 1],
vec![0, 2],
vec![0, 3],
vec![0, 4],
vec![1, 0],
vec![1, 2],
vec![1, 3],
vec![1, 4],
vec![2, 0],
vec![2, 1],
vec![2, 3],
vec![2, 4],
vec![3, 0],
vec![3, 1],
vec![3, 2],
vec![3, 4],
vec![4, 0],
vec![4, 1],
vec![4, 2],
vec![4, 3]
],
build_input(&[], 0..5, 2)
);
}
#[test]
fn day7_ex1() {
let mut mem_state = vec![
3, 15, 3, 16, 1002, 16, 10, 16, 1, 16, 15, 15, 4, 15, 99, 0, 0,
];
let result = run_amplifiers_in_feedback_loop(&mut mem_state, &[4, 3, 2, 1, 0]).unwrap();
assert_eq!(result, Some(43210));
}
#[test]
fn day7_ex2() {
let mut mem_state = vec![
3, 15, 3, 16, 1002, 16, 10, 16, 1, 16, 15, 15, 4, 15, 99, 0, 0,
];
let result = find_max_thrust_signal(&mut mem_state).unwrap().unwrap();
assert_eq!(result.0, vec![4, 3, 2, 1, 0]);
assert_eq!(result.1, 43210);
}
#[test]
fn day7_ex3() {
let mut mem_state = vec![
3, 23, 3, 24, 1002, 24, 10, 24, 1002, 23, -1, 23, 101, 5, 23, 23, 1, 24, 23, 23, 4, 23,
99, 0, 0,
];
let result = run_amplifiers_in_feedback_loop(&mut mem_state, &[0, 1, 2, 3, 4]).unwrap();
assert_eq!(result, Some(54321));
}
#[test]
fn day7_ex4() {
let mut mem_state = vec![
3, 23, 3, 24, 1002, 24, 10, 24, 1002, 23, -1, 23, 101, 5, 23, 23, 1, 24, 23, 23, 4, 23,
99, 0, 0,
];
let result = find_max_thrust_signal(&mut mem_state).unwrap().unwrap();
assert_eq!(result.0, vec![0, 1, 2, 3, 4]);
assert_eq!(result.1, 54321);
}
#[test]
fn day7_ex5() {
let mut mem_state = vec![
3, 31, 3, 32, 1002, 32, 10, 32, 1001, 31, -2, 31, 1007, 31, 0, 33, 1002, 33, 7, 33, 1,
33, 31, 31, 1, 32, 31, 31, 4, 31, 99, 0, 0, 0,
];
let result = run_amplifiers_in_feedback_loop(&mut mem_state, &[1, 0, 4, 3, 2]).unwrap();
assert_eq!(result, Some(65210));
}
#[test]
fn day7_ex6() {
let mut mem_state = vec![
3, 31, 3, 32, 1002, 32, 10, 32, 1001, 31, -2, 31, 1007, 31, 0, 33, 1002, 33, 7, 33, 1,
33, 31, 31, 1, 32, 31, 31, 4, 31, 99, 0, 0, 0,
];
let result = find_max_thrust_signal(&mut mem_state).unwrap().unwrap();
assert_eq!(result.0, vec![1, 0, 4, 3, 2]);
assert_eq!(result.1, 65210);
}
#[test]
fn day7_ex7() {
let mut mem_state = vec![
3, 26, 1001, 26, -4, 26, 3, 27, 1002, 27, 2, 27, 1, 27, 26, 27, 4, 27, 1001, 28, -1,
28, 1005, 28, 6, 99, 0, 0, 5,
];
let result = run_amplifiers_in_feedback_loop(&mut mem_state, &[9, 8, 7, 6, 5]).unwrap();
assert_eq!(result, Some(139629729));
}
#[test]
fn day7_ex8() {
let mut mem_state = vec![
3, 26, 1001, 26, -4, 26, 3, 27, 1002, 27, 2, 27, 1, 27, 26, 27, 4, 27, 1001, 28, -1,
28, 1005, 28, 6, 99, 0, 0, 5,
];
let result = find_max_thrust_signal_in_feedback_loop(&mut mem_state)
.unwrap()
.unwrap();
assert_eq!(result.0, vec![9, 8, 7, 6, 5]);
assert_eq!(result.1, 139629729);
}
#[test]
fn day7_ex9() {
let mut mem_state = vec![
3, 52, 1001, 52, -5, 52, 3, 53, 1, 52, 56, 54, 1007, 54, 5, 55, 1005, 55, 26, 1001, 54,
-5, 54, 1105, 1, 12, 1, 53, 54, 53, 1008, 54, 0, 55, 1001, 55, 1, 55, 2, 53, 55, 53, 4,
53, 1001, 56, -1, 56, 1005, 56, 6, 99, 0, 0, 0, 0, 10,
];
let result = run_amplifiers_in_feedback_loop(&mut mem_state, &[9, 7, 8, 5, 6]).unwrap();
assert_eq!(result, Some(18216));
}
#[test]
fn day7_ex10() {
let mut mem_state = vec![
3, 52, 1001, 52, -5, 52, 3, 53, 1, 52, 56, 54, 1007, 54, 5, 55, 1005, 55, 26, 1001, 54,
-5, 54, 1105, 1, 12, 1, 53, 54, 53, 1008, 54, 0, 55, 1001, 55, 1, 55, 2, 53, 55, 53, 4,
53, 1001, 56, -1, 56, 1005, 56, 6, 99, 0, 0, 0, 0, 10,
];
let result = find_max_thrust_signal_in_feedback_loop(&mut mem_state)
.unwrap()
.unwrap();
assert_eq!(result.0, vec![9, 7, 8, 5, 6]);
assert_eq!(result.1, 18216);
}
#[test]
fn day9_ex1() {
let mem_state = vec![
109, 1, 204, -1, 1001, 100, 1, 100, 1008, 100, 16, 101, 1006, 101, 0, 99,
];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(&mut TestInput::new(vec![]), &mut test_output)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
let expected_output: Vec<String> = vec![
109, 1, 204, -1, 1001, 100, 1, 100, 1008, 100, 16, 101, 1006, 101, 0, 99,
]
.into_iter()
.map(|i| i.to_string())
.collect();
assert_eq!(expected_output, test_output.output);
}
#[test]
fn day9_ex2() {
let mem_state = vec![1102, 34915192, 34915192, 7, 4, 7, 99, 0];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(&mut TestInput::new(vec![]), &mut test_output)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec![1219_0706_3239_6864i64.to_string()], test_output.output);
}
#[test]
fn day9_ex3() {
let mem_state = vec![104, 1125899906842624, 99];
let mut test_output = TestOutput::new();
let mut prog = Prog::new(&mem_state);
prog.run(&mut TestInput::new(vec![]), &mut test_output)
.unwrap();
assert_eq!(ProgState::Halt, prog.state);
assert_eq!(vec![1125899906842624i64.to_string()], test_output.output);
}
}
|
use futures::{Future, Stream};
use hyper_tls::HttpsConnector;
use hyper::Client;
use tokio_core::reactor::Core;
use serde_json;
use std::string::String;
use std::str;
use std;
#[derive(Debug, Serialize, Deserialize)]
#[allow(non_snake_case)]
pub struct CurrencyPrice {
pub results: Vec<Res>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Res {
pub contract: Contract,
pub marginalPrices: Vec<String>,
pub event: Event,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Contract {
pub address: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Event {
pub oracle: Oracle,
#[serde(rename="type")]
pub type_name: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Oracle {
pub eventDescription: EventDescription,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct EventDescription {
pub title: String,
pub ipfsHash: String,
}
pub struct Crawler {
}
impl Crawler {
pub fn new() -> Crawler {
let c = Crawler {};
return c;
}
fn parse_content (&self, content : String) -> Vec<Res> {
let s_slice: &str = &*content;
let v: CurrencyPrice = serde_json::from_str(s_slice.clone()).unwrap();
return v.results;
}
fn http_get(&self, url : &str) -> String {
let mut core = Core::new().unwrap();
let client = Client::configure()
.connector(HttpsConnector::new(4, &core.handle()).unwrap())
.build(&core.handle());
let uri = url.parse().unwrap();
let work = client.get(uri).and_then (|res| {res.body().concat2()})
.map(|chunk| std::str::from_utf8(&chunk).expect("error handling").to_string());
let r = core.run(work).unwrap();
return r;
}
pub fn get_markets (&self) -> Vec<Res> {
let url = "https://olympia-api.helena.network/api/markets/?event_oracle_is_outcome_set=false";
let content = self.http_get(&url[..]);
let values: Vec<Res> = self.parse_content(content);
return values;
}
}
|
#![allow(dead_code)]
pub mod algebra;
pub mod collection;
pub mod planning;
pub mod sat;
pub mod stn;
pub mod two_sat;
|
extern crate sysfs_gpio;
use rand::prelude::*;
use std::thread::sleep;
use std::time::Duration;
use sysfs_gpio::{Direction, Pin, Edge};
fn main() {
rgb_touch();
}
fn flash_led() {
println!("Hello, world!");
let r_led = Pin::new(17); // number depends on chip, etc.s
r_led.with_exported(|| {
// When running as non-root, need to wait a few ms (100 or so)
// for exported pin to become available
sleep(Duration::from_millis(100));
r_led.set_direction(Direction::Out).unwrap();
for _ in 0..10 {
r_led.set_value(0)?;
sleep(Duration::from_millis(200));
r_led.set_value(1)?;
sleep(Duration::from_millis(200));
}
r_led.set_value(0)?;
Ok(())
}).unwrap();
}
fn rgb_touch() {
let mut rng = rand::thread_rng();
let b_led = Pin::new(23); // number depends on chip, etc.s
let g_led = Pin::new(24); // number depends on chip, etc.s
let r_led = Pin::new(25); // number depends on chip, etc.s
let touch_sensor = Pin::new(5);
touch_sensor.with_exported(|| {
b_led.export()?;
g_led.export()?;
r_led.export()?;
sleep(Duration::from_millis(100));
r_led.set_direction(Direction::Out).unwrap();
g_led.set_direction(Direction::Out).unwrap();
b_led.set_direction(Direction::Out).unwrap();
touch_sensor.set_direction(Direction::In).unwrap();
touch_sensor.set_edge(Edge::FallingEdge)?;
let mut touch_poller = touch_sensor.get_poller().unwrap();
for i in 0..1000 {
let touch_value = touch_poller.poll(1000).unwrap();
println!("{}: {:?}", i, touch_value);
let led = match rng.gen_range(0, 3) {
0 => r_led,
1 => g_led,
_ => b_led
};
match touch_value {
None => {
r_led.set_value(1)?;
g_led.set_value(1)?;
b_led.set_value(1)?;
},
Some(val) => {
led.set_value(val)?;
},
}
}
Ok(())
}).unwrap_or(());
r_led.unexport().unwrap_or(());
b_led.unexport().unwrap_or(());
g_led.unexport().unwrap_or(());
}
|
#[cfg(all(not(target_arch = "wasm32"), test))]
mod test;
use liblumen_alloc::erts::term::prelude::*;
#[native_implemented::function(erlang:is_bitstring/1)]
pub fn result(term: Term) -> Term {
term.is_bitstring().into()
}
|
//! The primary module containing microcontroller-specific core definitions
/// The ATmega88.
#[cfg(any(avr_mcu_atmega88, feature = "all-mcus"))] pub mod atmega88;
#[cfg(avr_mcu_atmega88)] pub use self::atmega88 as current;
/// The ATmega48A.
#[cfg(any(avr_mcu_atmega48a, feature = "all-mcus"))] pub mod atmega48a;
#[cfg(avr_mcu_atmega48a)] pub use self::atmega48a as current;
/// The ATmega168A.
#[cfg(any(avr_mcu_atmega168a, feature = "all-mcus"))] pub mod atmega168a;
#[cfg(avr_mcu_atmega168a)] pub use self::atmega168a as current;
/// The ATmega88P.
#[cfg(any(avr_mcu_atmega88p, feature = "all-mcus"))] pub mod atmega88p;
#[cfg(avr_mcu_atmega88p)] pub use self::atmega88p as current;
/// The ATmega168P.
#[cfg(any(avr_mcu_atmega168p, feature = "all-mcus"))] pub mod atmega168p;
#[cfg(avr_mcu_atmega168p)] pub use self::atmega168p as current;
/// The ATmega88PA.
#[cfg(any(avr_mcu_atmega88pa, feature = "all-mcus"))] pub mod atmega88pa;
#[cfg(avr_mcu_atmega88pa)] pub use self::atmega88pa as current;
/// The ATmega168.
#[cfg(any(avr_mcu_atmega168, feature = "all-mcus"))] pub mod atmega168;
#[cfg(avr_mcu_atmega168)] pub use self::atmega168 as current;
/// The ATmega328P.
#[cfg(any(avr_mcu_atmega328p, feature = "all-mcus"))] pub mod atmega328p;
#[cfg(avr_mcu_atmega328p)] pub use self::atmega328p as current;
/// The ATmega48PA.
#[cfg(any(avr_mcu_atmega48pa, feature = "all-mcus"))] pub mod atmega48pa;
#[cfg(avr_mcu_atmega48pa)] pub use self::atmega48pa as current;
/// The ATmega168PA.
#[cfg(any(avr_mcu_atmega168pa, feature = "all-mcus"))] pub mod atmega168pa;
#[cfg(avr_mcu_atmega168pa)] pub use self::atmega168pa as current;
/// The ATmega48P.
#[cfg(any(avr_mcu_atmega48p, feature = "all-mcus"))] pub mod atmega48p;
#[cfg(avr_mcu_atmega48p)] pub use self::atmega48p as current;
/// The ATmega328.
///
/// This device is chosen as the default when the crate is targeting non-AVR devices.
#[cfg(any(avr_mcu_atmega328, feature = "all-mcus", not(target_arch = "avr")))] pub mod atmega328;
#[cfg(any(avr_mcu_atmega328, not(target_arch = "avr")))] pub use self::atmega328 as current;
/// The ATmega88A.
#[cfg(any(avr_mcu_atmega88a, feature = "all-mcus"))] pub mod atmega88a;
#[cfg(avr_mcu_atmega88a)] pub use self::atmega88a as current;
/// The ATmega48.
#[cfg(any(avr_mcu_atmega48, feature = "all-mcus"))] pub mod atmega48;
#[cfg(avr_mcu_atmega48)] pub use self::atmega48 as current;
|
extern crate reqwest;
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use serde_json::json;
use crate::settings::{load_settings, Settings};
#[derive(Debug)]
#[derive(Serialize, Deserialize)]
pub struct WebAddon {
pub(crate) addon_name: String,
addon_foldername: String,
pub(crate) addon_uuid: String,
addon_version: String,
}
#[derive(Debug)]
#[derive(Serialize, Deserialize)]
pub struct WebAddonGroupInfo {
addon_group_uuid: String,
}
#[derive(Debug)]
#[derive(Serialize, Deserialize)]
pub struct WebAddonGroup {
pub(crate) addon_group_name: String,
pub(crate) addon_group_author: String,
pub(crate) addon_group_uuid: String,
pub(crate) addon_group_version: String,
pub(crate) addons: Vec<WebAddon>,
}
pub fn get_addon_uuid(addon_name: &String, settings: &Settings) -> String {
println!("web_api::get_addon_uuid(addon_name='{}')", addon_name);
let mut addon_uuid_url = settings.api_url.clone();
if !addon_uuid_url.ends_with("/") {
addon_uuid_url = format!("{}{}", addon_uuid_url, "/");
}
addon_uuid_url = format!("{}{}{}", addon_uuid_url, "addon/", addon_name);
let mut res = reqwest::get(&addon_uuid_url).unwrap();
let body = res.text().unwrap();
let result: HashMap<String, String> = match serde_json::from_str(&body) {
Ok(uuid) => uuid,
Err(error) => {
panic!("Problem parsing the response: {:?}, the response was {:?}", error, body)
}
};
result.get("uuid").unwrap().clone()
}
pub fn get_addon_groups(settings: &Settings) -> Vec<WebAddonGroup> {
let mut addon_groups_info_url = settings.api_url.clone();
if !addon_groups_info_url.ends_with("/") {
addon_groups_info_url = format!("{}{}", addon_groups_info_url, "/");
}
addon_groups_info_url = format!("{}{}", addon_groups_info_url, "addon_groups");
let mut res = reqwest::get(&addon_groups_info_url).unwrap();
let body = res.text().unwrap();
let addon_group_infos: Vec<WebAddonGroupInfo> = match serde_json::from_str(&body) {
Ok(uuid) => uuid,
Err(error) => {
panic!("Problem parsing the response: {:?}, the response was {:?}", error, body)
}
};
let mut addon_groups_url = settings.api_url.clone();
if !addon_groups_url.ends_with("/") {
addon_groups_url = format!("{}{}", addon_groups_url, "/");
}
addon_groups_url = format!("{}{}", addon_groups_url, "addon_group/");
let mut addon_groups: Vec<WebAddonGroup> = Vec::new();
for addon_group_info in addon_group_infos {
let addon_group_uuid = addon_group_info.addon_group_uuid;
let mut res = reqwest::get(&format!("{}{}", addon_groups_url, addon_group_uuid)).unwrap();
let body = res.text().unwrap();
let addon_group: WebAddonGroup = match serde_json::from_str(&body) {
Ok(uuid) => uuid,
Err(error) => {
panic!("Problem parsing the response: {:?}, the response was {:?}", error, body)
}
};
addon_groups.push(addon_group);
}
addon_groups
}
pub fn update_addons(updated_addons: HashMap<String, String>) {
let settings = load_settings();
let mut addon_uuid_url = settings.api_url;
if !addon_uuid_url.ends_with("/") {
addon_uuid_url = format!("{}{}", addon_uuid_url, "/");
}
addon_uuid_url = format!("{}{}", addon_uuid_url, "update_addons");
let params = [("updated_addons", format!("{}", json!(updated_addons)))];
let client = reqwest::Client::new();
let _response = match client.post(&addon_uuid_url)
.form(¶ms)
.send() {
Ok(response) => response,
Err(error) => {
panic!("Problem updating updates through API: {:?}", error)
}
};
}
|
use crate::spec::{RelroLevel, TargetOptions};
pub fn opts() -> TargetOptions {
TargetOptions {
os: "netbsd".into(),
dynamic_linking: true,
families: vec!["unix".into()],
no_default_libraries: false,
has_rpath: true,
position_independent_executables: true,
relro_level: RelroLevel::Full,
use_ctors_section: true,
default_dwarf_version: 2,
..Default::default()
}
}
|
use errors::*;
use hex;
use libsodacrypt;
use net::endpoint::Endpoint;
use net::event::{Event, ServerEvent};
use net::http;
use net::message;
use rmp_serde;
use std;
use std::io::{Read, Write};
#[derive(Debug, Clone, PartialEq)]
pub enum SessionState {
New,
WaitPing,
Ready,
}
pub struct SessionServer {
pub session_id: String,
pub local_node_id: Vec<u8>,
pub local_discover: Vec<Endpoint>,
pub remote_node_id: Vec<u8>,
pub remote_discover: Vec<Endpoint>,
pub endpoint: Endpoint,
pub state: SessionState,
pub eph_pub: Vec<u8>,
pub eph_priv: Vec<u8>,
pub key_send: Vec<u8>,
pub key_recv: Vec<u8>,
pub cur_socket: Option<std::net::TcpStream>,
pub out_messages: Vec<message::Message>,
pub cur_request: http::Request,
}
impl SessionServer {
pub fn new(local_node_id: &[u8], endpoint: &Endpoint, discover: Vec<Endpoint>) -> Result<Self> {
let (key_pub, key_priv) = libsodacrypt::kx::gen_keypair()?;
Ok(SessionServer {
session_id: "".to_string(),
local_node_id: local_node_id.to_vec(),
local_discover: discover,
remote_node_id: Vec::new(),
remote_discover: Vec::new(),
endpoint: endpoint.clone(),
state: SessionState::New,
eph_pub: key_pub,
eph_priv: key_priv,
key_send: Vec::new(),
key_recv: Vec::new(),
cur_socket: None,
out_messages: Vec::new(),
cur_request: http::Request::new(http::RequestType::Request),
})
}
pub fn send_buffered_messages(&mut self, socket: &mut std::net::TcpStream) -> Result<()> {
let out_messages: Vec<message::Message> = self.out_messages.drain(..).collect();
let out = message::compile(
&self.session_id,
&out_messages,
http::RequestType::Response,
&self.key_send,
)?;
if out.len() != socket.write(&out)? {
panic!("incomplete write");
}
Ok(())
}
pub fn pong(&mut self, socket: &mut std::net::TcpStream, origin_time: u64) -> Result<()> {
let ping_res = message::PingRes::new(
origin_time,
&self.local_node_id,
self.local_discover.clone(),
);
self.out_messages
.push(message::Message::PingRes(Box::new(ping_res)));
self.send_buffered_messages(socket)
}
pub fn user_message(&mut self, data: Vec<u8>) -> Result<()> {
let msg = message::UserMessage::new(data);
self.out_messages
.push(message::Message::UserMessage(Box::new(msg)));
Ok(())
}
#[allow(unknown_lints)]
#[allow(needless_pass_by_value)]
fn process_initial_handshake(
mut self,
mut events: Vec<Event>,
request: http::Request,
mut socket: std::net::TcpStream,
) -> (Option<Self>, Vec<Event>) {
let (mut srv_recv, mut srv_send, mut remote_node_id, session_id) =
match wrap_initial_handshake(
&request.path,
&self.local_node_id,
&self.eph_pub,
&self.eph_priv,
&mut socket,
) {
Ok(v) => v,
Err(e) => {
events.push(Event::OnServerEvent(ServerEvent::OnError(e)));
return (None, events);
}
};
self.remote_node_id.append(&mut remote_node_id);
self.eph_pub.drain(..);
self.eph_priv.drain(..);
self.key_send.append(&mut srv_send);
self.key_recv.append(&mut srv_recv);
self.session_id = session_id;
self.state = SessionState::WaitPing;
(Some(self), events)
}
#[allow(unknown_lints)]
#[allow(needless_pass_by_value)]
fn process_message(
mut self,
mut events: Vec<Event>,
request: http::Request,
mut socket: std::net::TcpStream,
) -> (Option<Self>, Vec<Event>) {
let msgs = message::parse(&request.body, &self.key_recv).unwrap();
for msg in msgs {
match msg {
message::Message::PingReq(mut r) => {
self.state = SessionState::Ready;
self.remote_node_id = r.node_id.drain(..).collect();
self.remote_discover = r.discover.drain(..).collect();
self.pong(&mut socket, r.sent_time).unwrap();
}
message::Message::UserMessage(r) => {
events.push(Event::OnServerEvent(ServerEvent::OnDataReceived(
self.remote_node_id.clone(),
r.data,
)));
}
_ => {
panic!("unhandled response type");
}
}
}
(Some(self), events)
}
pub fn process_once(mut self) -> (Option<Self>, Vec<Event>) {
let mut buf = [0u8; 1024];
let mut events: Vec<Event> = Vec::new();
let mut socket = match self.cur_socket.take() {
None => return (Some(self), events),
Some(s) => s,
};
if !self.cur_request.is_done() {
let size = match socket.read(&mut buf) {
Ok(b) => {
if b < 1 {
events.push(Event::OnServerEvent(ServerEvent::OnClose()));
return (Some(self), events);
} else {
b
}
}
Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => {
self.cur_socket = Some(socket);
return (Some(self), events);
}
Err(e) => {
events.push(Event::OnServerEvent(ServerEvent::OnError(e.into())));
events.push(Event::OnServerEvent(ServerEvent::OnClose()));
return (None, events);
}
};
{
if !self.cur_request.check_parse(&buf[..size]) {
self.cur_socket = Some(socket);
return (Some(self), events);
}
}
}
let request = self.cur_request;
self.cur_request = http::Request::new(http::RequestType::Request);
match self.state {
SessionState::New => {
if self.session_id.is_empty() && &request.method == "GET" {
self.process_initial_handshake(events, request, socket)
} else if self.session_id.is_empty() && &request.method == "POST" {
{
let parts: Vec<&str> = request.path.split('/').collect();
self.session_id = parts[1].to_string();
}
// re-attach so we can be processed in the proper context
self.cur_socket = Some(socket);
self.cur_request = request;
(Some(self), events)
} else {
panic!("I don't know what to do with this request!")
}
}
SessionState::WaitPing => {
if self.session_id.is_empty() {
panic!("cannot process non-new tx without session info");
}
if &request.method == "GET" {
panic!("cannot process GET requests on established session");
}
{
let parts: Vec<&str> = request.path.split('/').collect();
if parts[1] != self.session_id {
panic!("session id mismatch");
}
}
self.process_message(events, request, socket)
}
SessionState::Ready => {
if self.session_id.is_empty() {
panic!("cannot process non-new tx without session info");
}
if &request.method == "GET" {
panic!("cannot process GET requests on established session");
}
{
let parts: Vec<&str> = request.path.split('/').collect();
if parts[1] != self.session_id {
panic!("session id mismatch");
}
}
self.process_message(events, request, socket)
}
}
}
}
#[allow(unknown_lints)]
#[allow(type_complexity)]
fn wrap_initial_handshake(
path: &str,
local_node_id: &[u8],
eph_pub: &[u8],
eph_priv: &[u8],
socket: &mut std::net::TcpStream,
) -> Result<(Vec<u8>, Vec<u8>, Vec<u8>, String)> {
let parts: Vec<&str> = path.split('/').collect();
let remote_node_id = hex::decode(parts[1])?;
let cli_pub = hex::decode(parts[2])?;
let session_id = hex::encode(libsodacrypt::rand::rand_bytes(32)?);
let (srv_recv, srv_send) = libsodacrypt::kx::derive_server(eph_pub, eph_priv, &cli_pub)?;
let mut res = http::Request::new(http::RequestType::Response);
res.status = "OK".to_string();
res.code = "200".to_string();
res.headers.insert(
"content-type".to_string(),
"application/octet-stream".to_string(),
);
let data_out = message::InitialHandshakeRes {
session_id: session_id.clone(),
node_id: local_node_id.to_vec(),
eph_pub: eph_pub.to_vec(),
};
res.body = rmp_serde::to_vec(&data_out)?;
let output = res.generate();
if output.len() != socket.write(&res.generate())? {
panic!("incomplete write");
}
Ok((srv_recv, srv_send, remote_node_id, session_id))
}
|
pub mod report_repair_part_1;
pub mod report_repair_part_2;
|
use domain::Unit;
use std::sync::mpsc::Receiver;
pub trait UnitGateway {
fn get_units_stream(&self) -> Receiver<Unit>;
}
|
use std::env;
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where
P: AsRef<Path>,
{
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
fn check_slope(left: usize, down: usize, input: &String) -> usize {
let mut n_trees = 0;
let mut width = 0;
let mut x = left;
let mut y = 0;
if let Ok(lines) = read_lines(&input) {
for line in lines {
if let Ok(row) = line {
if width == 0 {
width = row.len()
}
if y < down {
y += 1;
continue;
}
y = 1;
let chars = row.chars().collect::<Vec<char>>();
if chars[x] == '#' {
n_trees += 1;
}
x += left;
if x >= width {
x -= width;
}
}
}
}
n_trees
}
fn main() {
let args: Vec<String> = env::args().collect();
let mut n_trees = 1;
if &args[2] == "1" {
n_trees = check_slope(3, 1, &args[1]);
} else if &args[2] == "2" {
n_trees *= check_slope(1, 1, &args[1]);
n_trees *= check_slope(3, 1, &args[1]);
n_trees *= check_slope(5, 1, &args[1]);
n_trees *= check_slope(7, 1, &args[1]);
n_trees *= check_slope(1, 2, &args[1]);
}
println!("{}", n_trees)
}
|
use std::{
future::Future,
pin::Pin,
task::{Context, Poll},
time::{Duration, Instant},
};
use async_io::Timer;
use futures_core::Stream;
/// Simple stream of `std::time::Instant` at a target rate.
///
/// If the stream is polled late, the next instant will target the duration after the call to
/// `poll_next` that generated the event, not after the previous timer deadline. Thus, under load
/// or with artificial delays, the stream will just not generate as many events rather than trying
/// to generate more events to catch up. The target rate is the *fastest* rate the stream will run,
/// it may run slower.
pub struct Interval {
duration: Duration,
timer: Timer,
}
impl Interval {
/// Create a new `Interval` stream where events are `duration` apart.
pub fn new(duration: Duration) -> Interval {
Interval {
duration,
timer: Timer::after(duration),
}
}
}
impl Stream for Interval {
type Item = Instant;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let Self { duration, timer } = &mut *self;
match Pin::new(&mut *timer).poll(cx) {
Poll::Ready(instant) => {
timer.set_after(*duration);
Poll::Ready(Some(instant))
}
Poll::Pending => Poll::Pending,
}
}
}
|
//! Common utility functions for sensors.
use crate::{Device, Ev3Result};
/// Common utility functions for sensors.
pub trait Sensor: Device {
/// Reading the file will give the unscaled raw values in the `value<N>` attributes.
/// Use `bin_data_format`, `num_values` and the individual sensor documentation to determine how to interpret the data.
fn get_bin_data(&self) -> Ev3Result<String> {
self.get_attribute("bin_data").get()
}
/// Returns the format of the values in `bin_data` for the current mode. Possible values are:
// * u8: Unsigned 8-bit integer (byte)
// * s8: Signed 8-bit integer (sbyte)
// * u16: Unsigned 16-bit integer (ushort)
// * s16: Signed 16-bit integer (short)
// * s16_be: Signed 16-bit integer, big endian
// * s32: Signed 32-bit integer (int)
// * s32_be: Signed 32-bit integer, big endian
// * float: IEEE 754 32-bit floating point (float)
fn get_bin_data_format(&self) -> Ev3Result<String> {
self.get_attribute("bin_data_format").get()
}
/// Returns the number of decimal places for the values in the `value<N>` attributes of the current mode.
fn get_decimals(&self) -> Ev3Result<i32> {
self.get_attribute("decimals").get()
}
/// Returns the firmware version of the sensor if available.
/// Currently only NXT/I2C sensors support this.
fn get_fw_version(&self) -> Ev3Result<String> {
self.get_attribute("fw_version").get()
}
/// Returns the current mode.
/// See the individual sensor documentation for a description of the modes available for each type of sensor.
fn get_mode(&self) -> Ev3Result<String> {
self.get_attribute("mode").get()
}
/// Sets the sensor to that mode.
/// See the individual sensor documentation for a description of the modes available for each type of sensor.
fn set_mode(&self, mode: &str) -> Ev3Result<()> {
self.get_attribute("mode").set_str_slice(mode)
}
/// Returns a list of the valid modes for the sensor.
fn get_modes(&self) -> Ev3Result<Vec<String>> {
self.get_attribute("modes").get_vec()
}
/// Returns the number of `value<N>` attributes that will return a valid value for the current mode.
fn get_num_values(&self) -> Ev3Result<i32> {
self.get_attribute("num_values").get()
}
/// Returns the polling period of the sensor in milliseconds.
/// Returns `-EOPNOTSUPP` if changing polling is not supported.
/// Note: Setting poll_ms too high can cause the input port auto detection to fail.
/// If this happens, use the mode attribute of the port to force the port to `nxt-i2c mode`. Values must not be negative.
fn get_poll_ms(&self) -> Ev3Result<i32> {
self.get_attribute("poll_ms").get()
}
/// Sets the polling period of the sensor in milliseconds.
/// Setting to 0 disables polling.
/// Note: Setting poll_ms too high can cause the input port auto detection to fail.
/// If this happens, use the mode attribute of the port to force the port to `nxt-i2c mode`. Values must not be negative.
fn set_poll_ms(&self, poll_ms: i32) -> Ev3Result<()> {
self.get_attribute("poll_ms").set(poll_ms)
}
/// Returns the units of the measured value for the current mode. May return empty string if units are unknown.
fn get_units(&self) -> Ev3Result<String> {
self.get_attribute("units").get()
}
/// Returns the current `value{index}` value if available.
fn get_value(&self, index: u8) -> Ev3Result<i32> {
use crate::Ev3Error;
match index {
0 => self.get_value0(),
1 => self.get_value1(),
2 => self.get_value2(),
3 => self.get_value3(),
4 => self.get_value4(),
5 => self.get_value5(),
6 => self.get_value6(),
7 => self.get_value7(),
_ => Ev3Result::Err(Ev3Error::InternalError {
msg: format!("Sensor value index {index} is out of bounds [0, 7]"),
}),
}
}
/// Returns the current `value0` value if available.
fn get_value0(&self) -> Ev3Result<i32> {
self.get_attribute("value0").get()
}
/// Returns the current `value1` value if available.
fn get_value1(&self) -> Ev3Result<i32> {
self.get_attribute("value1").get()
}
/// Returns the current `value2` value if available.
fn get_value2(&self) -> Ev3Result<i32> {
self.get_attribute("value2").get()
}
/// Returns the current `value3` value if available.
fn get_value3(&self) -> Ev3Result<i32> {
self.get_attribute("value3").get()
}
/// Returns the current `value4` value if available.
fn get_value4(&self) -> Ev3Result<i32> {
self.get_attribute("value4").get()
}
/// Returns the current `value5` value if available.
fn get_value5(&self) -> Ev3Result<i32> {
self.get_attribute("value5").get()
}
/// Returns the current `value6` value if available.
fn get_value6(&self) -> Ev3Result<i32> {
self.get_attribute("value6").get()
}
/// Returns the current `value7` value if available.
fn get_value7(&self) -> Ev3Result<i32> {
self.get_attribute("value7").get()
}
/// Returns a space delimited string representing sensor-specific text values. Returns `-EOPNOTSUPP` if a sensor does not support text values.
fn get_text_value(&self) -> Ev3Result<String> {
self.get_attribute("text_value").get()
}
}
|
use s3::bucket::Bucket;
use s3::creds::Credentials as aws_cred;
use s3::region::Region;
use s3::S3Error;
struct Storage {
name: String,
region: Region,
credentials: aws_cred,
bucket: String,
}
pub fn aws_func(filename: String, data: Vec<u8>) -> Result<String, S3Error> {
let aws = Storage {
name: "aws".into(),
region: "eu-central-1".parse()?,
credentials: aws_cred::new(
Some(&std::env::var("AWS_KEY").expect("AWS KEY not set")),
Some(&std::env::var("AWS_SECRET").expect("AWS SECRET not set")),
None,
None,
None,
)?,
bucket: std::env::var("AWS_BUCKET").expect("AWS SECRET not set"),
};
println!("Running {}", aws.name);
let bucket = Bucket::new(&aws.bucket, aws.region, aws.credentials)?;
let (_, _code) = bucket.put_object_blocking(&filename, &data)?;
let file = format!(
"https://val-mxo.s3.eu-central-1.amazonaws.com/{}",
&filename
);
Ok(file)
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
#[repr(transparent)]
pub struct PlatformDiagnosticActionState(pub i32);
impl PlatformDiagnosticActionState {
pub const Success: Self = Self(0i32);
pub const FreeNetworkNotAvailable: Self = Self(1i32);
pub const ACPowerNotAvailable: Self = Self(2i32);
}
impl ::core::marker::Copy for PlatformDiagnosticActionState {}
impl ::core::clone::Clone for PlatformDiagnosticActionState {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PlatformDiagnosticEscalationType(pub i32);
impl PlatformDiagnosticEscalationType {
pub const OnCompletion: Self = Self(0i32);
pub const OnFailure: Self = Self(1i32);
}
impl ::core::marker::Copy for PlatformDiagnosticEscalationType {}
impl ::core::clone::Clone for PlatformDiagnosticEscalationType {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PlatformDiagnosticEventBufferLatencies(pub u32);
impl PlatformDiagnosticEventBufferLatencies {
pub const Normal: Self = Self(1u32);
pub const CostDeferred: Self = Self(2u32);
pub const Realtime: Self = Self(4u32);
}
impl ::core::marker::Copy for PlatformDiagnosticEventBufferLatencies {}
impl ::core::clone::Clone for PlatformDiagnosticEventBufferLatencies {
fn clone(&self) -> Self {
*self
}
}
pub type PlatformDiagnosticTraceInfo = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PlatformDiagnosticTracePriority(pub i32);
impl PlatformDiagnosticTracePriority {
pub const Normal: Self = Self(0i32);
pub const UserElevated: Self = Self(1i32);
}
impl ::core::marker::Copy for PlatformDiagnosticTracePriority {}
impl ::core::clone::Clone for PlatformDiagnosticTracePriority {
fn clone(&self) -> Self {
*self
}
}
pub type PlatformDiagnosticTraceRuntimeInfo = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PlatformDiagnosticTraceSlotState(pub i32);
impl PlatformDiagnosticTraceSlotState {
pub const NotRunning: Self = Self(0i32);
pub const Running: Self = Self(1i32);
pub const Throttled: Self = Self(2i32);
}
impl ::core::marker::Copy for PlatformDiagnosticTraceSlotState {}
impl ::core::clone::Clone for PlatformDiagnosticTraceSlotState {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PlatformDiagnosticTraceSlotType(pub i32);
impl PlatformDiagnosticTraceSlotType {
pub const Alternative: Self = Self(0i32);
pub const AlwaysOn: Self = Self(1i32);
pub const Mini: Self = Self(2i32);
}
impl ::core::marker::Copy for PlatformDiagnosticTraceSlotType {}
impl ::core::clone::Clone for PlatformDiagnosticTraceSlotType {
fn clone(&self) -> Self {
*self
}
}
|
use dlal_component_base::{component, err, json, serde_json, Arg, Body, CmdResult};
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
lazy_static! {
static ref RE: Regex = Regex::new(concat!(
r#"""#,
r"%(\d+)",
r"(?:\*([\d.e-]+))?",
r"(?:\+([\d.e-]+))?",
r#"""#,
))
.unwrap();
}
#[derive(Serialize, Deserialize)]
enum Piece {
Byte(u8),
Null,
LeastSignificantNibble(u8),
MostSignificantNibble(u8),
}
#[derive(Serialize, Deserialize)]
struct Directive {
pattern: Vec<Piece>,
component: usize,
format: String,
}
impl Directive {
fn matches(&self, msg: &[u8]) -> bool {
for i in 0..self.pattern.len() {
if i > msg.len() {
if std::option_env!("DLAL_SNOOP_MIDMAN").is_some() {
println!("no match, pattern longer than {:02x?}", msg);
}
break;
}
match self.pattern[i] {
Piece::Byte(b) => {
if b != msg[i] {
if std::option_env!("DLAL_SNOOP_MIDMAN").is_some() {
println!("no match, piece {} (byte), msg {:02x?}", i, msg);
}
return false;
}
}
Piece::Null => (),
Piece::LeastSignificantNibble(b) => {
if b != msg[i] & 0xf {
if std::option_env!("DLAL_SNOOP_MIDMAN").is_some() {
println!("no match, piece {} (little nibble), msg {:02x?}", i, msg);
}
return false;
}
}
Piece::MostSignificantNibble(b) => {
if b != msg[i] & 0xf0 {
if std::option_env!("DLAL_SNOOP_MIDMAN").is_some() {
println!("no match, piece {} (big nibble), msg {:02x?}", i, msg);
}
return false;
}
}
}
}
true
}
fn sub(&self, msg: &[u8]) -> serde_json::Value {
let text = &RE
.replace_all(&self.format, |captures: ®ex::Captures| -> String {
let i = match captures.get(1) {
Some(i) => i,
None => return "null".into(),
};
let i = match i.as_str().parse::<usize>() {
Ok(i) => i,
Err(_) => return "null".into(),
};
let m = match captures.get(2) {
Some(m) => match m.as_str().parse::<f32>() {
Ok(m) => m,
Err(_) => return "null".into(),
},
None => 1.0,
};
let b = match captures.get(3) {
Some(b) => match b.as_str().parse::<f32>() {
Ok(b) => b,
Err(_) => return "null".into(),
},
None => 0.0,
};
if i >= msg.len() {
return "null".into();
}
(msg[i] as f32 * m + b).to_string()
})
.to_string();
match serde_json::from_str(text) {
Ok(body) => body,
Err(_) => json!("null"),
}
}
}
component!(
{"in": ["midi"], "out": ["cmd"]},
[
"multi",
{"name": "field_helpers", "fields": ["last_error"], "kinds": ["r"]},
],
{
directives: Vec<Directive>,
last_error: String,
},
{
"directive": {
"args": [
{
"name": "pattern",
"type": "array",
"element": {
"choices": [
{
"name": "byte",
"desc": "match equal",
},
{
"name": "null",
"desc": "match anything",
},
{
"name": "object",
"values": {
"nibble": "nibble",
},
"desc": "match based on provided values",
},
],
},
},
{
"name": "component",
"type": "unsigned",
"desc": "index into connected components to send the command to",
},
{
"name": "format",
"type": "string",
"desc": "command text to have MIDI bytes subbed in; %1*2+3 becomes MIDI byte 1 multiplied by 2 plus 3"
},
],
},
},
);
impl ComponentTrait for Component {
fn midi(&mut self, msg: &[u8]) {
for directive in &self.directives {
if directive.matches(msg) {
if directive.component >= self.outputs.len() {
if std::option_env!("DLAL_SNOOP_MIDMAN").is_some() {
println!("component index {} out of range", directive.component);
}
continue;
}
if let Some(result) = self.outputs[directive.component].command(&directive.sub(msg))
{
if let Some(error) = result.get("error") {
self.last_error = error.as_str().unwrap_or(&error.to_string()).into();
if std::option_env!("DLAL_SNOOP_MIDMAN").is_some() {
println!("command error: {}", self.last_error);
}
}
}
}
}
}
fn to_json_cmd(&mut self, _body: serde_json::Value) -> CmdResult {
Ok(Some(json!({
"directives": self.directives,
})))
}
fn from_json_cmd(&mut self, body: serde_json::Value) -> CmdResult {
let j = body.arg::<serde_json::Value>(0)?;
self.directives =
serde_json::from_str(&j.at::<serde_json::Value>("directives")?.to_string())?;
Ok(None)
}
}
impl Component {
fn directive_cmd(&mut self, body: serde_json::Value) -> CmdResult {
let pattern = body.arg::<Vec<_>>(0)?.vec_map(|i| match i {
serde_json::Value::Null => Ok(Piece::Null),
serde_json::Value::Number(_) => Ok(Piece::Byte(i.to()?)),
serde_json::Value::Object(map) => {
if let Some(nibble) = map.get("nibble") {
let nibble: u8 = nibble.to()?;
if nibble >= 0x10 {
if nibble & 0xf != 0 {
return Err(err!("expected one nibble to be 0").into());
}
Ok(Piece::MostSignificantNibble(nibble))
} else {
Ok(Piece::LeastSignificantNibble(nibble))
}
} else {
Err(err!("unknown pattern object").into())
}
}
v => Err(err!("pattern element {:?} is invalid", v).into()),
})?;
self.directives.push(Directive {
pattern,
component: body.arg(1)?,
format: body.arg(2)?,
});
Ok(None)
}
}
|
// Copyright 2019, The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use crate::transactions::{
fee::Fee,
tari_amount::*,
transaction::*,
types::{BlindingFactor, Commitment, CommitmentFactory, CryptoFactories, PrivateKey, RangeProofService},
};
use log::*;
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Error, Formatter};
use tari_crypto::{commitment::HomomorphicCommitmentFactory, ristretto::pedersen::PedersenCommitment};
pub const LOG_TARGET: &str = "c::tx::aggregated_body";
/// The components of the block or transaction. The same struct can be used for either, since in Mimblewimble,
/// cut-through means that blocks and transactions have the same structure.
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct AggregateBody {
sorted: bool,
/// List of inputs spent by the transaction.
inputs: Vec<TransactionInput>,
/// List of outputs the transaction produces.
outputs: Vec<TransactionOutput>,
/// Kernels contain the excesses and their signatures for transaction
kernels: Vec<TransactionKernel>,
}
impl AggregateBody {
/// Create an empty aggregate body
pub fn empty() -> AggregateBody {
AggregateBody {
sorted: false,
inputs: vec![],
outputs: vec![],
kernels: vec![],
}
}
/// Create a new aggregate body from provided inputs, outputs and kernels
pub fn new(
inputs: Vec<TransactionInput>,
outputs: Vec<TransactionOutput>,
kernels: Vec<TransactionKernel>,
) -> AggregateBody
{
AggregateBody {
sorted: false,
inputs,
outputs,
kernels,
}
}
/// Provide read-only access to the input list
pub fn inputs(&self) -> &Vec<TransactionInput> {
&self.inputs
}
/// Provide read-only access to the output list
pub fn outputs(&self) -> &Vec<TransactionOutput> {
&self.outputs
}
/// Provide read-only access to the kernel list
pub fn kernels(&self) -> &Vec<TransactionKernel> {
&self.kernels
}
/// Should be used for tests only. Get a mutable reference to the inputs
pub fn inputs_mut(&mut self) -> &mut Vec<TransactionInput> {
&mut self.inputs
}
/// Add an input to the existing aggregate body
pub fn add_input(&mut self, input: TransactionInput) {
self.inputs.push(input);
self.sorted = false;
}
/// Add a series of inputs to the existing aggregate body
pub fn add_inputs(&mut self, inputs: &mut Vec<TransactionInput>) {
self.inputs.append(inputs);
self.sorted = false;
}
/// Add an output to the existing aggregate body
pub fn add_output(&mut self, output: TransactionOutput) {
self.outputs.push(output);
self.sorted = false;
}
/// Add an output to the existing aggregate body
pub fn add_outputs(&mut self, outputs: &mut Vec<TransactionOutput>) {
self.outputs.append(outputs);
self.sorted = false;
}
/// Add a kernel to the existing aggregate body
pub fn add_kernel(&mut self, kernel: TransactionKernel) {
self.kernels.push(kernel);
}
/// Add a kernels to the existing aggregate body
pub fn add_kernels(&mut self, new_kernels: &mut Vec<TransactionKernel>) {
self.kernels.append(new_kernels);
self.sorted = false;
}
/// Set the kernel of the aggregate body, replacing any previous kernels
pub fn set_kernel(&mut self, kernel: TransactionKernel) {
self.kernels = vec![kernel];
}
/// This will perform cut-through on the aggregate body. It will remove all outputs (and inputs) that are being
/// spent as inputs.
pub fn do_cut_through(&mut self) {
let double_inputs: Vec<TransactionInput> = self
.inputs
.iter()
.filter(|input| self.outputs.iter().any(|o| o.is_equal_to(input)))
.cloned()
.collect();
for input in double_inputs {
trace!(
target: LOG_TARGET,
"removing following utxo for cut-through: {:?}",
input
);
self.outputs.retain(|x| !input.is_equal_to(x));
self.inputs.retain(|x| *x != input);
}
}
/// This will perform a check that cut-through was performed on the aggregate body. It will return true if there are
/// no outputs that are being spent as inputs.
pub fn cut_through_check(&self) -> bool {
!self
.inputs
.iter()
.any(|input| self.outputs.iter().any(|o| o.is_equal_to(input)))
}
/// Sort the component lists of the aggregate body
pub fn sort(&mut self) {
if self.sorted {
return;
}
self.inputs.sort();
self.outputs.sort();
self.kernels.sort();
self.sorted = true;
}
/// Verify the signatures in all kernels contained in this aggregate body. Clients must provide an offset that
/// will be added to the public key used in the signature verification.
pub fn verify_kernel_signatures(&self) -> Result<(), TransactionError> {
trace!(target: LOG_TARGET, "Checking kernel signatures",);
for kernel in self.kernels.iter() {
kernel.verify_signature().or_else(|e| {
warn!(target: LOG_TARGET, "Kernel ({}) signature failed {:?}.", kernel, e);
Err(e)
})?;
}
Ok(())
}
pub fn get_total_fee(&self) -> MicroTari {
let mut fee = MicroTari::from(0);
for kernel in &self.kernels {
fee += kernel.fee;
}
fee
}
/// Validate this transaction by checking the following:
/// 1. The sum of inputs, outputs and fees equal the (public excess value + offset)
/// 1. The signature signs the canonical message with the private excess
/// 1. Range proofs of the outputs are valid
///
/// This function does NOT check that inputs come from the UTXO set
/// The reward is the amount of Tari rewarded for this block, this should be 0 for a transaction
pub fn validate_internal_consistency(
&self,
offset: &BlindingFactor,
reward: MicroTari,
factories: &CryptoFactories,
) -> Result<(), TransactionError>
{
let total_offset = factories.commitment.commit_value(&offset, reward.0);
self.verify_kernel_signatures()?;
self.validate_kernel_sum(total_offset, &factories.commitment)?;
self.validate_range_proofs(&factories.range_proof)
}
pub fn dissolve(self) -> (Vec<TransactionInput>, Vec<TransactionOutput>, Vec<TransactionKernel>) {
(self.inputs, self.outputs, self.kernels)
}
/// Calculate the sum of the inputs and outputs including fees
fn sum_commitments(&self, fees: u64, factory: &CommitmentFactory) -> Commitment {
let fee_commitment = factory.commit_value(&PrivateKey::default(), fees);
let sum_inputs = &self.inputs.iter().map(|i| &i.commitment).sum::<Commitment>();
let sum_outputs = &self.outputs.iter().map(|o| &o.commitment).sum::<Commitment>();
&(sum_outputs - sum_inputs) + &fee_commitment
}
/// Calculate the sum of the kernels, taking into account the provided offset, and their constituent fees
fn sum_kernels(&self, offset: PedersenCommitment) -> KernelSum {
// Sum all kernel excesses and fees
self.kernels.iter().fold(
KernelSum {
fees: MicroTari(0),
sum: offset,
},
|acc, val| KernelSum {
fees: acc.fees + val.fee,
sum: &acc.sum + &val.excess,
},
)
}
/// Confirm that the (sum of the outputs) - (sum of inputs) = Kernel excess
fn validate_kernel_sum(&self, offset: Commitment, factory: &CommitmentFactory) -> Result<(), TransactionError> {
trace!(target: LOG_TARGET, "Checking kernel total");
let kernel_sum = self.sum_kernels(offset);
let sum_io = self.sum_commitments(kernel_sum.fees.into(), factory);
if kernel_sum.sum != sum_io {
return Err(TransactionError::ValidationError(
"Sum of inputs and outputs did not equal sum of kernels with fees".into(),
));
}
Ok(())
}
fn validate_range_proofs(&self, range_proof_service: &RangeProofService) -> Result<(), TransactionError> {
trace!(target: LOG_TARGET, "Checking range proofs");
for o in &self.outputs {
if !o.verify_range_proof(&range_proof_service)? {
return Err(TransactionError::ValidationError(
"Range proof could not be verified".into(),
));
}
}
Ok(())
}
/// Returns the byte size or weight of a body
pub fn calculate_weight(&self) -> u64 {
Fee::calculate_weight(self.kernels().len(), self.inputs().len(), self.outputs().len())
}
}
/// This will strip away the offset of the transaction returning a pure aggregate body
impl From<Transaction> for AggregateBody {
fn from(transaction: Transaction) -> Self {
transaction.body
}
}
impl Display for AggregateBody {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), Error> {
if !self.sorted {
fmt.write_str("WARNING: Block body is not sorted.\n")?;
}
fmt.write_str("--- Transaction Kernels ---\n")?;
for (i, kernel) in self.kernels.iter().enumerate() {
fmt.write_str(&format!("Kernel {}:\n", i))?;
fmt.write_str(&format!("{}\n", kernel))?;
}
fmt.write_str(&format!("--- Inputs ({}) ---\n", self.inputs.len()))?;
for input in self.inputs.iter() {
fmt.write_str(&format!("{}", input))?;
}
fmt.write_str(&format!("--- Outputs ({}) ---\n", self.outputs.len()))?;
for output in self.outputs.iter() {
fmt.write_str(&format!("{}", output))?;
}
Ok(())
}
}
|
use std::iter;
use std::str;
use lexical::tokens::Tokens;
use lexical::tokens::Tokens::*;
use lexical::types::Types;
use lexical::keywords::Keywords;
use lexical::symbols::Symbols;
// A Lexer that keeps track of the current line and column position
// as well as the position in the char input stream.
pub struct Lexer<'a> {
line_number: usize,
column_number: usize,
input: iter::Peekable<str::CharIndices<'a>>,
}
impl<'a> Lexer<'a> {
// Create a new lexer instance
pub fn new(input: &'a str) -> Lexer<'a> {
Lexer {
line_number: 1,
column_number: 1,
input: input.char_indices().peekable(),
}
}
// Gets the next char and sets the position forward in the buffer
fn consume_char(&mut self) -> Option<char> {
if let Some(result) = self.input.next() {
let (_, chr) = result;
self.column_number += 1;
if chr == '\n' {
self.line_number += 1;
self.column_number = 1;
}
return Some(chr);
}
None
}
fn next_char(&mut self) -> Option<char> {
if let Some(result) = self.input.peek() {
let &(_, chr) = result;
return Some(chr);
}
None
}
fn consume_while<F: FnMut(char) -> bool>(&mut self, test: &mut F) -> String {
let mut result = String::new();
// Always unwrapping as the loop checks eof.
while self.next_char().is_some() && test(self.next_char().unwrap()) {
match self.consume_char().unwrap() {
// Ignore any carriage returns
'\r' => continue,
ch => result.push(ch)
};
};
result
}
// Single and multi char symbols: *, -, +=, -=, ...
fn symbols_token(&self, punc: &str) -> Tokens {
Symbol(match punc.parse::<Symbols>() {
Ok(p) => p,
Err(e) => unreachable!(e)
})
}
// Consume non newline whitespace
fn consume_whitespace(&mut self) {
self.consume_while(&mut |ch| match ch {
'\n' | '\t' => false,
w if w.is_whitespace() => true,
_ => false
});
}
// Identifiers: [a-zA-Z_][a-zA-z0-9_]*
// Keywords are subsets of identifiers.
fn consume_identifier(&mut self) -> Tokens {
// Lexer will only let you start with alpha or undescore,
// so there is no need to check for numeric start
let ident = self.consume_while(&mut |ch| match ch {
a if a.is_alphanumeric() => true,
'_' => true,
_ => false
});
match &ident[..] {
"True" => return BoolLiteral(true),
"False" => return BoolLiteral(false),
_ => ()
};
if let Ok(key) = ident.parse::<Keywords>() {
return Keyword(key);
}
Identifier(ident)
}
// Find a sequence of 32 or 64
fn consume_32_64(&mut self, prefix: char) -> Result<String, String> {
// prefix is the starting character, ie i, u, f
let mut string = String::new();
string.push(prefix);
match self.next_char() {
Some('3') => {
string.push(self.consume_char().unwrap());
match self.next_char() {
Some('2') => {
string.push(self.consume_char().unwrap());
Ok(string)
},
Some('\n') | // NL & CR have pesky visual effects.
Some('\r') => Err(format!("Invalid suffix {}. Did you mean {}32?", string, prefix)),
Some(_) => Err(format!("Invalid suffix {}{}. Did you mean {0}2?", string, self.consume_char().unwrap())),
None => Err(format!("Hit EOF when looking for suffix {}32.", prefix))
}
},
Some('6') => {
string.push(self.consume_char().unwrap());
match self.next_char() {
Some('4') => {
string.push(self.consume_char().unwrap());
Ok(string)
},
Some('\n') | // NL & CR have pesky visual effects.
Some('\r') => Err(format!("Invalid suffix {}. Did you mean {}64?", string, prefix)),
Some(_) => Err(format!("Invalid suffix {}{}. Did you mean {0}4?", string, self.consume_char().unwrap())),
None => Err(format!("Hit EOF when looking for suffix {}64.", prefix))
}
},
Some('\n') | // NL & CR have pesky visual effects.
Some('\r') => Err(format!("Invalid suffix {}. Did you mean {1}32 or {1}64?", string, prefix)),
Some(_) => Err(format!("Invalid suffix {}{}. Did you mean {0}32 or {0}64?", string, self.consume_char().unwrap())),
None => Err(format!("Hit EOF when looking for a suffix {0}32 or {0}64.", prefix))
}
}
// Determines what type of number it is and consume it
fn consume_numeric(&mut self) -> Tokens {
let mut number = String::new();
let mut suffix = String::new();
match self.next_char() {
Some('0') => {
self.consume_char();
match self.next_char() {
Some('x') => {
// Found hexadecimal: 0x[0-9a-fA-F_]+
self.consume_char();
number.push_str("0x");
// Cant do += for String, and push_str looks better than
// number = number + self.consume...
number.push_str(&self.consume_while(&mut |ch| match ch {
'0'...'9' |
'a'...'f' |
'A'...'F' |
'_' => true,
_ => false
}));
if &number[..] == "0x" {
return Error("No hexadecimal value was found.".to_string());
}
// Attempt to find a suffix if one exists
match self.next_char() {
Some('u') |
Some('i') => {
let ch = self.consume_char().unwrap();
match self.consume_32_64(ch) {
Ok(s) => suffix.push_str(&s),
Err(err) => return Error(err)
};
},
// Found some other suffix, ie 0x42o
Some(c) if c.is_alphanumeric() => {
let ch = self.consume_char().unwrap();
let err = format!("Invalid suffix {}. Did you mean u32, u64, i32, or i64?", ch);
return Error(err);
},
// If eof or other just return the numeric token without a suffix
_ => ()
};
},
Some('b') => {
// Found binary: 0b[01_]+
self.consume_char();
number.push_str("0b");
// Formatting the same as the hex case above.
number.push_str(&self.consume_while(&mut |ch| match ch {
'0' |
'1' |
'_' => true,
_ => false
}));
if &number[..] == "0b" {
return Error("No binary value was found.".to_string());
}
// Attempt to find a suffix if one exists
match self.next_char() {
Some('u') |
Some('i') => {
let ch = self.consume_char().unwrap();
match self.consume_32_64(ch) {
Ok(s) => suffix.push_str(&s),
Err(err) => return Error(err)
};
},
// Found some other suffix, ie 0x42o
Some(c) if c.is_alphabetic() => {
let ch = self.consume_char().unwrap();
let err = format!("Invalid suffix {}. Did you mean u32, u64, i32, or i64?", ch);
return Error(err);
},
// If eof or other just return the numeric token without a suffix
_ => ()
};
},
_ => return Error(format!("Invalid number type {:?}", self.next_char())),
}
},
_ => {
// Found int: [0-9]+ or float: [0-9]+.[0-9]+
number.push_str(&self.consume_while(&mut |ch| match ch {
'0'...'9' |
'_' => true,
_ => false
}));
match self.next_char() {
// Float decimal point:
Some('.') => {
number.push(self.consume_char().unwrap());
let fractional = self.consume_while(&mut |ch| match ch {
'0'...'9' |
'_' => true,
_ => false
});
// Check if no decimal values were found
match &fractional[..] {
"" => return Error("No numbers found after the decimal point.".to_string()),
_ => number.push_str(&fractional)
}
// Find float suffixes
match self.next_char() {
Some('f') => {
let ch = self.consume_char().unwrap();
match self.consume_32_64(ch) {
Ok(s) => suffix.push_str(&s),
Err(err) => return Error(err)
};
},
// Found some other suffix, ie 0x42o
Some(c) if c.is_alphabetic() => {
let ch = self.consume_char().unwrap();
let err = format!("Invalid suffix {}. Did you mean f32, f64?", ch);
return Error(err);
},
// No suffix found, can hit symbols or other
_ => ()
}
},
// Int suffixes:
Some('u') |
Some('i') => {
let ch = self.consume_char().unwrap();
match self.consume_32_64(ch) {
Ok(s) => suffix.push_str(&s),
Err(err) => return Error(err)
};
},
// Found some other suffix, ie 0x42o
Some(c) if c.is_alphabetic() => {
let ch = self.consume_char().unwrap();
let err = format!("Invalid suffix {}. Did you mean u32, u64, i32, or i64?", ch);
return Error(err);
},
// Presumably any other remaining char is valid, ie symbols {,[ etc
_ => ()
};
},
}
Numeric(number, suffix.parse::<Types>().ok())
}
fn consume_comment(&mut self) -> Tokens {
let mut result = String::new();
// Consume 2nd '>'
self.consume_char();
match self.next_char() {
// Multiline comments must end in <<< else error
Some('>') => {
let mut sequence = 0u8;
// Consume 3rd '>'
self.consume_char();
result.push_str(&self.consume_while(&mut |ch| match ch {
'<' => {
sequence += 1;
if sequence == 3 {
return false;
}
true
},
_ => {
sequence = 0;
true
}
}));
// Should be able to consume the last <
if self.consume_char() != Some('<') {
return Error("Hit eof before end of multi-line comment.".to_string())
}
// Remove << from end of the comment string
result.pop();
result.pop();
},
// Single line comments eat up anything until newline or eof
Some(_) => {
result.push_str(&self.consume_while(&mut |ch| match ch {
'\n' => false,
_ => true
}));
},
// Single line comment w/ EOF at start should be valid:
None => ()
}
Comment(result)
}
fn consume_tabs(&mut self) -> Tokens {
let mut count = 0u64;
// Consume the newline token, count tabs
self.consume_char();
self.consume_while(&mut |ch| match ch {
'\t' => {
count += 1;
true
},
_ => false
});
Indent(count)
}
fn escape_char(ch: char) -> Result<char, String> {
// Does not include unicode escapes
match ch {
'\''=> Ok('\''),
'\"'=> Ok('\"'),
'\\'=> Ok('\\'),
'n' => Ok('\n'),
'r' => Ok('\r'),
't' => Ok('\t'),
'\n'=> Ok(' '), // Escape newline?
_ => Err(format!("Unknown character escape: \\{}", ch))
}
}
fn consume_char_literal(&mut self) -> Tokens {
let ch: char;
// Consume first '
self.consume_char();
// Get the character or two if escaped
match self.consume_char() {
Some('\\') => {
match self.consume_char() {
Some(ch2) => {
match Lexer::escape_char(ch2) {
Ok(esc) => ch = esc,
Err(msg) => return Error(msg)
};
},
None => return Error("Hit eof before end of character literal.".to_string())
};
},
Some('\'') => return Error("Empty character literal is invalid.".to_string()),
Some(c) => ch = c,
None => return Error("Hit eof before end of character literal.".to_string())
};
// Get the final '
match self.consume_char() {
Some('\'') => CharLiteral(ch),
_ => Error("Char literal was not closed with a '".to_string())
}
}
// This is currently set up to accept multi line strings
fn consume_string_literal(&mut self) -> Tokens {
let mut result = String::new();
// Consume first "
self.consume_char();
// Consume until closing "
loop {
match self.consume_char() {
// Ignore any carriage returns
Some('\r') => continue,
// Handle Escape chars
Some('\\') => {
if let Some(ch) = self.consume_char() {
match Lexer::escape_char(ch) {
Ok(ch) => result.push(ch),
Err(e) => return Error(e)
}
}
},
// End at a closing "
Some('\"') => return StrLiteral(result),
Some(ch) => result.push(ch),
None => break
};
};
Error("Hit EOF before end of string literal.".to_string())
}
}
impl<'a> Iterator for Lexer<'a> {
type Item = Tokens;
// Parse the file where it left off and return the next token
fn next(&mut self) -> Option<Self::Item> {
self.consume_whitespace();
let tok = match self.next_char() {
// Find Keywords and Identifiers
Some(a) if a.is_alphabetic() || a == '_' => self.consume_identifier(),
// Find ints, floats, hex, and bin numeric values
Some(n) if n.is_digit(10) => self.consume_numeric(),
// Count tabs: \n\t*
Some('\n') => self.consume_tabs(),
// Error: Found tabs without preceeding newline
Some('\t') => {
self.consume_char().unwrap();
Error("Found an out of place tab.".to_string())
},
// Find single-char symbols
Some('(') | Some(')') |
Some('[') | Some(']') |
Some('{') | Some('}') |
Some('.') |
Some(',') |
Some(':') |
Some('^') |
Some('~') |
Some('=') => {
let punc = self.consume_char().unwrap().to_string();
self.symbols_token(&punc)
},
// Find multi-char(+=, -=, ..) or the single-char version
Some('+') |
Some('*') |
Some('/') |
Some('%') => {
let mut punc = self.consume_char().unwrap().to_string();
if let Some('=') = self.next_char() {
punc.push(self.consume_char().unwrap());
}
self.symbols_token(&punc)
},
// Find -, -=, -> symbols
Some('-') => {
let mut punc = self.consume_char().unwrap().to_string();
// = and > are adjacent chars, provides a nice if let:
if let Some('='...'>') = self.next_char() {
punc.push(self.consume_char().unwrap())
}
self.symbols_token(&punc)
},
// Find >> and >>> comments, otherwise > or >= symbols
Some('>') => {
self.consume_char();
match self.next_char() {
Some('>') => self.consume_comment(),
Some('=') => {
self.consume_char();
self.symbols_token(">=")
},
_ => self.symbols_token(">")
}
},
// Find < and <= symbols
Some('<') => {
self.consume_char();
match self.next_char() {
Some('=') => {
self.consume_char();
self.symbols_token("<=")
},
_ => self.symbols_token("<")
}
},
// Find character literals, 'c', including ascii escape chars
Some('\'') => self.consume_char_literal(),
// Find string literals, "String"
Some('\"') => self.consume_string_literal(),
Some(ch) => Error(format!("Unknown character ({}).", ch)),
None => EOF
};
if tok.expect(EOF) {
return None;
}
Some(tok)
}
}
pub trait Tokenizer {}
impl<'a> Tokenizer for Lexer<'a> {}
|
use crate::name::{Name, COMPRESS_POINTER_MARK16, COMPRESS_POINTER_MARK8, MAX_LABEL_COUNT};
use crate::util::{InputBuffer, OutputBuffer};
use anyhow::Result;
const MAX_COMPRESS_POINTER: usize = 0x3fff;
const HASH_SEED: u32 = 0x9e37_79b9;
#[derive(Clone, Copy, Default)]
struct OffSetItem(u64);
impl OffSetItem {
pub fn new(len: u8, pos: u16, hash: u32) -> Self {
OffSetItem(((hash as u64) << 32) | ((pos as u64) << 16) | (len as u64))
}
#[inline]
pub fn len(&self) -> u8 {
(self.0 & 0xff) as u8
}
#[inline]
pub fn pos(&self) -> u16 {
((self.0 & 0xffff0000) >> 16) as u16
}
#[inline]
pub fn hash(&self) -> u32 {
(self.0 >> 32) as u32
}
}
#[derive(Clone, Copy)]
struct NameComparator<'a, 'b> {
buffer: &'a OutputBuffer<'b>,
hash: u32,
}
struct NameRef<'a> {
parent_level: u8,
name: &'a Name,
}
impl<'a> NameRef<'a> {
fn from_name(name: &'a Name) -> Self {
NameRef {
parent_level: 0,
name,
}
}
fn parent(&mut self) {
self.parent_level += 1;
}
fn is_root(&self) -> bool {
self.parent_level + 1 == self.name.label_count() as u8
}
fn raw_data(&self) -> &[u8] {
let offset = self.name.offsets()[self.parent_level as usize] as usize;
&self.name.raw_data()[offset..]
}
fn hash(&self) -> u32 {
self.raw_data().iter().fold(0, |hash, c| {
hash ^ (u32::from(*c)
.wrapping_add(HASH_SEED)
.wrapping_add(hash << 6)
.wrapping_add(hash >> 2))
})
}
}
impl<'a, 'b> NameComparator<'a, 'b> {
pub fn compare(self, item: OffSetItem, name_buffer: &mut InputBuffer) -> Result<bool> {
if item.hash() != self.hash || item.len() != (name_buffer.len() as u8) {
return Ok(false);
}
let mut item_pos = item.pos();
loop {
let label = self.next_label(item_pos)?;
let mut name_label_len = name_buffer.read_u8()?;
if name_label_len != label.0 {
return Ok(false);
} else if name_label_len == 0 {
break;
}
item_pos = label.1;
while name_label_len > 0 {
let ch1 = self.buffer.at(item_pos as usize)?;
let ch2 = name_buffer.read_u8()?;
if ch1 != ch2 {
return Ok(false);
}
item_pos += 1;
name_label_len -= 1;
}
}
Ok(true)
}
fn next_label(&self, pos: u16) -> Result<(u8, u16)> {
let mut next_pos = pos as usize;
let mut b = self.buffer.at(next_pos)?;
while b & COMPRESS_POINTER_MARK8 == COMPRESS_POINTER_MARK8 {
let nb = u16::from(self.buffer.at(next_pos + 1)?);
next_pos = (u16::from(b & !(COMPRESS_POINTER_MARK8 as u8)) * 256 + nb) as usize;
b = self.buffer.at(next_pos)?;
}
Ok((b, (next_pos + 1) as u16))
}
}
const BUCKETS: usize = 64;
const RESERVED_ITEMS: usize = 16;
const NO_OFFSET: u16 = 65535;
pub struct MessageRender<'a> {
buffer: OutputBuffer<'a>,
table: Vec<Vec<OffSetItem>>,
item_counts: [usize; BUCKETS],
label_hashes: [u32; MAX_LABEL_COUNT as usize],
}
impl<'a> MessageRender<'a> {
pub fn new(buf: &'a mut [u8]) -> Self {
MessageRender {
buffer: OutputBuffer::new(buf),
table: vec![vec![OffSetItem::default(); RESERVED_ITEMS]; BUCKETS],
item_counts: [0; BUCKETS],
label_hashes: [0; MAX_LABEL_COUNT as usize],
}
}
fn find_offset(&self, name_buffer: &mut InputBuffer, hash: u32) -> Result<u16> {
let bucket_id = hash % (BUCKETS as u32);
let comparator = NameComparator {
buffer: &self.buffer,
hash,
};
let items = &self.table[bucket_id as usize];
let item_count = self.item_counts[bucket_id as usize];
for item in &items[0..item_count] {
if comparator.compare(*item, name_buffer)? {
return Ok(item.pos());
}
}
Ok(NO_OFFSET)
}
fn add_offset(&mut self, hash: u32, offset: u16, len: u8) {
let bucket_id = hash % (BUCKETS as u32);
let item_count = self.item_counts[bucket_id as usize];
//give up compress
if item_count + 1 == RESERVED_ITEMS {
return;
}
self.table[bucket_id as usize][item_count] = OffSetItem::new(len, offset, hash);
self.item_counts[bucket_id as usize] = item_count + 1;
}
pub fn write_name(&mut self, name: &Name, compress: bool) -> Result<()> {
let label_count = name.label_count();
let mut label_uncompressed = 0;
let mut offset = NO_OFFSET;
let mut parent = NameRef::from_name(name);
while label_uncompressed < label_count {
if label_uncompressed > 0 {
parent.parent();
}
if parent.is_root() {
label_uncompressed += 1;
break;
}
self.label_hashes[label_uncompressed] = parent.hash();
if compress {
offset = self.find_offset(
&mut InputBuffer::new(parent.raw_data()),
self.label_hashes[label_uncompressed],
)?;
if offset != NO_OFFSET {
break;
}
}
label_uncompressed += 1;
}
let mut name_pos = self.buffer.len();
if !compress || label_uncompressed == label_count {
self.buffer.write_bytes(name.raw_data())?;
} else if label_uncompressed > 0 {
let pos = name.offsets()[label_uncompressed as usize];
self.buffer
.write_bytes(&name.raw_data()[0..(pos as usize)])?;
}
if compress && (offset != NO_OFFSET) {
offset |= COMPRESS_POINTER_MARK16;
self.buffer.write_u16(offset)?;
}
let mut name_len = name.len();
for i in 0..label_uncompressed {
let label_len = self.buffer.at(name_pos)?;
if label_len == 0 {
break;
}
if name_pos > MAX_COMPRESS_POINTER {
break;
}
let hash = self.label_hashes[i];
self.add_offset(hash, name_pos as u16, name_len as u8);
name_pos += (label_len + 1) as usize;
name_len -= (label_len + 1) as usize;
}
Ok(())
}
pub fn len(&self) -> usize {
self.buffer.len()
}
pub fn is_empty(&self) -> bool {
self.buffer.len() == 0
}
pub fn skip(&mut self, len: usize) -> Result<()> {
self.buffer.skip(len)
}
pub fn write_u8(&mut self, d: u8) -> Result<()> {
self.buffer.write_u8(d)
}
pub fn write_u8_at(&mut self, pos: usize, d: u8) -> Result<()> {
self.buffer.write_u8_at(pos, d)
}
pub fn write_u16(&mut self, d: u16) -> Result<()> {
self.buffer.write_u16(d)
}
pub fn write_u16_at(&mut self, pos: usize, d: u16) -> Result<()> {
self.buffer.write_u16_at(pos, d)
}
pub fn write_u32(&mut self, d: u32) -> Result<()> {
self.buffer.write_u32(d)
}
pub fn write_bytes(&mut self, data: &[u8]) -> Result<()> {
self.buffer.write_bytes(data)
}
pub fn truncate(&mut self, pos: usize) -> Result<()> {
self.buffer.truncate(pos)
}
//reset the buffer
pub fn reset(&mut self) {
self.buffer.reset();
self.table.iter_mut().for_each(|v| v.fill(OffSetItem(0)));
self.item_counts.fill(0);
self.label_hashes.fill(0);
}
pub fn data(&self) -> &[u8] {
self.buffer.data()
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::name::Name;
use crate::response::Response;
use crate::util::hex::from_hex;
#[test]
fn test_write_name() {
let a_example_com = Name::new("a.example.com").unwrap();
let b_example_com = Name::new("b.example.com").unwrap();
let a_example_org = Name::new("a.example.org").unwrap();
let mut buf = [0; 0x3fff + 512];
let mut render = MessageRender::new(&mut buf);
let raw = from_hex("0161076578616d706c6503636f6d000162c0020161076578616d706c65036f726700")
.unwrap();
render.write_name(&a_example_com, true).unwrap();
render.write_name(&b_example_com, true).unwrap();
render.write_name(&a_example_org, true).unwrap();
assert_eq!(raw.as_slice(), &buf[0..raw.len()]);
let raw =
from_hex("0161076578616d706c6503636f6d00ffff0162076578616d706c6503636f6d00").unwrap();
let mut render = MessageRender::new(&mut buf);
let offset: usize = 0x3fff;
render.skip(offset).unwrap();
render.write_name(&a_example_com, true).unwrap();
render.write_name(&a_example_com, true).unwrap();
render.write_name(&b_example_com, true).unwrap();
assert_eq!(raw.as_slice(), &render.data()[offset..]);
let raw =
from_hex("0161076578616d706c6503636f6d000162076578616d706c6503636f6d00c00f").unwrap();
render.reset();
render.write_name(&a_example_com, true).unwrap();
render.write_name(&b_example_com, false).unwrap();
render.write_name(&b_example_com, true).unwrap();
assert_eq!(raw.as_slice(), render.data());
let raw = from_hex("0161076578616d706c6503636f6d000162c002c00f").unwrap();
render.reset();
render.write_name(&a_example_com, true).unwrap();
render.write_name(&b_example_com, true).unwrap();
render.write_name(&b_example_com, true).unwrap();
assert_eq!(raw.as_slice(), render.data());
let raw =
from_hex("e3808583000100000001000001320131033136380331393207696e2d61646472046172706100000c0001033136380331393207494e2d4144445204415250410000060001000151800017c02a00000000000000708000001c2000093a8000015180").unwrap();
let mut render = MessageRender::new(&mut buf);
let msg = Response::from_wire(raw.as_slice()).unwrap();
msg.to_wire(&mut render).unwrap();
assert_eq!(raw.as_slice(), render.data());
}
}
|
use input::Input;
use input::evaluator::InputEvaluatorRef;
use parser::{ Evaluator, Node };
use util;
// Panic
//
// Ramps up a fan to full speed once a certain
// condition is met. Otherwise, returns zero so
// that this can be used with the accumulators.
pub struct Panic {
input : Box<Input>,
temp_target : f64,
temp_critical : f64,
is_panicked : bool,
}
impl Panic {
pub fn create(temp_target_v: f64, temp_critical_v: f64, input_v: Box<Input>) -> Box<Input> {
Box::new(Panic {
input : input_v,
temp_target : temp_target_v,
temp_critical : temp_critical_v,
is_panicked : false,
})
}
}
impl Input for Panic {
fn compute(&mut self) -> f64 {
let input = self.input.compute();
self.is_panicked = (!self.is_panicked && input >= self.temp_critical)
|| ( self.is_panicked && input >= self.temp_target);
match self.is_panicked {
true => 1.0,
false => 0.0,
}
}
}
///////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////
pub struct EvalPanic {
input : InputEvaluatorRef,
}
impl EvalPanic {
pub fn new(input_v : InputEvaluatorRef) -> EvalPanic {
EvalPanic { input : input_v }
}
}
impl Evaluator<Box<Input>> for EvalPanic {
fn parse_nodes(&self, nodes: &[Node]) -> Result<Box<Input>, String> {
Ok(Panic::create(
try!(util::get_num_node::<f64>("panic", nodes, 0)),
try!(util::get_num_node::<f64>("panic", nodes, 1)),
try!(self.input.borrow().parse_node(
try!(util::get_node("panic", nodes, 2))))))
}
} |
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use failure::Error;
use futures::channel::mpsc;
use rouille::{self, router, Request, Response};
use std::thread;
const SERVER_IP: &str = "::";
const SERVER_PORT: &str = "8880";
pub enum SetupEvent {
Root,
}
fn serve(request: &Request, rouille_sender: mpsc::UnboundedSender<SetupEvent>) -> Response {
router!(request,
(GET) (/) => {
rouille_sender.unbounded_send(SetupEvent::Root).expect("Async thread closed the channel.");
rouille::Response::text("Root document\n")
},
_ => {
rouille::Response::text("Unknown command\n").with_status_code(404)
}
)
}
pub fn start_server() -> Result<mpsc::UnboundedReceiver<SetupEvent>, Error> {
println!("recovery: start_server");
let address = format!("{}:{}", SERVER_IP, SERVER_PORT);
let (rouille_sender, async_receiver) = mpsc::unbounded();
thread::Builder::new().name("setup-server".into()).spawn(move || {
rouille::start_server(address, move |request| serve(&request, rouille_sender.clone()));
})?;
Ok(async_receiver)
}
|
use crate::{
field::{BillingMethod, Currency},
structures::Structure,
};
use retriever::traits::record::Record;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
#[derive(Debug, Deserialize, Serialize, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct Client {
pub name: String,
pub address: String,
pub currency: Currency,
pub billing: BillingMethod,
}
impl Structure for Client {
const STORE: &'static str = "clients";
type ChunkKeys = ();
type ItemKeys = String;
}
impl Record<<Self as Structure>::ChunkKeys, <Self as Structure>::ItemKeys> for Client {
fn chunk_key(&self) -> Cow<()> {
Cow::Owned(())
}
fn item_key(&self) -> Cow<String> {
Cow::Borrowed(&self.name)
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/// This module tests the property that pkg_resolver does not enter a bad
/// state (successfully handles retries) when the TUF server errors while
/// servicing fuchsia.pkg.PackageResolver.Resolve FIDL requests.
use {
super::*,
fuchsia_merkle::MerkleTree,
fuchsia_pkg_testing::{serve::UriPathHandler, RepositoryBuilder},
futures::future::{ready, BoxFuture},
hyper::{header::CONTENT_LENGTH, Body, Response},
matches::assert_matches,
parking_lot::Mutex,
std::{
path::{Path, PathBuf},
sync::Arc,
},
};
struct OverrideOnceUriPathHandler {
already_overridden: Mutex<bool>,
path_to_override: PathBuf,
path_handler: Box<dyn UriPathHandler>,
}
impl OverrideOnceUriPathHandler {
fn new(path_to_override: impl Into<PathBuf>, path_handler: impl UriPathHandler) -> Self {
Self {
already_overridden: Mutex::new(false),
path_to_override: path_to_override.into(),
path_handler: Box::new(path_handler),
}
}
}
impl UriPathHandler for OverrideOnceUriPathHandler {
fn handle(&self, uri_path: &Path, response: Response<Body>) -> BoxFuture<Response<Body>> {
if uri_path != self.path_to_override {
return ready(response).boxed();
}
let mut already_overridden = self.already_overridden.lock();
if *already_overridden {
return ready(response).boxed();
}
*already_overridden = true;
self.path_handler.handle(uri_path, response)
}
}
async fn verify_resolve_fails_then_succeeds(
pkg: Package,
uri_handler: OverrideOnceUriPathHandler,
failure_status: Status,
) {
let env = TestEnv::new();
let repo = Arc::new(
RepositoryBuilder::from_template_dir(EMPTY_REPO_PATH)
.add_package(&pkg)
.build()
.await
.unwrap(),
);
let served_repository =
repo.build_server().uri_path_override_handler(uri_handler).start().unwrap();
let repo_url = "fuchsia-pkg://test".parse().unwrap();
let repo_config = served_repository.make_repo_config(repo_url);
env.proxies.repo_manager.add(repo_config.into()).await.unwrap();
env.set_experiment_state(Experiment::DownloadBlob, true).await;
let pkg_url = format!("fuchsia-pkg://test/{}", pkg.name());
assert_matches!(env.resolve_package(&pkg_url).await, Err(status) if status == failure_status);
let package_dir = env.resolve_package(&pkg_url).await.expect("package to resolve");
pkg.verify_contents(&package_dir).await.expect("correct package contents");
env.stop().await;
}
struct NotFoundUriPathHandler;
impl UriPathHandler for NotFoundUriPathHandler {
fn handle(&self, _uri_path: &Path, _response: Response<Body>) -> BoxFuture<Response<Body>> {
ready(
Response::builder()
.status(hyper::StatusCode::NOT_FOUND)
.body(Body::empty())
.expect("valid response"),
)
.boxed()
}
}
#[fasync::run_singlethreaded(test)]
async fn second_resolve_succeeds_when_far_404() {
let pkg = make_rolldice_pkg_with_extra_blobs(1).await;
let path_to_override = format!("/blobs/{}", pkg.meta_far_merkle_root());
verify_resolve_fails_then_succeeds(
pkg,
OverrideOnceUriPathHandler::new(path_to_override, NotFoundUriPathHandler),
Status::UNAVAILABLE,
)
.await
}
#[fasync::run_singlethreaded(test)]
async fn second_resolve_succeeds_when_blob_404() {
let pkg = make_rolldice_pkg_with_extra_blobs(1).await;
let path_to_override = format!(
"/blobs/{}",
MerkleTree::from_reader(extra_blob_contents(0).as_slice()).expect("merkle slice").root()
);
verify_resolve_fails_then_succeeds(
pkg,
OverrideOnceUriPathHandler::new(path_to_override, NotFoundUriPathHandler),
Status::UNAVAILABLE,
)
.await
}
struct OneByteShortThenErrorUriPathHandler;
impl UriPathHandler for OneByteShortThenErrorUriPathHandler {
fn handle(&self, _uri_path: &Path, response: Response<Body>) -> BoxFuture<Response<Body>> {
async {
let mut bytes = body_to_bytes(response.into_body()).await;
if let None = bytes.pop() {
panic!("can't short 0 bytes");
}
Response::builder()
.status(hyper::StatusCode::OK)
.header(CONTENT_LENGTH, bytes.len() + 1)
.body(Body::wrap_stream(
futures::stream::iter(vec![
Ok(bytes),
Err("all_but_one_byte_then_eror has sent all but one bytes".to_string()),
])
.compat(),
))
.expect("valid response")
}
.boxed()
}
}
// If the body of an https response is not large enough, hyper will download the body
// along with the header in the initial fuchsia_hyper::HttpsClient.request(). This means
// that even if the body is implemented with a stream that fails before the transfer is
// complete, the failure will occur during the initial request and before the batch loop
// that writes to pkgfs/blobfs. Value was found experimentally.
const FILE_SIZE_LARGE_ENOUGH_TO_TRIGGER_HYPER_BATCHING: usize = 1_000_000;
#[fasync::run_singlethreaded(test)]
async fn second_resolve_succeeds_when_far_errors_mid_download() {
let pkg = PackageBuilder::new("large_meta_far")
.add_resource_at(
"meta/large_file",
vec![0; FILE_SIZE_LARGE_ENOUGH_TO_TRIGGER_HYPER_BATCHING].as_slice(),
)
.build()
.await
.unwrap();
let path_to_override = format!("/blobs/{}", pkg.meta_far_merkle_root());
verify_resolve_fails_then_succeeds(
pkg,
OverrideOnceUriPathHandler::new(path_to_override, OneByteShortThenErrorUriPathHandler),
Status::UNAVAILABLE,
)
.await
}
#[fasync::run_singlethreaded(test)]
async fn second_resolve_succeeds_when_blob_errors_mid_download() {
let blob = vec![0; FILE_SIZE_LARGE_ENOUGH_TO_TRIGGER_HYPER_BATCHING];
let pkg = PackageBuilder::new("large_blob")
.add_resource_at("blobbity/blob", blob.as_slice())
.build()
.await
.unwrap();
let path_to_override = format!(
"/blobs/{}",
MerkleTree::from_reader(blob.as_slice()).expect("merkle slice").root()
);
verify_resolve_fails_then_succeeds(
pkg,
OverrideOnceUriPathHandler::new(path_to_override, OneByteShortThenErrorUriPathHandler),
Status::UNAVAILABLE,
)
.await
}
struct OneByteShortThenDisconnectUriPathHandler;
impl UriPathHandler for OneByteShortThenDisconnectUriPathHandler {
fn handle(&self, _uri_path: &Path, response: Response<Body>) -> BoxFuture<Response<Body>> {
async {
let mut bytes = body_to_bytes(response.into_body()).await;
if let None = bytes.pop() {
panic!("can't short 0 bytes");
}
Response::builder()
.status(hyper::StatusCode::OK)
.header(CONTENT_LENGTH, bytes.len() + 1)
.body(Body::wrap_stream(
futures::stream::iter(vec![Result::<Vec<u8>, String>::Ok(bytes)]).compat(),
))
.expect("valid response")
}
.boxed()
}
}
#[fasync::run_singlethreaded(test)]
async fn second_resolve_succeeds_disconnect_before_far_complete() {
let pkg = PackageBuilder::new("large_meta_far")
.add_resource_at(
"meta/large_file",
vec![0; FILE_SIZE_LARGE_ENOUGH_TO_TRIGGER_HYPER_BATCHING].as_slice(),
)
.build()
.await
.unwrap();
let path_to_override = format!("/blobs/{}", pkg.meta_far_merkle_root());
verify_resolve_fails_then_succeeds(
pkg,
OverrideOnceUriPathHandler::new(path_to_override, OneByteShortThenDisconnectUriPathHandler),
Status::UNAVAILABLE,
)
.await
}
#[fasync::run_singlethreaded(test)]
async fn second_resolve_succeeds_disconnect_before_blob_complete() {
let blob = vec![0; FILE_SIZE_LARGE_ENOUGH_TO_TRIGGER_HYPER_BATCHING];
let pkg = PackageBuilder::new("large_blob")
.add_resource_at("blobbity/blob", blob.as_slice())
.build()
.await
.unwrap();
let path_to_override = format!(
"/blobs/{}",
MerkleTree::from_reader(blob.as_slice()).expect("merkle slice").root()
);
verify_resolve_fails_then_succeeds(
pkg,
OverrideOnceUriPathHandler::new(path_to_override, OneByteShortThenDisconnectUriPathHandler),
Status::UNAVAILABLE,
)
.await
}
struct OneByteFlippedUriPathHandler;
impl UriPathHandler for OneByteFlippedUriPathHandler {
fn handle(&self, _uri_path: &Path, response: Response<Body>) -> BoxFuture<Response<Body>> {
async {
let mut bytes = body_to_bytes(response.into_body()).await;
if bytes.is_empty() {
panic!("can't flip 0 bytes");
}
bytes[0] = !bytes[0];
Response::builder()
.status(hyper::StatusCode::OK)
.body(bytes.into())
.expect("valid response")
}
.boxed()
}
}
#[fasync::run_singlethreaded(test)]
async fn second_resolve_succeeds_when_far_corrupted() {
let pkg = make_rolldice_pkg_with_extra_blobs(1).await;
let path_to_override = format!("/blobs/{}", pkg.meta_far_merkle_root());
verify_resolve_fails_then_succeeds(
pkg,
OverrideOnceUriPathHandler::new(path_to_override, OneByteFlippedUriPathHandler),
Status::IO,
)
.await
}
#[fasync::run_singlethreaded(test)]
async fn second_resolve_succeeds_when_blob_corrupted() {
let pkg = make_rolldice_pkg_with_extra_blobs(1).await;
let blob = extra_blob_contents(0);
let path_to_override = format!(
"/blobs/{}",
MerkleTree::from_reader(blob.as_slice()).expect("merkle slice").root()
);
verify_resolve_fails_then_succeeds(
pkg,
OverrideOnceUriPathHandler::new(path_to_override, OneByteFlippedUriPathHandler),
Status::IO,
)
.await
}
|
trait Inputable {
fn input(arg: impl Into<String>) -> Self;
}
impl Inputable for i32 {
fn input(arg: impl Into<String>) -> Self {
arg.into().trim().parse().unwrap()
}
}
macro_rules! leer_teclado {
($arg: tt: $type: ty) => {
let mut input = String::new();
let _ = std::io::stdin().read_line(&mut input);
let $arg: $type = Inputable::input(input);
};
($arg: tt) => {
let mut input = String::new();
let _ = std::io::stdin().read_line(&mut input);
let $arg = input;
};
}
fn main() {
leer_teclado!(a);
println!("{}", a);
leer_teclado!(a: i32);
println!("{}", a);
}
|
use std::env;
use std::fs;
use std::path::Path;
fn main() {
let src = env::args()
.nth(1)
.expect("src harus diisi!.");
let dst = env::args()
.nth(2)
.expect("dst harus diisi!.");
let res = get_files(src).unwrap();
// let dst_res = get_files(dst).unwrap();
for name in res {
let fname = Path::new(&name).file_name().unwrap().to_str().unwrap();
let file_name = format!("{}/{}", dst, &fname);
if !Path::new(&file_name).exists() {
println!("{}", name);
}
}
}
struct FileInfo {
name: String,
file_name: String,
path_name: String,
}
struct Files {
names: Vec<FileInfo>,
}
impl Files {
fn get_files(dirname: String) -> std::io::Result<Vec<FileInfo>> {
let mut res: Vec<FileInfo> = Vec::new();
for entry in fs::read_dir(dirname)? {
let dir = entry?;
res.push(dir.path().display().to_string());
}
Ok(res)
}
}
|
//! Code related to the sending of HMS push notifications.
//!
//! ## Authentication
//!
//! We are using OAuth 2.0-based authentication with the "Client Credentials" mode.
//!
//! Docs: https://developer.huawei.com/consumer/en/doc/development/HMSCore-Guides/open-platform-oauth-0000001053629189
//!
//! ## Message Sending
//!
//! Docs: https://developer.huawei.com/consumer/en/doc/development/HMSCore-Guides/android-server-dev-0000001050040110
//!
//! Payload format: https://developer.huawei.com/consumer/en/doc/development/HMSCore-References-V5/https-send-api-0000001050986197-V5#EN-US_TOPIC_0000001124288117__section13271045101216
use std::{
str::{from_utf8, FromStr},
time::{Duration, Instant},
};
use http::{
header::{AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE},
Request, Response,
};
use hyper::{body, Body, StatusCode, Uri};
use serde_derive::{Deserialize, Serialize};
use serde_json as json;
use tokio::sync::Mutex;
use crate::{
config::HmsConfig,
errors::SendPushError,
http_client::HttpClient,
push::{HmsToken, ThreemaPayload},
};
enum EndpointType {
Login,
Push,
}
// Server endpoints
#[cfg(not(test))]
fn hms_endpoint(endpoint_type: EndpointType) -> &'static str {
match endpoint_type {
EndpointType::Login => "https://oauth-login.cloud.huawei.com",
EndpointType::Push => "https://push-api.cloud.huawei.com",
}
}
#[cfg(test)]
fn hms_endpoint(_: EndpointType) -> String {
mockito::server_url()
}
fn hms_login_url() -> String {
format!("{}/oauth2/v3/token", hms_endpoint(EndpointType::Login))
}
fn hms_push_url(app_id: &str) -> String {
format!(
"{}/v1/{}/messages:send",
hms_endpoint(EndpointType::Push),
app_id
)
}
/// HMS push urgency.
#[derive(Debug, Serialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum Urgency {
High,
Normal,
}
/// HMS push category.
///
/// Note: To be able to use these categories, you need to apply for special
/// permission.
#[derive(Debug, Serialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum Category {
//PlayVoice,
Voip,
}
#[derive(Debug, Serialize)]
pub struct AndroidConfig {
/// The urgency.
urgency: Urgency,
/// The push category.
#[serde(skip_serializing_if = "Option::is_none")]
category: Option<Category>,
/// Time to live in seconds.
ttl: String,
}
#[derive(Debug, Serialize)]
pub struct Message<'a> {
/// The push payload.
data: String,
/// Android message push control.
android: AndroidConfig,
/// Push token(s) of the recipient(s).
token: &'a [&'a str],
}
/// HMS request body.
#[derive(Debug, Serialize)]
struct Payload<'a> {
/// The message.
message: Message<'a>,
}
/// HMS auth response.
#[derive(Debug, Deserialize)]
struct AuthResponse {
access_token: String,
expires_in: i32,
token_type: String,
}
/// HMS auth response.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct PushResponse {
code: String,
#[allow(dead_code)]
msg: String,
#[allow(dead_code)]
request_id: String,
}
/// HMS service result code.
#[derive(Debug)]
enum HmsCode {
Success, // 80000000
SomeInvalidTokens, // 80100000
InvalidParameters, // 80100001
InvalidTokenCount, // 80100002
IncorrectMessageStructure, // 80100003
InvalidTtl, // 80100004
InvalidCollapseKey, // 80100013
TooManyTopicMessages, // 80100017
AuthenticationError, // 80200001
AuthorizationExpired, // 80200003
PermissionDenied, // 80300002
InvalidTokens, // 80300007
MessageTooLarge, // 80300008
TooManyTokens, // 80300010
HighPriorityPermissionMissing, // 80300011
InternalError, // 81000001
Other(String),
}
impl From<&str> for HmsCode {
fn from(val: &str) -> Self {
match val {
"80000000" => Self::Success,
"80100000" => Self::SomeInvalidTokens,
"80100001" => Self::InvalidParameters,
"80100002" => Self::InvalidTokenCount,
"80100003" => Self::IncorrectMessageStructure,
"80100004" => Self::InvalidTtl,
"80100013" => Self::InvalidCollapseKey,
"80100017" => Self::TooManyTopicMessages,
"80200001" => Self::AuthenticationError,
"80200003" => Self::AuthorizationExpired,
"80300002" => Self::PermissionDenied,
"80300007" => Self::InvalidTokens,
"80300008" => Self::MessageTooLarge,
"80300010" => Self::TooManyTokens,
"80300011" => Self::HighPriorityPermissionMissing,
"81000001" => Self::InternalError,
_ => Self::Other(val.to_string()),
}
}
}
/// HMS OAuth2 credentials.
#[derive(Debug, Clone, PartialEq)]
pub struct HmsCredentials {
/// The OAuth2 access token.
access_token: String,
/// Expiration of this access token.
///
/// Note: We may set this to a time earlier than the actual token
/// expiration.
expiration: Instant,
}
impl HmsCredentials {
/// Return true if the credentials are expired.
pub fn expired(&self) -> bool {
self.expiration <= Instant::now()
}
}
impl From<AuthResponse> for HmsCredentials {
fn from(resp: AuthResponse) -> Self {
// Renew 180 seconds before expiration timestamp
let expires_in = i32::max(resp.expires_in - 180, 0) as u64;
Self {
access_token: resp.access_token,
expiration: Instant::now() + Duration::from_secs(expires_in),
}
}
}
/// The context object that holds state and authentication information.
#[derive(Debug)]
pub struct HmsContext {
/// The HTTP client used to connect to HMS.
client: HttpClient,
/// The long-term credentials used to request temporary OAuth credentials.
config: HmsConfig,
/// The short-term credentials with a mutex, for exclusive access and
/// interior mutability.
credentials: Mutex<Option<HmsCredentials>>,
}
impl HmsContext {
pub fn new(client: HttpClient, config: HmsConfig) -> Self {
Self {
client,
config,
credentials: Mutex::new(None),
}
}
/// Request new OAuth2 credentials from the Huawei server.
async fn request_new_credentials(&self) -> Result<HmsCredentials, SendPushError> {
debug!("Requesting OAuth2 credentials");
// Prepare request
let body: String = form_urlencoded::Serializer::new(String::new())
.append_pair("grant_type", "client_credentials")
.append_pair("client_id", &self.config.client_id)
.append_pair("client_secret", &self.config.client_secret)
.finish();
// Send request
let request = Request::post(Uri::from_str(&hms_login_url()).unwrap())
.header(CONTENT_TYPE, "application/x-www-form-urlencoded")
.header(CONTENT_LENGTH, &*body.len().to_string())
.body(Body::from(body))
.unwrap();
let response: Response<Body> = self
.client
.request(request)
.await
.map_err(|e| SendPushError::AuthError(e.to_string()))?;
// Extract status
let status = response.status();
// Fetch body
let body_bytes = body::to_bytes(response.into_body()).await.map_err(|e| {
SendPushError::AuthError(format!("Could not read HMS auth response body: {}", e))
})?;
// Validate status code
if status != StatusCode::OK {
match from_utf8(&body_bytes) {
Ok(body) => warn!("OAuth2 response: HTTP {}: {}", status, body),
Err(_) => warn!("OAuth2 response: HTTP {} (invalid UTF8 body)", status),
}
return Err(SendPushError::AuthError(format!(
"Could not request HMS credentials: HTTP {}",
status
)));
}
trace!("OAuth2 response: HTTP {}", status);
// Decode UTF8 bytes
let json_body = from_utf8(&body_bytes).map_err(|_| {
SendPushError::AuthError("Could not decode response JSON: Invalid UTF-8".into())
})?;
// Parse JSON
let data: AuthResponse = json::from_str(json_body).map_err(|e| {
SendPushError::AuthError(format!(
"Could not decode response JSON: `{}` (Reason: {})",
json_body, e
))
})?;
// Validate type
if data.token_type != "Bearer" {
warn!(
"Returned OAuth2 token is of type '{}', not 'Bearer'",
data.token_type
);
}
Ok(data.into())
}
/// Return a copy of the HMS credentials.
///
/// If there are no credentials so far, fetch and store them.
/// If the credentials are outdated, refresh them.
/// Otherwise, just return a copy directly.
pub async fn get_active_credentials(&self) -> Result<HmsCredentials, SendPushError> {
// Lock mutex
let mut credentials = self.credentials.lock().await;
match *credentials {
// No credentials found, fetch initial credentials
None => {
let new_credentials = self.request_new_credentials().await?;
*credentials = Some(new_credentials.clone());
info!("Fetched initial OAuth credentials");
Ok(new_credentials)
}
// Valid credentials found
Some(ref credentials) if !credentials.expired() => {
debug!(
"Credentials are still valid, expiration in {} seconds",
(credentials.expiration - Instant::now()).as_secs()
);
Ok(credentials.clone())
}
// Credentials must be renewed
Some(_) => {
let new_credentials = self.request_new_credentials().await?;
*credentials = Some(new_credentials.clone());
info!("Refreshed OAuth credentials");
Ok(new_credentials)
}
}
}
/// Clear credentials
pub async fn clear_credentials(&self) {
info!("Clearing credentials");
let mut credentials = self.credentials.lock().await;
*credentials = None;
}
}
/// Send a HMS push notification.
pub async fn send_push(
context: &HmsContext,
push_token: &HmsToken,
version: u16,
session: &str,
affiliation: Option<&str>,
ttl: u32,
) -> Result<(), SendPushError> {
let threema_payload = ThreemaPayload::new(session, affiliation, version);
let high_priority = context.config.high_priority.unwrap_or(false);
let payload = Payload {
message: Message {
data: json::to_string(&threema_payload).expect("Could not encode JSON threema payload"),
android: AndroidConfig {
urgency: if high_priority {
Urgency::High
} else {
Urgency::Normal
},
category: if high_priority {
Some(Category::Voip)
} else {
None
},
ttl: format!("{}s", ttl),
},
token: &[&push_token.0],
},
};
trace!("Sending payload: {:#?}", payload);
// Encode payload
let payload_string = json::to_string(&payload).expect("Could not encode JSON payload");
debug!("Payload: {}", payload_string);
// Get or refresh credentials
let credentials = context.get_active_credentials().await?;
// Prepare request
let request = Request::post(Uri::from_str(&hms_push_url(&context.config.client_id)).unwrap())
.header(CONTENT_TYPE, "application/json; charset=UTF-8")
.header(CONTENT_LENGTH, &*payload_string.len().to_string())
.header(
AUTHORIZATION,
&format!("Bearer {}", credentials.access_token),
)
.body(Body::from(payload_string))
.unwrap();
// Send request
let response: Response<Body> = context
.client
.request(request)
.await
.map_err(|e| SendPushError::SendError(e.to_string()))?;
// Extract status
let status = response.status();
// Fetch body
let body_bytes = body::to_bytes(response.into_body()).await.map_err(|e| {
SendPushError::AuthError(format!("Could not read HMS auth response body: {}", e))
})?;
// Decode UTF8 bytes
let body = match from_utf8(&body_bytes) {
Ok(string) => string,
Err(_) => "[Non-UTF8 Body]", // This will fail to parse as JSON, but it's helpful for error logging
};
// Validate status code
match status {
StatusCode::OK => {
trace!("HMS push request returned HTTP 200: {}", body);
}
StatusCode::BAD_REQUEST => {
return Err(SendPushError::ProcessingClientError(format!(
"Bad request: {}",
body
)));
}
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::BAD_GATEWAY => {
return Err(SendPushError::ProcessingRemoteError(format!(
"HMS server error: {}",
body
)));
}
StatusCode::SERVICE_UNAVAILABLE => {
return Err(SendPushError::ProcessingRemoteError(format!(
"HMS quota reached: {}",
body
)));
}
_other => {
return Err(SendPushError::Other(format!(
"Unexpected status code: HTTP {}: {}",
status, body
)));
}
}
// Parse JSON
let data: PushResponse = json::from_str(body).map_err(|e| {
SendPushError::Other(format!(
"Could not decode response JSON: `{}` (Reason: {})",
body, e
))
})?;
// Validate HMS code
let code = HmsCode::from(&*data.code);
match code {
// Success
HmsCode::Success => Ok(()),
// Client errors
HmsCode::SomeInvalidTokens | HmsCode::InvalidTokens => Err(
SendPushError::ProcessingClientError("Invalid push token(s)".to_string()),
),
// Potentially temporary errors
HmsCode::InternalError => Err(SendPushError::ProcessingRemoteError(
"HMS internal server error".to_string(),
)),
// Auth errors
HmsCode::AuthenticationError | HmsCode::AuthorizationExpired => {
// Clear credentials, since token may be invalid
context.clear_credentials().await;
Err(SendPushError::ProcessingRemoteError(format!(
"Authentication error: {:?}",
code
)))
}
// Other errors
other => Err(SendPushError::Other(format!(
"HMS push failed: {:?}",
other
))),
}
}
#[cfg(test)]
mod tests {
use super::*;
use mockito::mock;
use crate::http_client;
mod context {
use super::*;
#[tokio::test]
async fn get_credentials() {
const CLIENT_ID: &str = "klient";
const CLIENT_SECRET: &str = "sehr-sekur";
// Set up context
let client = http_client::make_client(10);
let context = HmsContext::new(
client,
HmsConfig {
client_id: CLIENT_ID.into(),
client_secret: CLIENT_SECRET.into(),
high_priority: None,
},
);
// Mock HMS auth endpoint
let m = mock("POST", "/oauth2/v3/token")
.with_body(format!(
"grant_type=client_credentials&client_id={}&client_secret={}",
CLIENT_ID, CLIENT_SECRET
))
.with_status(200)
.with_body(
r#"{
"access_token": "akssess",
"expires_in": 3600,
"token_type": "Bearer"
}"#,
)
.create();
// No credentials yet
assert!(context.credentials.lock().await.is_none());
// Get new credentials
let credentials = context.get_active_credentials().await.unwrap();
m.assert();
assert!(context.credentials.lock().await.is_some());
assert_eq!(credentials.access_token, "akssess");
let remaining_validity = (credentials.expiration - Instant::now()).as_secs();
assert!(remaining_validity <= (3600 - 180));
assert!(remaining_validity > (3600 - 180 - 10));
// Get cached credentials
let credentials2 = context.get_active_credentials().await.unwrap();
m.assert(); // This fails if the endpoint is called twice
assert_eq!(credentials, credentials2);
// Refresh credentials
let m = m.expect(2);
context
.credentials
.lock()
.await
.as_mut()
.unwrap()
.expiration = Instant::now() - Duration::from_secs(3);
let credentials3 = context.get_active_credentials().await.unwrap();
m.assert();
let remaining_validity = (credentials3.expiration - Instant::now()).as_secs();
assert!(remaining_validity > (3600 - 180 - 10));
}
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::{
constants::{EMPTY_STORY_TITLE, GRAPH_KEY, STATE_KEY, TIME_KEY, TITLE_KEY},
models::{AddModInfo, OutputConsumer, StoryMetadata},
story_context_store::Contributor,
story_graph::{Module, StoryGraph},
story_storage::{StoryName, StoryStorage},
},
failure::{format_err, Error},
fidl_fuchsia_app_discover::StoryDiscoverError,
std::{
collections::HashMap,
time::{SystemTime, UNIX_EPOCH},
},
};
/// Manage multiple story graphs to support restoring stories.
pub struct StoryManager {
// Save stories to Ledger.
story_storage: Box<dyn StoryStorage>,
}
impl StoryManager {
pub fn new(story_storage: Box<dyn StoryStorage>) -> Self {
StoryManager { story_storage }
}
pub async fn get_story_graph(&self, story_name: &str) -> Option<StoryGraph> {
self.story_storage
.get_property(story_name, GRAPH_KEY)
.await
.map(|s| serde_json::from_str(&s).ok())
.unwrap_or(None)
}
// Set property of given story with key & value.
pub async fn set_property(
&mut self,
story_name: &str,
key: &str,
value: impl Into<String>,
) -> Result<(), StoryDiscoverError> {
match key {
// Writing to story graph and instance state is not allowed.
GRAPH_KEY | STATE_KEY => Err(StoryDiscoverError::InvalidKey),
_ => self.story_storage.set_property(story_name, key, value.into()).await,
}
}
// Get property of given story with key.
pub async fn get_property(
&self,
story_name: &str,
key: &str,
) -> Result<String, StoryDiscoverError> {
self.story_storage.get_property(story_name, &key).await
}
// Set instance state of mods given story_name, module_name and name of state.
pub async fn set_instance_state(
&mut self,
story_name: &str,
module_name: &str,
state_name: &str,
value: impl Into<String>,
) -> Result<(), StoryDiscoverError> {
let identity_path = format!("{}/{}/{}", story_name, module_name, state_name);
self.story_storage.set_property(&identity_path, STATE_KEY, value.into()).await
}
// Get instance state of mods given story_name, module_name and name of state.
pub async fn get_instance_state(
&self,
story_name: &str,
module_name: &str,
state_name: &str,
) -> Result<String, StoryDiscoverError> {
let identity_path = format!("{}/{}/{}", story_name, module_name, state_name);
self.story_storage.get_property(&identity_path, STATE_KEY).await
}
// Update the time-stamp that a story is executed last time.
pub async fn update_timestamp(&mut self, story_name: &str) -> Result<(), Error> {
let timestamp =
SystemTime::now().duration_since(UNIX_EPOCH).expect("time went backwards").as_nanos();
self.story_storage
.set_property(story_name, TIME_KEY, timestamp.to_string())
.await
.map_err(StoryManager::error_mapping)
}
// Restore the story in story_manager by returning a vector of its modules
pub async fn restore_story_graph(
&mut self,
target_story_name: StoryName,
) -> Result<Vec<Module>, Error> {
let story_graph = serde_json::from_str(
&self
.story_storage
.get_property(&target_story_name, GRAPH_KEY)
.await
.map_err(StoryManager::error_mapping)?,
)
.unwrap_or(StoryGraph::new());
self.update_timestamp(&target_story_name).await?;
Ok(story_graph.get_all_modules().map(|(k, v)| Module::new(k.clone(), v.clone())).collect())
}
// Update the graph when a contributor changes its output.
pub async fn update_graph_for_replace(
&mut self,
old_reference: &str,
new_reference: &str,
contributor: Contributor,
) -> Result<(), Error> {
match contributor {
Contributor::ModuleContributor { story_id, module_id, parameter_name } => {
let mut story_graph = self
.story_storage
.get_property(&story_id, GRAPH_KEY)
.await
.map(|s| serde_json::from_str(&s).unwrap_or(StoryGraph::new()))
.unwrap_or(StoryGraph::new());
let consumer_ids = match story_graph.get_module_data_mut(&module_id) {
Some(module_data) => {
// Update the provider.
module_data.update_output(¶meter_name, Some(new_reference.to_string()));
module_data.outputs[¶meter_name]
.consumers
.iter()
.map(|(id, _)| id.to_string())
.collect()
}
None => vec![],
};
// Update the intent of each consumer module.
for consumer_module_id in consumer_ids {
if let Some(consumer_module_data) =
story_graph.get_module_data_mut(&consumer_module_id)
{
let new_intent = consumer_module_data
.last_intent
.clone_with_new_reference(old_reference, new_reference);
consumer_module_data.update_intent(new_intent);
}
}
if let Ok(string_content) = serde_json::to_string(&story_graph) {
self.story_storage
.set_property(&story_id, GRAPH_KEY, string_content)
.await
.map_err(StoryManager::error_mapping)?;
}
Ok(())
}
}
}
// Add the mod to the story graph by loading it from storage,
// update it and save it to storage.
pub async fn add_to_story_graph(
&mut self,
action: &AddModInfo,
output_consumers: Vec<OutputConsumer>,
) -> Result<(), Error> {
let mut story_graph = self
.story_storage
.get_property(action.story_name(), GRAPH_KEY)
.await
.map(|s| serde_json::from_str(&s).unwrap_or(StoryGraph::new()))
.unwrap_or(StoryGraph::new());
let mut intent = action.intent().clone();
if intent.action.is_none() {
intent.action = Some("NONE".to_string());
}
story_graph.add_module(action.mod_name(), intent);
for output_consumer in output_consumers {
match story_graph.get_module_data_mut(&output_consumer.module_id) {
Some(module_data) => {
module_data.add_child(action.mod_name());
module_data.add_output_consumer(
&output_consumer.output_name,
output_consumer.entity_reference,
action.mod_name(),
output_consumer.consume_type,
);
}
None => {}
}
}
if let Ok(string_content) = serde_json::to_string(&story_graph) {
self.story_storage
.set_property(action.story_name(), GRAPH_KEY, string_content)
.await
.map_err(StoryManager::error_mapping)?;
}
let story_title = self.story_storage.get_property(action.story_name(), TITLE_KEY).await;
if story_title.is_ok() {
return Ok(());
}
self.story_storage
.set_property(action.story_name(), TITLE_KEY, EMPTY_STORY_TITLE.to_string())
.await
.map_err(StoryManager::error_mapping)?;
self.update_timestamp(action.story_name()).await
}
// Return saved story metadata to generate suggestions.
pub async fn get_story_metadata(&self) -> Result<Vec<StoryMetadata>, Error> {
let mut time_map = self
.story_storage
.get_entries(TIME_KEY)
.await
.map_err(StoryManager::error_mapping)?
.into_iter()
.map(|(name, time)| {
(name.split_at(TIME_KEY.len() + 1).1.to_string(), time.parse::<u128>().unwrap_or(0))
})
.collect::<HashMap<String, u128>>();
Ok(self
.story_storage
.get_name_titles()
.await
.map_err(StoryManager::error_mapping)?
.iter()
.map(|(name, title)| {
(StoryMetadata::new(name, title, time_map.remove(name).unwrap_or(0)))
})
.collect())
}
pub fn error_mapping(error: StoryDiscoverError) -> Error {
match error {
StoryDiscoverError::Storage => format_err!("StoryDicoverError : Storage"),
StoryDiscoverError::VmoStringConversion => {
format_err!("StoryDiscoverError: VmoStringConversion")
}
StoryDiscoverError::InvalidKey => format_err!("StoryDiscoverError : InvalidKey"),
}
}
}
#[cfg(test)]
mod tests {
use {
super::*,
crate::{
models::{DisplayInfo, Intent, SuggestedAction, Suggestion},
story_storage::MemoryStorage,
},
failure::Error,
fuchsia_async as fasync,
};
#[fasync::run_singlethreaded(test)]
async fn add_to_story_graph_and_restore() -> Result<(), Error> {
let mut story_manager = StoryManager::new(Box::new(MemoryStorage::new()));
let suggestion_1 = suggestion!(
action = "PLAY_MUSIC",
title = "Play music",
parameters = [(name = "artist", entity_reference = "peridot-ref")],
story = "story_name_1"
);
let suggestion_2 = suggestion!(
action = "PLAY_MUSIC",
title = "Play music",
parameters = [(name = "artist", entity_reference = "peridot-ref")],
story = "story_name_2"
);
match suggestion_1.action() {
SuggestedAction::AddMod(action) => {
story_manager.add_to_story_graph(&action, vec![]).await?;
}
_ => assert!(false),
}
let story_graph = serde_json::from_str(
&story_manager
.story_storage
.get_property("story_name_1", GRAPH_KEY)
.await
.map_err(StoryManager::error_mapping)?,
)
.unwrap_or(StoryGraph::new());
assert_eq!(story_graph.get_module_count(), 1);
assert_eq!(
story_manager.story_storage.get_property("story_name_1", TITLE_KEY).await.unwrap(),
EMPTY_STORY_TITLE
);
// story_name_1 already saved
assert_eq!(
story_manager
.story_storage
.get_story_count()
.await
.map_err(StoryManager::error_mapping)?,
1
);
// changed to a new story_name_2
match suggestion_2.action() {
SuggestedAction::AddMod(action) => {
story_manager.add_to_story_graph(&action, vec![]).await?;
}
_ => assert!(false),
}
// story_name_1 & 2 already saved
assert_eq!(
story_manager
.story_storage
.get_story_count()
.await
.map_err(StoryManager::error_mapping)?,
2
);
// restore the story_name_1
let modules = story_manager.restore_story_graph("story_name_1".to_string()).await?;
assert_eq!(modules.len(), 1);
Ok(())
}
#[fasync::run_singlethreaded(test)]
async fn save_links() -> Result<(), Error> {
let mut story_manager = StoryManager::new(Box::new(MemoryStorage::new()));
let suggestion_1 = suggestion!(
action = "NOUNS_OF_WORLD",
title = "Nouns of world",
parameters = [],
story = "story_name_1"
);
let mod_name_1 = match suggestion_1.action() {
SuggestedAction::AddMod(action) => {
story_manager.add_to_story_graph(&action, vec![]).await?;
Some(action.mod_name())
}
_ => None,
}
.unwrap();
let contributors = vec![OutputConsumer::new(
"peridot-ref",
mod_name_1,
"selected",
"https://schema.org/MusicGroup",
)];
let suggestion_2 = suggestion!(
action = "PLAY_MUSIC",
title = "Play music",
parameters = [(name = "artist", entity_reference = "peridot-ref")],
story = "story_name_1"
);
let mod_name_2 = match suggestion_2.action() {
SuggestedAction::AddMod(action) => {
story_manager.add_to_story_graph(&action, contributors).await?;
Some(action.mod_name())
}
_ => None,
}
.unwrap();
let story_graph = story_manager
.story_storage
.get_property("story_name_1", GRAPH_KEY)
.await
.map(|s| serde_json::from_str(&s).unwrap_or(StoryGraph::new()))
.unwrap_or(StoryGraph::new());
let module_data_1 = story_graph.get_module_data(mod_name_1).unwrap();
assert_eq!(module_data_1.outputs.len(), 1);
let module_output = &module_data_1.outputs["selected"];
assert_eq!(&module_output.entity_reference, "peridot-ref");
assert_eq!(module_output.consumers.len(), 1);
assert_eq!(
module_output
.consumers
.iter()
.filter(|(module_name, type_name)| module_name == mod_name_2
&& type_name == "https://schema.org/MusicGroup")
.count(),
1
);
Ok(())
}
#[fasync::run_singlethreaded(test)]
async fn set_and_get_instance_state() -> Result<(), Error> {
let mut story_manager = StoryManager::new(Box::new(MemoryStorage::new()));
story_manager
.set_instance_state("some-story", "some-mod", "some-state", "value")
.await
.map_err(StoryManager::error_mapping)?;
let instance_state = story_manager
.get_instance_state("some-story", "some-mod", "some-state")
.await
.map_err(StoryManager::error_mapping)?;
assert_eq!(&instance_state, "value");
Ok(())
}
#[fasync::run_singlethreaded(test)]
async fn update_timestamp() -> Result<(), Error> {
let mut story_manager = StoryManager::new(Box::new(MemoryStorage::new()));
let story_name = "story_1".to_string();
story_manager.update_timestamp(&story_name).await?;
let timestamp_1 = story_manager
.get_property(&story_name, TIME_KEY)
.await
.map_err(StoryManager::error_mapping)?
.parse::<u128>()
.unwrap_or(0);
story_manager.update_timestamp(&story_name).await?;
let timestamp_2 = story_manager
.get_property(&story_name, TIME_KEY)
.await
.map_err(StoryManager::error_mapping)?
.parse::<u128>()
.unwrap_or(0);
assert!(timestamp_2 > timestamp_1);
Ok(())
}
#[fasync::run_singlethreaded(test)]
async fn maintain_story_recency() -> Result<(), Error> {
let mut story_manager = StoryManager::new(Box::new(MemoryStorage::new()));
let suggestion_1 = suggestion!(
action = "PLAY_MUSIC",
title = "Play music",
parameters = [(name = "artist", entity_reference = "peridot-ref")],
story = "story_name_1"
);
let suggestion_2 = suggestion!(
action = "PLAY_MUSIC",
title = "Play music",
parameters = [(name = "artist", entity_reference = "peridot-ref")],
story = "story_name_2"
);
// Execute two addmod suggestions one by one.
match suggestion_1.action() {
SuggestedAction::AddMod(action) => {
story_manager.add_to_story_graph(&action, vec![]).await?;
}
SuggestedAction::RestoreStory(_) => {
assert!(false);
}
}
match suggestion_2.action() {
SuggestedAction::AddMod(action) => {
story_manager.add_to_story_graph(&action, vec![]).await?;
}
SuggestedAction::RestoreStory(_) => {
assert!(false);
}
}
// Ensure that the most recent story is ranked first.
let mut stories = story_manager.get_story_metadata().await?;
assert_eq!(stories.len(), 2);
stories.sort_by(|a, b| b.last_executed_timestamp.cmp(&a.last_executed_timestamp));
assert_eq!(&stories[0].story_name, "story_name_2");
assert_eq!(&stories[1].story_name, "story_name_1");
// Restore one story and see if the recency ranking results change.
let _ = story_manager.restore_story_graph("story_name_1".to_string()).await?;
let mut stories = story_manager.get_story_metadata().await?;
assert_eq!(stories.len(), 2);
stories.sort_by(|a, b| b.last_executed_timestamp.cmp(&a.last_executed_timestamp));
assert_eq!(&stories[0].story_name, "story_name_1");
assert_eq!(&stories[1].story_name, "story_name_2");
Ok(())
}
}
|
#[cfg(all(not(target_arch = "wasm32"), test))]
mod test;
use anyhow::*;
use liblumen_alloc::erts::exception::{self, error};
use liblumen_alloc::erts::process::trace::Trace;
use liblumen_alloc::erts::process::Process;
use liblumen_alloc::erts::term::prelude::*;
use crate::runtime::registry::pid_to_process;
#[native_implemented::function(erlang:link/1)]
fn result(process: &Process, pid_or_port: Term) -> exception::Result<Term> {
match pid_or_port.decode()? {
TypedTerm::Pid(pid) => {
if pid == process.pid() {
Ok(true.into())
} else {
match pid_to_process(&pid) {
Some(pid_arc_process) => {
process.link(&pid_arc_process);
Ok(true.into())
}
None => Err(error(
Atom::str_to_term("noproc"),
None,
Trace::capture(),
Some(
anyhow!("pid ({}) doesn't refer to an alive local process", pid).into(),
),
)
.into()),
}
}
}
TypedTerm::Port(_) => unimplemented!(),
TypedTerm::ExternalPid(_) => unimplemented!(),
TypedTerm::ExternalPort(_) => unimplemented!(),
_ => Err(TypeError)
.context(format!(
"pid_or_port ({}) is neither a pid nor a port",
pid_or_port
))
.map_err(From::from),
}
}
|
::windows_targets::link ! ( "vssapi.dll""system" #[doc = "*Required features: `\"Win32_Storage_Vss\"`*"] fn CreateVssExpressWriterInternal ( ppwriter : *mut IVssExpressWriter ) -> ::windows_sys::core::HRESULT );
pub type IVssAdmin = *mut ::core::ffi::c_void;
pub type IVssAdminEx = *mut ::core::ffi::c_void;
pub type IVssAsync = *mut ::core::ffi::c_void;
pub type IVssComponent = *mut ::core::ffi::c_void;
pub type IVssComponentEx = *mut ::core::ffi::c_void;
pub type IVssComponentEx2 = *mut ::core::ffi::c_void;
pub type IVssCreateExpressWriterMetadata = *mut ::core::ffi::c_void;
pub type IVssCreateWriterMetadata = *mut ::core::ffi::c_void;
pub type IVssDifferentialSoftwareSnapshotMgmt = *mut ::core::ffi::c_void;
pub type IVssDifferentialSoftwareSnapshotMgmt2 = *mut ::core::ffi::c_void;
pub type IVssDifferentialSoftwareSnapshotMgmt3 = *mut ::core::ffi::c_void;
pub type IVssEnumMgmtObject = *mut ::core::ffi::c_void;
pub type IVssEnumObject = *mut ::core::ffi::c_void;
pub type IVssExpressWriter = *mut ::core::ffi::c_void;
pub type IVssFileShareSnapshotProvider = *mut ::core::ffi::c_void;
pub type IVssHardwareSnapshotProvider = *mut ::core::ffi::c_void;
pub type IVssHardwareSnapshotProviderEx = *mut ::core::ffi::c_void;
pub type IVssProviderCreateSnapshotSet = *mut ::core::ffi::c_void;
pub type IVssProviderNotifications = *mut ::core::ffi::c_void;
pub type IVssSnapshotMgmt = *mut ::core::ffi::c_void;
pub type IVssSnapshotMgmt2 = *mut ::core::ffi::c_void;
pub type IVssSoftwareSnapshotProvider = *mut ::core::ffi::c_void;
pub type IVssWMDependency = *mut ::core::ffi::c_void;
pub type IVssWMFiledesc = *mut ::core::ffi::c_void;
pub type IVssWriterComponents = *mut ::core::ffi::c_void;
pub type IVssWriterImpl = *mut ::core::ffi::c_void;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSSCoordinator: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xe579ab5f_1cc4_44b4_bed9_de0991ff0623);
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ASSOC_NO_MAX_SPACE: i32 = -1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ASSOC_REMOVE: u32 = 0u32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_CRITICAL_DISKS_TOO_SMALL: ::windows_sys::core::HRESULT = -2147212280i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_CRITICAL_DISK_CANNOT_BE_EXCLUDED: ::windows_sys::core::HRESULT = -2147212267i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_DATADISK_RDISK0: ::windows_sys::core::HRESULT = -2147212282i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_DISK_ASSIGNMENT_FAILED: ::windows_sys::core::HRESULT = -2147212287i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_DISK_RECREATION_FAILED: ::windows_sys::core::HRESULT = -2147212286i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_DYNAMIC_VHD_NOT_SUPPORTED: ::windows_sys::core::HRESULT = -2147212278i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_FIXED_PHYSICAL_DISK_AVAILABLE_AFTER_DISK_EXCLUSION: ::windows_sys::core::HRESULT = -2147212268i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_MISSING_DYNDISK: ::windows_sys::core::HRESULT = -2147212284i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_NO_ARCPATH: ::windows_sys::core::HRESULT = -2147212285i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_NO_PHYSICAL_DISK_AVAILABLE: ::windows_sys::core::HRESULT = -2147212269i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_RDISK0_TOOSMALL: ::windows_sys::core::HRESULT = -2147212281i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_RDISK_FOR_SYSTEM_DISK_NOT_FOUND: ::windows_sys::core::HRESULT = -2147212270i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_SHARED_CRIDISK: ::windows_sys::core::HRESULT = -2147212283i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_ASRERROR_SYSTEM_PARTITION_HIDDEN: ::windows_sys::core::HRESULT = -2147212266i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_AUTORECOVERY_FAILED: ::windows_sys::core::HRESULT = -2147212293i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_BAD_STATE: ::windows_sys::core::HRESULT = -2147212543i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_BREAK_REVERT_ID_FAILED: ::windows_sys::core::HRESULT = -2147212298i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_CANNOT_REVERT_DISKID: ::windows_sys::core::HRESULT = -2147212290i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_CLUSTER_ERROR: ::windows_sys::core::HRESULT = -2147212288i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_CLUSTER_TIMEOUT: ::windows_sys::core::HRESULT = -2147212498i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_CORRUPT_XML_DOCUMENT: ::windows_sys::core::HRESULT = -2147212528i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_CRITICAL_VOLUME_ON_INVALID_DISK: ::windows_sys::core::HRESULT = -2147212271i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_DYNAMIC_DISK_ERROR: ::windows_sys::core::HRESULT = -2147212292i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_FLUSH_WRITES_TIMEOUT: ::windows_sys::core::HRESULT = -2147212525i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_FSS_TIMEOUT: ::windows_sys::core::HRESULT = -2147212265i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_HOLD_WRITES_TIMEOUT: ::windows_sys::core::HRESULT = -2147212524i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_INSUFFICIENT_STORAGE: ::windows_sys::core::HRESULT = -2147212513i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_INVALID_XML_DOCUMENT: ::windows_sys::core::HRESULT = -2147212527i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_LEGACY_PROVIDER: ::windows_sys::core::HRESULT = -2147212297i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_MAXIMUM_DIFFAREA_ASSOCIATIONS_REACHED: ::windows_sys::core::HRESULT = -2147212514i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_MAXIMUM_NUMBER_OF_REMOTE_MACHINES_REACHED: ::windows_sys::core::HRESULT = -2147212510i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_MAXIMUM_NUMBER_OF_SNAPSHOTS_REACHED: ::windows_sys::core::HRESULT = -2147212521i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_MAXIMUM_NUMBER_OF_VOLUMES_REACHED: ::windows_sys::core::HRESULT = -2147212526i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_MISSING_DISK: ::windows_sys::core::HRESULT = -2147212296i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_MISSING_HIDDEN_VOLUME: ::windows_sys::core::HRESULT = -2147212295i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_MISSING_VOLUME: ::windows_sys::core::HRESULT = -2147212294i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_NESTED_VOLUME_LIMIT: ::windows_sys::core::HRESULT = -2147212500i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_NONTRANSPORTABLE_BCD: ::windows_sys::core::HRESULT = -2147212291i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_NOT_SUPPORTED: ::windows_sys::core::HRESULT = -2147212497i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_NO_SNAPSHOTS_IMPORTED: ::windows_sys::core::HRESULT = -2147212512i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_OBJECT_ALREADY_EXISTS: ::windows_sys::core::HRESULT = -2147212531i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_OBJECT_NOT_FOUND: ::windows_sys::core::HRESULT = -2147212536i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_PROVIDER_ALREADY_REGISTERED: ::windows_sys::core::HRESULT = -2147212541i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_PROVIDER_IN_USE: ::windows_sys::core::HRESULT = -2147212537i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_PROVIDER_NOT_REGISTERED: ::windows_sys::core::HRESULT = -2147212540i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_PROVIDER_VETO: ::windows_sys::core::HRESULT = -2147212538i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_REBOOT_REQUIRED: ::windows_sys::core::HRESULT = -2147212505i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_REMOTE_SERVER_UNAVAILABLE: ::windows_sys::core::HRESULT = -2147212509i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_REMOTE_SERVER_UNSUPPORTED: ::windows_sys::core::HRESULT = -2147212508i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_RESYNC_IN_PROGRESS: ::windows_sys::core::HRESULT = -2147212289i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_REVERT_IN_PROGRESS: ::windows_sys::core::HRESULT = -2147212507i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_REVERT_VOLUME_LOST: ::windows_sys::core::HRESULT = -2147212506i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_SNAPSHOT_NOT_IN_SET: ::windows_sys::core::HRESULT = -2147212501i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_SNAPSHOT_SET_IN_PROGRESS: ::windows_sys::core::HRESULT = -2147212522i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_SOME_SNAPSHOTS_NOT_IMPORTED: ::windows_sys::core::HRESULT = -2147212511i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_TRANSACTION_FREEZE_TIMEOUT: ::windows_sys::core::HRESULT = -2147212504i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_TRANSACTION_THAW_TIMEOUT: ::windows_sys::core::HRESULT = -2147212503i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_UNEXPECTED: ::windows_sys::core::HRESULT = -2147212542i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_UNEXPECTED_PROVIDER_ERROR: ::windows_sys::core::HRESULT = -2147212529i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_UNEXPECTED_WRITER_ERROR: ::windows_sys::core::HRESULT = -2147212523i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_UNSELECTED_VOLUME: ::windows_sys::core::HRESULT = -2147212502i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_UNSUPPORTED_CONTEXT: ::windows_sys::core::HRESULT = -2147212517i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_VOLUME_IN_USE: ::windows_sys::core::HRESULT = -2147212515i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_VOLUME_NOT_LOCAL: ::windows_sys::core::HRESULT = -2147212499i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_VOLUME_NOT_SUPPORTED: ::windows_sys::core::HRESULT = -2147212532i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_VOLUME_NOT_SUPPORTED_BY_PROVIDER: ::windows_sys::core::HRESULT = -2147212530i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITERERROR_INCONSISTENTSNAPSHOT: ::windows_sys::core::HRESULT = -2147212304i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITERERROR_NONRETRYABLE: ::windows_sys::core::HRESULT = -2147212300i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITERERROR_OUTOFRESOURCES: ::windows_sys::core::HRESULT = -2147212303i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITERERROR_PARTIAL_FAILURE: ::windows_sys::core::HRESULT = -2147212490i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITERERROR_RECOVERY_FAILED: ::windows_sys::core::HRESULT = -2147212299i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITERERROR_RETRYABLE: ::windows_sys::core::HRESULT = -2147212301i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITERERROR_TIMEOUT: ::windows_sys::core::HRESULT = -2147212302i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITER_ALREADY_SUBSCRIBED: ::windows_sys::core::HRESULT = -2147212518i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITER_INFRASTRUCTURE: ::windows_sys::core::HRESULT = -2147212520i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITER_NOT_RESPONDING: ::windows_sys::core::HRESULT = -2147212519i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_E_WRITER_STATUS_NOT_AVAILABLE: ::windows_sys::core::HRESULT = -2147212279i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_S_ASYNC_CANCELLED: ::windows_sys::core::HRESULT = 271115i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_S_ASYNC_FINISHED: ::windows_sys::core::HRESULT = 271114i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_S_ASYNC_PENDING: ::windows_sys::core::HRESULT = 271113i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_S_SOME_SNAPSHOTS_NOT_IMPORTED: ::windows_sys::core::HRESULT = 271137i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VssSnapshotMgmt: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0x0b5a2c52_3eb9_470a_96e2_6c6d4570e40f);
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_ALTERNATE_WRITER_STATE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_AWS_UNDEFINED: VSS_ALTERNATE_WRITER_STATE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_AWS_NO_ALTERNATE_WRITER: VSS_ALTERNATE_WRITER_STATE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_AWS_ALTERNATE_WRITER_EXISTS: VSS_ALTERNATE_WRITER_STATE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_AWS_THIS_IS_ALTERNATE_WRITER: VSS_ALTERNATE_WRITER_STATE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_APPLICATION_LEVEL = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_APP_UNKNOWN: VSS_APPLICATION_LEVEL = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_APP_SYSTEM: VSS_APPLICATION_LEVEL = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_APP_BACK_END: VSS_APPLICATION_LEVEL = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_APP_FRONT_END: VSS_APPLICATION_LEVEL = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_APP_SYSTEM_RM: VSS_APPLICATION_LEVEL = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_APP_AUTO: VSS_APPLICATION_LEVEL = -1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_BACKUP_SCHEMA = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_UNDEFINED: VSS_BACKUP_SCHEMA = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_DIFFERENTIAL: VSS_BACKUP_SCHEMA = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_INCREMENTAL: VSS_BACKUP_SCHEMA = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_EXCLUSIVE_INCREMENTAL_DIFFERENTIAL: VSS_BACKUP_SCHEMA = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_LOG: VSS_BACKUP_SCHEMA = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_COPY: VSS_BACKUP_SCHEMA = 16i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_TIMESTAMPED: VSS_BACKUP_SCHEMA = 32i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_LAST_MODIFY: VSS_BACKUP_SCHEMA = 64i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_LSN: VSS_BACKUP_SCHEMA = 128i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_WRITER_SUPPORTS_NEW_TARGET: VSS_BACKUP_SCHEMA = 256i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_WRITER_SUPPORTS_RESTORE_WITH_MOVE: VSS_BACKUP_SCHEMA = 512i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_INDEPENDENT_SYSTEM_STATE: VSS_BACKUP_SCHEMA = 1024i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_ROLLFORWARD_RESTORE: VSS_BACKUP_SCHEMA = 4096i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_RESTORE_RENAME: VSS_BACKUP_SCHEMA = 8192i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_AUTHORITATIVE_RESTORE: VSS_BACKUP_SCHEMA = 16384i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BS_WRITER_SUPPORTS_PARALLEL_RESTORES: VSS_BACKUP_SCHEMA = 32768i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_BACKUP_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BT_UNDEFINED: VSS_BACKUP_TYPE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BT_FULL: VSS_BACKUP_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BT_INCREMENTAL: VSS_BACKUP_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BT_DIFFERENTIAL: VSS_BACKUP_TYPE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BT_LOG: VSS_BACKUP_TYPE = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BT_COPY: VSS_BACKUP_TYPE = 5i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BT_OTHER: VSS_BACKUP_TYPE = 6i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_COMPONENT_FLAGS = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CF_BACKUP_RECOVERY: VSS_COMPONENT_FLAGS = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CF_APP_ROLLBACK_RECOVERY: VSS_COMPONENT_FLAGS = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CF_NOT_SYSTEM_STATE: VSS_COMPONENT_FLAGS = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_COMPONENT_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CT_UNDEFINED: VSS_COMPONENT_TYPE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CT_DATABASE: VSS_COMPONENT_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CT_FILEGROUP: VSS_COMPONENT_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_FILE_RESTORE_STATUS = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RS_UNDEFINED: VSS_FILE_RESTORE_STATUS = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RS_NONE: VSS_FILE_RESTORE_STATUS = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RS_ALL: VSS_FILE_RESTORE_STATUS = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RS_FAILED: VSS_FILE_RESTORE_STATUS = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_FILE_SPEC_BACKUP_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_FULL_BACKUP_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_DIFFERENTIAL_BACKUP_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_INCREMENTAL_BACKUP_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_LOG_BACKUP_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_FULL_SNAPSHOT_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 256i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_DIFFERENTIAL_SNAPSHOT_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 512i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_INCREMENTAL_SNAPSHOT_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 1024i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_LOG_SNAPSHOT_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 2048i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_CREATED_DURING_BACKUP: VSS_FILE_SPEC_BACKUP_TYPE = 65536i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_ALL_BACKUP_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 15i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_FSBT_ALL_SNAPSHOT_REQUIRED: VSS_FILE_SPEC_BACKUP_TYPE = 3840i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_HARDWARE_OPTIONS = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BREAKEX_FLAG_MASK_LUNS: VSS_HARDWARE_OPTIONS = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BREAKEX_FLAG_MAKE_READ_WRITE: VSS_HARDWARE_OPTIONS = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BREAKEX_FLAG_REVERT_IDENTITY_ALL: VSS_HARDWARE_OPTIONS = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_BREAKEX_FLAG_REVERT_IDENTITY_NONE: VSS_HARDWARE_OPTIONS = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ONLUNSTATECHANGE_NOTIFY_READ_WRITE: VSS_HARDWARE_OPTIONS = 256i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ONLUNSTATECHANGE_NOTIFY_LUN_PRE_RECOVERY: VSS_HARDWARE_OPTIONS = 512i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ONLUNSTATECHANGE_NOTIFY_LUN_POST_RECOVERY: VSS_HARDWARE_OPTIONS = 1024i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ONLUNSTATECHANGE_DO_MASK_LUNS: VSS_HARDWARE_OPTIONS = 2048i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_MGMT_OBJECT_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_MGMT_OBJECT_UNKNOWN: VSS_MGMT_OBJECT_TYPE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_MGMT_OBJECT_VOLUME: VSS_MGMT_OBJECT_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_MGMT_OBJECT_DIFF_VOLUME: VSS_MGMT_OBJECT_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_MGMT_OBJECT_DIFF_AREA: VSS_MGMT_OBJECT_TYPE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_OBJECT_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_OBJECT_UNKNOWN: VSS_OBJECT_TYPE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_OBJECT_NONE: VSS_OBJECT_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_OBJECT_SNAPSHOT_SET: VSS_OBJECT_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_OBJECT_SNAPSHOT: VSS_OBJECT_TYPE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_OBJECT_PROVIDER: VSS_OBJECT_TYPE = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_OBJECT_TYPE_COUNT: VSS_OBJECT_TYPE = 5i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_PROTECTION_FAULT = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_NONE: VSS_PROTECTION_FAULT = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_DIFF_AREA_MISSING: VSS_PROTECTION_FAULT = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_IO_FAILURE_DURING_ONLINE: VSS_PROTECTION_FAULT = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_META_DATA_CORRUPTION: VSS_PROTECTION_FAULT = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_MEMORY_ALLOCATION_FAILURE: VSS_PROTECTION_FAULT = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_MAPPED_MEMORY_FAILURE: VSS_PROTECTION_FAULT = 5i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_COW_READ_FAILURE: VSS_PROTECTION_FAULT = 6i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_COW_WRITE_FAILURE: VSS_PROTECTION_FAULT = 7i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_DIFF_AREA_FULL: VSS_PROTECTION_FAULT = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_GROW_TOO_SLOW: VSS_PROTECTION_FAULT = 9i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_GROW_FAILED: VSS_PROTECTION_FAULT = 10i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_DESTROY_ALL_SNAPSHOTS: VSS_PROTECTION_FAULT = 11i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_FILE_SYSTEM_FAILURE: VSS_PROTECTION_FAULT = 12i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_IO_FAILURE: VSS_PROTECTION_FAULT = 13i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_DIFF_AREA_REMOVED: VSS_PROTECTION_FAULT = 14i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_EXTERNAL_WRITER_TO_DIFF_AREA: VSS_PROTECTION_FAULT = 15i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_FAULT_MOUNT_DURING_CLUSTER_OFFLINE: VSS_PROTECTION_FAULT = 16i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_PROTECTION_LEVEL = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_LEVEL_ORIGINAL_VOLUME: VSS_PROTECTION_LEVEL = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROTECTION_LEVEL_SNAPSHOT: VSS_PROTECTION_LEVEL = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_PROVIDER_CAPABILITIES = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_LEGACY: VSS_PROVIDER_CAPABILITIES = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_COMPLIANT: VSS_PROVIDER_CAPABILITIES = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_LUN_REPOINT: VSS_PROVIDER_CAPABILITIES = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_LUN_RESYNC: VSS_PROVIDER_CAPABILITIES = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_OFFLINE_CREATION: VSS_PROVIDER_CAPABILITIES = 16i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_MULTIPLE_IMPORT: VSS_PROVIDER_CAPABILITIES = 32i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_RECYCLING: VSS_PROVIDER_CAPABILITIES = 64i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_PLEX: VSS_PROVIDER_CAPABILITIES = 128i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_DIFFERENTIAL: VSS_PROVIDER_CAPABILITIES = 256i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PRV_CAPABILITY_CLUSTERED: VSS_PROVIDER_CAPABILITIES = 512i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_PROVIDER_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROV_UNKNOWN: VSS_PROVIDER_TYPE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROV_SYSTEM: VSS_PROVIDER_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROV_SOFTWARE: VSS_PROVIDER_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROV_HARDWARE: VSS_PROVIDER_TYPE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_PROV_FILESHARE: VSS_PROVIDER_TYPE = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_RECOVERY_OPTIONS = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RECOVERY_REVERT_IDENTITY_ALL: VSS_RECOVERY_OPTIONS = 256i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RECOVERY_NO_VOLUME_CHECK: VSS_RECOVERY_OPTIONS = 512i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_RESTOREMETHOD_ENUM = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RME_UNDEFINED: VSS_RESTOREMETHOD_ENUM = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RME_RESTORE_IF_NOT_THERE: VSS_RESTOREMETHOD_ENUM = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RME_RESTORE_IF_CAN_REPLACE: VSS_RESTOREMETHOD_ENUM = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RME_STOP_RESTORE_START: VSS_RESTOREMETHOD_ENUM = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RME_RESTORE_TO_ALTERNATE_LOCATION: VSS_RESTOREMETHOD_ENUM = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RME_RESTORE_AT_REBOOT: VSS_RESTOREMETHOD_ENUM = 5i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RME_RESTORE_AT_REBOOT_IF_CANNOT_REPLACE: VSS_RESTOREMETHOD_ENUM = 6i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RME_CUSTOM: VSS_RESTOREMETHOD_ENUM = 7i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RME_RESTORE_STOP_START: VSS_RESTOREMETHOD_ENUM = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_RESTORE_TARGET = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RT_UNDEFINED: VSS_RESTORE_TARGET = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RT_ORIGINAL: VSS_RESTORE_TARGET = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RT_ALTERNATE: VSS_RESTORE_TARGET = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RT_DIRECTED: VSS_RESTORE_TARGET = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RT_ORIGINAL_LOCATION: VSS_RESTORE_TARGET = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_RESTORE_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RTYPE_UNDEFINED: VSS_RESTORE_TYPE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RTYPE_BY_COPY: VSS_RESTORE_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RTYPE_IMPORT: VSS_RESTORE_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RTYPE_OTHER: VSS_RESTORE_TYPE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_ROLLFORWARD_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RF_UNDEFINED: VSS_ROLLFORWARD_TYPE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RF_NONE: VSS_ROLLFORWARD_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RF_ALL: VSS_ROLLFORWARD_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_RF_PARTIAL: VSS_ROLLFORWARD_TYPE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_SNAPSHOT_COMPATIBILITY = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SC_DISABLE_DEFRAG: VSS_SNAPSHOT_COMPATIBILITY = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SC_DISABLE_CONTENTINDEX: VSS_SNAPSHOT_COMPATIBILITY = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_SNAPSHOT_CONTEXT = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CTX_BACKUP: VSS_SNAPSHOT_CONTEXT = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CTX_FILE_SHARE_BACKUP: VSS_SNAPSHOT_CONTEXT = 16i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CTX_NAS_ROLLBACK: VSS_SNAPSHOT_CONTEXT = 25i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CTX_APP_ROLLBACK: VSS_SNAPSHOT_CONTEXT = 9i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CTX_CLIENT_ACCESSIBLE: VSS_SNAPSHOT_CONTEXT = 29i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CTX_CLIENT_ACCESSIBLE_WRITERS: VSS_SNAPSHOT_CONTEXT = 13i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_CTX_ALL: VSS_SNAPSHOT_CONTEXT = -1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_SNAPSHOT_PROPERTY_ID = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_UNKNOWN: VSS_SNAPSHOT_PROPERTY_ID = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_SNAPSHOT_ID: VSS_SNAPSHOT_PROPERTY_ID = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_SNAPSHOT_SET_ID: VSS_SNAPSHOT_PROPERTY_ID = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_SNAPSHOTS_COUNT: VSS_SNAPSHOT_PROPERTY_ID = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_SNAPSHOT_DEVICE: VSS_SNAPSHOT_PROPERTY_ID = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_ORIGINAL_VOLUME: VSS_SNAPSHOT_PROPERTY_ID = 5i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_ORIGINATING_MACHINE: VSS_SNAPSHOT_PROPERTY_ID = 6i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_SERVICE_MACHINE: VSS_SNAPSHOT_PROPERTY_ID = 7i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_EXPOSED_NAME: VSS_SNAPSHOT_PROPERTY_ID = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_EXPOSED_PATH: VSS_SNAPSHOT_PROPERTY_ID = 9i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_PROVIDER_ID: VSS_SNAPSHOT_PROPERTY_ID = 10i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_SNAPSHOT_ATTRIBUTES: VSS_SNAPSHOT_PROPERTY_ID = 11i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_CREATION_TIMESTAMP: VSS_SNAPSHOT_PROPERTY_ID = 12i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SPROPID_STATUS: VSS_SNAPSHOT_PROPERTY_ID = 13i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_SNAPSHOT_STATE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_UNKNOWN: VSS_SNAPSHOT_STATE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PREPARING: VSS_SNAPSHOT_STATE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PROCESSING_PREPARE: VSS_SNAPSHOT_STATE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PREPARED: VSS_SNAPSHOT_STATE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PROCESSING_PRECOMMIT: VSS_SNAPSHOT_STATE = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PRECOMMITTED: VSS_SNAPSHOT_STATE = 5i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PROCESSING_COMMIT: VSS_SNAPSHOT_STATE = 6i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_COMMITTED: VSS_SNAPSHOT_STATE = 7i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PROCESSING_POSTCOMMIT: VSS_SNAPSHOT_STATE = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PROCESSING_PREFINALCOMMIT: VSS_SNAPSHOT_STATE = 9i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PREFINALCOMMITTED: VSS_SNAPSHOT_STATE = 10i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_PROCESSING_POSTFINALCOMMIT: VSS_SNAPSHOT_STATE = 11i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_CREATED: VSS_SNAPSHOT_STATE = 12i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_ABORTED: VSS_SNAPSHOT_STATE = 13i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_DELETED: VSS_SNAPSHOT_STATE = 14i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_POSTCOMMITTED: VSS_SNAPSHOT_STATE = 15i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SS_COUNT: VSS_SNAPSHOT_STATE = 16i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_SOURCE_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ST_UNDEFINED: VSS_SOURCE_TYPE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ST_TRANSACTEDDB: VSS_SOURCE_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ST_NONTRANSACTEDDB: VSS_SOURCE_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_ST_OTHER: VSS_SOURCE_TYPE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_SUBSCRIBE_MASK = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SM_POST_SNAPSHOT_FLAG: VSS_SUBSCRIBE_MASK = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SM_BACKUP_EVENTS_FLAG: VSS_SUBSCRIBE_MASK = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SM_RESTORE_EVENTS_FLAG: VSS_SUBSCRIBE_MASK = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SM_IO_THROTTLING_FLAG: VSS_SUBSCRIBE_MASK = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_SM_ALL_FLAGS: VSS_SUBSCRIBE_MASK = -1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_USAGE_TYPE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_UT_UNDEFINED: VSS_USAGE_TYPE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_UT_BOOTABLESYSTEMSTATE: VSS_USAGE_TYPE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_UT_SYSTEMSERVICE: VSS_USAGE_TYPE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_UT_USERDATA: VSS_USAGE_TYPE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_UT_OTHER: VSS_USAGE_TYPE = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_VOLUME_SNAPSHOT_ATTRIBUTES = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_PERSISTENT: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_NO_AUTORECOVERY: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_CLIENT_ACCESSIBLE: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_NO_AUTO_RELEASE: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_NO_WRITERS: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 16i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_TRANSPORTABLE: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 32i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_NOT_SURFACED: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 64i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_NOT_TRANSACTED: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 128i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_HARDWARE_ASSISTED: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 65536i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_DIFFERENTIAL: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 131072i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_PLEX: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 262144i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_IMPORTED: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 524288i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_EXPOSED_LOCALLY: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 1048576i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_EXPOSED_REMOTELY: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 2097152i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_AUTORECOVER: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 4194304i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_ROLLBACK_RECOVERY: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 8388608i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_DELAYED_POSTSNAPSHOT: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 16777216i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_TXF_RECOVERY: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 33554432i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_VOLSNAP_ATTR_FILE_SHARE: VSS_VOLUME_SNAPSHOT_ATTRIBUTES = 67108864i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_WRITERRESTORE_ENUM = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WRE_UNDEFINED: VSS_WRITERRESTORE_ENUM = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WRE_NEVER: VSS_WRITERRESTORE_ENUM = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WRE_IF_REPLACE_FAILS: VSS_WRITERRESTORE_ENUM = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WRE_ALWAYS: VSS_WRITERRESTORE_ENUM = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub type VSS_WRITER_STATE = i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_UNKNOWN: VSS_WRITER_STATE = 0i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_STABLE: VSS_WRITER_STATE = 1i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_WAITING_FOR_FREEZE: VSS_WRITER_STATE = 2i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_WAITING_FOR_THAW: VSS_WRITER_STATE = 3i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_WAITING_FOR_POST_SNAPSHOT: VSS_WRITER_STATE = 4i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_WAITING_FOR_BACKUP_COMPLETE: VSS_WRITER_STATE = 5i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_IDENTIFY: VSS_WRITER_STATE = 6i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_PREPARE_BACKUP: VSS_WRITER_STATE = 7i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_PREPARE_SNAPSHOT: VSS_WRITER_STATE = 8i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_FREEZE: VSS_WRITER_STATE = 9i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_THAW: VSS_WRITER_STATE = 10i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_POST_SNAPSHOT: VSS_WRITER_STATE = 11i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_BACKUP_COMPLETE: VSS_WRITER_STATE = 12i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_PRE_RESTORE: VSS_WRITER_STATE = 13i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_POST_RESTORE: VSS_WRITER_STATE = 14i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_FAILED_AT_BACKUPSHUTDOWN: VSS_WRITER_STATE = 15i32;
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub const VSS_WS_COUNT: VSS_WRITER_STATE = 16i32;
#[repr(C)]
pub struct IVssExamineWriterMetadata(pub u8);
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub struct VSS_DIFF_AREA_PROP {
pub m_pwszVolumeName: *mut u16,
pub m_pwszDiffAreaVolumeName: *mut u16,
pub m_llMaximumDiffSpace: i64,
pub m_llAllocatedDiffSpace: i64,
pub m_llUsedDiffSpace: i64,
}
impl ::core::marker::Copy for VSS_DIFF_AREA_PROP {}
impl ::core::clone::Clone for VSS_DIFF_AREA_PROP {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub struct VSS_DIFF_VOLUME_PROP {
pub m_pwszVolumeName: *mut u16,
pub m_pwszVolumeDisplayName: *mut u16,
pub m_llVolumeFreeSpace: i64,
pub m_llVolumeTotalSpace: i64,
}
impl ::core::marker::Copy for VSS_DIFF_VOLUME_PROP {}
impl ::core::clone::Clone for VSS_DIFF_VOLUME_PROP {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub struct VSS_MGMT_OBJECT_PROP {
pub Type: VSS_MGMT_OBJECT_TYPE,
pub Obj: VSS_MGMT_OBJECT_UNION,
}
impl ::core::marker::Copy for VSS_MGMT_OBJECT_PROP {}
impl ::core::clone::Clone for VSS_MGMT_OBJECT_PROP {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub union VSS_MGMT_OBJECT_UNION {
pub Vol: VSS_VOLUME_PROP,
pub DiffVol: VSS_DIFF_VOLUME_PROP,
pub DiffArea: VSS_DIFF_AREA_PROP,
}
impl ::core::marker::Copy for VSS_MGMT_OBJECT_UNION {}
impl ::core::clone::Clone for VSS_MGMT_OBJECT_UNION {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub struct VSS_OBJECT_PROP {
pub Type: VSS_OBJECT_TYPE,
pub Obj: VSS_OBJECT_UNION,
}
impl ::core::marker::Copy for VSS_OBJECT_PROP {}
impl ::core::clone::Clone for VSS_OBJECT_PROP {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub union VSS_OBJECT_UNION {
pub Snap: VSS_SNAPSHOT_PROP,
pub Prov: VSS_PROVIDER_PROP,
}
impl ::core::marker::Copy for VSS_OBJECT_UNION {}
impl ::core::clone::Clone for VSS_OBJECT_UNION {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub struct VSS_PROVIDER_PROP {
pub m_ProviderId: ::windows_sys::core::GUID,
pub m_pwszProviderName: *mut u16,
pub m_eProviderType: VSS_PROVIDER_TYPE,
pub m_pwszProviderVersion: *mut u16,
pub m_ProviderVersionId: ::windows_sys::core::GUID,
pub m_ClassId: ::windows_sys::core::GUID,
}
impl ::core::marker::Copy for VSS_PROVIDER_PROP {}
impl ::core::clone::Clone for VSS_PROVIDER_PROP {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub struct VSS_SNAPSHOT_PROP {
pub m_SnapshotId: ::windows_sys::core::GUID,
pub m_SnapshotSetId: ::windows_sys::core::GUID,
pub m_lSnapshotsCount: i32,
pub m_pwszSnapshotDeviceObject: *mut u16,
pub m_pwszOriginalVolumeName: *mut u16,
pub m_pwszOriginatingMachine: *mut u16,
pub m_pwszServiceMachine: *mut u16,
pub m_pwszExposedName: *mut u16,
pub m_pwszExposedPath: *mut u16,
pub m_ProviderId: ::windows_sys::core::GUID,
pub m_lSnapshotAttributes: i32,
pub m_tsCreationTimestamp: i64,
pub m_eStatus: VSS_SNAPSHOT_STATE,
}
impl ::core::marker::Copy for VSS_SNAPSHOT_PROP {}
impl ::core::clone::Clone for VSS_SNAPSHOT_PROP {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`*"]
pub struct VSS_VOLUME_PROP {
pub m_pwszVolumeName: *mut u16,
pub m_pwszVolumeDisplayName: *mut u16,
}
impl ::core::marker::Copy for VSS_VOLUME_PROP {}
impl ::core::clone::Clone for VSS_VOLUME_PROP {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[doc = "*Required features: `\"Win32_Storage_Vss\"`, `\"Win32_Foundation\"`*"]
#[cfg(feature = "Win32_Foundation")]
pub struct VSS_VOLUME_PROTECTION_INFO {
pub m_protectionLevel: VSS_PROTECTION_LEVEL,
pub m_volumeIsOfflineForProtection: super::super::Foundation::BOOL,
pub m_protectionFault: VSS_PROTECTION_FAULT,
pub m_failureStatus: i32,
pub m_volumeHasUnusedDiffArea: super::super::Foundation::BOOL,
pub m_reserved: u32,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for VSS_VOLUME_PROTECTION_INFO {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for VSS_VOLUME_PROTECTION_INFO {
fn clone(&self) -> Self {
*self
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::any::Any;
use std::collections::HashMap;
use std::fmt::Debug;
use std::fmt::Formatter;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::mem;
use std::sync::Arc;
use common_arrow::arrow::array::Array;
use common_arrow::arrow::chunk::Chunk as ArrowChunk;
use common_arrow::arrow::datatypes::Field;
use common_arrow::arrow::io::parquet::read::infer_schema;
use common_arrow::arrow::io::parquet::read::read_columns;
use common_arrow::arrow::io::parquet::read::read_metadata_async;
use common_arrow::arrow::io::parquet::read::to_deserializer;
use common_arrow::arrow::io::parquet::read::RowGroupDeserializer;
use common_arrow::parquet::metadata::ColumnChunkMetaData;
use common_arrow::parquet::metadata::FileMetaData;
use common_arrow::parquet::metadata::RowGroupMetaData;
use common_arrow::parquet::read::read_metadata;
use common_exception::ErrorCode;
use common_exception::Result;
use common_expression::DataBlock;
use common_expression::DataField;
use common_expression::DataSchema;
use common_expression::TableSchema;
use common_expression::TableSchemaRef;
use common_meta_app::principal::StageInfo;
use common_pipeline_core::Pipeline;
use common_settings::Settings;
use common_storage::read_parquet_metas_in_parallel;
use common_storage::StageFileInfo;
use futures::AsyncRead;
use futures::AsyncReadExt;
use futures::AsyncSeek;
use futures::AsyncSeekExt;
use opendal::Operator;
use serde::Deserializer;
use serde::Serializer;
use crate::input_formats::input_pipeline::AligningStateTrait;
use crate::input_formats::input_pipeline::BlockBuilderTrait;
use crate::input_formats::input_pipeline::InputFormatPipe;
use crate::input_formats::input_pipeline::ReadBatchTrait;
use crate::input_formats::input_pipeline::RowBatchTrait;
use crate::input_formats::input_split::DynData;
use crate::input_formats::input_split::FileInfo;
use crate::input_formats::InputContext;
use crate::input_formats::InputFormat;
use crate::input_formats::SplitInfo;
pub struct InputFormatParquet;
impl InputFormatParquet {
fn make_splits(
file_infos: Vec<StageFileInfo>,
metas: Vec<FileMetaData>,
) -> Result<Vec<Arc<SplitInfo>>> {
let mut infos = vec![];
let mut schema = None;
for (info, mut file_meta) in file_infos.into_iter().zip(metas.into_iter()) {
let size = info.size as usize;
let path = info.path.clone();
let row_groups = mem::take(&mut file_meta.row_groups);
if schema.is_none() {
schema = Some(infer_schema(&file_meta)?);
}
let fields = Arc::new(schema.clone().unwrap().fields);
let read_file_meta = Arc::new(FileMeta { fields });
let file_info = Arc::new(FileInfo {
path,
size,
num_splits: row_groups.len(),
compress_alg: None,
});
let num_file_splits = row_groups.len();
for (i, rg) in row_groups.into_iter().enumerate() {
if !rg.columns().is_empty() {
let offset = rg
.columns()
.iter()
.map(col_offset)
.min()
.expect("must success") as usize;
let size = rg.total_byte_size();
let meta = Arc::new(SplitMeta {
file: read_file_meta.clone(),
meta: rg,
});
let info = Arc::new(SplitInfo {
file: file_info.clone(),
seq_in_file: i,
offset,
size,
num_file_splits,
format_info: Some(meta),
});
infos.push(info);
}
}
}
Ok(infos)
}
}
fn col_offset(meta: &ColumnChunkMetaData) -> i64 {
meta.data_page_offset()
}
#[async_trait::async_trait]
impl InputFormat for InputFormatParquet {
async fn get_splits(
&self,
file_infos: Vec<StageFileInfo>,
_stage_info: &StageInfo,
op: &Operator,
_settings: &Arc<Settings>,
) -> Result<Vec<Arc<SplitInfo>>> {
let files = file_infos
.iter()
.map(|f| (f.path.clone(), f.size))
.collect::<Vec<_>>();
let metas = read_parquet_metas_in_parallel(op.clone(), files, 16, 64).await?;
Self::make_splits(file_infos, metas)
}
async fn infer_schema(&self, path: &str, op: &Operator) -> Result<TableSchemaRef> {
let mut reader = op.reader(path).await?;
let file_meta = read_metadata_async(&mut reader).await?;
let arrow_schema = infer_schema(&file_meta)?;
Ok(Arc::new(TableSchema::from(&arrow_schema)))
}
fn exec_copy(&self, ctx: Arc<InputContext>, pipeline: &mut Pipeline) -> Result<()> {
ParquetFormatPipe::execute_copy_aligned(ctx, pipeline)
}
fn exec_stream(&self, ctx: Arc<InputContext>, pipeline: &mut Pipeline) -> Result<()> {
ParquetFormatPipe::execute_stream(ctx, pipeline)
}
}
pub struct ParquetFormatPipe;
#[async_trait::async_trait]
impl InputFormatPipe for ParquetFormatPipe {
type SplitMeta = SplitMeta;
type ReadBatch = ReadBatch;
type RowBatch = RowGroupInMemory;
type AligningState = AligningState;
type BlockBuilder = ParquetBlockBuilder;
async fn read_split(
ctx: Arc<InputContext>,
split_info: Arc<SplitInfo>,
) -> Result<Self::RowBatch> {
let meta = Self::get_split_meta(&split_info).expect("must success");
let op = ctx.source.get_operator()?;
let input_fields = Arc::new(get_used_fields(&meta.file.fields, &ctx.schema)?);
RowGroupInMemory::read_async(split_info.clone(), op, meta.meta.clone(), input_fields).await
}
}
pub struct FileMeta {
// all fields in the parquet file
pub fields: Arc<Vec<Field>>,
}
#[derive(Clone)]
pub struct SplitMeta {
pub file: Arc<FileMeta>,
pub meta: RowGroupMetaData,
}
impl Debug for SplitMeta {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "parquet split meta")
}
}
impl serde::Serialize for SplitMeta {
fn serialize<S>(&self, _serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
unimplemented!()
}
}
impl<'a> serde::Deserialize<'a> for SplitMeta {
fn deserialize<D: Deserializer<'a>>(_deserializer: D) -> Result<Self, D::Error> {
unimplemented!()
}
}
#[typetag::serde(name = "parquet_split")]
impl DynData for SplitMeta {
fn as_any(&self) -> &dyn Any {
self
}
}
pub struct RowGroupInMemory {
pub split_info: String,
pub meta: RowGroupMetaData,
// for input, they are in the order of schema.
// for select, they are the fields used in query.
// used both in read and deserialize.
pub fields_to_read: Arc<Vec<Field>>,
pub field_meta_indexes: Vec<Vec<usize>>,
pub field_arrays: Vec<Vec<Vec<u8>>>,
}
impl RowBatchTrait for RowGroupInMemory {
fn size(&self) -> usize {
self.meta.compressed_size()
}
fn rows(&self) -> usize {
self.meta.num_rows()
}
}
impl RowGroupInMemory {
fn read<R: Read + Seek>(
split_info: String,
reader: &mut R,
meta: RowGroupMetaData,
fields: Arc<Vec<Field>>,
) -> Result<Self> {
let field_names = fields.iter().map(|x| x.name.as_str()).collect::<Vec<_>>();
let field_meta_indexes = split_column_metas_by_field(meta.columns(), &field_names);
let mut filed_arrays = vec![];
for field_name in field_names {
let meta_data = read_columns(reader, meta.columns(), field_name)?;
let data = meta_data.into_iter().map(|t| t.1).collect::<Vec<_>>();
filed_arrays.push(data)
}
Ok(Self {
split_info,
meta,
field_meta_indexes,
field_arrays: filed_arrays,
fields_to_read: fields,
})
}
async fn read_field_async(
op: Operator,
path: String,
col_metas: Vec<ColumnChunkMetaData>,
index: usize,
) -> Result<(usize, Vec<Vec<u8>>)> {
let mut cols = Vec::with_capacity(col_metas.len());
let mut reader = op.reader(&path).await?;
for meta in &col_metas {
cols.push(read_single_column_async(&mut reader, meta).await?)
}
Ok((index, cols))
}
async fn read_async(
split_info: Arc<SplitInfo>,
operator: Operator,
meta: RowGroupMetaData,
fields: Arc<Vec<Field>>,
) -> Result<Self> {
let field_names = fields.iter().map(|x| x.name.as_str()).collect::<Vec<_>>();
let field_meta_indexes = split_column_metas_by_field(meta.columns(), &field_names);
let mut join_handlers = Vec::with_capacity(field_names.len());
for (i, field_name) in field_names.iter().enumerate() {
let col_metas = get_field_columns(meta.columns(), field_name)
.into_iter()
.cloned()
.collect::<Vec<_>>();
let op = operator.clone();
let path = split_info.file.path.clone();
join_handlers.push(async move { Self::read_field_async(op, path, col_metas, i).await });
}
let mut field_arrays = futures::future::try_join_all(join_handlers).await?;
field_arrays.sort();
let field_arrays = field_arrays.into_iter().map(|t| t.1).collect::<Vec<_>>();
Ok(Self {
split_info: split_info.to_string(),
meta,
field_meta_indexes,
field_arrays,
fields_to_read: fields,
})
}
fn get_arrow_chunk(&mut self) -> Result<ArrowChunk<Box<dyn Array>>> {
let mut column_chunks = vec![];
let field_arrays = mem::take(&mut self.field_arrays);
for (f, data) in field_arrays.into_iter().enumerate() {
let meta_iters = self.field_meta_indexes[f]
.iter()
.map(|c| &self.meta.columns()[*c]);
let meta_data = meta_iters.zip(data.into_iter()).collect::<Vec<_>>();
let array_iters = to_deserializer(
meta_data,
self.fields_to_read[f].clone(),
self.meta.num_rows(),
None,
None,
)?;
column_chunks.push(array_iters);
}
match RowGroupDeserializer::new(column_chunks, self.meta.num_rows(), None).next() {
None => Err(ErrorCode::Internal(format!(
"no chunk when deserialize row group {}",
self.split_info
))),
Some(Ok(chunk)) => Ok(chunk),
Some(Err(e)) => Err(e.into()),
}
}
}
impl Debug for RowGroupInMemory {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "RowGroupInMemory")
}
}
#[derive(Debug)]
pub enum ReadBatch {
Buffer(Vec<u8>),
#[allow(unused)]
RowGroup(RowGroupInMemory),
}
impl From<Vec<u8>> for ReadBatch {
fn from(v: Vec<u8>) -> Self {
Self::Buffer(v)
}
}
impl ReadBatchTrait for ReadBatch {
fn size(&self) -> usize {
match self {
ReadBatch::Buffer(v) => v.len(),
ReadBatch::RowGroup(g) => g.size(),
}
}
}
pub struct ParquetBlockBuilder {
ctx: Arc<InputContext>,
}
impl BlockBuilderTrait for ParquetBlockBuilder {
type Pipe = ParquetFormatPipe;
fn create(ctx: Arc<InputContext>) -> Self {
ParquetBlockBuilder { ctx }
}
fn deserialize(&mut self, mut batch: Option<RowGroupInMemory>) -> Result<Vec<DataBlock>> {
if let Some(rg) = batch.as_mut() {
let chunk = rg.get_arrow_chunk()?;
let fields: Vec<DataField> = rg
.fields_to_read
.iter()
.map(DataField::from)
.collect::<Vec<_>>();
let input_schema = DataSchema::new(fields);
let block = DataBlock::from_arrow_chunk(&chunk, &input_schema)?;
let block_total_rows = block.num_rows();
let num_rows_per_block = self.ctx.block_compact_thresholds.max_rows_per_block;
let blocks: Vec<DataBlock> = (0..block_total_rows)
.step_by(num_rows_per_block)
.map(|idx| {
if idx + num_rows_per_block < block_total_rows {
block.slice(idx..idx + num_rows_per_block)
} else {
block.slice(idx..block_total_rows)
}
})
.collect();
Ok(blocks)
} else {
Ok(vec![])
}
}
}
pub struct AligningState {
ctx: Arc<InputContext>,
split_info: Arc<SplitInfo>,
buffers: Vec<Vec<u8>>,
}
impl AligningStateTrait for AligningState {
type Pipe = ParquetFormatPipe;
fn try_create(ctx: &Arc<InputContext>, split_info: &Arc<SplitInfo>) -> Result<Self> {
Ok(AligningState {
ctx: ctx.clone(),
split_info: split_info.clone(),
buffers: vec![],
})
}
fn align(&mut self, read_batch: Option<ReadBatch>) -> Result<Vec<RowGroupInMemory>> {
let split_info = self.split_info.to_string();
if let Some(rb) = read_batch {
if let ReadBatch::Buffer(b) = rb {
self.buffers.push(b)
} else {
return Err(ErrorCode::Internal(
"Bug: should not see ReadBatch::RowGroup in align().",
));
};
Ok(vec![])
} else {
let file_in_memory = self.buffers.concat();
let size = file_in_memory.len();
tracing::debug!(
"aligning parquet file {} of {} bytes",
self.split_info.file.path,
size,
);
let mut cursor = Cursor::new(file_in_memory);
let file_meta = read_metadata(&mut cursor)?;
let infer_schema = infer_schema(&file_meta)?;
let fields = Arc::new(get_used_fields(&infer_schema.fields, &self.ctx.schema)?);
let mut row_batches = Vec::with_capacity(file_meta.row_groups.len());
for row_group in file_meta.row_groups.into_iter() {
row_batches.push(RowGroupInMemory::read(
split_info.clone(),
&mut cursor,
row_group,
fields.clone(),
)?)
}
tracing::info!(
"align parquet file {} of {} bytes to {} row groups",
self.split_info.file.path,
size,
row_batches.len()
);
Ok(row_batches)
}
}
}
fn get_used_fields(fields: &Vec<Field>, schema: &TableSchemaRef) -> Result<Vec<Field>> {
let mut read_fields = Vec::with_capacity(fields.len());
for f in schema.fields().iter() {
if let Some(m) = fields
.iter()
.filter(|c| c.name.eq_ignore_ascii_case(f.name()))
.last()
{
read_fields.push(m.clone());
} else {
return Err(ErrorCode::TableSchemaMismatch(format!(
"schema field size mismatch, expected to find column: {}",
f.name()
)));
}
}
Ok(read_fields)
}
pub fn split_column_metas_by_field(
columns: &[ColumnChunkMetaData],
field_names: &[&str],
) -> Vec<Vec<usize>> {
let mut r = field_names.iter().map(|_| vec![]).collect::<Vec<_>>();
let d = field_names
.iter()
.enumerate()
.map(|(i, name)| (name, i))
.collect::<HashMap<_, _>>();
columns.iter().enumerate().for_each(|(col_i, x)| {
if let Some(field_i) = d.get(&x.descriptor().path_in_schema[0].as_str()) {
r[*field_i].push(col_i);
}
});
r
}
fn get_field_columns<'a>(
columns: &'a [ColumnChunkMetaData],
field_name: &str,
) -> Vec<&'a ColumnChunkMetaData> {
columns
.iter()
.filter(|x| x.descriptor().path_in_schema[0] == field_name)
.collect()
}
async fn read_single_column_async<R>(
reader: &mut R,
meta: &ColumnChunkMetaData,
) -> Result<Vec<u8>>
where
R: AsyncRead + AsyncSeek + Send + Unpin,
{
let (start, len) = meta.byte_range();
reader.seek(std::io::SeekFrom::Start(start)).await?;
let mut chunk = vec![0; len as usize];
reader.read_exact(&mut chunk).await?;
Ok(chunk)
}
|
//! Tests the fields related to reflection generated in the StableAbi derive macro.
use crate::{
abi_stability::{PrefixStableAbi, StableAbi},
reflection::ModReflMode,
std_types::*,
type_layout::{FieldAccessor, TLData, TLField},
};
#[repr(u8)]
#[derive(StableAbi)]
pub enum PubEnum {
Variant0,
Variant1(RString),
Variant2 { field0: u32 },
}
#[repr(C)]
#[derive(StableAbi)]
#[allow(dead_code)]
enum PrivEnum {
Variant0,
Variant1(RString),
Variant2 { field0: u32 },
}
#[repr(C)]
#[derive(StableAbi)]
//#[sabi(debug_print)]
pub struct RegularPubFields {
pub field0: u8,
pub field1: u8,
#[sabi(refl(pub_getter = what_the))]
pub field2: u8,
}
#[repr(C)]
#[derive(StableAbi)]
#[sabi(module_reflection(Opaque))]
pub struct RegularPubFieldsOpaque {
pub field0: u8,
pub field1: u8,
}
#[repr(C)]
#[derive(StableAbi)]
pub struct RegularMostPrivacies {
pub field0: u8,
pub(super) field1: u16,
field2: u32,
}
#[repr(C)]
#[derive(StableAbi)]
pub struct RegularPriv {
field0: u8,
field1: u16,
#[sabi(refl(pub_getter = hello))]
field2: u32,
}
#[repr(C)]
#[derive(StableAbi)]
#[sabi(kind(Prefix), missing_field(panic))]
pub struct PrefixPubFields {
#[sabi(last_prefix_field)]
pub field0: u8,
pub field1: u8,
#[sabi(missing_field(option))]
pub field2: u8,
#[sabi(refl(pub_getter = hello))]
#[sabi(missing_field(default))]
pub field3: u8,
#[sabi(missing_field(panic))]
pub field4: u8,
}
#[repr(C)]
#[derive(StableAbi)]
#[sabi(kind(Prefix), missing_field(panic), module_reflection(Opaque))]
pub struct PrefixPubFieldsOpaque {
#[sabi(last_prefix_field)]
pub field0: u8,
#[sabi(missing_field(panic))]
pub field1: u8,
}
#[allow(dead_code)]
mod some_prefixes {
#[repr(C)]
#[derive(StableAbi)]
#[sabi(kind(Prefix))]
pub struct PrefMostPrivacies {
pub field0: u8,
#[sabi(last_prefix_field)]
pub field1: u8,
pub field2: u8,
pub field3: u8,
pub(super) field4: u16,
#[sabi(missing_field(default))]
field5: u32,
}
#[repr(C)]
#[derive(StableAbi)]
#[sabi(kind(Prefix))]
pub struct PrefPriv {
field0: u8,
pub(super) field1: u16,
#[sabi(last_prefix_field)]
#[sabi(missing_field(default))]
pub(crate) field2: u32,
}
}
pub use self::some_prefixes::*;
///////////////////////////////////////////////////////////////////////////////
fn check_fields(fields: &[TLField], accessors: &[FieldAccessor]) {
for (field, expec_acc) in fields.iter().zip(accessors) {
assert_eq!(
field.field_accessor(),
*expec_acc,
"field:{}\nfields:{:#?}",
field.name(),
fields
);
}
}
fn check_enum_accessors<T>(mod_refl_mode: ModReflMode, accessors: &[&[FieldAccessor]])
where
T: StableAbi,
{
let layout = T::LAYOUT;
let mut fields = match layout.data() {
TLData::Enum(enum_) => enum_.fields.iter(),
x => panic!("layout.data must be TLData::Struct{{..}}:\n{:#?}", x),
};
assert_eq!(layout.mod_refl_mode(), mod_refl_mode);
for expec_vari in accessors {
let subfields = fields.by_ref().take(expec_vari.len()).collect::<Vec<_>>();
check_fields(&subfields, expec_vari)
}
}
fn check_struct_accessors<T>(mod_refl_mode: ModReflMode, accessors: &[FieldAccessor])
where
T: StableAbi,
{
let layout = T::LAYOUT;
let fields = match layout.data() {
TLData::Struct { fields } => fields.to_vec(),
x => panic!("layout.data must be TLData::Struct{{..}}:\n{:#?}", x),
};
assert_eq!(layout.mod_refl_mode(), mod_refl_mode);
check_fields(&fields, accessors);
}
fn check_prefix_accessors<T>(mod_refl_mode: ModReflMode, accessors: &[FieldAccessor])
where
T: PrefixStableAbi,
{
let layout = T::LAYOUT;
let fields = match layout.data() {
TLData::PrefixType(prefix) => prefix.fields.to_vec(),
x => panic!("layout.data must be TLData::Struct{{..}}:\n{:#?}", x),
};
assert_eq!(layout.mod_refl_mode(), mod_refl_mode);
check_fields(&fields, accessors);
}
///////////////////////////////////////////////////////////////////////////////
#[test]
fn test_pub_enum() {
check_enum_accessors::<PubEnum>(
ModReflMode::Module,
&[&[], &[FieldAccessor::Direct], &[FieldAccessor::Direct]],
);
}
#[test]
fn test_priv_enum() {
check_enum_accessors::<PrivEnum>(
ModReflMode::Opaque,
&[&[], &[FieldAccessor::Opaque], &[FieldAccessor::Opaque]],
);
}
///////////////////////////////////////////////////////////////////////////////
#[test]
fn test_regular_pub_fields() {
check_struct_accessors::<RegularPubFields>(
ModReflMode::Module,
&[FieldAccessor::Direct, FieldAccessor::Direct, {
const FA: FieldAccessor = FieldAccessor::method_named(rstr!("what_the"));
FA
}],
);
}
#[test]
fn test_regular_pub_fields_opaque() {
check_struct_accessors::<RegularPubFieldsOpaque>(
ModReflMode::Opaque,
&[FieldAccessor::Opaque, FieldAccessor::Opaque],
);
}
#[test]
fn test_regular_most_privacies() {
check_struct_accessors::<RegularMostPrivacies>(
ModReflMode::Module,
&[
FieldAccessor::Direct,
FieldAccessor::Opaque,
FieldAccessor::Opaque,
],
);
}
#[test]
fn test_regular_priv() {
check_struct_accessors::<RegularPriv>(
ModReflMode::Opaque,
&[FieldAccessor::Opaque, FieldAccessor::Opaque, {
const FA: FieldAccessor = FieldAccessor::method_named(rstr!("hello"));
FA
}],
);
}
///////////////////////////////////////////////////////////////////////////////
#[test]
fn test_prefix_pub_fields() {
check_prefix_accessors::<PrefixPubFields_Prefix>(
ModReflMode::Module,
&[
FieldAccessor::Method,
FieldAccessor::Method,
FieldAccessor::MethodOption,
{
const FA: FieldAccessor = FieldAccessor::method_named(rstr!("hello"));
FA
},
FieldAccessor::Method,
],
);
}
#[test]
fn test_prefix_pub_fields_opaque() {
check_prefix_accessors::<PrefixPubFieldsOpaque_Prefix>(
ModReflMode::Opaque,
&[FieldAccessor::Opaque, FieldAccessor::Opaque],
);
}
#[test]
fn test_prefix_most_privacies() {
check_prefix_accessors::<PrefMostPrivacies_Prefix>(
ModReflMode::Module,
&[
FieldAccessor::Method,
FieldAccessor::Method,
FieldAccessor::MethodOption,
FieldAccessor::MethodOption,
FieldAccessor::Opaque,
FieldAccessor::Opaque,
],
);
}
#[test]
fn test_prefix_priv() {
check_prefix_accessors::<PrefPriv_Prefix>(
ModReflMode::Opaque,
&[
FieldAccessor::Opaque,
FieldAccessor::Opaque,
FieldAccessor::Opaque,
],
);
}
|
use petgraph::visit::{IntoNeighbors, IntoNodeIdentifiers};
use petgraph_drawing::{Drawing, DrawingIndex, DrawingValue};
use std::collections::HashMap;
pub struct ForceAtlas2<S>
where
S: DrawingValue,
{
degree: Vec<usize>,
links: Vec<Vec<usize>>,
k: S,
min_distance: S,
}
impl<S> ForceAtlas2<S>
where
S: DrawingValue,
{
pub fn new<G>(graph: G) -> ForceAtlas2<S>
where
G: IntoNodeIdentifiers + IntoNeighbors,
G::NodeId: DrawingIndex,
{
let node_indices = graph
.node_identifiers()
.enumerate()
.map(|(i, u)| (u, i))
.collect::<HashMap<_, _>>();
let mut degree = vec![0; node_indices.len()];
let mut links = vec![vec![]; node_indices.len()];
for u in graph.node_identifiers() {
for v in graph.neighbors(u) {
degree[node_indices[&u]] += 1;
degree[node_indices[&v]] += 1;
links[node_indices[&u]].push(node_indices[&v]);
}
}
ForceAtlas2 {
degree,
links,
k: S::one(),
min_distance: S::one(),
}
}
pub fn apply_to_node<N>(&self, u: usize, drawing: &mut Drawing<N, S>, alpha: S)
where
N: DrawingIndex,
{
let n = drawing.len();
for v in 0..n {
if u == v {
continue;
}
let dx = drawing.coordinates[[v, 0]] - drawing.coordinates[[u, 0]];
let dy = drawing.coordinates[[v, 1]] - drawing.coordinates[[u, 1]];
let d = (dx * dx + dy * dy).sqrt().max(self.min_distance);
let du = S::from(self.degree[u] + 1).unwrap();
let c = self.k * du * du / d;
drawing.coordinates[[u, 0]] -= alpha * c * dx;
drawing.coordinates[[u, 1]] -= alpha * c * dy;
}
for &v in self.links[u].iter() {
let dx = drawing.coordinates[[v, 0]] - drawing.coordinates[[u, 0]];
let dy = drawing.coordinates[[v, 1]] - drawing.coordinates[[u, 1]];
let d = (dx * dx + dy * dy).sqrt();
drawing.coordinates[[u, 0]] += alpha * d * dx;
drawing.coordinates[[u, 1]] += alpha * d * dy;
}
}
pub fn apply<N>(&self, drawing: &mut Drawing<N, S>, alpha: S)
where
N: DrawingIndex,
{
let n = drawing.len();
for u in 0..n {
self.apply_to_node(u, drawing, alpha);
}
}
}
|
//! This module represents a tree connect request.
//! The SMB2 TREE_CONNECT Request packet is sent by a client to request access to a particular share on the server.
//! This request is composed of an SMB2 Packet Header that is followed by this request structure.
use rand::{
distributions::{Distribution, Standard},
Rng,
};
use crate::smb2::helper_functions::tree_connect_context::TreeConnectContext;
/// tree connect request size of 9 bytes
const STRUCTURE_SIZE: &[u8; 2] = b"\x09\x00";
/// A struct that represents a tree connect request
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct TreeConnect {
/// StructureSize (2 bytes): The client MUST set this field to 9,
/// indicating the size of the request structure, not including the header.
/// The client MUST set it to this value regardless of how long Buffer[]
/// actually is in the request being sent.
pub structure_size: Vec<u8>,
/// Flags/Reserved (2 bytes): This field is interpreted in different ways
/// depending on the SMB2 dialect. In the SMB 3.1.1 dialect,
/// this field is interpreted as the Flags field, which indicates how to process the operation.
pub flags: Vec<u8>,
/// PathOffset (2 bytes): The offset, in bytes, of the full share path name from the beginning
/// of the packet header. The full share pathname is Unicode in the form "\\server\share"
/// for the request. The server component of the path MUST be less than 256 characters in length,
/// and it MUST be a NetBIOS name, a fully qualified domain name (FQDN), or a textual IPv4 or IPv6 address.
/// The share component of the path MUST be less than or equal to 80 characters in length.
/// The share name MUST NOT contain any invalid characters.
pub path_offset: Vec<u8>,
/// PathLength (2 bytes): The length, in bytes, of the full share path name.
pub path_length: Vec<u8>,
/// Buffer (variable): If SMB2_TREE_CONNECT_FLAG_EXTENSION_PRESENT is not set in the Flags field of this structure,
/// this field is a variable-length buffer that contains the full share path name.
/// If SMB2_TREE_CONNECT_FLAG_EXTENSION_PRESENT is set in the Flags field in this structure,
/// this field is a variable-length buffer that contains the tree connect request extension
pub buffer: Vec<u8>,
}
impl TreeConnect {
/// Creates a new instance of the tree connect request.
pub fn default() -> Self {
TreeConnect {
structure_size: STRUCTURE_SIZE.to_vec(),
flags: Vec::new(),
path_offset: Vec::new(),
path_length: Vec::new(),
buffer: Vec::new(),
}
}
}
/// *Cluster Reconnect*:
/// - When set, indicates that the client has previously connected
/// to the specified cluster share using the SMB dialect of the
/// connection on which the request is received.
///
/// *Redirect To Owner*:
/// - When set, indicates that the client can handle synchronous share
/// redirects via a Share Redirect error context response.
///
/// *Extension Present*:
/// - When set, indicates that a tree connect request extension, is present,
/// starting at the Buffer field of this tree connect request.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Flags {
ClusterReconnect,
RedirectToOwner,
ExtensionPresent,
}
impl Flags {
/// Unpacks the byte code of tree connect request flags.
pub fn unpack_byte_code(&self) -> u16 {
match self {
Flags::ClusterReconnect => 0x0001,
Flags::RedirectToOwner => 0x0002,
Flags::ExtensionPresent => 0x0004,
}
}
/// Returns the some calculated from an array of flags.
pub fn return_sum_of_chosen_flags(flags: Vec<Flags>) -> Vec<u8> {
let combined_flags: u16 = flags
.iter()
.fold(0u16, |acc, flag| acc + flag.unpack_byte_code());
combined_flags.to_le_bytes().to_vec()
}
}
impl Distribution<Flags> for Standard {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Flags {
match rng.gen_range(0..=2) {
0 => Flags::ClusterReconnect,
1 => Flags::RedirectToOwner,
_ => Flags::ExtensionPresent,
}
}
}
/// If the Flags field of the SMB2 TREE_CONNECT request has the
/// SMB2_TREE_CONNECT_FLAG_EXTENSION_PRESENT bit set,
/// the following structure MUST be added at the beginning of the Buffer field.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct TreeConnectExtension {
/// TreeConnectContextOffset (4 bytes): The offset from the start
/// of the SMB2 TREE_CONNECT request of an array of tree connect contexts.
tree_connect_context_offset: Vec<u8>,
/// TreeConnectContextCount (2 bytes): The count of elements in the tree
/// connect context array.
tree_connect_context_count: Vec<u8>,
/// Reserved (10 bytes): MUST be set to zero.
reserved: Vec<u8>,
/// PathName (variable): This field is a variable-length buffer that contains
/// the full share path name
/// The full share pathname is Unicode in the form "\\server\share" for the request
path_name: Vec<u8>,
/// TreeConnectContexts (variable): A variable length array of
/// SMB2_TREE_CONNECT_CONTEXT structures
tree_connect_contexts: Vec<TreeConnectContext>,
}
impl TreeConnectExtension {
/// Creates a new Tree Connect Extension instance.
pub fn default() -> Self {
TreeConnectExtension {
tree_connect_context_offset: Vec::new(),
tree_connect_context_count: Vec::new(),
reserved: b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00".to_vec(),
path_name: Vec::new(),
tree_connect_contexts: Vec::new(),
}
}
}
|
// Copyright 2019-2020 PolkaX. Licensed under MIT or Apache-2.0.
use std::cmp::Ordering;
use std::fmt;
use super::Entry;
use std::ops::DerefMut;
pub trait Order: fmt::Debug + Sync + Send {
fn cmp(&self, a: &Entry, b: &Entry) -> Ordering;
}
impl<F> Order for F
where
F: Fn(&Entry, &Entry) -> Ordering + fmt::Debug + Sync + Send,
{
fn cmp(&self, a: &Entry, b: &Entry) -> Ordering {
self(a, b)
}
}
/// OrderByValue is used to signal to datastores they should apply internal
/// orderings.
pub struct OrderByValue;
impl Order for OrderByValue {
fn cmp(&self, a: &Entry, b: &Entry) -> Ordering {
a.value.cmp(&b.value)
}
}
impl fmt::Debug for OrderByValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "VALUE")
}
}
/// OrderByValueDescending is used to signal to datastores they
/// should apply internal orderings.
pub struct OrderByValueDescending;
impl Order for OrderByValueDescending {
fn cmp(&self, a: &Entry, b: &Entry) -> Ordering {
a.value.cmp(&b.value).reverse()
}
}
impl fmt::Debug for OrderByValueDescending {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "desc(VALUE)")
}
}
/// OrderByKey
pub struct OrderByKey;
impl Order for OrderByKey {
fn cmp(&self, a: &Entry, b: &Entry) -> Ordering {
a.key.cmp(&b.key)
}
}
impl fmt::Debug for OrderByKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "KEY")
}
}
/// OrderByKeyDescending
pub struct OrderByKeyDescending;
impl Order for OrderByKeyDescending {
fn cmp(&self, a: &Entry, b: &Entry) -> Ordering {
a.key.cmp(&b.key).reverse()
}
}
impl fmt::Debug for OrderByKeyDescending {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "KEY")
}
}
pub fn less(orders: &[Box<dyn Order>], a: &Entry, b: &Entry) -> Ordering {
for cmp in orders.iter() {
match cmp.cmp(a, b) {
Ordering::Equal => {}
Ordering::Less => return Ordering::Less,
Ordering::Greater => return Ordering::Greater,
}
}
// This gives us a *stable* sort for free. We don't care
// preserving the order from the underlying datastore
// because it's undefined.
a.key.cmp(&b.key)
}
/// Sort sorts the given entries using the given orders.
pub fn sort<E: AsRef<Entry>, L: DerefMut<Target = [E]>>(orders: &[Box<dyn Order>], mut entries: L) {
entries
.deref_mut()
.sort_by(|a, b| less(orders, a.as_ref(), b.as_ref()));
}
|
// Copyright 2023 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::string::FromUtf8Error;
use crate::kvapi::KeyError;
/// Function that escapes special characters in a string.
///
/// All characters except digit, alphabet and '_' are treated as special characters.
/// A special character will be converted into "%num" where num is the hexadecimal form of the character.
///
/// # Example
/// ```
/// let key = "data_bend!!";
/// let new_key = escape(&key);
/// assert_eq!("data_bend%21%21".to_string(), new_key);
/// ```
pub(crate) fn escape(key: &str) -> String {
let mut new_key = Vec::with_capacity(key.len());
fn hex(num: u8) -> u8 {
match num {
0..=9 => b'0' + num,
10..=15 => b'a' + (num - 10),
unreachable => unreachable!("Unreachable branch num = {}", unreachable),
}
}
for char in key.as_bytes() {
match char {
b'0'..=b'9' => new_key.push(*char),
b'_' | b'a'..=b'z' | b'A'..=b'Z' => new_key.push(*char),
_other => {
new_key.push(b'%');
new_key.push(hex(*char / 16));
new_key.push(hex(*char % 16));
}
}
}
// Safe unwrap(): there are no invalid utf char in it.
String::from_utf8(new_key).unwrap()
}
/// The reverse function of escape_for_key.
///
/// # Example
/// ```
/// let key = "data_bend%21%21";
/// let original_key = unescape(&key);
/// assert_eq!(Ok("data_bend!!".to_string()), original_key);
/// ```
pub(crate) fn unescape(key: &str) -> Result<String, FromUtf8Error> {
let mut new_key = Vec::with_capacity(key.len());
fn unhex(num: u8) -> u8 {
match num {
b'0'..=b'9' => num - b'0',
b'a'..=b'f' => num - b'a' + 10,
unreachable => unreachable!("Unreachable branch num = {}", unreachable),
}
}
let bytes = key.as_bytes();
let mut index = 0;
while index < bytes.len() {
match bytes[index] {
b'%' => {
// The last byte of the string won't be '%'
let mut num = unhex(bytes[index + 1]) * 16;
num += unhex(bytes[index + 2]);
new_key.push(num);
index += 3;
}
other => {
new_key.push(other);
index += 1;
}
}
}
String::from_utf8(new_key)
}
/// Decode a string into u64 id.
pub(crate) fn decode_id(s: &str) -> Result<u64, KeyError> {
let id = s.parse::<u64>().map_err(|e| KeyError::InvalidId {
s: s.to_string(),
reason: e.to_string(),
})?;
Ok(id)
}
|
use std::{thread, time::Duration};
use backend_embedded_graphics::{
themes::Theme,
widgets::{background::BackgroundStyle, border::BorderStyle, label::ascii::LabelConstructor},
EgCanvas,
};
use embedded_graphics::{
draw_target::DrawTarget, pixelcolor::BinaryColor, prelude::Size as EgSize,
};
use embedded_graphics_simulator::{
sdl2::MouseButton, BinaryColorTheme, OutputSettingsBuilder, SimulatorDisplay, SimulatorEvent,
Window as SimWindow,
};
use embedded_gui::{
data::BoundData,
geometry::Position,
input::event::{InputEvent, PointerEvent},
prelude::*,
state::WidgetState,
widgets::{
background::Background,
border::Border,
button::Button,
fill::{Center, FillParent, HorizontalAndVertical},
label::Label,
layouts::linear::{Column, Row},
spacing::Spacing,
},
};
fn convert_input(event: SimulatorEvent) -> Result<InputEvent, bool> {
unsafe {
// This is fine for a demo
static mut MOUSE_DOWN: bool = false;
match event {
SimulatorEvent::MouseButtonUp {
mouse_btn: MouseButton::Left,
point,
} => {
MOUSE_DOWN = false;
Ok(InputEvent::PointerEvent(
Position {
x: point.x,
y: point.y,
},
PointerEvent::Up,
))
}
SimulatorEvent::MouseButtonDown {
mouse_btn: MouseButton::Left,
point,
} => {
MOUSE_DOWN = true;
Ok(InputEvent::PointerEvent(
Position {
x: point.x,
y: point.y,
},
PointerEvent::Down,
))
}
SimulatorEvent::MouseMove { point } => Ok(InputEvent::PointerEvent(
Position {
x: point.x,
y: point.y,
},
if MOUSE_DOWN {
PointerEvent::Drag
} else {
PointerEvent::Hover
},
)),
SimulatorEvent::Quit => Err(true),
_ => Err(false),
}
}
}
fn update_button_background<W: Widget>(
widget: &mut Background<W, BackgroundStyle<BinaryColor>>,
state: WidgetState,
) {
if state.has_state(Button::STATE_HOVERED) {
widget.set_background_color(BinaryColor::Off)
} else if state.has_state(Button::STATE_PRESSED) {
widget.set_background_color(BinaryColor::On)
} else {
widget.set_background_color(BinaryColor::Off)
};
}
fn update_button_border<W: Widget>(
widget: &mut Border<W, BorderStyle<BinaryColor>>,
state: WidgetState,
) {
if state.has_state(Button::STATE_HOVERED) {
widget.set_border_color(BinaryColor::On)
} else if state.has_state(Button::STATE_PRESSED) {
widget.set_border_color(BinaryColor::Off)
} else {
widget.set_border_color(BinaryColor::Off)
};
}
// While this return type is ugly as, it can be generated by the compiler
// (`-> _` gives the type as a compile error ❤)
fn button_with_style<W: Widget>(
inner: W,
) -> Button<
Background<
Border<FillParent<W, HorizontalAndVertical, Center, Center>, BorderStyle<BinaryColor>>,
BackgroundStyle<BinaryColor>,
>,
> {
Button::new(
Background::new(
Border::new(
FillParent::both(inner)
.align_horizontal(Center)
.align_vertical(Center),
)
.border_color(BinaryColor::Off)
.on_state_changed(update_button_border),
)
.background_color(BinaryColor::Off)
.on_state_changed(update_button_background),
)
}
fn main() {
let display = SimulatorDisplay::new(EgSize::new(128, 64));
let flag = BoundData::new(true, |data| println!("Data changed to {:?}", data));
let mut gui = Window::new(
EgCanvas::new(display),
Column::new()
.add(
Row::new()
.add(FillParent::horizontal(Label::new("Hello,")).align_horizontal(Center))
.weight(1)
.add(FillParent::horizontal(Label::new("World!")).align_horizontal(Center))
.weight(1),
)
.add(
Spacing::new(
button_with_style(Label::new("Click me").bind(&flag).on_data_changed(
|widget, data| widget.text = if *data { "on" } else { "off" },
))
.bind(&flag)
.on_clicked(|data| {
*data = !*data;
println!("Clicked!");
}),
)
.all(4),
),
);
let output_settings = OutputSettingsBuilder::new()
.theme(BinaryColorTheme::OledBlue)
.build();
let mut window = SimWindow::new("GUI demonstration", &output_settings);
loop {
gui.canvas
.target
.clear(BinaryColor::BACKGROUND_COLOR)
.unwrap();
gui.update();
gui.measure();
gui.arrange();
gui.draw().unwrap();
// Update the window.
window.update(&gui.canvas.target);
// Handle key and mouse events.
for event in window.events() {
match convert_input(event) {
Ok(input) => {
gui.input_event(input);
}
Err(true) => return,
_ => {}
}
}
// Wait for a little while.
thread::sleep(Duration::from_millis(10));
}
}
|
//! Runtime lifecycle related functionality.
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
#[cfg(not(feature = "std"))]
use alloc::rc::Rc;
#[cfg(feature = "std")]
use std::rc::Rc;
use super::cost::code_deposit_gas;
use super::util::copy_into_memory_apply;
use super::{GasUsage, Machine, MachineStatus};
use crate::{
commit::AccountState,
errors::{OnChainError, RequireError},
AccountPatch, Memory, Patch,
};
use bigint::{Address, Gas, M256, U256};
/// # Lifecycle of a Machine
///
/// When a new non-invoked transaction is created, `initialize_call`
/// or `initialize_create` should be called. After this, the machine
/// can be stepped as normal. When the machine meets a CALL/CALLCODE
/// or CREATE instruction, a sub-machine will be created. This
/// submachine should first call `invoke_call` or
/// `invoke_create`. After the submachine is finished, it should call
/// `apply_sub`. When the non-invoked transaction is finished, it
/// should first call `code_deposit` if it is a contract creation
/// transaction. After that, it should call `finalize`.
impl<'a, M: Memory, P: Patch> Machine<'a, M, P> {
/// Initialize a MessageCall transaction.
///
/// ### Panic
/// Requires caller of the transaction to be committed.
pub fn initialize_call(&mut self, preclaimed_value: U256) -> Result<(), RequireError> {
self.state.account_state.require(self.state.context.address)?;
if !self.state.context.is_system {
self.state
.account_state
.decrease_balance(self.state.context.caller, preclaimed_value);
self.state
.account_state
.decrease_balance(self.state.context.caller, self.state.context.value);
}
self.state
.account_state
.increase_balance(self.state.context.address, self.state.context.value);
Ok(())
}
/// Initialize the runtime as a call from a CALL or CALLCODE opcode.
///
/// ### Panic
/// Requires caller of the CALL/CALLCODE opcode to be committed.
pub fn invoke_call(&mut self) -> Result<(), RequireError> {
self.state.account_state.require(self.state.context.address)?;
if !self.state.context.is_system {
self.state
.account_state
.decrease_balance(self.state.context.caller, self.state.context.value);
}
self.state
.account_state
.increase_balance(self.state.context.address, self.state.context.value);
Ok(())
}
/// Initialize a ContractCreation transaction.
///
/// ### Panic
/// Requires caller of the transaction to be committed.
pub fn initialize_create(&mut self, preclaimed_value: U256) -> Result<(), RequireError> {
self.state.account_state.require(self.state.context.address)?;
if !self.state.context.is_system {
self.state
.account_state
.decrease_balance(self.state.context.caller, preclaimed_value);
self.state
.account_state
.decrease_balance(self.state.context.caller, self.state.context.value);
}
self.state
.account_state
.create(self.state.context.address, self.state.context.value)
.unwrap();
Ok(())
}
/// Initialize the runtime as a call from a CREATE opcode.
///
/// ### Panic
/// Requires caller of the CREATE opcode to be committed.
pub fn invoke_create(&mut self) -> Result<(), RequireError> {
self.state.account_state.require(self.state.context.address)?;
if !self.state.context.is_system {
self.state
.account_state
.decrease_balance(self.state.context.caller, self.state.context.value);
}
self.state
.account_state
.create(self.state.context.address, self.state.context.value)
.unwrap();
Ok(())
}
/// Deposit code for a ContractCreation transaction or a CREATE opcode.
#[allow(clippy::collapsible_if)]
pub fn code_deposit(&mut self) {
match self.status() {
MachineStatus::ExitedOk | MachineStatus::ExitedErr(_) => (),
_ => panic!(),
}
if self.state.patch.code_deposit_limit().is_some() {
if self.state.out.len() > self.state.patch.code_deposit_limit().unwrap() {
reset_error_hard!(self, OnChainError::EmptyGas);
return;
}
}
let deposit_cost = code_deposit_gas(self.state.out.len());
if deposit_cost > self.state.available_gas() {
if !self.state.patch.force_code_deposit() {
reset_error_hard!(self, OnChainError::EmptyGas);
} else {
self.state
.account_state
.code_deposit(self.state.context.address, Rc::new(Vec::new()));
}
} else {
self.state.used_gas += deposit_cost;
self.state
.account_state
.code_deposit(self.state.context.address, self.state.out.clone());
}
}
/// Finalize a transaction. This should not be used when invoked
/// by an opcode and should only be used in the top level.
///
/// ### Panic
/// Requires caller of the transaction to be committed.
pub fn finalize_transaction(
&mut self,
beneficiary: Address,
real_used_gas: Gas,
preclaimed_value: U256,
fresh_account_state: &AccountState<'a, P::Account>,
) -> Result<(), RequireError> {
self.state.account_state.require(self.state.context.address)?;
if !self.state.patch.account_patch().allow_partial_change() {
self.state.account_state.require(beneficiary)?;
}
match self.status() {
MachineStatus::ExitedOk => {
// Requires removed accounts to exist.
for address in &self.state.removed {
self.state.account_state.require(*address)?;
}
}
MachineStatus::ExitedErr(_) => {
// If exited with error, reset all changes.
self.state.account_state = fresh_account_state.clone();
self.state.removed = Vec::new();
if !self.state.context.is_system {
self.state
.account_state
.decrease_balance(self.state.context.caller, preclaimed_value);
}
}
_ => panic!(),
}
let gas_dec = real_used_gas * self.state.context.gas_price;
if !self.state.context.is_system {
self.state
.account_state
.increase_balance(self.state.context.caller, preclaimed_value);
self.state
.account_state
.decrease_balance(self.state.context.caller, gas_dec.into());
// Apply miner rewards
self.state.account_state.increase_balance(beneficiary, gas_dec.into());
}
for address in &self.state.removed {
self.state.account_state.remove(*address).unwrap();
}
match self.status() {
MachineStatus::ExitedOk => Ok(()),
MachineStatus::ExitedErr(_) => Ok(()),
_ => panic!(),
}
}
/// Finalize a context execution. This should not be used when
/// invoked by an opcode and should only be used in the top level.
///
/// ### Panic
/// Requires caller of the transaction to be committed.
pub fn finalize_context(&mut self, fresh_account_state: &AccountState<'a, P::Account>) {
match self.status() {
MachineStatus::ExitedOk => (),
MachineStatus::ExitedErr(_) => {
self.state.account_state = fresh_account_state.clone();
self.state.removed = Vec::new();
}
_ => panic!(),
}
}
/// Apply a sub runtime into the current runtime. This sub runtime
/// should have been created by the current runtime's `derive`
/// function. Depending whether the current runtime is invoking a
/// ContractCreation or MessageCall instruction, it will apply
/// various states back.
pub fn apply_sub(&mut self, sub: Machine<'a, M, P>) {
#[cfg(feature = "std")]
use std::mem::swap;
#[cfg(not(feature = "std"))]
use core::mem::swap;
let mut status = MachineStatus::Running;
swap(&mut status, &mut self.status);
match status {
MachineStatus::InvokeCreate(_) => {
self.apply_create(sub);
}
MachineStatus::InvokeCall(_, (out_start, out_len)) => {
self.apply_call(sub, out_start, out_len);
}
_ => panic!(),
}
}
fn apply_create(&mut self, mut sub: Machine<'a, M, P>) {
sub.code_deposit();
let sub_total_used_gas = sub.state.total_used_gas();
self.state.logs.append(&mut sub.state.logs);
self.state.used_gas += sub_total_used_gas;
self.state.refunded_gas = self.state.refunded_gas + sub.state.refunded_gas;
self.state.ret = sub.state.out.clone();
match sub.status() {
MachineStatus::ExitedOk => {
self.state.account_state = sub.state.account_state;
self.state.removed = sub.state.removed;
}
MachineStatus::ExitedErr(_) => {
self.state.stack.pop().unwrap();
self.state.stack.push(M256::zero()).unwrap();
}
_ => panic!(),
}
}
fn apply_call(&mut self, mut sub: Machine<'a, M, P>, out_start: U256, out_len: U256) {
let sub_total_used_gas = sub.state.total_used_gas();
self.state.logs.append(&mut sub.state.logs);
self.state.used_gas += sub_total_used_gas;
self.state.refunded_gas = self.state.refunded_gas + sub.state.refunded_gas;
copy_into_memory_apply(&mut self.state.memory, sub.state.out.as_slice(), out_start, out_len);
match sub.status() {
MachineStatus::ExitedOk => {
self.state.account_state = sub.state.account_state;
self.state.removed = sub.state.removed;
self.state.ret = Rc::new(Vec::new());
}
MachineStatus::ExitedErr(_) => {
self.state.stack.pop().unwrap();
self.state.stack.push(M256::zero()).unwrap();
self.state.ret = sub.state.out.clone();
}
_ => panic!(),
}
}
}
|
use std::time;
fn main() {
print_time_in_millis();
}
fn print_time_in_millis() {
let now = time::SystemTime::now();
let now_in_ms = now.duration_since(time::UNIX_EPOCH).expect("WTF").as_millis();
println!("{}", now_in_ms);
} |
//! The module contains widely used functions for more
//! comfortable work with Scheme's object from Rust.
use crate::errors::EvalErr;
use crate::object::Object;
use std::rc::Rc;
/// Converts lists to Vec of references to its elements.
pub fn list_to_vec(obj: &Object) -> Result<Vec<Rc<Object>>, EvalErr> {
let mut result = Vec::new();
let mut tail = obj;
while let Object::Pair(a, b) = tail {
result.push(Rc::clone(a));
tail = b;
}
if !tail.is_nil() {
return Err(EvalErr::ListRequired(obj.to_string()));
}
Ok(result)
}
/// Converts Vec of references to a list object.
///
/// This function always succeeds.
pub fn vec_to_list(vec: &[Rc<Object>]) -> Object {
vec.into_iter().rfold(Object::Nil, |tail, elem| {
Object::Pair(elem.clone(), Rc::new(tail))
})
}
/// Ensures that given object is a list with length `n`
pub fn expect_args(vec: Vec<Rc<Object>>, func: &str, n: usize) -> Result<Vec<Rc<Object>>, EvalErr> {
if vec.len() != n {
Err(EvalErr::WrongAgrsNum(func.to_string(), n, vec.len()))
} else {
Ok(vec)
}
}
/// Checks that taken object is a list with one element and returns the element or error
pub fn expect_1_arg(vec: Vec<Rc<Object>>, func: &str) -> Result<Rc<Object>, EvalErr> {
Ok(expect_args(vec, func, 1)?[0].clone())
}
/// Checks that taken object is a list of two elements
/// and returns a tuple of this elements or error
pub fn expect_2_args(
vec: Vec<Rc<Object>>, func: &str,
) -> Result<(Rc<Object>, Rc<Object>), EvalErr> {
let vec = expect_args(vec, func, 2)?;
Ok((vec[0].clone(), vec[1].clone()))
}
/// Ensures that given object is a pair or returns an Err.
/// Since Rust doesn't support types for enum variants,
/// we are forced to use a tuple for the Ok value.
pub fn check_pair(obj: &Object) -> Result<(&Rc<Object>, &Rc<Object>), EvalErr> {
match obj {
Object::Pair(x, y) => Ok((x, y)),
x => Err(EvalErr::PairRequired(x.to_string())),
}
}
/// The symbol of undefined value.
/// Used in situations when an expression have not an exact result.
/// E.g. for `cond` where all clauses are false.
pub fn undef() -> Rc<Object> {
Rc::new(Object::Symbol("#<undef>".to_string()))
}
|
// Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use serde_json::Value;
use super::super::VmmAction;
use logger::{Metric, METRICS};
use request::{Body, Error, ParsedRequest, StatusCode};
// The names of the members from this enum must precisely correspond (as a string) to the possible
// values of "action_type" from the json request body. This is useful to get a strongly typed
// struct from the Serde deserialization process.
#[derive(Debug, Deserialize, Serialize)]
enum ActionType {
BlockDeviceRescan,
FlushMetrics,
InstanceStart,
SendCtrlAltDel,
}
// The model of the json body from a sync request. We use Serde to transform each associated
// json body into this.
#[derive(Debug, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
struct ActionBody {
action_type: ActionType,
#[serde(skip_serializing_if = "Option::is_none")]
payload: Option<Value>,
}
fn validate_payload(action_body: &ActionBody) -> Result<(), Error> {
match action_body.action_type {
ActionType::BlockDeviceRescan => {
match action_body.payload {
Some(ref payload) => {
// Expecting to have drive_id as a String in the payload.
if !payload.is_string() {
return Err(Error::Generic(
StatusCode::BadRequest,
"Invalid payload type. Expected a string representing the drive_id"
.to_string(),
));
}
Ok(())
}
None => Err(Error::Generic(
StatusCode::BadRequest,
"Payload is required for block device rescan.".to_string(),
)),
}
}
ActionType::FlushMetrics | ActionType::InstanceStart | ActionType::SendCtrlAltDel => {
// Neither FlushMetrics nor InstanceStart should have a payload.
if action_body.payload.is_some() {
return Err(Error::Generic(
StatusCode::BadRequest,
format!("{:?} does not support a payload.", action_body.action_type),
));
}
Ok(())
}
}
}
pub fn parse_put_actions(body: &Body) -> Result<ParsedRequest, Error> {
METRICS.put_api_requests.actions_count.inc();
let action_body = serde_json::from_slice::<ActionBody>(body.raw()).map_err(|e| {
METRICS.put_api_requests.actions_fails.inc();
Error::SerdeJson(e)
})?;
validate_payload(&action_body)?;
match action_body.action_type {
ActionType::BlockDeviceRescan => {
// Safe to unwrap because we validated the payload in the validate_payload func.
let block_device_id = action_body.payload.unwrap().as_str().unwrap().to_string();
Ok(ParsedRequest::Sync(VmmAction::RescanBlockDevice(
block_device_id,
)))
}
ActionType::FlushMetrics => Ok(ParsedRequest::Sync(VmmAction::FlushMetrics)),
ActionType::InstanceStart => Ok(ParsedRequest::Sync(VmmAction::StartMicroVm)),
ActionType::SendCtrlAltDel => {
// SendCtrlAltDel not supported on aarch64.
#[cfg(target_arch = "aarch64")]
return Err(Error::Generic(
StatusCode::BadRequest,
"SendCtrlAltDel does not supported on aarch64.".to_string(),
));
#[cfg(target_arch = "x86_64")]
Ok(ParsedRequest::Sync(VmmAction::SendCtrlAltDel))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_validate_payload() {
// Test InstanceStart.
let action_body = ActionBody {
action_type: ActionType::InstanceStart,
payload: None,
};
assert!(validate_payload(&action_body).is_ok());
// Error case: InstanceStart with payload.
let action_body = ActionBody {
action_type: ActionType::InstanceStart,
payload: Some(Value::String("dummy-payload".to_string())),
};
assert!(validate_payload(&action_body).is_err());
// Test BlockDeviceRescan
let action_body = ActionBody {
action_type: ActionType::BlockDeviceRescan,
payload: Some(Value::String(String::from("dummy_id"))),
};
assert!(validate_payload(&action_body).is_ok());
// Error case: no payload.
let action_body = ActionBody {
action_type: ActionType::BlockDeviceRescan,
payload: None,
};
assert!(validate_payload(&action_body).is_err());
// Error case: payload is not String.
let action_body = ActionBody {
action_type: ActionType::BlockDeviceRescan,
payload: Some(Value::Bool(false)),
};
assert!(validate_payload(&action_body).is_err());
// Test FlushMetrics.
let action_body = ActionBody {
action_type: ActionType::FlushMetrics,
payload: None,
};
assert!(validate_payload(&action_body).is_ok());
// Error case: FlushMetrics with payload.
let action_body = ActionBody {
action_type: ActionType::FlushMetrics,
payload: Some(Value::String("metrics-payload".to_string())),
};
let res = validate_payload(&action_body);
assert!(res.is_err());
// Test SendCtrlAltDel.
let action_body = ActionBody {
action_type: ActionType::SendCtrlAltDel,
payload: None,
};
assert!(validate_payload(&action_body).is_ok());
// Error case: SendCtrlAltDel with payload.
let action_body = ActionBody {
action_type: ActionType::SendCtrlAltDel,
payload: Some(Value::String("dummy-payload".to_string())),
};
assert!(validate_payload(&action_body).is_err());
}
#[test]
fn test_parse_put_actions_request() {
{
assert!(parse_put_actions(&Body::new("invalid_body")).is_err());
let json = r#"{
"action_type": "BlockDeviceRescan",
"payload": "dummy_id"
}"#;
let req = ParsedRequest::Sync(VmmAction::RescanBlockDevice("dummy_id".to_string()));
let result = parse_put_actions(&Body::new(json));
assert!(result.is_ok());
assert!(result.unwrap().eq(&req));
}
{
let json = r#"{
"action_type": "InstanceStart"
}"#;
let req: ParsedRequest = ParsedRequest::Sync(VmmAction::StartMicroVm);
let result = parse_put_actions(&Body::new(json));
assert!(result.is_ok());
assert!(result.unwrap().eq(&req));
}
#[cfg(target_arch = "x86_64")]
{
let json = r#"{
"action_type": "SendCtrlAltDel"
}"#;
let req: ParsedRequest = ParsedRequest::Sync(VmmAction::SendCtrlAltDel);
let result = parse_put_actions(&Body::new(json));
assert!(result.is_ok());
assert!(result.unwrap().eq(&req));
}
#[cfg(target_arch = "aarch64")]
{
let json = r#"{
"action_type": "SendCtrlAltDel"
}"#;
let result = parse_put_actions(&Body::new(json));
assert!(result.is_err());
}
{
let json = r#"{
"action_type": "FlushMetrics"
}"#;
let req: ParsedRequest = ParsedRequest::Sync(VmmAction::FlushMetrics);
let result = parse_put_actions(&Body::new(json));
assert!(result.is_ok());
assert!(result.unwrap().eq(&req));
let json = r#"{
"action_type": "FlushMetrics",
"payload": "metrics-payload"
}"#;
let result = parse_put_actions(&Body::new(json));
assert!(result.is_err());
}
}
}
|
use engine::{self, EngineMessage};
use scheduler::{self, Async};
use polygon::geometry::mesh::{BuildMeshError, MeshBuilder};
use polygon::math::Vector2;
use obj::{self, Obj};
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::Path;
use std::string::FromUtf8Error;
use std::sync::atomic::{AtomicUsize, Ordering};
use stopwatch::Stopwatch;
pub mod collada;
static MESH_ID_COUNTER: AtomicUsize = AtomicUsize::new(1);
static MATERIAL_ID_COUNTER: AtomicUsize = AtomicUsize::new(1);
/// Load all data from the specified file as an array of bytes.
pub fn load_file_bytes<'a, P>(path: P) -> Async<'a, Result<Vec<u8>, io::Error>>
where
P: 'a,
P: AsRef<Path> + Send,
{
scheduler::start(move || {
let _s = Stopwatch::new("Load file bytes");
let mut file = File::open(path)?;
let mut bytes = if let Ok(metadata) = file.metadata() {
Vec::with_capacity(metadata.len() as usize)
} else {
Vec::new()
};
file.read_to_end(&mut bytes)?;
Ok(bytes)
})
}
/// Load all data from the specified file as a `String`.
pub fn load_file_text<'a, P>(path: P) -> Async<'a, Result<String, LoadTextError>>
where
P: 'a,
P: AsRef<Path> + Send,
{
scheduler::start(move || {
let _s = Stopwatch::new("Load file text");
let bytes = load_file_bytes(path).await()?;
let result = String::from_utf8(bytes).map_err(|utf8_err| utf8_err.into());
result
})
}
#[derive(Debug)]
pub enum LoadTextError {
Io(io::Error),
Utf8(FromUtf8Error),
}
impl From<io::Error> for LoadTextError {
fn from(from: io::Error) -> LoadTextError {
LoadTextError::Io(from)
}
}
impl From<FromUtf8Error> for LoadTextError {
fn from(from: FromUtf8Error) -> LoadTextError {
LoadTextError::Utf8(from)
}
}
/// Loads a mesh from disk.
///
/// Loads a mesh data from the specified path and performs any necessary processing to prepare it
/// to be used in rendering.
pub fn load_mesh<'a, P>(path: P) -> Async<'a, Result<Mesh, LoadMeshError>>
where
P: 'a,
P: AsRef<Path> + Send + Into<String>
{
scheduler::start(move || {
let _s = Stopwatch::new("Load mesh");
let extension: Option<String> = path.as_ref().extension().map(|ext| ext.to_string_lossy().into_owned());
// Load mesh source and parse mesh data based on file type.
let mesh_data = match extension {
Some(ref ext) if ext == "dae" => {
let text = load_file_text(path).await()?;
collada::load_resources(text)?
},
Some(ref ext) if ext == "obj" => {
let text = load_file_text(path).await()?;
// Load mesh file and normalize indices for OpenGL.
let obj = Obj::from_str(&*text)?;
// Gather vertex data so that OpenGL can use them.
let mut positions = Vec::new();
let mut normals = Vec::new();
let mut texcoords = Vec::new();
// Iterate over each of the faces in the mesh.
for face in obj.faces() {
// Iterate over each of the vertices in the face to combine the position and normal into
// a single vertex.
for (position, maybe_tex, maybe_normal) in face {
positions.push(position.into());
// NOTE: The w texcoord is provided according to the bitmap spec but we don't need to
// use it here, so we simply ignore it.
if let Some((u, v, _w)) = maybe_tex {
texcoords.push(Vector2::new(u, v));
}
if let Some(normal) = maybe_normal {
normals.push(normal.into());
}
}
}
// Create indices list.
let indices_count = obj.position_indices().len() as u32 * 3;
let indices: Vec<u32> = (0..indices_count).collect();
MeshBuilder::new()
.set_position_data(&*positions)
.set_normal_data(&*normals)
.set_texcoord_data(&*texcoords)
.set_indices(&*indices)
.build()?
},
_ => {
return Err(LoadMeshError::UnsupportedFileType(path.into()));
},
};
// Create handle for mesh data and asynchronously register it with the renderer.
let mesh_id = MESH_ID_COUNTER.fetch_add(1, Ordering::Relaxed);
engine::send_message(EngineMessage::Mesh(mesh_id, mesh_data));
Ok(Mesh(mesh_id))
})
}
pub type MeshId = usize;
#[derive(Debug)]
pub struct Mesh(MeshId);
impl Mesh {
// TODO: Make this private to the crate.
pub fn id(&self) -> MeshId {
self.0
}
}
impl Drop for Mesh {
fn drop(&mut self) {
// TODO: How do we cleanup a mesh?
}
}
#[derive(Debug)]
pub enum LoadMeshError {
BuildMeshError(BuildMeshError),
LoadTextError(LoadTextError),
ParseColladaError(collada::Error),
ParseObjError(obj::Error),
UnsupportedFileType(String),
}
impl From<BuildMeshError> for LoadMeshError {
fn from(from: BuildMeshError) -> LoadMeshError {
LoadMeshError::BuildMeshError(from)
}
}
impl From<LoadTextError> for LoadMeshError {
fn from(from: LoadTextError) -> LoadMeshError {
LoadMeshError::LoadTextError(from)
}
}
impl From<collada::Error> for LoadMeshError {
fn from(from: collada::Error) -> LoadMeshError {
LoadMeshError::ParseColladaError(from)
}
}
impl From<obj::Error> for LoadMeshError {
fn from(from: obj::Error) -> LoadMeshError {
LoadMeshError::ParseObjError(from)
}
}
pub fn load_material<'a, P>(path: P) -> Async<'a, Result<Material, LoadMaterialError>>
where
P: 'a,
P: AsRef<Path> + Send
{
scheduler::start(move || {
let _s = Stopwatch::new("Load material");
// Load and parse material data.
let text = load_file_text(path).await()?;
let material_source = ::polygon::material::MaterialSource::from_str(text)?;
let material_id = MATERIAL_ID_COUNTER.fetch_add(1, Ordering::Relaxed);
engine::send_message(EngineMessage::Material(material_id, material_source));
Ok(Material(material_id))
})
}
pub type MaterialId = usize;
#[derive(Debug)]
pub struct Material(MaterialId);
#[derive(Debug)]
pub enum LoadMaterialError {
LoadTextError(LoadTextError),
BuildMaterialError(::polygon::BuildMaterialError),
ParseMaterialError(::polygon::material::MaterialSourceError),
}
impl From<LoadTextError> for LoadMaterialError {
fn from(from: LoadTextError) -> LoadMaterialError {
LoadMaterialError::LoadTextError(from)
}
}
impl From<::polygon::material::MaterialSourceError> for LoadMaterialError {
fn from(from: ::polygon::material::MaterialSourceError) -> LoadMaterialError {
LoadMaterialError::ParseMaterialError(from)
}
}
impl From<::polygon::BuildMaterialError> for LoadMaterialError {
fn from(from: ::polygon::BuildMaterialError) -> LoadMaterialError {
LoadMaterialError::BuildMaterialError(from)
}
}
|
#![allow(unused)]
use ert::current;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
#[derive(Clone)]
pub struct Checker {
map: Arc<Mutex<HashMap<u64, u64>>>,
}
impl Checker {
pub fn new() -> Self {
Self {
map: Arc::new(Mutex::new(HashMap::new())),
}
}
pub fn add(&self, i: u64) {
self.map.lock().unwrap().insert(i, current());
}
pub fn check(&self, i: u64) {
assert_eq!(current(), *self.map.lock().unwrap().get(&i).unwrap())
}
}
|
#[doc = "Reader of register FTSR2"]
pub type R = crate::R<u32, super::FTSR2>;
#[doc = "Writer for register FTSR2"]
pub type W = crate::W<u32, super::FTSR2>;
#[doc = "Register FTSR2 `reset()`'s with value 0"]
impl crate::ResetValue for super::FTSR2 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `FT35`"]
pub type FT35_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FT35`"]
pub struct FT35_W<'a> {
w: &'a mut W,
}
impl<'a> FT35_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `FT36`"]
pub type FT36_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FT36`"]
pub struct FT36_W<'a> {
w: &'a mut W,
}
impl<'a> FT36_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `FT37`"]
pub type FT37_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FT37`"]
pub struct FT37_W<'a> {
w: &'a mut W,
}
impl<'a> FT37_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `FT38`"]
pub type FT38_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FT38`"]
pub struct FT38_W<'a> {
w: &'a mut W,
}
impl<'a> FT38_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
impl R {
#[doc = "Bit 3 - FT35"]
#[inline(always)]
pub fn ft35(&self) -> FT35_R {
FT35_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - FT36"]
#[inline(always)]
pub fn ft36(&self) -> FT36_R {
FT36_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - FT37"]
#[inline(always)]
pub fn ft37(&self) -> FT37_R {
FT37_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - FT38"]
#[inline(always)]
pub fn ft38(&self) -> FT38_R {
FT38_R::new(((self.bits >> 6) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 3 - FT35"]
#[inline(always)]
pub fn ft35(&mut self) -> FT35_W {
FT35_W { w: self }
}
#[doc = "Bit 4 - FT36"]
#[inline(always)]
pub fn ft36(&mut self) -> FT36_W {
FT36_W { w: self }
}
#[doc = "Bit 5 - FT37"]
#[inline(always)]
pub fn ft37(&mut self) -> FT37_W {
FT37_W { w: self }
}
#[doc = "Bit 6 - FT38"]
#[inline(always)]
pub fn ft38(&mut self) -> FT38_W {
FT38_W { w: self }
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.