text stringlengths 8 4.13M |
|---|
use super::common;
use serde_json::Value;
use veloci::*;
// #[macro_use]
// mod common;
pub fn get_test_data() -> Value {
json!([
{
"title": "die erbin"
},
{
"title": "erbin"
},
{
"title": "der die erbin"
},
{
"title": "asdf die erbin",
"subtitles": ["die erbin"]
}
])
}
static TEST_FOLDER: &str = "test_stopwords";
lazy_static! {
static ref TEST_PERSISTENCE: persistence::Persistence = {
let indices = r#"{ "title":{"features": ["Search","PhraseBoost","BoostTextLocality"], "fulltext":{"tokenize":true}}, "subtitles[]":{"features": ["Search","PhraseBoost","BoostTextLocality"], "fulltext":{"tokenize":true} }} "#;
common::create_test_persistence(TEST_FOLDER, indices, &get_test_data().to_string().as_bytes(), None)
};
}
// TODO add tests ..
|
use std::fs::File;
use std::io::{BufRead, BufReader};
use itertools::Itertools;
fn get_parameter(commands: &Vec<i32>, ip : usize, mode : i32) -> i32 {
match mode {
0 => return commands[commands[ip] as usize],
1 => return commands[ip],
_ => println!("Invalid mode: {}, ip: {}, command: {}, commands: {:?}", mode, ip, commands[ip], commands),
}
return 0;
}
fn emulate(commands: &mut Vec<i32>, instruction_pointer : i32, phase_setting : i32, input_signal : i32) -> (i32, i32) {
let mut ip = instruction_pointer as usize;
let mut count = 0;
let mut input_commands = if phase_setting > 0 { 0 } else { 1 };
let mut output_value = 0;
loop {
if ip >= commands.len() {
println!("Outside memory");
break;
}
count += 1;
let full_opcode = commands[ip];
let opcode = full_opcode % 100;
let mode1 = (full_opcode / 100) % 10;
let mode2 = (full_opcode / 1000) % 10;
let mode3 = (full_opcode / 10000) % 10;
match opcode {
1 => {
if ip + 4 > commands.len() {
println!("Outside memory");
break;
}
let param1 = get_parameter(commands, ip + 1, mode1);
let param2 = get_parameter(commands, ip + 2, mode2);
let new_value = param1 + param2;
assert_eq!(mode3, 0);
let dst = commands[ip + 3] as usize;
commands[dst] = new_value;
ip += 4;
},
2 => {
if ip + 4 > commands.len() {
println!("Outside memory");
break;
}
let param1 = get_parameter(commands, ip + 1, mode1);
let param2 = get_parameter(commands, ip + 2, mode2);
let new_value = param1 * param2;
assert_eq!(mode3, 0);
let dst = commands[ip + 3] as usize;
commands[dst] = new_value;
ip += 4;
},
3 => {
if ip + 2 > commands.len() {
println!("Outside memory");
break;
}
let value = if input_commands == 0 { phase_setting } else { input_signal };
// println!("Process input {} -> {}", input_commands, value);
input_commands += 1;
let input_idx = commands[ip + 1] as usize;
commands[input_idx] = value;
ip += 2;
},
4 => {
if ip + 2 > commands.len() {
println!("Outside memory");
break;
}
let value = get_parameter(commands, ip + 1, mode1);
output_value = value;
// println!("{} - Output: {}", count, value);
ip += 2;
return (value, ip as i32);
},
5 => {
if ip + 3 > commands.len() {
println!("Outside memory");
break;
}
let param1 = get_parameter(commands, ip + 1, mode1);
if param1 != 0 {
let value = get_parameter(commands, ip + 2, mode2);
if value < 0 {
println!("Invalid jump address {}", value);
}
ip = value as usize;
} else {
ip += 3;
}
},
6 => {
if ip + 3 > commands.len() {
println!("Outside memory");
break;
}
let param1 = get_parameter(commands, ip + 1, mode1);
if param1 == 0 {
let value = get_parameter(commands, ip + 2, mode2);
if value < 0 {
println!("Invalid jump address {}", value);
}
ip = value as usize;
} else {
ip += 3;
}
},
7 => {
if ip + 4 > commands.len() {
println!("Outside memory");
break;
}
let param1 = get_parameter(commands, ip + 1, mode1);
let param2 = get_parameter(commands, ip + 2, mode2);
let new_value = if param1 < param2 { 1 } else { 0 };
assert_eq!(mode3, 0);
let dst = commands[ip + 3] as usize;
commands[dst] = new_value;
ip += 4;
},
8 => {
if ip + 4 > commands.len() {
println!("Outside memory");
break;
}
let param1 = get_parameter(commands, ip + 1, mode1);
let param2 = get_parameter(commands, ip + 2, mode2);
let new_value = if param1 == param2 { 1 } else { 0 };
assert_eq!(mode3, 0);
let dst = commands[ip + 3] as usize;
commands[dst] = new_value;
ip += 4;
},
99 => {
// println!("{} - Halting", count);
return (output_value, -1);
},
_ => println!("Invalid command: {}, ip: {}, commands: {:?}", commands[ip], ip, commands),
}
}
return (-1, -1);
}
fn main() {
// let filename = "src/input0";
let filename = "../part1/src/input";
// Open the file in read-only mode (ignoring errors).
let file = File::open(filename).unwrap();
let reader = BufReader::new(file);
let mut commands = Vec::new();
// Read the file line by line using the lines() iterator from std::io::BufRead.
for (_, line) in reader.lines().enumerate() {
let line = line.unwrap(); // Ignore errors.
// Show the line and its number.
let commands_str = line.split(",");
for comm_str in commands_str {
let comm: i32 = comm_str.parse().unwrap();
commands.push(comm);
}
break;
}
let mut max_output_amplifier = 0;
let phase_setting : Vec<i32> = vec![5, 6, 7, 8, 9];
let mut best_perm : Vec<&i32> = Vec::new();
for perm in phase_setting.iter().permutations(phase_setting.len()) {
let mut input_signal = 0;
let mut first_execution = true;
let mut commands_list : Vec<Vec<i32>> = vec![commands.clone(); 5];
let mut ip_list : Vec<i32> = vec![0; 5];
loop {
let mut ip_idx = 0;
for ps in &perm {
let output = emulate(&mut commands_list[ip_idx], ip_list[ip_idx], if first_execution { **ps } else { -1 }, input_signal);
// println!("Output for amp {} and phase setting {} -> ({}, {})", ip_idx, **ps, output.0, output.1);
input_signal = output.0;
ip_list[ip_idx] = output.1;
ip_idx += 1;
if output.1 == -1 {
// println!("Program halt in amplifier {}", ip_idx);
break;
}
}
first_execution = false;
if input_signal > max_output_amplifier {
best_perm = perm.clone();
max_output_amplifier = input_signal;
}
if ip_list[ip_idx - 1] == -1 {
if ip_idx != 6 {
break;
}
}
}
}
println!("Solution: {}", max_output_amplifier);
println!("Best Phase Setting permutation: {:?}", best_perm);
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
mod _QuestionMark;
//mod _c;
mod _d_upper;
mod _g;
mod _g_upper;
mod _h_upper;
//mod _k;
mod _m;
mod _m_upper;
//mod _p;
//mod _p_upper;
mod _QStartNoAckMode;
mod _QThreadEvents;
mod _qAttached;
mod _qC;
mod _qSupported;
mod _qXfer;
mod _qfThreadInfo;
mod _qsThreadInfo;
//mod _s;
//mod _t_upper;
mod _vCont;
mod _vFile;
mod _vKill;
mod _x_upper;
mod _z;
mod _z_upper;
//pub use _p::*;
//pub use _p_upper::*;
pub use _QStartNoAckMode::*;
pub use _QThreadEvents::*;
pub use _QuestionMark::*;
//pub use _c::*;
pub use _d_upper::*;
pub use _g::*;
pub use _g_upper::*;
pub use _h_upper::*;
//pub use _k::*;
pub use _m::*;
pub use _m_upper::*;
pub use _qAttached::*;
pub use _qC::*;
pub use _qSupported::*;
pub use _qXfer::*;
pub use _qfThreadInfo::*;
pub use _qsThreadInfo::*;
//pub use _s::*;
//pub use _t_upper::*;
pub use _vCont::*;
pub use _vFile::*;
pub use _vKill::*;
pub use _x_upper::*;
pub use _z::*;
pub use _z_upper::*;
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub mod protection_intent {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn validate(
operation_config: &crate::OperationConfig,
azure_region: &str,
subscription_id: &str,
parameters: &PreValidateEnableBackupRequest,
) -> std::result::Result<PreValidateEnableBackupResponse, validate::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/Subscriptions/{}/providers/Microsoft.RecoveryServices/locations/{}/backupPreValidateProtection",
&operation_config.base_path, subscription_id, azure_region
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(validate::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(validate::BuildRequestError)?;
let rsp = client.execute(req).await.context(validate::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(validate::ResponseBytesError)?;
let rsp_value: PreValidateEnableBackupResponse =
serde_json::from_slice(&body).context(validate::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(validate::ResponseBytesError)?;
validate::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod validate {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
vault_name: &str,
resource_group_name: &str,
subscription_id: &str,
fabric_name: &str,
intent_object_name: &str,
) -> std::result::Result<ProtectionIntentResource, get::Error> {
let client = &operation_config.client;
let uri_str = & format ! ("{}/Subscriptions/{}/resourceGroups/{}/providers/Microsoft.RecoveryServices/vaults/{}/backupFabrics/{}/backupProtectionIntent/{}" , & operation_config . base_path , subscription_id , resource_group_name , vault_name , fabric_name , intent_object_name) ;
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ProtectionIntentResource = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
get::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
vault_name: &str,
resource_group_name: &str,
subscription_id: &str,
fabric_name: &str,
intent_object_name: &str,
parameters: &ProtectionIntentResource,
) -> std::result::Result<ProtectionIntentResource, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = & format ! ("{}/Subscriptions/{}/resourceGroups/{}/providers/Microsoft.RecoveryServices/vaults/{}/backupFabrics/{}/backupProtectionIntent/{}" , & operation_config . base_path , subscription_id , resource_group_name , vault_name , fabric_name , intent_object_name) ;
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ProtectionIntentResource =
serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
create_or_update::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
vault_name: &str,
resource_group_name: &str,
subscription_id: &str,
fabric_name: &str,
intent_object_name: &str,
) -> std::result::Result<(), delete::Error> {
let client = &operation_config.client;
let uri_str = & format ! ("{}/Subscriptions/{}/resourceGroups/{}/providers/Microsoft.RecoveryServices/vaults/{}/backupFabrics/{}/backupProtectionIntent/{}" , & operation_config . base_path , subscription_id , resource_group_name , vault_name , fabric_name , intent_object_name) ;
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::NO_CONTENT => Ok(()),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
delete::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod backup_status {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
azure_region: &str,
subscription_id: &str,
parameters: &BackupStatusRequest,
) -> std::result::Result<BackupStatusResponse, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/Subscriptions/{}/providers/Microsoft.RecoveryServices/locations/{}/backupStatus",
&operation_config.base_path, subscription_id, azure_region
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: BackupStatusResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
get::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod feature_support {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn validate(
operation_config: &crate::OperationConfig,
azure_region: &str,
subscription_id: &str,
parameters: &FeatureSupportRequest,
) -> std::result::Result<AzureVmResourceFeatureSupportResponse, validate::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/Subscriptions/{}/providers/Microsoft.RecoveryServices/locations/{}/backupValidateFeatures",
&operation_config.base_path, subscription_id, azure_region
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(validate::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(validate::BuildRequestError)?;
let rsp = client.execute(req).await.context(validate::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(validate::ResponseBytesError)?;
let rsp_value: AzureVmResourceFeatureSupportResponse =
serde_json::from_slice(&body).context(validate::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(validate::ResponseBytesError)?;
validate::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod validate {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod backup_protection_intent {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(
operation_config: &crate::OperationConfig,
vault_name: &str,
resource_group_name: &str,
subscription_id: &str,
filter: Option<&str>,
skip_token: Option<&str>,
) -> std::result::Result<ProtectionIntentResourceList, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/Subscriptions/{}/resourceGroups/{}/providers/Microsoft.RecoveryServices/vaults/{}/backupProtectionIntents",
&operation_config.base_path, subscription_id, resource_group_name, vault_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(filter) = filter {
req_builder = req_builder.query(&[("$filter", filter)]);
}
if let Some(skip_token) = skip_token {
req_builder = req_builder.query(&[("$skipToken", skip_token)]);
}
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: ProtectionIntentResourceList = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
list::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod backup_usage_summaries {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(
operation_config: &crate::OperationConfig,
vault_name: &str,
resource_group_name: &str,
subscription_id: &str,
filter: Option<&str>,
skip_token: Option<&str>,
) -> std::result::Result<BackupManagementUsageList, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/Subscriptions/{}/resourceGroups/{}/providers/Microsoft.RecoveryServices/vaults/{}/backupUsageSummaries",
&operation_config.base_path, subscription_id, resource_group_name, vault_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(filter) = filter {
req_builder = req_builder.query(&[("$filter", filter)]);
}
if let Some(skip_token) = skip_token {
req_builder = req_builder.query(&[("$skipToken", skip_token)]);
}
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: BackupManagementUsageList = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
list::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
|
mod robot {
pub fn say_hello() {
println!("Saying hello!!!");
}
pub fn say_hi() {
println!("Saying hi!!!");
}
}
fn main() {
robot::say_hi();
use robot::say_hello;
say_hello();
} |
// Copyright 2020 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Decimal Fixed Point implementations for Substrate runtime.
use crate::{
helpers_128bit::multiply_by_rational,
traits::{
Bounded, CheckedAdd, CheckedDiv, CheckedMul, CheckedNeg, CheckedSub, One,
SaturatedConversion, Saturating, UniqueSaturatedInto, Zero,
},
PerThing,
};
use codec::{Decode, Encode};
use sp_std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::{self, Add, Div, Mul, Sub},
prelude::*,
};
#[cfg(feature = "std")]
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
/// Integer types that can be used to interact with `FixedPointNumber` implementations.
pub trait FixedPointOperand:
Copy
+ Clone
+ Bounded
+ Zero
+ Saturating
+ PartialOrd
+ UniqueSaturatedInto<u128>
+ TryFrom<u128>
+ CheckedNeg
{
}
impl FixedPointOperand for i128 {}
impl FixedPointOperand for u128 {}
impl FixedPointOperand for i64 {}
impl FixedPointOperand for u64 {}
impl FixedPointOperand for i32 {}
impl FixedPointOperand for u32 {}
impl FixedPointOperand for i16 {}
impl FixedPointOperand for u16 {}
impl FixedPointOperand for i8 {}
impl FixedPointOperand for u8 {}
/// Something that implements a decimal fixed point number.
///
/// The precision is given by `Self::DIV`, i.e. `1 / DIV` can be represented.
///
/// Each type can store numbers from `Self::Inner::min_value() / Self::DIV`
/// to `Self::Inner::max_value() / Self::DIV`.
/// This is also referred to as the _accuracy_ of the type in the documentation.
pub trait FixedPointNumber:
Sized
+ Copy
+ Default
+ Debug
+ Saturating
+ Bounded
+ Eq
+ PartialEq
+ Ord
+ PartialOrd
+ CheckedSub
+ CheckedAdd
+ CheckedMul
+ CheckedDiv
+ Add
+ Sub
+ Div
+ Mul
{
/// The underlying data type used for this fixed point number.
type Inner: Debug + One + CheckedMul + CheckedDiv + FixedPointOperand;
/// Precision of this fixed point implementation. It should be a power of `10`.
const DIV: Self::Inner;
/// Indicates if this fixed point implementation is signed or not.
const SIGNED: bool;
/// Precision of this fixed point implementation.
fn accuracy() -> Self::Inner {
Self::DIV
}
/// Builds this type from an integer number.
fn from_inner(int: Self::Inner) -> Self;
/// Consumes `self` and returns the inner raw value.
fn into_inner(self) -> Self::Inner;
/// Creates self from an integer number `int`.
///
/// Returns `Self::max` or `Self::min` if `int` exceeds accuracy.
fn saturating_from_integer<N: FixedPointOperand>(int: N) -> Self {
let mut n: I129 = int.into();
n.value = n.value.saturating_mul(Self::DIV.saturated_into());
Self::from_inner(from_i129(n).unwrap_or_else(|| to_bound(int, 0)))
}
/// Creates `self` from an integer number `int`.
///
/// Returns `None` if `int` exceeds accuracy.
fn checked_from_integer(int: Self::Inner) -> Option<Self> {
int.checked_mul(&Self::DIV).map(|inner| Self::from_inner(inner))
}
/// Creates `self` from a rational number. Equal to `n / d`.
///
/// Panics if `d = 0`. Returns `Self::max` or `Self::min` if `n / d` exceeds accuracy.
fn saturating_from_rational<N: FixedPointOperand, D: FixedPointOperand>(n: N, d: D) -> Self {
if d == D::zero() {
panic!("attempt to divide by zero")
}
Self::checked_from_rational(n, d).unwrap_or_else(|| to_bound(n, d))
}
/// Creates `self` from a rational number. Equal to `n / d`.
///
/// Returns `None` if `d == 0` or `n / d` exceeds accuracy.
fn checked_from_rational<N: FixedPointOperand, D: FixedPointOperand>(
n: N,
d: D,
) -> Option<Self> {
if d == D::zero() {
return None
}
let n: I129 = n.into();
let d: I129 = d.into();
let negative = n.negative != d.negative;
multiply_by_rational(n.value, Self::DIV.unique_saturated_into(), d.value)
.ok()
.and_then(|value| from_i129(I129 { value, negative }))
.map(|inner| Self::from_inner(inner))
}
/// Checked multiplication for integer type `N`. Equal to `self * n`.
///
/// Returns `None` if the result does not fit in `N`.
fn checked_mul_int<N: FixedPointOperand>(self, n: N) -> Option<N> {
let lhs: I129 = self.into_inner().into();
let rhs: I129 = n.into();
let negative = lhs.negative != rhs.negative;
multiply_by_rational(lhs.value, rhs.value, Self::DIV.unique_saturated_into())
.ok()
.and_then(|value| from_i129(I129 { value, negative }))
}
/// Saturating multiplication for integer type `N`. Equal to `self * n`.
///
/// Returns `N::min` or `N::max` if the result does not fit in `N`.
fn saturating_mul_int<N: FixedPointOperand>(self, n: N) -> N {
self.checked_mul_int(n).unwrap_or_else(|| to_bound(self.into_inner(), n))
}
/// Checked division for integer type `N`. Equal to `self / d`.
///
/// Returns `None` if the result does not fit in `N` or `d == 0`.
fn checked_div_int<N: FixedPointOperand>(self, d: N) -> Option<N> {
let lhs: I129 = self.into_inner().into();
let rhs: I129 = d.into();
let negative = lhs.negative != rhs.negative;
lhs.value
.checked_div(rhs.value)
.and_then(|n| n.checked_div(Self::DIV.unique_saturated_into()))
.and_then(|value| from_i129(I129 { value, negative }))
}
/// Saturating division for integer type `N`. Equal to `self / d`.
///
/// Panics if `d == 0`. Returns `N::min` or `N::max` if the result does not fit in `N`.
fn saturating_div_int<N: FixedPointOperand>(self, d: N) -> N {
if d == N::zero() {
panic!("attempt to divide by zero")
}
self.checked_div_int(d).unwrap_or_else(|| to_bound(self.into_inner(), d))
}
/// Saturating multiplication for integer type `N`, adding the result back.
/// Equal to `self * n + n`.
///
/// Returns `N::min` or `N::max` if the multiplication or final result does not fit in `N`.
fn saturating_mul_acc_int<N: FixedPointOperand>(self, n: N) -> N {
if self.is_negative() && n > N::zero() {
n.saturating_sub(Self::zero().saturating_sub(self).saturating_mul_int(n))
} else {
self.saturating_mul_int(n).saturating_add(n)
}
}
/// Saturating absolute value.
///
/// Returns `Self::max` if `self == Self::min`.
fn saturating_abs(self) -> Self {
let inner = self.into_inner();
if inner >= Self::Inner::zero() {
self
} else {
Self::from_inner(inner.checked_neg().unwrap_or_else(|| Self::Inner::max_value()))
}
}
/// Takes the reciprocal (inverse). Equal to `1 / self`.
///
/// Returns `None` if `self = 0`.
fn reciprocal(self) -> Option<Self> {
Self::one().checked_div(&self)
}
/// Returns zero.
fn zero() -> Self {
Self::from_inner(Self::Inner::zero())
}
/// Checks if the number is zero.
fn is_zero(&self) -> bool {
self.into_inner() == Self::Inner::zero()
}
/// Returns one.
fn one() -> Self {
Self::from_inner(Self::DIV)
}
/// Checks if the number is one.
fn is_one(&self) -> bool {
self.into_inner() == Self::Inner::one()
}
/// Returns `true` if `self` is positive and `false` if the number is zero or negative.
fn is_positive(self) -> bool {
self.into_inner() > Self::Inner::zero()
}
/// Returns `true` if `self` is negative and `false` if the number is zero or positive.
fn is_negative(self) -> bool {
self.into_inner() < Self::Inner::zero()
}
/// Returns the integer part.
fn trunc(self) -> Self {
self.into_inner()
.checked_div(&Self::DIV)
.expect("panics only if DIV is zero, DIV is not zero; qed")
.checked_mul(&Self::DIV)
.map(|inner| Self::from_inner(inner))
.expect("can not overflow since fixed number is >= integer part")
}
/// Returns the fractional part.
///
/// Note: the returned fraction will be non-negative for negative numbers,
/// except in the case where the integer part is zero.
fn frac(self) -> Self {
let integer = self.trunc();
let fractional = self.saturating_sub(integer);
if integer == Self::zero() {
fractional
} else {
fractional.saturating_abs()
}
}
/// Returns the smallest integer greater than or equal to a number.
///
/// Saturates to `Self::max` (truncated) if the result does not fit.
fn ceil(self) -> Self {
if self.is_negative() {
self.trunc()
} else {
if self.frac() == Self::zero() {
self
} else {
self.saturating_add(Self::one()).trunc()
}
}
}
/// Returns the largest integer less than or equal to a number.
///
/// Saturates to `Self::min` (truncated) if the result does not fit.
fn floor(self) -> Self {
if self.is_negative() {
self.saturating_sub(Self::one()).trunc()
} else {
self.trunc()
}
}
/// Returns the number rounded to the nearest integer. Rounds half-way cases away from 0.0.
///
/// Saturates to `Self::min` or `Self::max` (truncated) if the result does not fit.
fn round(self) -> Self {
let n = self.frac().saturating_mul(Self::saturating_from_integer(10));
if n < Self::saturating_from_integer(5) {
self.trunc()
} else {
if self.is_positive() {
self.saturating_add(Self::one()).trunc()
} else {
self.saturating_sub(Self::one()).trunc()
}
}
}
}
/// Data type used as intermediate storage in some computations to avoid overflow.
struct I129 {
value: u128,
negative: bool,
}
impl<N: FixedPointOperand> From<N> for I129 {
fn from(n: N) -> I129 {
if n < N::zero() {
let value: u128 = n
.checked_neg()
.map(|n| n.unique_saturated_into())
.unwrap_or_else(|| N::max_value().unique_saturated_into().saturating_add(1));
I129 { value, negative: true }
} else {
I129 { value: n.unique_saturated_into(), negative: false }
}
}
}
/// Transforms an `I129` to `N` if it is possible.
fn from_i129<N: FixedPointOperand>(n: I129) -> Option<N> {
let max_plus_one: u128 = N::max_value().unique_saturated_into().saturating_add(1);
if n.negative && N::min_value() < N::zero() && n.value == max_plus_one {
Some(N::min_value())
} else {
let unsigned_inner: N = n.value.try_into().ok()?;
let inner = if n.negative { unsigned_inner.checked_neg()? } else { unsigned_inner };
Some(inner)
}
}
/// Returns `R::max` if the sign of `n * m` is positive, `R::min` otherwise.
fn to_bound<N: FixedPointOperand, D: FixedPointOperand, R: Bounded>(n: N, m: D) -> R {
if (n < N::zero()) != (m < D::zero()) {
R::min_value()
} else {
R::max_value()
}
}
macro_rules! implement_fixed {
(
$name:ident,
$test_mod:ident,
$inner_type:ty,
$signed:tt,
$div:tt,
$title:expr $(,)?
) => {
/// A fixed point number representation in the range.
///
#[doc = $title]
#[derive(Encode, Decode, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct $name($inner_type);
impl From<$inner_type> for $name {
fn from(int: $inner_type) -> Self {
$name::saturating_from_integer(int)
}
}
impl<N: FixedPointOperand, D: FixedPointOperand> From<(N, D)> for $name {
fn from(r: (N, D)) -> Self {
$name::saturating_from_rational(r.0, r.1)
}
}
impl FixedPointNumber for $name {
type Inner = $inner_type;
const DIV: Self::Inner = $div;
const SIGNED: bool = $signed;
fn from_inner(inner: Self::Inner) -> Self {
Self(inner)
}
fn into_inner(self) -> Self::Inner {
self.0
}
}
impl $name {
/// const version of `FixedPointNumber::from_inner`.
pub const fn from_inner(inner: $inner_type) -> Self {
Self(inner)
}
#[cfg(any(feature = "std", test))]
pub fn from_fraction(x: f64) -> Self {
Self((x * (<Self as FixedPointNumber>::DIV as f64)) as $inner_type)
}
#[cfg(any(feature = "std", test))]
pub fn to_fraction(self) -> f64 {
self.0 as f64 / <Self as FixedPointNumber>::DIV as f64
}
}
impl Saturating for $name {
fn saturating_add(self, rhs: Self) -> Self {
Self(self.0.saturating_add(rhs.0))
}
fn saturating_sub(self, rhs: Self) -> Self {
Self(self.0.saturating_sub(rhs.0))
}
fn saturating_mul(self, rhs: Self) -> Self {
self.checked_mul(&rhs).unwrap_or_else(|| to_bound(self.0, rhs.0))
}
fn saturating_pow(self, exp: usize) -> Self {
if exp == 0 {
return Self::saturating_from_integer(1);
}
let exp = exp as u32;
let msb_pos = 32 - exp.leading_zeros();
let mut result = Self::saturating_from_integer(1);
let mut pow_val = self;
for i in 0..msb_pos {
if ((1 << i) & exp) > 0 {
result = result.saturating_mul(pow_val);
}
pow_val = pow_val.saturating_mul(pow_val);
}
result
}
}
impl ops::Neg for $name {
type Output = Self;
fn neg(self) -> Self::Output {
Self(<Self as FixedPointNumber>::Inner::zero() - self.0)
}
}
impl ops::Add for $name {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self(self.0 + rhs.0)
}
}
impl ops::Sub for $name {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self(self.0 - rhs.0)
}
}
impl ops::Mul for $name {
type Output = Self;
fn mul(self, rhs: Self) -> Self::Output {
self.checked_mul(&rhs)
.unwrap_or_else(|| panic!("attempt to multiply with overflow"))
}
}
impl ops::Div for $name {
type Output = Self;
fn div(self, rhs: Self) -> Self::Output {
if rhs.0 == 0 {
panic!("attempt to divide by zero")
}
self.checked_div(&rhs)
.unwrap_or_else(|| panic!("attempt to divide with overflow"))
}
}
impl CheckedSub for $name {
fn checked_sub(&self, rhs: &Self) -> Option<Self> {
self.0.checked_sub(rhs.0).map(Self)
}
}
impl CheckedAdd for $name {
fn checked_add(&self, rhs: &Self) -> Option<Self> {
self.0.checked_add(rhs.0).map(Self)
}
}
impl CheckedDiv for $name {
fn checked_div(&self, other: &Self) -> Option<Self> {
if other.0 == 0 {
return None
}
let lhs: I129 = self.0.into();
let rhs: I129 = other.0.into();
let negative = lhs.negative != rhs.negative;
multiply_by_rational(lhs.value, Self::DIV as u128, rhs.value).ok()
.and_then(|value| from_i129(I129 { value, negative }))
.map(Self)
}
}
impl CheckedMul for $name {
fn checked_mul(&self, other: &Self) -> Option<Self> {
let lhs: I129 = self.0.into();
let rhs: I129 = other.0.into();
let negative = lhs.negative != rhs.negative;
multiply_by_rational(lhs.value, rhs.value, Self::DIV as u128).ok()
.and_then(|value| from_i129(I129 { value, negative }))
.map(Self)
}
}
impl Bounded for $name {
fn min_value() -> Self {
Self(<Self as FixedPointNumber>::Inner::min_value())
}
fn max_value() -> Self {
Self(<Self as FixedPointNumber>::Inner::max_value())
}
}
impl sp_std::fmt::Debug for $name {
#[cfg(feature = "std")]
fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result {
let integral = {
let int = self.0 / Self::accuracy();
let signum_for_zero = if int == 0 && self.is_negative() { "-" } else { "" };
format!("{}{}", signum_for_zero, int)
};
let precision = (Self::accuracy() as f64).log10() as usize;
let fractional = format!("{:0>weight$}", ((self.0 % Self::accuracy()) as i128).abs(), weight=precision);
write!(f, "{}({}.{})", stringify!($name), integral, fractional)
}
#[cfg(not(feature = "std"))]
fn fmt(&self, _: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result {
Ok(())
}
}
impl<P: PerThing> From<P> for $name {
fn from(p: P) -> Self {
let accuracy = P::ACCURACY.saturated_into();
let value = p.deconstruct().saturated_into();
$name::saturating_from_rational(value, accuracy)
}
}
#[cfg(feature = "std")]
impl sp_std::fmt::Display for $name {
fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result {
write!(f, "{}", self.0)
}
}
#[cfg(feature = "std")]
impl sp_std::str::FromStr for $name {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let inner: <Self as FixedPointNumber>::Inner = s.parse()
.map_err(|_| "invalid string input for fixed point number")?;
Ok(Self::from_inner(inner))
}
}
// Manual impl `Serialize` as serde_json does not support i128.
// TODO: remove impl if issue https://github.com/serde-rs/json/issues/548 fixed.
#[cfg(feature = "std")]
impl Serialize for $name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
// Manual impl `Deserialize` as serde_json does not support i128.
// TODO: remove impl if issue https://github.com/serde-rs/json/issues/548 fixed.
#[cfg(feature = "std")]
impl<'de> Deserialize<'de> for $name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
use sp_std::str::FromStr;
let s = String::deserialize(deserializer)?;
$name::from_str(&s).map_err(|err_str| de::Error::custom(err_str))
}
}
#[cfg(test)]
mod $test_mod {
use super::*;
use crate::{Perbill, Percent, Permill, Perquintill};
fn max() -> $name {
$name::max_value()
}
fn min() -> $name {
$name::min_value()
}
fn precision() -> usize {
($name::accuracy() as f64).log10() as usize
}
#[test]
fn macro_preconditions() {
assert!($name::DIV > 0);
}
#[test]
fn from_i129_works() {
let a = I129 {
value: 1,
negative: true,
};
// Can't convert negative number to unsigned.
assert_eq!(from_i129::<u128>(a), None);
let a = I129 {
value: u128::max_value() - 1,
negative: false,
};
// Max - 1 value fits.
assert_eq!(from_i129::<u128>(a), Some(u128::max_value() - 1));
let a = I129 {
value: u128::max_value(),
negative: false,
};
// Max value fits.
assert_eq!(from_i129::<u128>(a), Some(u128::max_value()));
let a = I129 {
value: i128::max_value() as u128 + 1,
negative: true,
};
// Min value fits.
assert_eq!(from_i129::<i128>(a), Some(i128::min_value()));
let a = I129 {
value: i128::max_value() as u128 + 1,
negative: false,
};
// Max + 1 does not fit.
assert_eq!(from_i129::<i128>(a), None);
let a = I129 {
value: i128::max_value() as u128,
negative: false,
};
// Max value fits.
assert_eq!(from_i129::<i128>(a), Some(i128::max_value()));
}
#[test]
fn to_bound_works() {
let a = 1i32;
let b = 1i32;
// Pos + Pos => Max.
assert_eq!(to_bound::<_, _, i32>(a, b), i32::max_value());
let a = -1i32;
let b = -1i32;
// Neg + Neg => Max.
assert_eq!(to_bound::<_, _, i32>(a, b), i32::max_value());
let a = 1i32;
let b = -1i32;
// Pos + Neg => Min.
assert_eq!(to_bound::<_, _, i32>(a, b), i32::min_value());
let a = -1i32;
let b = 1i32;
// Neg + Pos => Min.
assert_eq!(to_bound::<_, _, i32>(a, b), i32::min_value());
let a = 1i32;
let b = -1i32;
// Pos + Neg => Min (unsigned).
assert_eq!(to_bound::<_, _, u32>(a, b), 0);
}
#[test]
fn op_neg_works() {
let a = $name::zero();
let b = -a;
// Zero.
assert_eq!(a, b);
if $name::SIGNED {
let a = $name::saturating_from_integer(5);
let b = -a;
// Positive.
assert_eq!($name::saturating_from_integer(-5), b);
let a = $name::saturating_from_integer(-5);
let b = -a;
// Negative
assert_eq!($name::saturating_from_integer(5), b);
let a = $name::max_value();
let b = -a;
// Max.
assert_eq!($name::min_value() + $name::from_inner(1), b);
let a = $name::min_value() + $name::from_inner(1);
let b = -a;
// Min.
assert_eq!($name::max_value(), b);
}
}
#[test]
fn op_checked_add_overflow_works() {
let a = $name::max_value();
let b = 1.into();
assert!(a.checked_add(&b).is_none());
}
#[test]
fn op_add_works() {
let a = $name::saturating_from_rational(5, 2);
let b = $name::saturating_from_rational(1, 2);
// Positive case: 6/2 = 3.
assert_eq!($name::saturating_from_integer(3), a + b);
if $name::SIGNED {
// Negative case: 4/2 = 2.
let b = $name::saturating_from_rational(1, -2);
assert_eq!($name::saturating_from_integer(2), a + b);
}
}
#[test]
fn op_checked_sub_underflow_works() {
let a = $name::min_value();
let b = 1.into();
assert!(a.checked_sub(&b).is_none());
}
#[test]
fn op_sub_works() {
let a = $name::saturating_from_rational(5, 2);
let b = $name::saturating_from_rational(1, 2);
assert_eq!($name::saturating_from_integer(2), a - b);
assert_eq!($name::saturating_from_integer(-2), b.saturating_sub(a));
}
#[test]
fn op_checked_mul_overflow_works() {
let a = $name::max_value();
let b = 2.into();
assert!(a.checked_mul(&b).is_none());
}
#[test]
fn op_mul_works() {
let a = $name::saturating_from_integer(42);
let b = $name::saturating_from_integer(2);
assert_eq!($name::saturating_from_integer(84), a * b);
let a = $name::saturating_from_integer(42);
let b = $name::saturating_from_integer(-2);
assert_eq!($name::saturating_from_integer(-84), a * b);
}
#[test]
#[should_panic(expected = "attempt to divide by zero")]
fn op_div_panics_on_zero_divisor() {
let a = $name::saturating_from_integer(1);
let b = 0.into();
let _c = a / b;
}
#[test]
fn op_checked_div_overflow_works() {
if $name::SIGNED {
let a = $name::min_value();
let b = $name::zero().saturating_sub($name::one());
assert!(a.checked_div(&b).is_none());
}
}
#[test]
fn op_div_works() {
let a = $name::saturating_from_integer(42);
let b = $name::saturating_from_integer(2);
assert_eq!($name::saturating_from_integer(21), a / b);
if $name::SIGNED {
let a = $name::saturating_from_integer(42);
let b = $name::saturating_from_integer(-2);
assert_eq!($name::saturating_from_integer(-21), a / b);
}
}
#[test]
fn saturating_from_integer_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
let accuracy = $name::accuracy();
// Cases where integer fits.
let a = $name::saturating_from_integer(42);
assert_eq!(a.into_inner(), 42 * accuracy);
let a = $name::saturating_from_integer(-42);
assert_eq!(a.into_inner(), 0.saturating_sub(42 * accuracy));
// Max/min integers that fit.
let a = $name::saturating_from_integer(inner_max / accuracy);
assert_eq!(a.into_inner(), (inner_max / accuracy) * accuracy);
let a = $name::saturating_from_integer(inner_min / accuracy);
assert_eq!(a.into_inner(), (inner_min / accuracy) * accuracy);
// Cases where integer doesn't fit, so it saturates.
let a = $name::saturating_from_integer(inner_max / accuracy + 1);
assert_eq!(a.into_inner(), inner_max);
let a = $name::saturating_from_integer((inner_min / accuracy).saturating_sub(1));
assert_eq!(a.into_inner(), inner_min);
}
#[test]
fn checked_from_integer_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
let accuracy = $name::accuracy();
// Case where integer fits.
let a = $name::checked_from_integer(42)
.expect("42 * accuracy <= inner_max; qed");
assert_eq!(a.into_inner(), 42 * accuracy);
// Max integer that fit.
let a = $name::checked_from_integer(inner_max / accuracy)
.expect("(inner_max / accuracy) * accuracy <= inner_max; qed");
assert_eq!(a.into_inner(), (inner_max / accuracy) * accuracy);
// Case where integer doesn't fit, so it returns `None`.
let a = $name::checked_from_integer(inner_max / accuracy + 1);
assert_eq!(a, None);
if $name::SIGNED {
// Case where integer fits.
let a = $name::checked_from_integer(0.saturating_sub(42))
.expect("-42 * accuracy >= inner_min; qed");
assert_eq!(a.into_inner(), 0 - 42 * accuracy);
// Min integer that fit.
let a = $name::checked_from_integer(inner_min / accuracy)
.expect("(inner_min / accuracy) * accuracy <= inner_min; qed");
assert_eq!(a.into_inner(), (inner_min / accuracy) * accuracy);
// Case where integer doesn't fit, so it returns `None`.
let a = $name::checked_from_integer(inner_min / accuracy - 1);
assert_eq!(a, None);
}
}
#[test]
fn from_inner_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
assert_eq!(max(), $name::from_inner(inner_max));
assert_eq!(min(), $name::from_inner(inner_min));
}
#[test]
#[should_panic(expected = "attempt to divide by zero")]
fn saturating_from_rational_panics_on_zero_divisor() {
let _ = $name::saturating_from_rational(1, 0);
}
#[test]
fn saturating_from_rational_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
let accuracy = $name::accuracy();
let a = $name::saturating_from_rational(5, 2);
// Positive case: 2.5
assert_eq!(a.into_inner(), 25 * accuracy / 10);
// Max - 1.
let a = $name::saturating_from_rational(inner_max - 1, accuracy);
assert_eq!(a.into_inner(), inner_max - 1);
// Min + 1.
let a = $name::saturating_from_rational(inner_min + 1, accuracy);
assert_eq!(a.into_inner(), inner_min + 1);
// Max.
let a = $name::saturating_from_rational(inner_max, accuracy);
assert_eq!(a.into_inner(), inner_max);
// Min.
let a = $name::saturating_from_rational(inner_min, accuracy);
assert_eq!(a.into_inner(), inner_min);
// Zero.
let a = $name::saturating_from_rational(0, 1);
assert_eq!(a.into_inner(), 0);
if $name::SIGNED {
// Negative case: -2.5
let a = $name::saturating_from_rational(-5, 2);
assert_eq!(a.into_inner(), 0 - 25 * accuracy / 10);
// Other negative case: -2.5
let a = $name::saturating_from_rational(5, -2);
assert_eq!(a.into_inner(), 0 - 25 * accuracy / 10);
// Other positive case: 2.5
let a = $name::saturating_from_rational(-5, -2);
assert_eq!(a.into_inner(), 25 * accuracy / 10);
// Max + 1, saturates.
let a = $name::saturating_from_rational(inner_max as u128 + 1, accuracy);
assert_eq!(a.into_inner(), inner_max);
// Min - 1, saturates.
let a = $name::saturating_from_rational(inner_max as u128 + 2, 0 - accuracy);
assert_eq!(a.into_inner(), inner_min);
let a = $name::saturating_from_rational(inner_max, 0 - accuracy);
assert_eq!(a.into_inner(), 0 - inner_max);
let a = $name::saturating_from_rational(inner_min, 0 - accuracy);
assert_eq!(a.into_inner(), inner_max);
let a = $name::saturating_from_rational(inner_min + 1, 0 - accuracy);
assert_eq!(a.into_inner(), inner_max);
let a = $name::saturating_from_rational(inner_min, 0 - 1);
assert_eq!(a.into_inner(), inner_max);
let a = $name::saturating_from_rational(inner_max, 0 - 1);
assert_eq!(a.into_inner(), inner_min);
let a = $name::saturating_from_rational(inner_max, 0 - inner_max);
assert_eq!(a.into_inner(), 0 - accuracy);
let a = $name::saturating_from_rational(0 - inner_max, inner_max);
assert_eq!(a.into_inner(), 0 - accuracy);
let a = $name::saturating_from_rational(inner_max, 0 - 3 * accuracy);
assert_eq!(a.into_inner(), 0 - inner_max / 3);
let a = $name::saturating_from_rational(inner_min, 0 - accuracy / 3);
assert_eq!(a.into_inner(), inner_max);
let a = $name::saturating_from_rational(1, 0 - accuracy);
assert_eq!(a.into_inner(), 0.saturating_sub(1));
let a = $name::saturating_from_rational(inner_min, inner_min);
assert_eq!(a.into_inner(), accuracy);
// Out of accuracy.
let a = $name::saturating_from_rational(1, 0 - accuracy - 1);
assert_eq!(a.into_inner(), 0);
}
let a = $name::saturating_from_rational(inner_max - 1, accuracy);
assert_eq!(a.into_inner(), inner_max - 1);
let a = $name::saturating_from_rational(inner_min + 1, accuracy);
assert_eq!(a.into_inner(), inner_min + 1);
let a = $name::saturating_from_rational(inner_max, 1);
assert_eq!(a.into_inner(), inner_max);
let a = $name::saturating_from_rational(inner_min, 1);
assert_eq!(a.into_inner(), inner_min);
let a = $name::saturating_from_rational(inner_max, inner_max);
assert_eq!(a.into_inner(), accuracy);
let a = $name::saturating_from_rational(inner_max, 3 * accuracy);
assert_eq!(a.into_inner(), inner_max / 3);
let a = $name::saturating_from_rational(inner_min, 2 * accuracy);
assert_eq!(a.into_inner(), inner_min / 2);
let a = $name::saturating_from_rational(inner_min, accuracy / 3);
assert_eq!(a.into_inner(), inner_min);
let a = $name::saturating_from_rational(1, accuracy);
assert_eq!(a.into_inner(), 1);
// Out of accuracy.
let a = $name::saturating_from_rational(1, accuracy + 1);
assert_eq!(a.into_inner(), 0);
}
#[test]
fn checked_from_rational_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
let accuracy = $name::accuracy();
// Divide by zero => None.
let a = $name::checked_from_rational(1, 0);
assert_eq!(a, None);
// Max - 1.
let a = $name::checked_from_rational(inner_max - 1, accuracy).unwrap();
assert_eq!(a.into_inner(), inner_max - 1);
// Min + 1.
let a = $name::checked_from_rational(inner_min + 1, accuracy).unwrap();
assert_eq!(a.into_inner(), inner_min + 1);
// Max.
let a = $name::checked_from_rational(inner_max, accuracy).unwrap();
assert_eq!(a.into_inner(), inner_max);
// Min.
let a = $name::checked_from_rational(inner_min, accuracy).unwrap();
assert_eq!(a.into_inner(), inner_min);
// Max + 1 => Overflow => None.
let a = $name::checked_from_rational(inner_min, 0.saturating_sub(accuracy));
assert_eq!(a, None);
if $name::SIGNED {
// Min - 1 => Underflow => None.
let a = $name::checked_from_rational(inner_max as u128 + 2, 0.saturating_sub(accuracy));
assert_eq!(a, None);
let a = $name::checked_from_rational(inner_max, 0 - 3 * accuracy).unwrap();
assert_eq!(a.into_inner(), 0 - inner_max / 3);
let a = $name::checked_from_rational(inner_min, 0 - accuracy / 3);
assert_eq!(a, None);
let a = $name::checked_from_rational(1, 0 - accuracy).unwrap();
assert_eq!(a.into_inner(), 0.saturating_sub(1));
let a = $name::checked_from_rational(1, 0 - accuracy - 1).unwrap();
assert_eq!(a.into_inner(), 0);
let a = $name::checked_from_rational(inner_min, accuracy / 3);
assert_eq!(a, None);
}
let a = $name::checked_from_rational(inner_max, 3 * accuracy).unwrap();
assert_eq!(a.into_inner(), inner_max / 3);
let a = $name::checked_from_rational(inner_min, 2 * accuracy).unwrap();
assert_eq!(a.into_inner(), inner_min / 2);
let a = $name::checked_from_rational(1, accuracy).unwrap();
assert_eq!(a.into_inner(), 1);
let a = $name::checked_from_rational(1, accuracy + 1).unwrap();
assert_eq!(a.into_inner(), 0);
}
#[test]
fn checked_mul_int_works() {
let a = $name::saturating_from_integer(2);
// Max - 1.
assert_eq!(a.checked_mul_int((i128::max_value() - 1) / 2), Some(i128::max_value() - 1));
// Max.
assert_eq!(a.checked_mul_int(i128::max_value() / 2), Some(i128::max_value() - 1));
// Max + 1 => None.
assert_eq!(a.checked_mul_int(i128::max_value() / 2 + 1), None);
if $name::SIGNED {
// Min - 1.
assert_eq!(a.checked_mul_int((i128::min_value() + 1) / 2), Some(i128::min_value() + 2));
// Min.
assert_eq!(a.checked_mul_int(i128::min_value() / 2), Some(i128::min_value()));
// Min + 1 => None.
assert_eq!(a.checked_mul_int(i128::min_value() / 2 - 1), None);
let b = $name::saturating_from_rational(1, -2);
assert_eq!(b.checked_mul_int(42i128), Some(-21));
assert_eq!(b.checked_mul_int(u128::max_value()), None);
assert_eq!(b.checked_mul_int(i128::max_value()), Some(i128::max_value() / -2));
assert_eq!(b.checked_mul_int(i128::min_value()), Some(i128::min_value() / -2));
}
let a = $name::saturating_from_rational(1, 2);
assert_eq!(a.checked_mul_int(42i128), Some(21));
assert_eq!(a.checked_mul_int(i128::max_value()), Some(i128::max_value() / 2));
assert_eq!(a.checked_mul_int(i128::min_value()), Some(i128::min_value() / 2));
let c = $name::saturating_from_integer(255);
assert_eq!(c.checked_mul_int(2i8), None);
assert_eq!(c.checked_mul_int(2i128), Some(510));
assert_eq!(c.checked_mul_int(i128::max_value()), None);
assert_eq!(c.checked_mul_int(i128::min_value()), None);
}
#[test]
fn saturating_mul_int_works() {
let a = $name::saturating_from_integer(2);
// Max - 1.
assert_eq!(a.saturating_mul_int((i128::max_value() - 1) / 2), i128::max_value() - 1);
// Max.
assert_eq!(a.saturating_mul_int(i128::max_value() / 2), i128::max_value() - 1);
// Max + 1 => saturates to max.
assert_eq!(a.saturating_mul_int(i128::max_value() / 2 + 1), i128::max_value());
// Min - 1.
assert_eq!(a.saturating_mul_int((i128::min_value() + 1) / 2), i128::min_value() + 2);
// Min.
assert_eq!(a.saturating_mul_int(i128::min_value() / 2), i128::min_value());
// Min + 1 => saturates to min.
assert_eq!(a.saturating_mul_int(i128::min_value() / 2 - 1), i128::min_value());
if $name::SIGNED {
let b = $name::saturating_from_rational(1, -2);
assert_eq!(b.saturating_mul_int(42i32), -21);
assert_eq!(b.saturating_mul_int(i128::max_value()), i128::max_value() / -2);
assert_eq!(b.saturating_mul_int(i128::min_value()), i128::min_value() / -2);
assert_eq!(b.saturating_mul_int(u128::max_value()), u128::min_value());
}
let a = $name::saturating_from_rational(1, 2);
assert_eq!(a.saturating_mul_int(42i32), 21);
assert_eq!(a.saturating_mul_int(i128::max_value()), i128::max_value() / 2);
assert_eq!(a.saturating_mul_int(i128::min_value()), i128::min_value() / 2);
let c = $name::saturating_from_integer(255);
assert_eq!(c.saturating_mul_int(2i8), i8::max_value());
assert_eq!(c.saturating_mul_int(-2i8), i8::min_value());
assert_eq!(c.saturating_mul_int(i128::max_value()), i128::max_value());
assert_eq!(c.saturating_mul_int(i128::min_value()), i128::min_value());
}
#[test]
fn checked_mul_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
let a = $name::saturating_from_integer(2);
// Max - 1.
let b = $name::from_inner(inner_max - 1);
assert_eq!(a.checked_mul(&(b/2.into())), Some(b));
// Max.
let c = $name::from_inner(inner_max);
assert_eq!(a.checked_mul(&(c/2.into())), Some(b));
// Max + 1 => None.
let e = $name::from_inner(1);
assert_eq!(a.checked_mul(&(c/2.into()+e)), None);
if $name::SIGNED {
// Min + 1.
let b = $name::from_inner(inner_min + 1) / 2.into();
let c = $name::from_inner(inner_min + 2);
assert_eq!(a.checked_mul(&b), Some(c));
// Min.
let b = $name::from_inner(inner_min) / 2.into();
let c = $name::from_inner(inner_min);
assert_eq!(a.checked_mul(&b), Some(c));
// Min - 1 => None.
let b = $name::from_inner(inner_min) / 2.into() - $name::from_inner(1);
assert_eq!(a.checked_mul(&b), None);
let c = $name::saturating_from_integer(255);
let b = $name::saturating_from_rational(1, -2);
assert_eq!(b.checked_mul(&42.into()), Some(0.saturating_sub(21).into()));
assert_eq!(b.checked_mul(&$name::max_value()), $name::max_value().checked_div(&0.saturating_sub(2).into()));
assert_eq!(b.checked_mul(&$name::min_value()), $name::min_value().checked_div(&0.saturating_sub(2).into()));
assert_eq!(c.checked_mul(&$name::min_value()), None);
}
let a = $name::saturating_from_rational(1, 2);
let c = $name::saturating_from_integer(255);
assert_eq!(a.checked_mul(&42.into()), Some(21.into()));
assert_eq!(c.checked_mul(&2.into()), Some(510.into()));
assert_eq!(c.checked_mul(&$name::max_value()), None);
assert_eq!(a.checked_mul(&$name::max_value()), $name::max_value().checked_div(&2.into()));
assert_eq!(a.checked_mul(&$name::min_value()), $name::min_value().checked_div(&2.into()));
}
#[test]
fn checked_div_int_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
let accuracy = $name::accuracy();
let a = $name::from_inner(inner_max);
let b = $name::from_inner(inner_min);
let c = $name::zero();
let d = $name::one();
let e = $name::saturating_from_integer(6);
let f = $name::saturating_from_integer(5);
assert_eq!(e.checked_div_int(2.into()), Some(3));
assert_eq!(f.checked_div_int(2.into()), Some(2));
assert_eq!(a.checked_div_int(i128::max_value()), Some(0));
assert_eq!(a.checked_div_int(2), Some(inner_max / (2 * accuracy)));
assert_eq!(a.checked_div_int(inner_max / accuracy), Some(1));
assert_eq!(a.checked_div_int(1i8), None);
if b < c {
// Not executed by unsigned inners.
assert_eq!(a.checked_div_int(0.saturating_sub(2)), Some(0.saturating_sub(inner_max / (2 * accuracy))));
assert_eq!(a.checked_div_int(0.saturating_sub(inner_max / accuracy)), Some(0.saturating_sub(1)));
assert_eq!(b.checked_div_int(i128::min_value()), Some(0));
assert_eq!(b.checked_div_int(inner_min / accuracy), Some(1));
assert_eq!(b.checked_div_int(1i8), None);
assert_eq!(b.checked_div_int(0.saturating_sub(2)), Some(0.saturating_sub(inner_min / (2 * accuracy))));
assert_eq!(b.checked_div_int(0.saturating_sub(inner_min / accuracy)), Some(0.saturating_sub(1)));
assert_eq!(c.checked_div_int(i128::min_value()), Some(0));
assert_eq!(d.checked_div_int(i32::min_value()), Some(0));
}
assert_eq!(b.checked_div_int(2), Some(inner_min / (2 * accuracy)));
assert_eq!(c.checked_div_int(1), Some(0));
assert_eq!(c.checked_div_int(i128::max_value()), Some(0));
assert_eq!(c.checked_div_int(1i8), Some(0));
assert_eq!(d.checked_div_int(1), Some(1));
assert_eq!(d.checked_div_int(i32::max_value()), Some(0));
assert_eq!(d.checked_div_int(1i8), Some(1));
assert_eq!(a.checked_div_int(0), None);
assert_eq!(b.checked_div_int(0), None);
assert_eq!(c.checked_div_int(0), None);
assert_eq!(d.checked_div_int(0), None);
}
#[test]
#[should_panic(expected = "attempt to divide by zero")]
fn saturating_div_int_panics_when_divisor_is_zero() {
let _ = $name::one().saturating_div_int(0);
}
#[test]
fn saturating_div_int_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
let accuracy = $name::accuracy();
let a = $name::saturating_from_integer(5);
assert_eq!(a.saturating_div_int(2), 2);
let a = $name::min_value();
assert_eq!(a.saturating_div_int(1i128), (inner_min / accuracy) as i128);
if $name::SIGNED {
let a = $name::saturating_from_integer(5);
assert_eq!(a.saturating_div_int(-2), -2);
let a = $name::min_value();
assert_eq!(a.saturating_div_int(-1i128), (inner_max / accuracy) as i128);
}
}
#[test]
fn saturating_abs_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
assert_eq!($name::from_inner(inner_max).saturating_abs(), $name::max_value());
assert_eq!($name::zero().saturating_abs(), 0.into());
if $name::SIGNED {
assert_eq!($name::from_inner(inner_min).saturating_abs(), $name::max_value());
assert_eq!($name::saturating_from_rational(-1, 2).saturating_abs(), (1, 2).into());
}
}
#[test]
fn saturating_mul_acc_int_works() {
assert_eq!($name::zero().saturating_mul_acc_int(42i8), 42i8);
assert_eq!($name::one().saturating_mul_acc_int(42i8), 2 * 42i8);
assert_eq!($name::one().saturating_mul_acc_int(i128::max_value()), i128::max_value());
assert_eq!($name::one().saturating_mul_acc_int(i128::min_value()), i128::min_value());
assert_eq!($name::one().saturating_mul_acc_int(u128::max_value() / 2), u128::max_value() - 1);
assert_eq!($name::one().saturating_mul_acc_int(u128::min_value()), u128::min_value());
if $name::SIGNED {
let a = $name::saturating_from_rational(-1, 2);
assert_eq!(a.saturating_mul_acc_int(42i8), 21i8);
assert_eq!(a.saturating_mul_acc_int(42u8), 21u8);
assert_eq!(a.saturating_mul_acc_int(u128::max_value() - 1), u128::max_value() / 2);
}
}
#[test]
fn saturating_pow_should_work() {
assert_eq!($name::saturating_from_integer(2).saturating_pow(0), $name::saturating_from_integer(1));
assert_eq!($name::saturating_from_integer(2).saturating_pow(1), $name::saturating_from_integer(2));
assert_eq!($name::saturating_from_integer(2).saturating_pow(2), $name::saturating_from_integer(4));
assert_eq!($name::saturating_from_integer(2).saturating_pow(3), $name::saturating_from_integer(8));
assert_eq!($name::saturating_from_integer(2).saturating_pow(50),
$name::saturating_from_integer(1125899906842624i64));
assert_eq!($name::saturating_from_integer(1).saturating_pow(1000), (1).into());
assert_eq!($name::saturating_from_integer(1).saturating_pow(usize::max_value()), (1).into());
if $name::SIGNED {
// Saturating.
assert_eq!($name::saturating_from_integer(2).saturating_pow(68), $name::max_value());
assert_eq!($name::saturating_from_integer(-1).saturating_pow(1000), (1).into());
assert_eq!($name::saturating_from_integer(-1).saturating_pow(1001), 0.saturating_sub(1).into());
assert_eq!($name::saturating_from_integer(-1).saturating_pow(usize::max_value()), 0.saturating_sub(1).into());
assert_eq!($name::saturating_from_integer(-1).saturating_pow(usize::max_value() - 1), (1).into());
}
assert_eq!($name::saturating_from_integer(114209).saturating_pow(5), $name::max_value());
assert_eq!($name::saturating_from_integer(1).saturating_pow(usize::max_value()), (1).into());
assert_eq!($name::saturating_from_integer(0).saturating_pow(usize::max_value()), (0).into());
assert_eq!($name::saturating_from_integer(2).saturating_pow(usize::max_value()), $name::max_value());
}
#[test]
fn checked_div_works() {
let inner_max = <$name as FixedPointNumber>::Inner::max_value();
let inner_min = <$name as FixedPointNumber>::Inner::min_value();
let a = $name::from_inner(inner_max);
let b = $name::from_inner(inner_min);
let c = $name::zero();
let d = $name::one();
let e = $name::saturating_from_integer(6);
let f = $name::saturating_from_integer(5);
assert_eq!(e.checked_div(&2.into()), Some(3.into()));
assert_eq!(f.checked_div(&2.into()), Some((5, 2).into()));
assert_eq!(a.checked_div(&inner_max.into()), Some(1.into()));
assert_eq!(a.checked_div(&2.into()), Some($name::from_inner(inner_max / 2)));
assert_eq!(a.checked_div(&$name::max_value()), Some(1.into()));
assert_eq!(a.checked_div(&d), Some(a));
if b < c {
// Not executed by unsigned inners.
assert_eq!(a.checked_div(&0.saturating_sub(2).into()), Some($name::from_inner(0.saturating_sub(inner_max / 2))));
assert_eq!(a.checked_div(&-$name::max_value()), Some(0.saturating_sub(1).into()));
assert_eq!(b.checked_div(&0.saturating_sub(2).into()), Some($name::from_inner(0.saturating_sub(inner_min / 2))));
assert_eq!(c.checked_div(&$name::max_value()), Some(0.into()));
assert_eq!(b.checked_div(&b), Some($name::one()));
}
assert_eq!(b.checked_div(&2.into()), Some($name::from_inner(inner_min / 2)));
assert_eq!(b.checked_div(&a), Some(0.saturating_sub(1).into()));
assert_eq!(c.checked_div(&1.into()), Some(0.into()));
assert_eq!(d.checked_div(&1.into()), Some(1.into()));
assert_eq!(a.checked_div(&$name::one()), Some(a));
assert_eq!(b.checked_div(&$name::one()), Some(b));
assert_eq!(c.checked_div(&$name::one()), Some(c));
assert_eq!(d.checked_div(&$name::one()), Some(d));
assert_eq!(a.checked_div(&$name::zero()), None);
assert_eq!(b.checked_div(&$name::zero()), None);
assert_eq!(c.checked_div(&$name::zero()), None);
assert_eq!(d.checked_div(&$name::zero()), None);
}
#[test]
fn is_positive_negative_works() {
let one = $name::one();
assert!(one.is_positive());
assert!(!one.is_negative());
let zero = $name::zero();
assert!(!zero.is_positive());
assert!(!zero.is_negative());
if $signed {
let minus_one = $name::saturating_from_integer(-1);
assert!(minus_one.is_negative());
assert!(!minus_one.is_positive());
}
}
#[test]
fn trunc_works() {
let n = $name::saturating_from_rational(5, 2).trunc();
assert_eq!(n, $name::saturating_from_integer(2));
if $name::SIGNED {
let n = $name::saturating_from_rational(-5, 2).trunc();
assert_eq!(n, $name::saturating_from_integer(-2));
}
}
#[test]
fn frac_works() {
let n = $name::saturating_from_rational(5, 2);
let i = n.trunc();
let f = n.frac();
assert_eq!(n, i + f);
let n = $name::saturating_from_rational(5, 2)
.frac()
.saturating_mul(10.into());
assert_eq!(n, 5.into());
let n = $name::saturating_from_rational(1, 2)
.frac()
.saturating_mul(10.into());
assert_eq!(n, 5.into());
if $name::SIGNED {
let n = $name::saturating_from_rational(-5, 2);
let i = n.trunc();
let f = n.frac();
assert_eq!(n, i - f);
// The sign is attached to the integer part unless it is zero.
let n = $name::saturating_from_rational(-5, 2)
.frac()
.saturating_mul(10.into());
assert_eq!(n, 5.into());
let n = $name::saturating_from_rational(-1, 2)
.frac()
.saturating_mul(10.into());
assert_eq!(n, 0.saturating_sub(5).into());
}
}
#[test]
fn ceil_works() {
let n = $name::saturating_from_rational(5, 2);
assert_eq!(n.ceil(), 3.into());
let n = $name::saturating_from_rational(-5, 2);
assert_eq!(n.ceil(), 0.saturating_sub(2).into());
// On the limits:
let n = $name::max_value();
assert_eq!(n.ceil(), n.trunc());
let n = $name::min_value();
assert_eq!(n.ceil(), n.trunc());
}
#[test]
fn floor_works() {
let n = $name::saturating_from_rational(5, 2);
assert_eq!(n.floor(), 2.into());
let n = $name::saturating_from_rational(-5, 2);
assert_eq!(n.floor(), 0.saturating_sub(3).into());
// On the limits:
let n = $name::max_value();
assert_eq!(n.floor(), n.trunc());
let n = $name::min_value();
assert_eq!(n.floor(), n.trunc());
}
#[test]
fn round_works() {
let n = $name::zero();
assert_eq!(n.round(), n);
let n = $name::one();
assert_eq!(n.round(), n);
let n = $name::saturating_from_rational(5, 2);
assert_eq!(n.round(), 3.into());
let n = $name::saturating_from_rational(-5, 2);
assert_eq!(n.round(), 0.saturating_sub(3).into());
// Saturating:
let n = $name::max_value();
assert_eq!(n.round(), n.trunc());
let n = $name::min_value();
assert_eq!(n.round(), n.trunc());
// On the limit:
// floor(max - 1) + 0.33..
let n = $name::max_value()
.saturating_sub(1.into())
.trunc()
.saturating_add((1, 3).into());
assert_eq!(n.round(), ($name::max_value() - 1.into()).trunc());
// floor(max - 1) + 0.5
let n = $name::max_value()
.saturating_sub(1.into())
.trunc()
.saturating_add((1, 2).into());
assert_eq!(n.round(), $name::max_value().trunc());
if $name::SIGNED {
// floor(min + 1) - 0.33..
let n = $name::min_value()
.saturating_add(1.into())
.trunc()
.saturating_sub((1, 3).into());
assert_eq!(n.round(), ($name::min_value() + 1.into()).trunc());
// floor(min + 1) - 0.5
let n = $name::min_value()
.saturating_add(1.into())
.trunc()
.saturating_sub((1, 2).into());
assert_eq!(n.round(), $name::min_value().trunc());
}
}
#[test]
fn perthing_into_works() {
let ten_percent_percent: $name = Percent::from_percent(10).into();
assert_eq!(ten_percent_percent.into_inner(), $name::accuracy() / 10);
let ten_percent_permill: $name = Permill::from_percent(10).into();
assert_eq!(ten_percent_permill.into_inner(), $name::accuracy() / 10);
let ten_percent_perbill: $name = Perbill::from_percent(10).into();
assert_eq!(ten_percent_perbill.into_inner(), $name::accuracy() / 10);
let ten_percent_perquintill: $name = Perquintill::from_percent(10).into();
assert_eq!(ten_percent_perquintill.into_inner(), $name::accuracy() / 10);
}
#[test]
fn fmt_should_work() {
let zero = $name::zero();
assert_eq!(format!("{:?}", zero), format!("{}(0.{:0>weight$})", stringify!($name), 0, weight=precision()));
let one = $name::one();
assert_eq!(format!("{:?}", one), format!("{}(1.{:0>weight$})", stringify!($name), 0, weight=precision()));
let frac = $name::saturating_from_rational(1, 2);
assert_eq!(format!("{:?}", frac), format!("{}(0.{:0<weight$})", stringify!($name), 5, weight=precision()));
let frac = $name::saturating_from_rational(5, 2);
assert_eq!(format!("{:?}", frac), format!("{}(2.{:0<weight$})", stringify!($name), 5, weight=precision()));
let frac = $name::saturating_from_rational(314, 100);
assert_eq!(format!("{:?}", frac), format!("{}(3.{:0<weight$})", stringify!($name), 14, weight=precision()));
if $name::SIGNED {
let neg = -$name::one();
assert_eq!(format!("{:?}", neg), format!("{}(-1.{:0>weight$})", stringify!($name), 0, weight=precision()));
let frac = $name::saturating_from_rational(-314, 100);
assert_eq!(format!("{:?}", frac), format!("{}(-3.{:0<weight$})", stringify!($name), 14, weight=precision()));
}
}
}
}
}
implement_fixed!(
FixedI64,
test_fixed_i64,
i64,
true,
1_000_000_000,
"_Fixed Point 64 bits signed, range = [-9223372036.854775808, 9223372036.854775807]_",
);
implement_fixed!(
FixedI128,
test_fixed_i128,
i128,
true,
1_000_000_000_000_000_000,
"_Fixed Point 128 bits signed, range = \
[-170141183460469231731.687303715884105728, 170141183460469231731.687303715884105727]_",
);
implement_fixed!(
FixedU128,
test_fixed_u128,
u128,
false,
1_000_000_000_000_000_000,
"_Fixed Point 128 bits unsigned, range = \
[0.000000000000000000, 340282366920938463463.374607431768211455]_",
);
|
use crossbeam_channel as channel;
pub trait MessageReceiver<T> {
fn queue(&self, data: impl Into<T>);
}
pub struct MessageQueue<T> {
queue: channel::Sender<T>,
reader: channel::Receiver<T>,
}
impl<T> Default for MessageQueue<T> {
fn default() -> Self {
let (queue, reader) = channel::unbounded(); // TODO this should probably be bounded
Self { queue, reader }
}
}
impl<T> MessageQueue<T> {
pub fn new() -> Self {
Self::default()
}
pub fn enqueue(&self, req: T) {
self.queue.send(req);
}
pub fn read_all(&self) -> Vec<T> {
let mut buf = Vec::with_capacity(self.reader.len());
while let Some(req) = self.reader.try_recv() {
buf.push(req)
}
buf
}
pub fn len(&self) -> usize {
self.queue.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn clear(&self) {
while let Some(_req) = self.reader.try_recv() {}
}
}
|
use std::collections::HashMap;
impl Solution {
pub fn min_distance(word1: String, word2: String) -> i32 {
let xs = word1.as_bytes();
let ys = word2.as_bytes();
let mut rc = HashMap::new();
fn min_dis<'a, 'b>(
xs: &'a [u8], ys: &'b [u8],
rc: &mut HashMap<(&'a [u8], &'b [u8]), i32>
) -> i32 {
if xs.len() == 0 || ys.len() == 0 {
return xs.len().max(ys.len()) as i32;
}
if let Some(cnt) = rc.get(&(xs, ys)) {
return *cnt;
}
let ans = if xs[0] == ys[0] {
min_dis(&xs[1..], &ys[1..], rc)
}
else {
min_dis(&xs[1..], &ys[1..], rc)
.min(min_dis(&xs[1..], ys, rc))
.min(min_dis(xs, &ys[1..], rc)) + 1
};
rc.insert((xs, ys), ans);
ans
}
min_dis(&xs, &ys, &mut rc)
}
}
|
use chrono::{Local, Utc, DateTime};
use mustache::{MapBuilder, VecBuilder, Data};
use postgres::rows::Row;
pub trait DBTable {
fn from_row(row: Row) -> Self;
fn drop_query() -> &'static str;
fn init_query() -> &'static str;
fn test_init_query() -> &'static str;
}
pub trait TemplateData {
fn name() -> &'static str;
fn names() -> &'static str;
fn to_data(&self, builder: MapBuilder) -> MapBuilder;
}
pub fn build_data(items: Vec<&AddToData>) -> Data {
let mut builder = MapBuilder::new();
for item in &items {
builder = item.add_to_data(builder);
}
builder.build()
}
pub trait AddToData {
fn add_to_data(&self, builder: MapBuilder) -> MapBuilder;
}
impl<C : TemplateData> AddToData for C {
fn add_to_data(&self, builder: MapBuilder) -> MapBuilder {
builder.insert_map(Self::name(), |m| self.to_data(m))
}
}
impl<C : TemplateData> AddToData for Vec<C> {
fn add_to_data(&self, builder: MapBuilder) -> MapBuilder {
builder.insert_vec(C::names(), |b| vec_to_data(self, b))
}
}
fn vec_to_data<C : TemplateData>(items: &Vec<C>, builder: VecBuilder) -> VecBuilder {
let mut builder = builder;
for item in items {
builder = builder.push_map(|map| item.to_data(map))
}
builder
}
////// Site //////
#[derive(Debug, Clone)]
pub struct Site {
pub owner: String,
pub secret: String
}
impl TemplateData for Site {
fn name() -> &'static str { "site" }
fn names() -> &'static str { "sites" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("owner", self.owner.clone())
.insert_str("secret", self.secret.clone())
}
}
impl DBTable for Site {
fn from_row(row: Row) -> Site {
Site{
owner: row.get(0),
secret: row.get(1)
}
}
fn drop_query() -> &'static str {
"drop table if exists Site;"
}
fn init_query() -> &'static str {
"create table Site (
owner varchar NOT NULL,
secret varchar NOT NULL
);"
}
fn test_init_query() -> &'static str {
"insert into Site (owner, secret)
values ('me', 'secret');"
}
}
////// Hunts //////
#[derive(Debug, Clone)]
pub struct Hunt {
pub id: i32,
pub name: String,
pub key: String,
pub team_size: i32,
pub init_guesses: i32,
pub password: String,
pub closed: bool,
pub visible: bool
}
impl TemplateData for Hunt {
fn name() -> &'static str { "hunt" }
fn names() -> &'static str { "hunts" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("id", format!("{}", self.id))
.insert_str("name", self.name.clone())
.insert_str("key", self.key.clone())
.insert_str("team_size", format!("{}", self.team_size))
.insert_str("init_guesses", format!("{}", self.init_guesses))
.insert_bool("closed", self.closed)
.insert_bool("visible", self.visible)
}
}
impl DBTable for Hunt {
fn from_row(row: Row) -> Hunt {
Hunt{
id: row.get(0),
name: row.get(1),
key: row.get(2),
team_size: row.get(3),
init_guesses: row.get(4),
password: row.get(5),
closed: row.get(6),
visible: row.get(7)
}
}
fn drop_query() -> &'static str {
"drop table if exists Hunt;"
}
fn init_query() -> &'static str {
"create table Hunt (
id serial primary key NOT NULL,
name varchar NOT NULL,
key varchar NOT NULL,
team_size int NOT NULL,
init_guesses int NOT NULL,
password varchar NOT NULL,
closed boolean NOT NULL,
visible boolean NOT NULL
);"
}
fn test_init_query() -> &'static str {
"insert into Hunt (name, key, team_size, init_guesses, password, closed, visible)
values ('Best Hunt Ever', 'besthuntever', 4, 100, 'pass', true, true);"
}
}
////// Waves //////
#[derive(Debug, Clone)]
pub struct ReleasedWave {
pub name: String,
pub time: DateTime<Local>,
pub guesses: i32,
pub released: bool,
pub puzzles: Vec<ReleasedPuzzle>
}
impl TemplateData for ReleasedWave {
fn name() -> &'static str { "wave" }
fn names() -> &'static str { "waves" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("name", self.name.clone())
.insert_str("time", self.time.to_rfc3339())
.insert_str("guesses", format!("{}", self.guesses))
.insert_bool("released", self.released)
.insert_vec("puzzles", |b| vec_to_data(&self.puzzles, b))
}
}
#[derive(Debug, Clone)]
pub struct Wave {
pub name: String,
pub time: DateTime<Local>,
pub guesses: i32
}
impl Wave {
pub fn is_released(&self) -> bool {
Local::now() > self.time
}
}
impl TemplateData for Wave {
fn name() -> &'static str { "wave" }
fn names() -> &'static str { "waves" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("name", self.name.clone())
.insert_str("time", self.time.to_rfc3339())
.insert_str("guesses", format!("{}", self.guesses))
}
}
impl DBTable for Wave {
fn from_row(row: Row) -> Wave {
let time: DateTime<Utc> = row.get(2);
Wave{
name: row.get(0),
time: time.with_timezone(&Local),
guesses: row.get(3)
}
}
fn drop_query() -> &'static str {
"drop table if exists Wave;"
}
fn init_query() -> &'static str {
"create table Wave (
name varchar NOT NULL,
hunt int NOT NULL,
time timestamp with time zone NOT NULL,
guesses int NOT NULL
);
"
}
fn test_init_query() -> &'static str {
"insert into Wave (name, hunt, time, guesses)
values ('Wave One', 1, '2004-10-19 10:23:54', 10);"
}
}
////// Puzzles //////
#[derive(Debug, Clone)]
pub struct ReleasedPuzzle {
pub name: String,
pub number: i32,
pub hunt: i32,
pub wave: String,
pub time: DateTime<Local>,
pub key: String,
pub hints: Vec<Hint>,
pub answer: String,
}
impl TemplateData for ReleasedPuzzle {
fn name() -> &'static str { "puzzle" }
fn names() -> &'static str { "puzzles" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("name", self.name.clone())
.insert_str("number", format!("{}", self.number))
.insert_str("wave", self.wave.clone())
.insert_str("key", self.key.clone())
.insert_vec("hints", |b| vec_to_data(&self.hints, b))
.insert_str("answer", self.answer.clone())
.insert_bool("solved", !self.answer.is_empty())
}
}
#[derive(Debug, Clone)]
pub struct Puzzle {
pub name: String,
pub number: i32,
pub hunt: i32,
pub answer: String,
pub wave: String,
pub key: String
}
impl TemplateData for Puzzle {
fn name() -> &'static str { "puzzle" }
fn names() -> &'static str { "puzzles" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("name", self.name.clone())
.insert_str("number", format!("{}", self.number))
.insert_str("hunt", format!("{}", self.hunt))
.insert_str("answer", self.answer.clone())
.insert_str("wave", self.wave.clone())
.insert_str("key", self.key.clone())
}
}
impl DBTable for Puzzle {
fn from_row(row: Row) -> Puzzle {
Puzzle{
name: row.get(0),
number: row.get(1),
hunt: row.get(2),
answer: row.get(3),
wave: row.get(4),
key: row.get(5)
}
}
fn drop_query() -> &'static str {
"drop table if exists Puzzle;"
}
fn init_query() -> &'static str {
"create table Puzzle (
name varchar primary key NOT NULL,
number int NOT NULL,
hunt int NOT NULL,
answer varchar NOT NULL,
wave varchar NOT NULL,
key varchar NOT NULL
);
"
}
fn test_init_query() -> &'static str {
"insert into Puzzle (name, number, hunt, answer, wave, key)
values ('Puzzle Two', 3, 1, 'answer2', 'Wave One', 'QQQ'),
('Puzzle One', 1, 1, 'answer1', 'Wave One', 'PPP'),
('Puzzle Three', 5, 1, 'answer3', 'Wave One', 'RRR');"
}
}
////// Hints //////
#[derive(Debug, Clone)]
pub struct Hint {
pub hint: String,
pub puzzle_name: String,
pub number: i32,
pub hunt: i32,
pub wave: String,
pub key: String
}
impl TemplateData for Hint {
fn name() -> &'static str { "hint" }
fn names() -> &'static str { "hints" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("hint", self.hint.clone())
.insert_str("puzzle_name",self.puzzle_name.clone())
.insert_str("number", format!("{}", self.number))
.insert_str("wave", self.wave.clone())
.insert_str("key", self.key.clone())
}
}
impl DBTable for Hint {
fn from_row(row: Row) -> Hint {
Hint{
hint: row.get(0),
puzzle_name:row.get(1),
number: row.get(2),
hunt: row.get(3),
wave: row.get(4),
key: row.get(5)
}
}
fn drop_query() -> &'static str {
"drop table if exists Hint;"
}
fn init_query() -> &'static str {
"create table Hint (
hint varchar NOT NULL primary key,
puzzle_name varchar NOT NULL,
number int NOT NULL,
hunt int NOT NULL,
wave varchar NOT NULL,
key varchar NOT NULL
);
"
}
fn test_init_query() -> &'static str {
"insert into Hint (hint, puzzle_name, number, hunt, wave, key)
values ('Really, it is just \"answer\".', 'Puzzle One', 2, 1, 'Wave One', 'HHH'),
('The answer is \"answer\"', 'Puzzle One', 1, 1, 'Wave One', 'III');"
}
}
////// Teams //////
#[derive(Debug, Clone)]
pub struct Team {
pub team_id: i32,
pub hunt: i32,
pub password: String,
pub name: String,
pub guesses: i32,
pub members: Vec<Member>
}
impl TemplateData for Team {
fn name() -> &'static str { "team" }
fn names() -> &'static str { "teams" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("team_id", format!("{}", self.team_id))
.insert_str("hunt", format!("{}", self.hunt))
.insert_str("password", self.password.clone())
.insert_str("name", self.name.clone())
.insert_str("guesses", format!("{}", self.guesses))
.insert_vec("members", |b| vec_to_data(&self.members, b))
}
}
impl DBTable for Team {
fn from_row(row: Row) -> Team {
Team{
team_id: row.get(0),
hunt: row.get(1),
password: row.get(2),
name: row.get(3),
guesses: row.get(4),
members: vec!()
}
}
fn drop_query() -> &'static str {
"drop table if exists Team;"
}
fn init_query() -> &'static str {
"create table Team (
team_id serial primary key NOT NULL,
hunt int NOT NULL,
password varchar NOT NULL,
name varchar NOT NULL,
guesses int NOT NULL
);
"
}
fn test_init_query() -> &'static str {
"insert into Team (hunt, password, name, guesses)
values (1, 'pass', 'SecondBestTeam', 99),
(1, 'pass', 'BestTeamEver', 5);"
}
}
////// Members //////
#[derive(Debug, Clone)]
pub struct Member {
pub team_id: i32,
pub hunt: i32,
pub name: String,
pub email: String
}
impl TemplateData for Member {
fn name() -> &'static str { "member" }
fn names() -> &'static str { "members" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("team_id", format!("{}", self.team_id))
.insert_str("hunt", format!("{}", self.hunt))
.insert_str("name", self.name.clone())
.insert_str("email", self.email.clone())
}
}
impl DBTable for Member {
fn from_row(row: Row) -> Member {
Member{
team_id: row.get(0),
hunt: row.get(1),
name: row.get(2),
email: row.get(3)
}
}
fn drop_query() -> &'static str {
"drop table if exists Member;"
}
fn init_query() -> &'static str {
"create table Member (
team_id int NOT NULL,
hunt int NOT NULL,
name varchar NOT NULL,
email varchar NOT NULL
);
"
}
fn test_init_query() -> &'static str {
"insert into Member (team_id, hunt, name, email)
values (1, 1, 'BestPersonEver', 'person@email.com');"
}
}
////// Guesses //////
#[derive(Debug, Clone)]
pub struct Guess {
pub team_id: i32,
pub hunt: i32,
pub puzzle_key: String,
pub guess: String,
pub time: DateTime<Utc>
}
impl TemplateData for Guess {
fn name() -> &'static str { "guess" }
fn names() -> &'static str { "guesss" }
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("team_id", format!("{}", self.team_id))
.insert_str("hunt", format!("{}", self.hunt))
.insert_str("puzzle_key", self.puzzle_key.clone())
.insert_str("guess", self.guess.clone())
.insert_str("time", format!("{}", self.time))
}
}
impl Guess {
pub fn index_query() -> &'static str {
"create index guess_index on Guess (hunt, team_id, puzzle_key);"
}
}
impl DBTable for Guess {
fn from_row(row: Row) -> Guess {
Guess{
team_id: row.get(0),
hunt: row.get(1),
puzzle_key: row.get(2),
guess: row.get(3),
time: row.get(4)
}
}
fn drop_query() -> &'static str {
"drop table if exists Guess;"
}
fn init_query() -> &'static str {
"create table Guess (
team_id int NOT NULL,
hunt int NOT NULL,
puzzle_key varchar NOT NULL,
guess varchar NOT NULL,
time timestamp with time zone NOT NULL
);
"
}
fn test_init_query() -> &'static str {
"insert into Guess (team_id, hunt, puzzle_key, guess, time)
values (1, 1, 'PPP', 'answer?', '2004-10-19 10:23:54');"
}
}
#[derive(Debug, Clone)]
pub struct Solve {
pub team_id: i32,
pub hunt: i32,
pub puzzle_key: String,
pub solved_at: DateTime<Utc>,
pub solve_time: i32, // in seconds
}
impl DBTable for Solve {
fn from_row(row: Row) -> Solve {
Solve {
team_id: row.get(0),
hunt: row.get(1),
puzzle_key: row.get(2),
solved_at: row.get(3),
solve_time: row.get(4),
}
}
fn drop_query() -> &'static str {
"drop table if exists Solve;"
}
fn init_query() -> &'static str {
"create table Solve (
team_id int NOT NULL,
hunt int NOT NULL,
puzzle_key varchar NOT NULL,
solved_at timestamp with time zone NOT NULL,
solve_time int NOT NULL,
primary key (hunt, team_id, puzzle_key)
);
"
}
fn test_init_query() -> &'static str {
"insert into Solve (team_id, hunt, puzzle_key, solved_at, solve_time)
values (2, 1, 'PPP', '2004-10-19 10:23:54', 385),
(2, 1, 'QQQ', '2004-10-19 10:23:54', 385),
(1, 1, 'PPP', '2004-10-19 10:23:55', 386);"
}
}
////// Answer Submission //////
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Correctness {
Right,
Wrong,
AlreadySolved,
AlreadyGuessedThat,
OutOfGuesses
}
pub struct AGuess(pub String);
impl TemplateData for AGuess {
fn name() -> &'static str {
"guess"
}
fn names() -> &'static str {
"guesses"
}
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
builder
.insert_str("guess", self.0.clone())
}
}
////// Stats //////
#[derive(Debug, Clone)]
pub struct TeamStats {
pub team_name: String,
pub guesses: i32,
pub solves: i32,
pub total_solve_time: i32, // in seconds
}
impl TemplateData for TeamStats {
fn name() -> &'static str {
"stat"
}
fn names() -> &'static str {
"stats"
}
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
let avg_solve_time = if self.solves == 0 {
"-".to_string()
} else {
format!("{} mins", self.total_solve_time / self.solves / 60)
};
builder
.insert_str("team", self.team_name.clone())
.insert_str("solves", format!("{}", self.solves))
.insert_str("guesses", format!("{}", self.guesses))
.insert_str("avg_solve_time", avg_solve_time)
}
}
#[derive(Debug, Clone)]
pub struct PuzzleStats {
pub wave_name: String,
pub puzzle_name: String,
pub puzzle_key: String,
pub guesses: i32,
pub solves: i32,
pub total_solve_time: i32, // in seconds
}
impl TemplateData for PuzzleStats {
fn name() -> &'static str {
"stat"
}
fn names() -> &'static str {
"stats"
}
fn to_data(&self, builder: MapBuilder) -> MapBuilder {
let avg_solve_time = if self.solves == 0 {
"-".to_string()
} else {
format!("{} mins", self.total_solve_time / self.solves / 60)
};
builder
.insert_str("wave_name", self.wave_name.clone())
.insert_str("puzzle_name", self.puzzle_name.clone())
.insert_str("puzzle_key", self.puzzle_key.clone())
.insert_str("guesses", format!("{}", self.guesses))
.insert_str("solves", format!("{}", self.solves))
.insert_str("avg_solve_time", avg_solve_time)
}
}
|
use specs::prelude::*;
use specs::storage::BTreeStorage;
use criterion::*;
use criterion::measurement::WallTime;
use super::super::utils::{Cold, Warm, CustomBencher};
use std::time::Instant;
use crate::suits::{A, B, C, D, E, F, G, H, I, J, K};
use rand::prelude::SliceRandom;
use crate::utils::bencher_max_size;
#[derive(Component, Copy, Clone, Debug, Default)]
#[storage(VecStorage)]
pub struct VecStore(u32);
#[derive(Component, Copy, Clone, Debug, Default)]
#[storage(DenseVecStorage)]
pub struct DenseVec(u32);
#[derive(Component, Copy, Clone, Debug, Default)]
#[storage(BTreeStorage)]
pub struct BTree(u32);
#[derive(Component, Copy, Clone, Debug, Default)]
#[storage(HashMapStorage)]
pub struct HashMap(u32);
#[derive(Component, Copy, Clone, Debug, Default)]
#[storage(VecStorage)]
pub struct VecStoreHuge([u32; 16]);
#[derive(Component, Copy, Clone, Debug, Default)]
#[storage(DenseVecStorage)]
pub struct DenseVecHuge([u32; 16]);
#[derive(Component, Copy, Clone, Debug, Default)]
#[storage(BTreeStorage)]
pub struct BTreeHuge([u32; 16]);
#[derive(Component, Copy, Clone, Debug, Default)]
#[storage(HashMapStorage)]
pub struct HashMapHuge([u32; 16]);
fn specs_world_create() -> specs::World {
let mut world = specs::World::new();
world.register::<A>();
world.register::<B>();
world.register::<C>();
world.register::<D>();
world.register::<E>();
world.register::<F>();
world.register::<G>();
world.register::<H>();
world.register::<I>();
world.register::<J>();
world.register::<K>();
world.register::<VecStore>();
world.register::<DenseVec>();
world.register::<BTree>();
world.register::<HashMap>();
world.register::<VecStoreHuge>();
world.register::<DenseVecHuge>();
world.register::<BTreeHuge>();
world.register::<HashMapHuge>();
world
}
type Args<'a> = (Entities<'a>,
WriteStorage<'a, A>, WriteStorage<'a, B>, WriteStorage<'a, C>, WriteStorage<'a, D>,
WriteStorage<'a, E>, WriteStorage<'a, F>, WriteStorage<'a, G>, WriteStorage<'a, H>);
pub fn specs_create(group: &mut BenchmarkGroup<WallTime>) {
const BATCH_SIZE: u64 = 64 * 1024;
group.bench_with_input(BenchmarkId::new("specs-external", 0), &0, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
for _ in 0..iters {
world.create_entity().build();
}
});
});
group.bench_with_input(BenchmarkId::new("specs-external", 1), &1, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
for _ in 0..iters {
world.create_entity()
.with(A::default())
.build();
}
});
});
group.bench_with_input(BenchmarkId::new("specs-external", 2), &2, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
for _ in 0..iters {
world.create_entity()
.with(A::default())
.with(B::default())
.build();
}
});
});
group.bench_with_input(BenchmarkId::new("specs-external", 4), &4, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
for _ in 0..iters {
world.create_entity()
.with(A::default())
.with(B::default())
.with(C::default())
.with(D::default())
.build();
};
});
});
group.bench_with_input(BenchmarkId::new("specs-external", 6), &6, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
for _ in 0..iters {
world.create_entity()
.with(A::default())
.with(B::default())
.with(C::default())
.with(D::default())
.with(E::default())
.with(F::default())
.build();
}
});
});
group.bench_with_input(BenchmarkId::new("specs-external", 8), &8, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
for _ in 0..iters {
world.create_entity()
.with(A::default())
.with(B::default())
.with(C::default())
.with(D::default())
.with(E::default())
.with(F::default())
.with(G::default())
.with(H::default())
.build();
}
});
});
group.bench_with_input(BenchmarkId::new("specs-system", 0), &0, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
world.exec(|(entities, _, _, _, _, _, _, _, _): Args| {
entities.create_iter().take(iters as usize).for_each(|_| {});
})
});
});
group.bench_with_input(BenchmarkId::new("specs-system", 1), &1, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
world.exec(|(entities, mut a, _, _, _, _, _, _, _): Args| {
for entity in entities.create_iter().take(iters as usize) {
a.insert(entity, Default::default()).unwrap();
}
})
});
});
group.bench_with_input(BenchmarkId::new("specs-system", 2), &2, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
world.exec(|(entities, mut a, mut b, _, _, _, _, _, _): Args| {
for entity in entities.create_iter().take(iters as usize) {
a.insert(entity, Default::default()).unwrap();
b.insert(entity, Default::default()).unwrap();
}
})
});
});
group.bench_with_input(BenchmarkId::new("specs-system", 4), &4, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
world.exec(|(entities, mut a, mut b, mut c, mut d, _, _, _, _): Args| {
for entity in entities.create_iter().take(iters as usize) {
a.insert(entity, Default::default()).unwrap();
b.insert(entity, Default::default()).unwrap();
c.insert(entity, Default::default()).unwrap();
d.insert(entity, Default::default()).unwrap();
}
})
});
});
group.bench_with_input(BenchmarkId::new("specs-system", 6), &6, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
world.exec(|(entities, mut a, mut b, mut c, mut d, mut e, mut f, _, _): Args| {
for entity in entities.create_iter().take(iters as usize) {
a.insert(entity, Default::default()).unwrap();
b.insert(entity, Default::default()).unwrap();
c.insert(entity, Default::default()).unwrap();
d.insert(entity, Default::default()).unwrap();
e.insert(entity, Default::default()).unwrap();
f.insert(entity, Default::default()).unwrap();
}
})
});
});
group.bench_with_input(BenchmarkId::new("specs-system", 8), &8, |bencher, _| {
bencher_max_size(bencher, BATCH_SIZE, specs_world_create, |mut world, iters| {
world.exec(|(entities, mut a, mut b, mut c, mut d, mut e, mut f, mut g, mut h): Args| {
for entity in entities.create_iter().take(iters as usize) {
a.insert(entity, Default::default()).unwrap();
b.insert(entity, Default::default()).unwrap();
c.insert(entity, Default::default()).unwrap();
d.insert(entity, Default::default()).unwrap();
e.insert(entity, Default::default()).unwrap();
f.insert(entity, Default::default()).unwrap();
g.insert(entity, Default::default()).unwrap();
h.insert(entity, Default::default()).unwrap();
}
})
});
});
}
pub fn specs_delete(group: &mut BenchmarkGroup<WallTime>) {
group.bench_function("specs-delete-1-of-8", |bencher| {
bencher.iter_custom(|iters| {
let mut world = specs_world_create();
let enitites: Vec<_> = (0..iters).map(|_| {
world.create_entity()
.with(A::default())
.with(B::default())
.with(C::default())
.with(D::default())
.with(E::default())
.with(F::default())
.with(G::default())
.with(H::default())
.build()
}).collect();
let mut a= world.system_data::<WriteStorage<A>>();
let start = Instant::now();
for entity in enitites {
a.remove(entity);
}
let end = Instant::now();
end.duration_since(start)
});
});
group.bench_function("specs-delete-8", |bencher| {
bencher.iter_custom(|iters| {
let mut world = specs_world_create();
let enitites: Vec<_> = (0..iters).map(|_| {
world.create_entity()
.with(A::default())
.with(B::default())
.with(C::default())
.with(D::default())
.with(E::default())
.with(F::default())
.with(G::default())
.with(H::default())
.build()
}).collect();
let (mut a, mut b, mut c, mut d,
mut e, mut f, mut g, mut h) = world.system_data::<
(WriteStorage<A>,
WriteStorage<B>,
WriteStorage<C>,
WriteStorage<D>,
WriteStorage<E>,
WriteStorage<F>,
WriteStorage<G>,
WriteStorage<H>)>();
let start = Instant::now();
for entity in enitites {
a.remove(entity);
b.remove(entity);
c.remove(entity);
d.remove(entity);
e.remove(entity);
f.remove(entity);
g.remove(entity);
h.remove(entity);
}
let end = Instant::now();
end.duration_since(start)
});
});
}
fn wrap_world<INNER>(world: World, mut inner: INNER)
where INNER: FnMut(ReadStorage<A>, ReadStorage<B>, ReadStorage<C>,
ReadStorage<D>, ReadStorage<E>, ReadStorage<F>)
{
let (a, b, c, d, e, f) = world.system_data::<
(ReadStorage<A>,
ReadStorage<B>,
ReadStorage<C>,
ReadStorage<D>,
ReadStorage<E>,
ReadStorage<F>)>();
inner(a, b, c, d, e, f);
}
fn with_world<INNER>(dataset_size: u32, inner: INNER)
where INNER: FnMut(ReadStorage<A>, ReadStorage<B>, ReadStorage<C>,
ReadStorage<D>, ReadStorage<E>, ReadStorage<F>)
{
let mut world = specs_world_create();
(0..dataset_size).for_each(|_| {
world.create_entity()
.with(A::default())
.with(B::default())
.with(C::default())
.with(D::default())
.with(E::default())
.with(F::default())
.build();
});
wrap_world(world, inner)
}
pub fn iteration(group: &mut BenchmarkGroup<WallTime>, dataset_size: usize) {
bench_with::<Cold>(&mut *group, "specs-cold", dataset_size as u32);
bench_with::<Warm>(&mut *group, "specs-warm", dataset_size as u32);
fn bench_with<BENCH>(group: &mut BenchmarkGroup<WallTime>, name: &str, dataset_size: u32)
where BENCH: CustomBencher
{
group.bench_with_input(BenchmarkId::new(name, 1), &1, |bencher, _| {
with_world(dataset_size, |a, _, _, _, _, _| {
BENCH::run(bencher, dataset_size, |iters| {
for (a, ) in (&a, ).join().take(iters as usize) {
black_box(*a);
}
})
});
});
group.bench_with_input(BenchmarkId::new(name, 2), &2, |bencher, _| {
with_world(dataset_size, |a, b, _, _, _, _| {
BENCH::run(bencher, dataset_size, |iters| {
for (a, b) in (&a, &b).join().take(iters as usize) {
black_box((*a, *b));
}
})
});
});
group.bench_with_input(BenchmarkId::new(name, 3), &3, |bencher, _| {
with_world(dataset_size, |a, b, c, _, _, _| {
BENCH::run(bencher, dataset_size, |iters| {
for (a, b, c) in (&a, &b, &c).join().take(iters as usize) {
black_box((*a, *b, *c));
}
})
});
});
group.bench_with_input(BenchmarkId::new(name, 4), &4, |bencher, _| {
with_world(dataset_size, |a, b, c, d, _, _| {
BENCH::run(bencher, dataset_size, |iters| {
for (a, b, c, d) in (&a, &b, &c, &d).join().take(iters as usize) {
black_box((*a, *b, *c, *d));
}
})
});
});
group.bench_with_input(BenchmarkId::new(name, 5), &5, |bencher, _| {
with_world(dataset_size, |a, b, c, d, e, _| {
BENCH::run(bencher, dataset_size, |iters| {
for (a, b, c, d, e) in (&a, &b, &c, &d, &e).join().take(iters as usize) {
black_box((*a, *b, *c, *d, *e));
}
})
});
});
group.bench_with_input(BenchmarkId::new(name, 6), &6, |bencher, _| {
with_world(dataset_size, |a, b, c, d, e, f| {
BENCH::run(bencher, dataset_size, |iters| {
for (a, b, c, d, e, f) in (&a, &b, &c, &d, &e, &f).join().take(iters as usize) {
black_box((*a, *b, *c, *d, *e, *f));
}
})
});
});
}
}
pub fn iteration_by_archetypes(group: &mut BenchmarkGroup<WallTime>, per_archtype: usize, dataset_size: usize) {
fn build_with_archetypes(per_archtype: usize, dataset_size: usize) -> World {
let mut world = specs_world_create();
for i in 0..(per_archtype*dataset_size) {
let i = i as u32;
let mut builder = world.create_entity()
.with(A(i));
if per_archtype == 1 {
builder.build();
continue;
}
let n = i % (per_archtype - 1) as u32;
if n & 1 != 0 { builder = builder.with(B(i)); }
if n & 2 != 0 { builder = builder.with(C(i)); }
if n & 4 != 0 { builder = builder.with(D(i)); }
if n & 8 != 0 { builder = builder.with(E(i)); }
if n & 16 != 0 { builder = builder.with(F(i)); }
if n & 32 != 0 { builder = builder.with(G(i)); }
if n & 64 != 0 { builder = builder.with(H(i)); }
if n & 128 != 0 { builder = builder.with(I(i)); }
if n & 256 != 0 { builder = builder.with(J(i)); }
if n & 512 != 0 { builder = builder.with(K(i)); }
builder.build();
}
world
}
bench_with::<Warm>(group, "specs-warm", per_archtype, dataset_size);
bench_with::<Cold>(group, "specs-cold", per_archtype, dataset_size);
fn bench_with<BENCH>(group: &mut BenchmarkGroup<WallTime>, name: &str, per_archtype: usize, dataset_size: usize)
where BENCH: CustomBencher
{
group.bench_with_input(BenchmarkId::new(name, dataset_size), &dataset_size, |bencher, &_| {
let world = build_with_archetypes(per_archtype, dataset_size);
wrap_world(world, |a, _, _, _, _, _| {
BENCH::run(bencher, (dataset_size * per_archtype) as u32, |iters| {
for (a, ) in (&a, ).join().take(iters as usize) {
black_box(*a);
}
})
});
});
}
}
pub fn iteration_by_saturation(group: &mut BenchmarkGroup<WallTime>, dataset_size: usize, with_alt: usize, reorder: bool) {
fn build_with_archetypes<Alt>(dataset_size: usize, with_alt: usize, value: Alt, reorder: bool) -> World
where Alt: Copy + Component
{
let mut world = specs_world_create();
let mut entities: Vec<_> = world.create_iter().take(dataset_size).collect();
let mut component_a = world.write_component::<A>();
let mut component_alt = world.write_component::<Alt>();
for &entity in &entities {
component_a.insert(entity, A(0)).unwrap();
}
entities.shuffle(&mut rand::thread_rng());
entities.truncate(with_alt);
if reorder {
entities.sort();
}
for &entity in entities.iter().take(with_alt) {
component_alt.insert(entity, value).unwrap();
}
drop((component_a, component_alt));
world
}
bench_with::<Warm, VecStore>(group, "specs-warm-vecmap", dataset_size, with_alt, VecStore(0), reorder);
bench_with::<Cold, VecStore>(group, "specs-cold-vecmap", dataset_size, with_alt, VecStore(0), reorder);
bench_with::<Warm, DenseVec>(group, "specs-warm-densevecmap", dataset_size, with_alt, DenseVec(0), reorder);
bench_with::<Cold, DenseVec>(group, "specs-cold-densevecmap", dataset_size, with_alt, DenseVec(0), reorder);
bench_with::<Warm, BTree>(group, "specs-warm-btreemap", dataset_size, with_alt, BTree(0), reorder);
bench_with::<Cold, BTree>(group, "specs-cold-btreemap", dataset_size, with_alt, BTree(0), reorder);
bench_with::<Warm, HashMap>(group, "specs-warm-hashmap", dataset_size, with_alt, HashMap(0), reorder);
bench_with::<Cold, HashMap>(group, "specs-cold-hashmap", dataset_size, with_alt, HashMap(0), reorder);
fn bench_with<BENCH, ALT>(
group: &mut BenchmarkGroup<WallTime>,
name: &str,
dataset_size: usize,
with_alt: usize,
value: ALT,
reorder: bool
)
where BENCH: CustomBencher,
ALT: Copy + Component
{
group.bench_with_input(BenchmarkId::new(name, with_alt), &with_alt, |bencher, &_| {
let world = build_with_archetypes(dataset_size, with_alt, value, reorder);
let (a, b) = world.system_data::<(ReadStorage<A>, ReadStorage<ALT>)>();
BENCH::run(bencher, with_alt as u32, |iters| {
for (a, b) in (&a, &b).join().take(iters as usize) {
black_box((*a, *b));
}
})
});
}
}
pub fn iteration_by_saturation_huge(group: &mut BenchmarkGroup<WallTime>, dataset_size: usize, with_alt: usize, reorder: bool) {
fn build_with_archetypes<Alt>(dataset_size: usize, with_alt: usize, value: Alt, reorder: bool) -> World
where Alt: Copy + Component
{
let mut world = specs_world_create();
let mut entities: Vec<_> = world.create_iter().take(dataset_size).collect();
let mut component_a = world.write_component::<A>();
let mut component_alt = world.write_component::<Alt>();
for &entity in &entities {
component_a.insert(entity, A(0)).unwrap();
}
entities.shuffle(&mut rand::thread_rng());
entities.truncate(with_alt);
if reorder {
entities.sort();
}
for &entity in entities.iter().take(with_alt) {
component_alt.insert(entity, value).unwrap();
}
drop((component_a, component_alt));
world
}
bench_with::<Warm, VecStoreHuge>(group, "specs-warm-vecmap", dataset_size, with_alt, VecStoreHuge::default(), reorder);
bench_with::<Cold, VecStoreHuge>(group, "specs-cold-vecmap", dataset_size, with_alt, VecStoreHuge::default(), reorder);
bench_with::<Warm, DenseVecHuge>(group, "specs-warm-densevecmap", dataset_size, with_alt, DenseVecHuge::default(), reorder);
bench_with::<Cold, DenseVecHuge>(group, "specs-cold-densevecmap", dataset_size, with_alt, DenseVecHuge::default(), reorder);
bench_with::<Warm, BTreeHuge>(group, "specs-warm-btreemap", dataset_size, with_alt, BTreeHuge::default(), reorder);
bench_with::<Cold, BTreeHuge>(group, "specs-cold-btreemap", dataset_size, with_alt, BTreeHuge::default(), reorder);
bench_with::<Warm, HashMapHuge>(group, "specs-warm-hashmap", dataset_size, with_alt, HashMapHuge::default(), reorder);
bench_with::<Cold, HashMapHuge>(group, "specs-cold-hashmap", dataset_size, with_alt, HashMapHuge::default(), reorder);
fn bench_with<BENCH, ALT>(
group: &mut BenchmarkGroup<WallTime>,
name: &str,
dataset_size: usize,
with_alt: usize,
value: ALT,
reorder: bool
)
where BENCH: CustomBencher,
ALT: Copy + Component
{
group.bench_with_input(BenchmarkId::new(name, with_alt), &with_alt, |bencher, &_| {
let world = build_with_archetypes(dataset_size, with_alt, value, reorder);
let (a, b) = world.system_data::<(ReadStorage<A>, ReadStorage<ALT>)>();
BENCH::run(bencher, with_alt as u32, |iters| {
for (a, b) in (&a, &b).join().take(iters as usize) {
black_box((*a, *b));
}
})
});
}
}
|
extern crate pretty_env_logger;
use chrono::prelude::*;
use hmac::{Hmac, Mac, NewMac};
use log::debug;
use reqwest::StatusCode;
use serde_json::Value;
use sha2::Sha256;
use std::collections::BTreeMap;
use std::error::Error;
type HmacSha256 = Hmac<Sha256>;
pub struct KylinNetworkAPI {
api_key: String,
api_secret: String,
base: String,
client: reqwest::Client,
}
impl KylinNetworkAPI {
pub fn new(api_key: String, api_secret: String) -> KylinNetworkAPI {
KylinNetworkAPI {
api_key: api_key,
api_secret: api_secret,
base: String::from("https://api.kylin.network"),
client: reqwest::Client::new(),
}
}
fn signature(&self, params: &BTreeMap<String, String>) -> String {
let sorted_params: Vec<String> =
params.iter().map(|(k, v)| format!("{}={}", k, v)).collect();
let str_to_be_signed = sorted_params.join("&");
debug!("String to be signed: {}", str_to_be_signed);
debug!("Secret: {}", self.api_secret);
let mut mac = HmacSha256::new_varkey(self.api_secret.as_bytes()).unwrap();
mac.update(str_to_be_signed.as_bytes());
let result = mac.finalize();
let code_bytes = result.into_bytes();
hex::encode(code_bytes)
}
async fn call_api(
&self,
api_method: &str,
api_url: &String,
api_params: &BTreeMap<String, String>,
) -> Result<serde_json::Value, Box<dyn Error>> {
// TODO: Find how to add log to actix_web
println!("============================================================");
println!("API path: {}, api_params: {:?}", api_url, api_params);
let mut _api_params = api_params.clone();
// Insert required fields
_api_params.insert(
String::from("timestamp"),
Utc::now().timestamp_millis().to_string(),
);
let signature = self.signature(&_api_params);
_api_params.insert(String::from("signature"), signature);
let resp: reqwest::Response;
match api_method {
"POST" => {
resp = self
.client
.post(api_url)
.header("APIKEY", self.api_key.as_str())
.json(&_api_params)
.send()
.await?;
}
_ => {
panic!("Unknown api_method: {}", api_method);
}
}
match resp.status() {
StatusCode::OK => {
let raw_content = resp.text().await?;
let json_content: Value = serde_json::from_str(raw_content.as_str())?;
println!("Response: {:?}", raw_content);
println!("============================================================");
Ok(json_content)
}
_ => {
panic!("Got error response: {:?}", resp);
}
}
}
// POST /data/liquidation
pub async fn contract_liquidation_order_list(
&self,
invoke_params: BTreeMap<String, String>,
) -> String {
let api_path = String::from("/data/liquidation");
// Process params
let mut api_params = invoke_params.clone();
// Validate required params are existing
if !api_params.contains_key(&String::from("exchCode"))
|| !api_params.contains_key(&String::from("type"))
{
panic!("exchCode or type are required");
}
// Update params default value
api_params
.entry(String::from("coinName"))
.or_insert(String::from("BTC"));
api_params
.entry(String::from("pageNum"))
.or_insert(String::from("1"));
api_params
.entry(String::from("pageSize"))
.or_insert(String::from("10"));
let full_api_endpoint = format!("{}{}", self.base, api_path);
let resp_content = match self.call_api("POST", &full_api_endpoint, &api_params).await {
Ok(r) => r,
Err(e) => panic!("Call API error: {:?}", e),
};
resp_content["data"].to_string()
}
pub async fn contract_bitmex_perpetual_contract_rate(&self) -> String {
let api_path = String::from("/data/getContractRate/XBTUSD");
let full_api_endpoint = format!("{}{}", self.base, api_path);
let api_params: BTreeMap<String, String> = BTreeMap::new();
let resp_content = match self.call_api("POST", &full_api_endpoint, &api_params).await {
Ok(r) => r,
Err(e) => panic!("Call API error: {:?}", e),
};
resp_content["data"].to_string()
}
pub async fn contract_bitmex_large_order_list(&self) -> String {
let api_path = String::from("/data/largeDeal");
let full_api_endpoint = format!("{}{}", self.base, api_path);
let api_params: BTreeMap<String, String> = BTreeMap::new();
let resp_content = match self.call_api("POST", &full_api_endpoint, &api_params).await {
Ok(r) => r,
Err(e) => panic!("Call API error: {:?}", e),
};
resp_content["data"].to_string()
}
pub async fn contract_bitfinex_holdings_minutes(&self) -> String {
let api_path = String::from("/data/getBitfinexPositionRatio/minute");
let full_api_endpoint = format!("{}{}", self.base, api_path);
let api_params: BTreeMap<String, String> = BTreeMap::new();
let resp_content = match self.call_api("POST", &full_api_endpoint, &api_params).await {
Ok(r) => r,
Err(e) => panic!("Call API error: {:?}", e),
};
resp_content["data"].to_string()
}
}
#[cfg(test)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn signature() {
// Case from document @2020-11-11
// url: https://docs-api.kylin.network/#example
pretty_env_logger::init();
let _api = KylinNetworkAPI::new(
String::from(""),
String::from("CHK5kxIQtd4WWkK8th8mBwctKF55vIEBztJ7KMnI6oniR9Rhlb1JB2WyWOhLG2GQ"),
);
let api_params: BTreeMap<String, String> = [
(String::from("coinName"), String::from("BTC")),
(String::from("timestamp"), String::from("1603271977470")),
(String::from("exchCode"), String::from("okex")),
(String::from("pageSize"), String::from("10")),
(String::from("pageNum"), String::from("1")),
(String::from("futureType"), String::from("0")),
(String::from("type"), String::from("0")),
]
.iter()
.cloned()
.collect();
let sign_rslt = _api.signature(&api_params);
assert_eq!(
sign_rslt,
"7e22854ab87ee121ec5d0675f5f0b9a4c74135f021d60771c37006c60e0674c6"
);
}
}
|
use shorthand::ShortHand;
#[derive(ShortHand)]
pub struct Command {
#[shorthand(enable(copy))]
value: String,
}
fn main() {}
|
#[doc = "Register `STR` reader"]
pub type R = crate::R<STR_SPEC>;
#[doc = "Register `STR` writer"]
pub type W = crate::W<STR_SPEC>;
#[doc = "Field `NBLW` reader - Number of valid bits in the last word When the last word of the message bit string is written to HASH_DIN register, the hash processor takes only the valid bits, specified as below, after internal data swapping: ... The above mechanism is valid only if DCAL = 0. If NBLW bits are written while DCAL is set to 1, the NBLW bitfield remains unchanged. In other words it is not possible to configure NBLW and set DCAL at the same time. Reading NBLW bits returns the last value written to NBLW."]
pub type NBLW_R = crate::FieldReader;
#[doc = "Field `NBLW` writer - Number of valid bits in the last word When the last word of the message bit string is written to HASH_DIN register, the hash processor takes only the valid bits, specified as below, after internal data swapping: ... The above mechanism is valid only if DCAL = 0. If NBLW bits are written while DCAL is set to 1, the NBLW bitfield remains unchanged. In other words it is not possible to configure NBLW and set DCAL at the same time. Reading NBLW bits returns the last value written to NBLW."]
pub type NBLW_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>;
#[doc = "Field `DCAL` reader - Digest calculation Writing this bit to 1 starts the message padding using the previously written value of NBLW, and starts the calculation of the final message digest with all the data words written to the input FIFO since the INIT bit was last written to 1. Reading this bit returns 0."]
pub type DCAL_R = crate::BitReader;
#[doc = "Field `DCAL` writer - Digest calculation Writing this bit to 1 starts the message padding using the previously written value of NBLW, and starts the calculation of the final message digest with all the data words written to the input FIFO since the INIT bit was last written to 1. Reading this bit returns 0."]
pub type DCAL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:4 - Number of valid bits in the last word When the last word of the message bit string is written to HASH_DIN register, the hash processor takes only the valid bits, specified as below, after internal data swapping: ... The above mechanism is valid only if DCAL = 0. If NBLW bits are written while DCAL is set to 1, the NBLW bitfield remains unchanged. In other words it is not possible to configure NBLW and set DCAL at the same time. Reading NBLW bits returns the last value written to NBLW."]
#[inline(always)]
pub fn nblw(&self) -> NBLW_R {
NBLW_R::new((self.bits & 0x1f) as u8)
}
#[doc = "Bit 8 - Digest calculation Writing this bit to 1 starts the message padding using the previously written value of NBLW, and starts the calculation of the final message digest with all the data words written to the input FIFO since the INIT bit was last written to 1. Reading this bit returns 0."]
#[inline(always)]
pub fn dcal(&self) -> DCAL_R {
DCAL_R::new(((self.bits >> 8) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:4 - Number of valid bits in the last word When the last word of the message bit string is written to HASH_DIN register, the hash processor takes only the valid bits, specified as below, after internal data swapping: ... The above mechanism is valid only if DCAL = 0. If NBLW bits are written while DCAL is set to 1, the NBLW bitfield remains unchanged. In other words it is not possible to configure NBLW and set DCAL at the same time. Reading NBLW bits returns the last value written to NBLW."]
#[inline(always)]
#[must_use]
pub fn nblw(&mut self) -> NBLW_W<STR_SPEC, 0> {
NBLW_W::new(self)
}
#[doc = "Bit 8 - Digest calculation Writing this bit to 1 starts the message padding using the previously written value of NBLW, and starts the calculation of the final message digest with all the data words written to the input FIFO since the INIT bit was last written to 1. Reading this bit returns 0."]
#[inline(always)]
#[must_use]
pub fn dcal(&mut self) -> DCAL_W<STR_SPEC, 8> {
DCAL_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "HASH start register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`str::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`str::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct STR_SPEC;
impl crate::RegisterSpec for STR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`str::R`](R) reader structure"]
impl crate::Readable for STR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`str::W`](W) writer structure"]
impl crate::Writable for STR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets STR to value 0"]
impl crate::Resettable for STR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
pub struct TabBar; // TODO
|
use std::convert::TryFrom;
use crate::generated::pahkat as pahkat_fbs;
use types::DependencyKey;
pub(crate) trait DescriptorExt {
fn name(&self) -> Option<Map<'_, &'_ str, &'_ str>>;
fn description(&self) -> Option<Map<'_, &'_ str, &'_ str>>;
}
pub(crate) trait TargetExt {
fn dependencies(&self) -> Option<Map<'_, &'_ str, &'_ str>>;
}
pub(crate) trait PackagesExt<B: AsRef<[u8]>> {
fn packages(&self) -> Option<Map<'_, &'_ str, pahkat_fbs::Descriptor<&'_ [u8]>>>;
}
impl PackagesExt<&'_ [u8]> for pahkat_fbs::Packages<&'_ [u8]> {
fn packages(&self) -> Option<Map<'_, &'_ str, pahkat_fbs::Descriptor<&'_ [u8]>>> {
let keys = self.packages_keys().ok()??;
let values = self.packages_values().ok()??;
Some(Map::new(keys, values))
}
}
impl<B: AsRef<[u8]>> DescriptorExt for pahkat_fbs::Descriptor<B> {
fn name(&self) -> Option<Map<'_, &'_ str, &'_ str>> {
let keys = self.name_keys().ok()??;
let values = self.name_values().ok()??;
Some(Map::new(keys, values))
}
fn description(&self) -> Option<Map<'_, &'_ str, &'_ str>> {
let keys = self.description_keys().ok()??;
let values = self.description_values().ok()??;
Some(Map::new(keys, values))
}
}
impl<B: AsRef<[u8]>> TargetExt for pahkat_fbs::Target<B> {
fn dependencies(&self) -> Option<Map<'_, &'_ str, &'_ str>> {
let keys = self.dependencies_keys().ok()??;
let values = self.dependencies_values().ok()??;
Some(Map::new(keys, values))
}
}
fn build_target<B: AsRef<[u8]>>(
t: &pahkat_fbs::Target<B>,
) -> Result<pahkat_types::payload::Target, fbs::Error> {
let platform = t.platform()?.to_string();
let arch = t.arch()?.map(str::to_string);
let dependencies = t
.dependencies()
.map(|x| {
let mut out = std::collections::BTreeMap::new();
for (k, v) in x.iter() {
out.insert(DependencyKey::from(k), v.to_string());
}
out
})
.unwrap_or_else(|| Default::default());
let payload = match t.payload()? {
pahkat_fbs::Payload::WindowsExecutable(x) => {
pahkat_types::payload::Payload::WindowsExecutable(
pahkat_types::payload::windows::Executable::builder()
.url(x.url()?.parse::<url::Url>().unwrap())
.product_code(x.product_code()?.to_string())
.kind(match x.kind()?.unwrap() {
pahkat_fbs::WindowsExecutableKind::NONE => None,
x => Some(
pahkat_fbs::enum_name_windows_executable_kind(x)
.to_lowercase()
.to_string(),
),
})
.size(x.size()?.unwrap())
.installed_size(x.installed_size()?.unwrap())
.build(),
)
}
pahkat_fbs::Payload::MacOSPackage(x) => pahkat_types::payload::Payload::MacOSPackage(
pahkat_types::payload::macos::Package::builder()
.url(x.url()?.parse::<url::Url>().unwrap())
.pkg_id(x.pkg_id()?.to_string())
.size(x.size()?.unwrap())
.installed_size(x.installed_size()?.unwrap())
.build(),
),
pahkat_fbs::Payload::TarballPackage(x) => pahkat_types::payload::Payload::TarballPackage(
pahkat_types::payload::tarball::Package::builder()
.url(x.url()?.parse::<url::Url>().unwrap())
.size(x.size()?.unwrap())
.installed_size(x.installed_size()?.unwrap())
.build(),
),
};
Ok(pahkat_types::payload::Target::builder()
.platform(platform)
.arch(arch)
.dependencies(dependencies)
.payload(payload)
.build())
}
impl<'a> TryFrom<&'a pahkat_fbs::Descriptor<&'a [u8]>> for pahkat_types::package::Descriptor {
type Error = fbs::Error;
fn try_from(pkg: &'a pahkat_fbs::Descriptor<&'a [u8]>) -> Result<Self, Self::Error> {
use std::collections::BTreeMap;
let descriptor = pahkat_types::package::Descriptor::builder()
.package(
pahkat_types::package::DescriptorData::builder()
.id(pkg.id()?.into())
.tags(
pkg.tags()?
.map(|tags| tags.iter().map(|x| x.unwrap_or("").to_string()).collect())
.unwrap_or(vec![]),
)
.build(),
)
.name(
pkg.name()
.map(|x| {
let mut out = BTreeMap::new();
for (k, v) in x.iter() {
out.insert(k.to_string(), v.to_string());
}
out
})
.unwrap_or_else(|| Default::default()),
)
.description(
pkg.description()
.map(|x| {
let mut out = BTreeMap::new();
for (k, v) in x.iter() {
out.insert(k.to_string(), v.to_string());
}
out
})
.unwrap_or_else(|| Default::default()),
)
.release(
pkg.release()?
.unwrap()
.iter()
.filter_map(Result::ok)
.map(|x| {
let release = pahkat_types::package::Release::builder()
.version(
pahkat_types::package::version::Version::new(x.version()?).unwrap(),
)
.channel(x.channel()?.map(|x| x.to_string()))
.target(
x.target()?
.unwrap()
.iter()
.filter_map(Result::ok)
.map(|t| build_target(&t))
.collect::<Result<Vec<_>, _>>()?,
)
.build();
Ok(release)
})
.collect::<Result<Vec<_>, _>>()?,
)
.build();
Ok(descriptor)
}
}
pub struct Map<'a, K, V> {
keys: fbs::Vector<'a, fbs::ForwardsUOffset<K>>,
values: fbs::Vector<'a, fbs::ForwardsUOffset<V>>,
len: usize,
}
impl<'a, K, V> From<Map<'a, K, V>>
for std::collections::BTreeMap<
<<fbs::ForwardsUOffset<K> as fbs::Follow<'a>>::Inner as ToOwned>::Owned,
<<fbs::ForwardsUOffset<V> as fbs::Follow<'a>>::Inner as ToOwned>::Owned,
>
where
K: PartialEq,
fbs::ForwardsUOffset<K>: fbs::Follow<'a>,
fbs::ForwardsUOffset<V>: fbs::Follow<'a>,
<fbs::ForwardsUOffset<K> as fbs::Follow<'a>>::Inner: PartialEq + ToOwned,
<<fbs::ForwardsUOffset<K> as fbs::Follow<'a>>::Inner as ToOwned>::Owned: PartialEq + Ord,
<fbs::ForwardsUOffset<V> as fbs::Follow<'a>>::Inner: ToOwned,
{
fn from(
value: Map<'a, K, V>,
) -> std::collections::BTreeMap<
<<fbs::ForwardsUOffset<K> as fbs::Follow<'a>>::Inner as ToOwned>::Owned,
<<fbs::ForwardsUOffset<V> as fbs::Follow<'a>>::Inner as ToOwned>::Owned,
> {
let mut out = std::collections::BTreeMap::new();
for (k, v) in value.iter() {
out.insert(k.to_owned(), v.to_owned());
}
out
}
}
impl<'a, K, V> Map<'a, K, V>
where
K: PartialEq,
fbs::ForwardsUOffset<K>: fbs::Follow<'a>,
fbs::ForwardsUOffset<V>: fbs::Follow<'a>,
<fbs::ForwardsUOffset<K> as fbs::Follow<'a>>::Inner: PartialEq,
{
#[inline]
fn new(
keys: fbs::Vector<'a, fbs::ForwardsUOffset<K>>,
values: fbs::Vector<'a, fbs::ForwardsUOffset<V>>,
) -> Map<'a, K, V> {
Map {
keys,
values,
len: keys.len().unwrap_or(0),
}
}
#[inline]
pub fn iter(
&self,
) -> impl Iterator<
Item = (
<fbs::ForwardsUOffset<K> as fbs::Follow<'a>>::Inner,
<fbs::ForwardsUOffset<V> as fbs::Follow<'a>>::Inner,
),
> {
self.keys
.iter()
.filter_map(Result::ok)
.zip(self.values.iter().filter_map(Result::ok))
}
#[inline]
pub fn get(
&self,
key: <fbs::ForwardsUOffset<K> as fbs::Follow<'a>>::Inner,
) -> Option<<fbs::ForwardsUOffset<V> as fbs::Follow<'a>>::Inner> {
self.keys
.iter()
.filter_map(Result::ok)
.position(|x| x == key)
.map(|i| self.values.get(i).unwrap())
}
#[inline]
pub fn keys(
&self,
) -> impl Iterator<Item = <fbs::ForwardsUOffset<K> as fbs::Follow<'a>>::Inner> {
self.keys.iter().filter_map(Result::ok)
}
#[inline]
pub fn key(&self, index: usize) -> Option<<fbs::ForwardsUOffset<K> as fbs::Follow<'a>>::Inner> {
if index >= self.len {
None
} else {
Some(self.keys.get(index).unwrap())
}
}
#[inline]
pub fn value(
&self,
index: usize,
) -> Option<<fbs::ForwardsUOffset<V> as fbs::Follow<'a>>::Inner> {
if index >= self.len {
None
} else {
Some(self.values.get(index).unwrap())
}
}
}
|
// Copyright 2015-2016 Joe Neeman.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use dfa::Dfa;
use error::Error;
use itertools::Itertools;
use look::Look;
use nfa::{Accept, Nfa, NoLooks, State, StateIdx, StateSet};
use num_traits::PrimInt;
use range_map::{Range, RangeMap, RangeMultiMap};
use std::{char, u8, usize};
use std::cmp::max;
use std::collections::{HashMap, HashSet};
use std::fmt::Debug;
use std::marker::PhantomData;
use std::mem::swap;
use utf8_ranges::{Utf8Range, Utf8Sequence, Utf8Sequences};
// This provides a more compact way of representing UTF-8 sequences.
//
// A sequence of bytes belongs to this set if its first byte is in `head[0]`, its second byte is
// in `head[1]`, etc., and its last byte belongs to one of the ranges in `last_byte`.
//
// This representation is handy for making NFAs because compared to the representation in
// `Utf8Sequences`, it adds many fewer states. Basically, we are doing some crude minimization
// before creating the states.
struct MergedUtf8Sequences {
pub head: Vec<Utf8Range>,
pub last_byte: Vec<Utf8Range>,
}
// Returns this range as a pair of chars, or none if this is an empty range.
fn to_char_pair(r: Range<u32>) -> Option<(char, char)> {
// Round up self.start to the nearest legal codepoint.
let start = if r.start > 0xD7FF && r.start < 0xE000 {
0xE000
} else {
r.start
};
// Round down self.end.
let end = if r.end > 0x10FFFF {
0x10FFFF
} else if r.end < 0xE000 && r.end > 0xD7FF {
0xD7FF
} else {
r.end
};
if start > end {
None
} else {
Some((char::from_u32(start).unwrap(), char::from_u32(end).unwrap()))
}
}
impl MergedUtf8Sequences {
// Panics if not all the input sequences have the same leading byte ranges.
fn merge<I>(iter: I) -> MergedUtf8Sequences where I: Iterator<Item=Utf8Sequence> {
let mut head = Vec::new();
let mut last_byte = Vec::new();
for seq in iter {
let len = seq.len();
let h = &seq.as_slice()[..len-1];
if head.is_empty() {
head.extend_from_slice(h);
} else if &head[..] != h {
panic!("invalid sequences to merge");
}
last_byte.push(seq.as_slice()[len-1]);
}
MergedUtf8Sequences {
head: head,
last_byte: last_byte,
}
}
fn from_sequences<'a, I>(iter: I) -> Box<Iterator<Item=MergedUtf8Sequences> + 'a>
where I: Iterator<Item=Utf8Sequence> + 'a {
fn head(u: &Utf8Sequence) -> Vec<Utf8Range> {
let len = u.len();
u.as_slice()[..len-1].to_owned()
}
Box::new(iter
.group_by(head)
.into_iter()
.map(|(_, seqs)| MergedUtf8Sequences::merge(seqs.into_iter())))
}
fn from_ranges<'a, I>(iter: I) -> Box<Iterator<Item=MergedUtf8Sequences> + 'a>
where I: Iterator<Item=Range<u32>> + 'a {
MergedUtf8Sequences::from_sequences(
iter.filter_map(to_char_pair)
.flat_map(|r| Utf8Sequences::new(r.0, r.1)))
}
fn num_bytes(&self) -> u8 {
(self.head.len() + 1) as u8
}
}
// Creates a byte-based Dfa that matches all the chars in `look.as_set()`.
fn make_char_dfa(look: Look) -> Dfa<(Look, u8)> {
let mut nfa: Nfa<u32, NoLooks> = Nfa::with_capacity(2);
nfa.add_state(Accept::Never);
nfa.add_look_ahead_state(look, 1, 0);
// TODO: shouldn't adding both Full and Boundary be redundant?
nfa.init.push((Look::Full, 0));
nfa.init.push((Look::Boundary, 0));
nfa.states[0].consuming
= RangeMultiMap::from_vec(look.as_set().ranges().map(|x| (x, 1)).collect());
// These unwraps are OK because the only failures are caused by having too many states.
nfa.byte_me(usize::MAX).unwrap()
.determinize(usize::MAX).unwrap()
.optimize()
}
// Creates a byte-based Dfa that matches backwards all the chars in `look.as_set()`.
fn make_rev_char_dfa(look: Look) -> Dfa<(Look, u8)> {
let mut nfa: Nfa<u8, NoLooks> = Nfa::with_capacity(0); // TODO: better capacity
nfa.add_state(Accept::Never);
nfa.init.push((Look::Full, 0));
nfa.init.push((Look::Boundary, 0));
// This is more-or-less C&P from add_utf8_sequence.
for seq in MergedUtf8Sequences::from_ranges(look.as_set().ranges()) {
let mut last_state = nfa.add_state(Accept::Never);
for range in &seq.last_byte {
nfa.add_transition(0, last_state, Range::new(range.start, range.end));
}
for range in seq.head.iter().rev() {
let cur_state = nfa.add_state(Accept::Never);
nfa.add_transition(last_state, cur_state, Range::new(range.start, range.end));
last_state = cur_state;
}
nfa.states[last_state].accept = Accept::Always;
nfa.states[last_state].accept_look = look;
nfa.states[last_state].accept_state = 0;
nfa.states[last_state].accept_tokens = seq.num_bytes();
}
// This unwrap is OK because the only failures are caused by having too many states.
nfa.determinize(usize::MAX).unwrap()
.optimize()
}
// We cache optimized Dfas for the expensive looks. See `Nfa<u8, NoLooks>::add_min_utf8_sequences`
// for an explanation.
lazy_static! {
static ref WORD_CHAR_DFA: Dfa<(Look, u8)> = make_char_dfa(Look::WordChar);
static ref NOT_WORD_CHAR_DFA: Dfa<(Look, u8)> = make_char_dfa(Look::NotWordChar);
static ref REV_WORD_CHAR_DFA: Dfa<(Look, u8)> = make_rev_char_dfa(Look::WordChar);
static ref REV_NOT_WORD_CHAR_DFA: Dfa<(Look, u8)> = make_rev_char_dfa(Look::NotWordChar);
}
impl<Tok: Debug + PrimInt> Nfa<Tok, NoLooks> {
// Returns the set of all states that can be reached from some initial state.
fn reachable_from<I>(&self, states: I) -> HashSet<StateIdx> where I: Iterator<Item=StateIdx> {
let mut active: HashSet<StateIdx> = states.collect();
let mut next_active: HashSet<StateIdx> = HashSet::new();
let mut ret = active.clone();
while !active.is_empty() {
for &s in &active {
for &(_, t) in self.states[s].consuming.ranges_values() {
if !ret.contains(&t) {
ret.insert(t);
next_active.insert(t);
}
}
}
swap(&mut active, &mut next_active);
next_active.clear();
}
ret
}
// Reverses this Nfa, but only the transitions (i.e. doesn't do anything about initial and
// final states).
fn reversed_simple(&self) -> Nfa<Tok, NoLooks> {
let rev_transitions = self.reversed_transitions();
let mut ret: Nfa<Tok, NoLooks> = Nfa::with_capacity(self.states.len());
for trans in rev_transitions {
let idx = ret.add_state(Accept::Never);
ret.states[idx].consuming = trans;
}
ret
}
// Returns the set of all states that can be reached from an initial state and that can reach
// some accepting state.
fn reachable_states(&self) -> HashSet<StateIdx> {
let init_states = self.init.iter().map(|pair| pair.1);
let final_states = self.states.iter().enumerate()
.filter(|&(_, state)| state.accept != Accept::Never)
.map(|(idx, _)| idx);
let forward = self.reachable_from(init_states);
let backward = self.reversed_simple().reachable_from(final_states);
forward.intersection(&backward).cloned().collect()
}
/// Optimizes this Nfa by removing all states that cannot be reached from an initial state
/// and all states that cannot lead to an accepting state.
pub fn trim_unreachable(&mut self) {
let reachable = self.reachable_states();
let mut old_states = Vec::new();
swap(&mut self.states, &mut old_states);
let mut old_to_new = vec![None; old_states.len()];
let (new_to_old, new_states): (Vec<_>, Vec<State<Tok>>) = old_states.into_iter()
.enumerate()
.filter(|&(i, _)| reachable.contains(&i))
.unzip();
self.states = new_states;
for (new, &old) in new_to_old.iter().enumerate() {
old_to_new[old] = Some(new);
}
self.map_states(|s| old_to_new[s]);
}
// Returns an `Accept` that will accept whenever anything in `states` would accept.
fn accept_union(&self, states: &StateSet) -> Accept {
states.iter().map(|s| self.states[*s].accept).max().unwrap_or(Accept::Never)
}
}
impl Nfa<u32, NoLooks> {
/// Converts this `Nfa` into one that consumes the input byte-by-byte.
pub fn byte_me(self, max_states: usize) -> ::Result<Nfa<u8, NoLooks>> {
let mut ret = Nfa::<u8, NoLooks> {
states: self.states.iter().map(|s| State {
accept: s.accept,
accept_look: s.accept_look,
accept_state: s.accept_state,
accept_tokens: s.accept_tokens,
consuming: RangeMultiMap::new(),
looking: Vec::new(),
}).collect(),
init: self.init,
phantom: PhantomData,
};
for (i, state) in self.states.into_iter().enumerate() {
// Group transitions by the target state, and add them in batches. Most of the time, we
// can merge a bunch of Utf8Sequences before adding them, which saves a bunch of
// states.
for (tgt, transitions) in state.consuming.ranges_values().group_by(|x| x.1) {
try!(ret.add_utf8_sequences(i, transitions.into_iter().map(|x| x.0), tgt, max_states));
}
}
Ok(ret)
}
}
impl Nfa<u8, NoLooks> {
/// Converts this `Nfa` into a `Dfa`.
pub fn determinize(&self, max_states: usize) -> ::Result<Dfa<(Look, u8)>> {
Determinizer::determinize(self, max_states, MatchChoice::TransitionOrder, self.init.clone())
}
/// Converts this `Nfa` into a `Dfa`.
///
/// Whenever this `Nfa` matches some text, the `Dfa` also will. But if this `Nfa` has multiple
/// possible endpoints for a match then the returned `Dfa` is only guaranteed to match the
/// longest one.
pub fn determinize_longest(&self, max_states: usize) -> ::Result<Dfa<(Look, u8)>> {
Determinizer::determinize(self, max_states, MatchChoice::LongestMatch, self.init.clone())
}
/// Returns the reversal of this `Nfa`.
///
/// If `self` matches some string of bytes, then the return value of this method will match
/// the same strings of bytes reversed.
///
/// Note that this loses information about match priorities.
pub fn reverse(&self, max_states: usize) -> ::Result<Nfa<u8, NoLooks>> {
let mut ret = self.reversed_simple();
// Turn our initial states into ret's accepting states.
for &(look, i) in &self.init {
match look {
Look::Full => {
ret.states[i].accept = Accept::Always;
ret.states[i].accept_look = Look::Full;
},
Look::Boundary => {
ret.states[i].accept = max(ret.states[i].accept, Accept::AtEoi);
ret.states[i].accept_look = max(ret.states[i].accept_look, Look::Boundary);
},
Look::NewLine => {
let accept_state = ret.add_look_ahead_state(Look::NewLine, 1, i);
ret.add_transition(i, accept_state, Range::new(b'\n', b'\n'));
ret.states[i].accept = max(ret.states[i].accept, Accept::AtEoi);
ret.states[i].accept_look = max(ret.states[i].accept_look, Look::Boundary);
},
Look::WordChar | Look::NotWordChar => {
// It would make more sense to put this outside the loop, but having it inside
// prevents a deadlock: constructing REV_*_DFA ends up calling reverse(), but
// with no look-ahead so it never gets inside this loop.
let dfa: &Dfa<_> = if look == Look::WordChar {
&REV_WORD_CHAR_DFA
} else {
ret.states[i].accept = max(ret.states[i].accept, Accept::AtEoi);
ret.states[i].accept_look = max(ret.states[i].accept_look, Look::Boundary);
&REV_NOT_WORD_CHAR_DFA
};
let accept_state = ret.add_look_ahead_state(look, 1, i);
try!(ret.add_min_utf8_sequences(i, dfa, accept_state, max_states));
},
Look::Empty => {
panic!("Empty cannot be an init look");
},
}
}
// Turn our accepting states into ret's initial states.
ret.init.clear();
for st in &self.states {
if st.accept != Accept::Never {
ret.init.push((st.accept_look, st.accept_state));
}
}
Ok(ret)
}
/// Can we accept immediately if the beginning of the input matches `look`?
fn init_accept(&self, look: Look) -> Accept {
let set = self.init.iter()
.filter(|pair| look <= pair.0)
.map(|pair| pair.1)
.collect::<Vec<_>>();
self.accept_union(&set)
}
/// This essentially modifies `self` by adding a `^.*` at the beginning.
///
/// The result is actually a little bit different, because `.` matches a whole code point,
/// whereas the `^.*` that we add works at the byte level.
pub fn anchor(mut self, max_states: usize) -> ::Result<Nfa<u8, NoLooks>> {
let loop_accept = self.init_accept(Look::Full);
let loop_state = self.add_state(loop_accept);
let init_accept = self.init_accept(Look::Boundary);
let init_state = self.add_state(init_accept);
// Swap out init so that we can iterate over it while modifying `self`.
let mut init = Vec::new();
swap(&mut init, &mut self.init);
for &(look, st_idx) in &init {
if look.allows_eoi() {
// TODO: shouldn't need to clone here.
for &(range, target) in self.states[st_idx].consuming.clone().ranges_values() {
self.add_transition(init_state, target, range);
}
}
match look {
Look::Boundary => {},
Look::Full => {
for &(range, target) in self.states[st_idx].consuming.clone().ranges_values() {
self.add_transition(loop_state, target, range);
}
},
Look::NewLine => {
self.add_transition(init_state, st_idx, Range::new(b'\n', b'\n'));
self.add_transition(loop_state, st_idx, Range::new(b'\n', b'\n'));
},
Look::WordChar | Look::NotWordChar => {
let dfa: &Dfa<_> =
if look == Look::WordChar { &WORD_CHAR_DFA } else { &NOT_WORD_CHAR_DFA };
try!(self.add_min_utf8_sequences(loop_state, dfa, st_idx, max_states));
try!(self.add_min_utf8_sequences(init_state, dfa, st_idx, max_states));
},
Look::Empty => {
panic!("Cannot start with an empty look");
},
}
// Once we've found an init state that accepts immediately, don't look for any others
// (since any matches that we find starting from them are lower priority that the one
// we've found already). This check is *almost* unnecessary, since similar pruning
// happens when we turn the NFA into a DFA. The important case that needs to be handled
// here is the case that a high-priority init state has no transitions out of it. Such
// a state will be completely removed by this function, and so we need to acknowledge
// its existence here.
if self.states[st_idx].accept == Accept::Always {
break;
}
}
// Wire up the initial and loop states, but only if they aren't accepting. That's because
// if they are accepting then the accept should take priority over the transition (since
// making the transition means that we are searching for a match that starts later).
if init_accept != Accept::Always {
self.add_transition(init_state, loop_state, Range::full());
}
if loop_accept != Accept::Always {
self.add_transition(loop_state, loop_state, Range::full());
}
// The new Nfa is only allowed to start at the beginning of the input, and only at the new
// initial state.
self.init.push((Look::Boundary, init_state));
self.trim_unreachable();
Ok(self)
}
// This does the same thing as add_utf8_sequences, but it gets the transitions from a dfa,
// which should have zero as its only starting state, and for which every accepting state
// should be Accept::Always.
//
// This is probably used in conjunction with make_char_dfa, which ends up having the same
// effect as add_utf8_sequences, but adds fewer states.
fn add_min_utf8_sequences(
&mut self,
start_state: StateIdx,
dfa: &Dfa<(Look, u8)>,
end_state: StateIdx,
max_states: usize,
) -> ::Result<()> {
let offset = self.states.len();
// If end_accept is true, then it isn't actually important that we end in state
// `end_state`: we can create a new look_ahead state to end in.
let end_accept = self.states[end_state].accept_tokens > 0;
if self.states.len() + dfa.num_states() > max_states {
return Err(Error::TooManyStates);
}
for _ in 0..dfa.num_states() {
self.add_state(Accept::Never);
}
for d_idx in 0..dfa.num_states() {
let n_src = if d_idx == 0 { start_state } else { d_idx + offset };
for &(range, d_tgt) in dfa.transitions(d_idx).ranges_values() {
let n_tgt = if dfa.accept(d_tgt) == &Accept::Always && !end_accept {
end_state
} else {
let n_tgt = d_tgt + offset;
self.states[n_tgt].accept = *dfa.accept(d_tgt);
if let Some(&(look, bytes)) = dfa.ret(d_tgt) {
self.states[n_tgt].accept_look = look;
self.states[n_tgt].accept_state = start_state;
self.states[n_tgt].accept_tokens = bytes;
}
n_tgt
};
self.add_transition(n_src, n_tgt, range);
}
}
Ok(())
}
// Adds a path from `start_state` to `end_state` for all byte sequences matching `seq`.
//
// If `end_state` is a look-ahead state, makes a new accepting state instead (so that we know
// how many bytes of look-ahead we used).
fn add_utf8_sequence(
&mut self,
start_state: StateIdx,
mut end_state: StateIdx,
seq: MergedUtf8Sequences
) {
let mut last_state = start_state;
for range in &seq.head {
let cur_state = self.add_state(Accept::Never);
self.add_transition(last_state, cur_state, Range::new(range.start, range.end));
last_state = cur_state;
}
if self.states[end_state].accept_tokens > 0 {
let look = self.states[end_state].accept_look;
let acc_state = self.states[end_state].accept_state;
end_state = self.add_look_ahead_state(look, seq.num_bytes(), acc_state);
}
for range in &seq.last_byte {
self.add_transition(last_state, end_state, Range::new(range.start, range.end));
}
}
// Adds a byte path from `start_state` to `end_state` for every char in `ranges`.
fn add_utf8_sequences<I>(
&mut self,
start_state: StateIdx,
ranges: I,
end_state: StateIdx,
max_states: usize
) -> ::Result<()>
where I: Iterator<Item=Range<u32>> {
for m in MergedUtf8Sequences::from_ranges(ranges) {
self.add_utf8_sequence(start_state, end_state, m);
if self.states.len() > max_states {
return Err(Error::TooManyStates);
}
}
Ok(())
}
// Finds the transitions out of the given set of states, as a RangeMap.
fn transition_map(&self, states: &[StateIdx]) -> RangeMap<u8, Vec<usize>> {
let mut transitions = states.into_iter()
.flat_map(|s| self.states[*s].consuming.ranges_values().cloned())
.collect::<RangeMultiMap<u8, StateIdx>>()
.group();
// `scratch` is large enough to be indexed by anything in `elts`. It is full of `false`.
fn uniquify(elts: &mut Vec<StateIdx>, scratch: &mut Vec<bool>) {
elts.retain(|&e| {
let ret = !scratch[e];
scratch[e] = true;
ret
});
// Clean up scratch, so that it is full of `false` again.
for e in elts {
scratch[*e] = false;
}
}
let mut scratch = vec![false; self.num_states()];
for pair in transitions.as_mut_slice() {
uniquify(&mut pair.1, &mut scratch);
}
transitions
}
}
#[derive(PartialEq)]
enum MatchChoice {
TransitionOrder,
LongestMatch,
}
// This contains all the intermediate data structures that we need when turning an `Nfa` into a
// `Dfa`.
struct Determinizer<'a> {
nfa: &'a Nfa<u8, NoLooks>,
dfa: Dfa<(Look, u8)>,
state_map: HashMap<StateSet, StateIdx>,
active_states: Vec<StateSet>,
max_states: usize,
match_choice: MatchChoice,
}
impl<'a> Determinizer<'a> {
// Turns an Nfa into an almost-equivalent (up to the difference between shortest and longest
// matches) Dfa.
//
// `init` is a vector of length Look::num(). Each entry gives a set of initial states that
// will be turned into the initial states of the dfa.
fn determinize(nfa: &Nfa<u8, NoLooks>,
max_states: usize,
match_choice: MatchChoice,
init: Vec<(Look, StateIdx)>) -> ::Result<Dfa<(Look, u8)>> {
let mut det = Determinizer::new(nfa, max_states, match_choice);
try!(det.run(init));
Ok(det.dfa)
}
fn new(nfa: &'a Nfa<u8, NoLooks>,
max_states: usize,
match_choice: MatchChoice) -> Determinizer<'a> {
Determinizer {
nfa: nfa,
dfa: Dfa::new(),
state_map: HashMap::new(),
active_states: Vec::new(),
max_states: max_states,
match_choice: match_choice,
}
}
// Checks whether we should accept in the given set of states.
//
// Returns a tuple: the first element says when we accept, the second says what look-ahead (if
// any) led to us accepting, and the third says how many bytes of look-ahead we needed before
// knowing that we can accept.
//
// There is one annoying corner case: there could be two states in the set `s` with different
// values of `accept_tokens`, where the higher priority state says `Accept::AtEoi` and the
// lower priority state says `Accept::Always`. In this case, we return `(AtEoi, look, bytes)`
// where `look` and `bytes` come from the lower priority state. This doesn't lose any
// information, since if a state says `Accept::AtEoi` then its `accept_look` and
// `accept_tokens` are guaranteed to be `Boundary` and `0`.
fn accept(&self, s: &[StateIdx]) -> (Accept, Look, u8) {
let mut accept_states = s.iter().cloned()
.filter(|i| self.nfa.states[*i].accept != Accept::Never);
let mut accept_always_states = s.iter().cloned()
.filter(|i| self.nfa.states[*i].accept == Accept::Always);
let (first_accept, other_accept) = if self.match_choice == MatchChoice::TransitionOrder {
(accept_states.next(), accept_always_states.next())
} else {
(accept_states.min_by_key(|i| self.nfa.states[*i].accept_tokens),
accept_always_states.min_by_key(|i| self.nfa.states[*i].accept_tokens))
};
// Returns the intersection of state.accept_look over all states in s that accept
// unconditionally and have the given number of look-ahead bytes.
let look_intersection = |toks: u8| {
s.iter().cloned()
.filter(|i| self.nfa.states[*i].accept == Accept::Always)
.filter(|i| self.nfa.states[*i].accept_tokens == toks)
.fold(Look::Full, |x, y| x.intersection(&self.nfa.states[y].accept_look))
};
if let Some(first_accept) = first_accept {
let st = &self.nfa.states[first_accept];
if st.accept == Accept::AtEoi {
// Check if there is a lower-priority Accept::Always.
if let Some(other_accept) = other_accept {
let other_st = &self.nfa.states[other_accept];
if other_st.accept_tokens > 0 {
let look = look_intersection(other_st.accept_tokens);
return (Accept::AtEoi, look, other_st.accept_tokens);
}
}
(Accept::AtEoi, Look::Boundary, 0)
} else {
(Accept::Always, look_intersection(st.accept_tokens), st.accept_tokens)
}
} else {
// There are no accepting states.
(Accept::Never, Look::Empty, 0)
}
}
// Tries to add a new state to the Dfa.
//
// If the state already exists, returns the index of the old one. If there are too many states,
// returns an error.
fn add_state(&mut self, mut s: StateSet) -> ::Result<StateIdx> {
// When we choose our matches by transition order, discard any states that have lower
// priority than the best match we've found.
if self.match_choice == MatchChoice::TransitionOrder {
if let Some(accept_idx) = s.iter().position(|&i| self.nfa.states[i].accept == Accept::Always) {
s.truncate(accept_idx + 1);
}
}
if self.state_map.contains_key(&s) {
Ok(*self.state_map.get(&s).unwrap())
} else if self.dfa.num_states() >= self.max_states {
Err(Error::TooManyStates)
} else {
let (acc, look, bytes_ago) = self.accept(&s);
let ret = if acc != Accept::Never { Some ((look, bytes_ago)) } else { None };
let new_state = self.dfa.add_state(acc, ret);
self.active_states.push(s.clone());
self.state_map.insert(s, new_state);
Ok(new_state)
}
}
// Creates a deterministic automaton representing the same language as our `nfa`.
// Puts the new Dfa in self.dfa.
fn run(&mut self, init: Vec<(Look, StateIdx)>) -> ::Result<()> {
if self.nfa.states.is_empty() {
return Ok(());
}
for &look in Look::all() {
let init_states: StateSet = init.iter().cloned()
.filter(|&(x, _)| look == x)
.map(|(_, y)| y)
.collect();
if !init_states.is_empty() {
let new_state_idx = try!(self.add_state(init_states));
self.dfa.init[look.as_usize()] = Some(new_state_idx);
}
}
while !self.active_states.is_empty() {
let state = self.active_states.pop().unwrap();
// This unwrap is ok because anything in active_states must also be in state_map.
let state_idx = *self.state_map.get(&state).unwrap();
let trans = self.nfa.transition_map(&state);
let mut dfa_trans = Vec::new();
for &(range, ref target) in trans.ranges_values() {
let target_idx = try!(self.add_state(target.clone()));
dfa_trans.push((range, target_idx));
}
self.dfa.set_transitions(state_idx, dfa_trans.into_iter().collect());
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use look::Look;
use dfa::Dfa;
use nfa::{Accept, Nfa, NoLooks};
use nfa::tests::{re_nfa, trans_nfa, trans_range_nfa};
use range_map::Range;
use std::usize;
fn re_nfa_anchored(re: &str) -> Nfa<u8, NoLooks> {
re_nfa(re).byte_me(usize::MAX).unwrap().anchor(usize::MAX).unwrap()
}
fn re_dfa(re: &str) -> Dfa<(Look, u8)> {
re_nfa(re).byte_me(usize::MAX).unwrap().determinize(usize::MAX).unwrap()
}
#[test]
fn anchor_simple() {
let nfa = re_nfa_anchored("a");
let mut target = trans_range_nfa(3, &[(2, 0, Range::new(b'a', b'a')),
(2, 1, Range::full()),
(1, 0, Range::new(b'a', b'a')),
(1, 1, Range::full())]);
target.init.push((Look::Boundary, 2));
target.states[0].accept = Accept::Always;
assert_eq!(nfa, target);
}
#[test]
fn anchor_nl() {
let nfa = re_nfa_anchored(r"(?m)^a");
let mut target = trans_nfa(4, &[(3, 1, 'a'),
(0, 1, 'a'),
(2, 0, '\n'),
(3, 0, '\n')]);
target.init.push((Look::Boundary, 3));
target.states[1].accept = Accept::Always;
let mut target = target.byte_me(usize::MAX).unwrap();
target.states[2].consuming.insert(Range::full(), 2);
target.states[3].consuming.insert(Range::full(), 2);
assert_eq!(nfa, target);
}
#[test]
fn anchor_already_anchored() {
let nfa = re_nfa_anchored("^a");
let mut target = trans_nfa(2, &[(1, 0, 'a')]);
target.init.push((Look::Boundary, 1));
target.states[0].accept = Accept::Always;
assert_eq!(nfa, target);
}
#[test]
fn determinize_pruning() {
assert_eq!(re_dfa("a|aa"), re_dfa("a"));
}
macro_rules! check_rev_inits {
($name:ident, $re:expr, $inits:expr) => {
#[test]
fn $name() {
let rev = re_nfa($re).byte_me(usize::MAX).unwrap().reverse(usize::MAX).unwrap();
println!("{:?}", rev.init);
for &look in Look::all() {
println!("checking look {:?}", look);
if $inits.contains(&look) {
assert!(rev.init.iter().any(|pair| pair.0 == look));
} else {
assert!(!rev.init.iter().any(|pair| pair.0 == look));
}
}
}
};
}
check_rev_inits!(rev_init_simple, "abc", [Look::Full]);
check_rev_inits!(rev_init_boundary, "abc$", [Look::Boundary]);
check_rev_inits!(rev_init_simple_and_boundary, "(abc$|abc)", [Look::Full, Look::Boundary]);
check_rev_inits!(rev_init_new_line, "(?m)abc$", [Look::Boundary, Look::NewLine]);
check_rev_inits!(rev_init_word, r" \b", [Look::WordChar]);
check_rev_inits!(rev_init_not_word, r"abc\b", [Look::Boundary, Look::NotWordChar]);
check_rev_inits!(rev_init_word_or_not_word, r".\b", [Look::Boundary, Look::NotWordChar, Look::WordChar]);
}
|
mod utils;
use js_sys::{Array, Number, Reflect};
use wasm_bindgen::prelude::*;
// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global
// allocator.
#[cfg(feature = "wee_alloc")]
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
// macro_rules! console_log {
// ($($t:tt)*) => (log(&format_args!($($t)*).to_string()))
// }
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(js_namespace = console)]
fn log(s: &str);
}
#[wasm_bindgen]
pub fn greet() {
alert("Hello, wasm-html!");
}
#[wasm_bindgen]
pub fn fibonacci(n: u32) -> u32 {
utils::set_panic_hook();
match n {
0 => 0,
1 => 1,
_ => fibonacci(n - 1) + fibonacci(n - 2),
}
}
pub struct Template {}
#[wasm_bindgen]
pub fn html(
call_site: JsValue,
subst0: JsValue,
subst1: JsValue,
subst2: JsValue,
subst3: JsValue,
subst4: JsValue,
subst5: JsValue,
subst6: JsValue,
subst7: JsValue,
subst8: JsValue,
subst9: JsValue,
) -> Vec<JsValue> {
let mut substitutions = Vec::from([
subst9, subst8, subst7, subst6, subst5, subst4, subst3, subst2, subst1, subst0,
]);
let raw = Array::from(&Reflect::get(&call_site, &JsValue::from("raw")).unwrap());
let mut result = Vec::with_capacity((raw.length() * 2) as usize);
for x in 0..raw.length() {
let test = raw.get(x);
result.push(test);
if let Some(substitution) = substitutions.pop() {
if substitution != JsValue::UNDEFINED && substitution != JsValue::NULL {
result.push(substitution);
}
}
}
return result;
}
#[wasm_bindgen]
pub fn render(input: Vec<JsValue>) -> String {
let mut result = String::new();
for item in input {
if let Some(text) = item.as_string() {
result.push_str(&text);
} else if let Some(num) = item.as_f64() {
result.push_str(&num.to_string());
} else if Array::is_array(&item) {
let arr = Array::from(&item);
result.push_str(&render(arr.to_vec()));
}
}
return result;
}
|
use actix_web::http::Method;
use maplit::hashmap;
use maplit::hashset;
use once_cell::sync::Lazy;
use std::collections::HashMap;
use std::collections::HashSet;
pub static SECURITY_MATRIX: Lazy<HashMap<(&str, Method), HashSet<&str>>> = Lazy::new(|| {
hashmap! {
{{~#each paths as | _ path |}}
{{~#with get}}
("{{path}}", Method::GET) => hashset![
{{~#each security}}
{{~#each auth as | scope |}}
"{{scope}}",
{{~/each}}
{{~/each}}
],
{{~/with}}
{{~#with head}}
("{{path}}", Method::HEAD) => hashset![
{{~#each security}}
{{~#each auth as | scope |}}
"{{scope}}",
{{~/each}}
{{~/each}}
],
{{~/with}}
{{~#with post}}
("{{path}}", Method::POST) => hashset![
{{~#each security}}
{{~#each auth as | scope |}}
"{{scope}}",
{{~/each}}
{{~/each}}
],
{{~/with}}
{{~#with put}}
("{{path}}", Method::PUT) => hashset![
{{~#each security}}
{{~#each auth as | scope |}}
"{{scope}}",
{{~/each}}
{{~/each}}
],
{{~/with}}
{{~#with delete}}
("{{path}}", Method::DELETE) => hashset![
{{~#each security}}
{{~#each auth as | scope |}}
"{{scope}}",
{{~/each}}
{{~/each}}
],
{{~/with}}
{{~#with options}}
("{{path}}", Method::OPTIONS) => hashset![
{{~#each security}}
{{~#each auth as | scope |}}
"{{scope}}",
{{~/each}}
{{~/each}}
],
{{~/with}}
{{~#with trace}}
("{{path}}", Method::TRACE) => hashset![
{{~#each security}}
{{~#each auth as | scope |}}
"{{scope}}",
{{~/each}}
{{~/each}}
],
{{~/with}}
{{~#with patch}}
("{{path}}", Method::PATCH) => hashset![
{{~#each security}}
{{~#each auth as | scope |}}
"{{scope}}",
{{~/each}}
{{~/each}}
],
{{~/with}}
{{~/each}}
}
});
|
#[cfg(feature = "rustls-tls")]
pub mod rustls_tls {
use hyper_rustls::ConfigBuilderExt;
use rustls::{
self,
client::{HandshakeSignatureValid, ServerCertVerified, ServerCertVerifier},
Certificate, ClientConfig, DigitallySignedStruct, PrivateKey,
};
use thiserror::Error;
/// Errors from Rustls
#[derive(Debug, Error)]
pub enum Error {
/// Identity PEM is invalid
#[error("identity PEM is invalid: {0}")]
InvalidIdentityPem(#[source] std::io::Error),
/// Identity PEM is missing a private key: the key must be PKCS8 or RSA/PKCS1
#[error("identity PEM is missing a private key: the key must be PKCS8 or RSA/PKCS1")]
MissingPrivateKey,
/// Identity PEM is missing certificate
#[error("identity PEM is missing certificate")]
MissingCertificate,
/// Invalid private key
#[error("invalid private key: {0}")]
InvalidPrivateKey(#[source] rustls::Error),
/// Unknown private key format
#[error("unknown private key format")]
UnknownPrivateKeyFormat,
// Using type-erased error to avoid depending on webpki
/// Failed to add a root certificate
#[error("failed to add a root certificate: {0}")]
AddRootCertificate(#[source] Box<dyn std::error::Error + Send + Sync>),
}
/// Create `rustls::ClientConfig`.
pub fn rustls_client_config(
identity_pem: Option<&[u8]>,
root_certs: Option<&[Vec<u8>]>,
accept_invalid: bool,
) -> Result<ClientConfig, Error> {
let config_builder = if let Some(certs) = root_certs {
ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(root_store(certs)?)
} else {
ClientConfig::builder().with_safe_defaults().with_native_roots()
};
let mut client_config = if let Some((chain, pkey)) = identity_pem.map(client_auth).transpose()? {
config_builder
.with_single_cert(chain, pkey)
.map_err(Error::InvalidPrivateKey)?
} else {
config_builder.with_no_client_auth()
};
if accept_invalid {
client_config
.dangerous()
.set_certificate_verifier(std::sync::Arc::new(NoCertificateVerification {}));
}
Ok(client_config)
}
fn root_store(root_certs: &[Vec<u8>]) -> Result<rustls::RootCertStore, Error> {
let mut root_store = rustls::RootCertStore::empty();
for der in root_certs {
root_store
.add(&Certificate(der.clone()))
.map_err(|e| Error::AddRootCertificate(Box::new(e)))?;
}
Ok(root_store)
}
fn client_auth(data: &[u8]) -> Result<(Vec<Certificate>, PrivateKey), Error> {
use rustls_pemfile::Item;
let mut cert_chain = Vec::new();
let mut pkcs8_key = None;
let mut rsa_key = None;
let mut ec_key = None;
let mut reader = std::io::Cursor::new(data);
for item in rustls_pemfile::read_all(&mut reader).map_err(Error::InvalidIdentityPem)? {
match item {
Item::X509Certificate(cert) => cert_chain.push(Certificate(cert)),
Item::PKCS8Key(key) => pkcs8_key = Some(PrivateKey(key)),
Item::RSAKey(key) => rsa_key = Some(PrivateKey(key)),
Item::ECKey(key) => ec_key = Some(PrivateKey(key)),
_ => return Err(Error::UnknownPrivateKeyFormat),
}
}
let private_key = pkcs8_key.or(rsa_key).or(ec_key).ok_or(Error::MissingPrivateKey)?;
if cert_chain.is_empty() {
return Err(Error::MissingCertificate);
}
Ok((cert_chain, private_key))
}
struct NoCertificateVerification {}
impl ServerCertVerifier for NoCertificateVerification {
fn verify_server_cert(
&self,
_end_entity: &Certificate,
_intermediates: &[Certificate],
_server_name: &rustls::client::ServerName,
_scts: &mut dyn Iterator<Item = &[u8]>,
_ocsp_response: &[u8],
_now: std::time::SystemTime,
) -> Result<ServerCertVerified, rustls::Error> {
tracing::warn!("Server cert bypassed");
Ok(ServerCertVerified::assertion())
}
fn verify_tls13_signature(
&self,
_message: &[u8],
_cert: &Certificate,
_dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
Ok(HandshakeSignatureValid::assertion())
}
fn verify_tls12_signature(
&self,
_message: &[u8],
_cert: &Certificate,
_dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
Ok(HandshakeSignatureValid::assertion())
}
}
}
#[cfg(feature = "openssl-tls")]
pub mod openssl_tls {
use openssl::{
pkey::PKey,
ssl::{SslConnector, SslConnectorBuilder, SslMethod},
x509::X509,
};
use thiserror::Error;
/// Errors from OpenSSL TLS
#[derive(Debug, Error)]
pub enum Error {
/// Failed to create OpenSSL HTTPS connector
#[error("failed to create OpenSSL HTTPS connector: {0}")]
CreateHttpsConnector(#[source] openssl::error::ErrorStack),
/// Failed to create OpenSSL SSL connector
#[error("failed to create OpenSSL SSL connector: {0}")]
CreateSslConnector(#[source] SslConnectorError),
}
/// Errors from creating a `SslConnectorBuilder`
#[derive(Debug, Error)]
pub enum SslConnectorError {
/// Failed to build SslConnectorBuilder
#[error("failed to build SslConnectorBuilder: {0}")]
CreateBuilder(#[source] openssl::error::ErrorStack),
/// Failed to deserialize PEM-encoded chain of certificates
#[error("failed to deserialize PEM-encoded chain of certificates: {0}")]
DeserializeCertificateChain(#[source] openssl::error::ErrorStack),
/// Failed to deserialize PEM-encoded private key
#[error("failed to deserialize PEM-encoded private key: {0}")]
DeserializePrivateKey(#[source] openssl::error::ErrorStack),
/// Failed to set private key
#[error("failed to set private key: {0}")]
SetPrivateKey(#[source] openssl::error::ErrorStack),
/// Failed to get a leaf certificate, the certificate chain is empty
#[error("failed to get a leaf certificate, the certificate chain is empty")]
GetLeafCertificate,
/// Failed to set the leaf certificate
#[error("failed to set the leaf certificate: {0}")]
SetLeafCertificate(#[source] openssl::error::ErrorStack),
/// Failed to append a certificate to the chain
#[error("failed to append a certificate to the chain: {0}")]
AppendCertificate(#[source] openssl::error::ErrorStack),
/// Failed to deserialize DER-encoded root certificate
#[error("failed to deserialize DER-encoded root certificate: {0}")]
DeserializeRootCertificate(#[source] openssl::error::ErrorStack),
/// Failed to add a root certificate
#[error("failed to add a root certificate: {0}")]
AddRootCertificate(#[source] openssl::error::ErrorStack),
}
/// Create `openssl::ssl::SslConnectorBuilder` required for `hyper_openssl::HttpsConnector`.
pub fn ssl_connector_builder(
identity_pem: Option<&Vec<u8>>,
root_certs: Option<&Vec<Vec<u8>>>,
) -> Result<SslConnectorBuilder, SslConnectorError> {
let mut builder =
SslConnector::builder(SslMethod::tls()).map_err(SslConnectorError::CreateBuilder)?;
if let Some(pem) = identity_pem {
let mut chain = X509::stack_from_pem(pem)
.map_err(SslConnectorError::DeserializeCertificateChain)?
.into_iter();
let leaf_cert = chain.next().ok_or(SslConnectorError::GetLeafCertificate)?;
builder
.set_certificate(&leaf_cert)
.map_err(SslConnectorError::SetLeafCertificate)?;
for cert in chain {
builder
.add_extra_chain_cert(cert)
.map_err(SslConnectorError::AppendCertificate)?;
}
let pkey = PKey::private_key_from_pem(pem).map_err(SslConnectorError::DeserializePrivateKey)?;
builder
.set_private_key(&pkey)
.map_err(SslConnectorError::SetPrivateKey)?;
}
if let Some(ders) = root_certs {
for der in ders {
let cert = X509::from_der(der).map_err(SslConnectorError::DeserializeRootCertificate)?;
builder
.cert_store_mut()
.add_cert(cert)
.map_err(SslConnectorError::AddRootCertificate)?;
}
}
Ok(builder)
}
}
|
use core::fmt::Display;
use x86_64::instructions::port::*;
use crate::time;
const UPDATE_IN_PROGRESS_BIT : usize = 1 << 7;
const BCD_MODE : usize = 1 << 1;
const HOUR_24 : usize = 1 << 2;
pub struct Cmos {
index_reg : Port<u8>, // 0x70
data_reg : Port<u8>, // 0x71
}
#[allow(unused)]
pub struct Rtc {
second : u8, // 0..59
minute : u8, // 0..59
hour : u8, // 24Hr: 0..23, 12Hr: 1..12 (msb set if pm)
weekday : u8, // Note: OSDev Wiki states that this shouldn't be used. Should be set to zero.
day : u8, // 1..31
month : u8, // 1..12
year : u8, // 0..99
// Note: Maybe add support for the Century register?
}
#[repr(C)]
pub enum RtcIndexes {
Seconds = 0x00,
Minutes = 0x02,
Hours = 0x04,
Weekday = 0x06,
DayOfMonth = 0x07,
Month = 0x08,
Year = 0x09,
StatusA = 0x0A,
StatusB = 0x0B,
}
impl Cmos {
pub fn get() -> Cmos {
Cmos {
index_reg : Port::new(0x70),
data_reg : Port::new(0x71),
}
}
pub fn read_rtc(&mut self, index : RtcIndexes) -> u8 {
unsafe {
self.index_reg.write(index as u8);
self.data_reg.read()
}
}
fn update_in_progress(&mut self) -> bool {
(self.read_rtc(RtcIndexes::StatusA) & UPDATE_IN_PROGRESS_BIT as u8) > 0
}
pub fn bcd_mode(&mut self) -> bool {
(self.read_rtc(RtcIndexes::StatusB) & BCD_MODE as u8) < 1
}
pub fn hour_mode(&mut self) -> bool {
(self.read_rtc(RtcIndexes::StatusB) & HOUR_24 as u8) > 0
}
pub fn rtc(&mut self) -> Rtc {
self.wait_for_update_completion();
let second = self.read_rtc(RtcIndexes::Seconds);
let minute = self.read_rtc(RtcIndexes::Minutes);
let hour = self.read_rtc(RtcIndexes::Hours);
let day = self.read_rtc(RtcIndexes::DayOfMonth);
let year = self.read_rtc(RtcIndexes::Year);
let month = self.read_rtc(RtcIndexes::Month);
Rtc {
day,
hour,
minute,
month,
second,
weekday : 0,
year
}
}
pub fn wait_for_update_completion(&mut self) {
while self.update_in_progress() {
time::sleep_ticks(1); //Sleep for 1ms
}
}
}
impl Rtc {
pub fn bcd_mode(&self) -> bool {
(Cmos::get().read_rtc(RtcIndexes::StatusB) & BCD_MODE as u8) > 1
}
pub fn hour_mode(&self) -> bool {
(Cmos::get().read_rtc(RtcIndexes::StatusB) & HOUR_24 as u8) > 1
}
pub fn status_a(&self) -> u8 {
Cmos::get().read_rtc(RtcIndexes::StatusA)
}
pub fn status_b(&self) -> u8 {
Cmos::get().read_rtc(RtcIndexes::StatusA)
}
}
impl Display for Rtc {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "{}/{}/20{} {}:{}:{:x}, 24Hr: {}, BCD: {}, StatusA: {:08b}, StatusB: {:08b}",
self.day,
self.month,
self.year,
self.hour,
self.minute,
self.second, self.bcd_mode(), self.hour_mode(),
self.status_a(), self.status_b())
}
} |
use super::config;
use crate::errors::{Error, ErrorKind, Result};
use crate::protos;
use crate::{session, wallet};
use std::prelude::v1::*;
extern crate sgx_types;
use crate::protos::xchain;
use sgx_types::*;
use std::collections::HashMap;
//use std::path::PathBuf;
use std::slice;
/// account在chain上面给to转账amount,小费是fee,留言是des, ocallc
pub fn invoke_contract(
account: &wallet::Account,
chain_name: &String,
method_name: &String,
args: HashMap<String, Vec<u8>>,
) -> Result<String> {
let mut invoke_req = protos::xchain::InvokeRequest::new();
invoke_req.set_module_name(String::from("wasm"));
invoke_req.set_contract_name(account.contract_name.to_owned());
invoke_req.set_method_name(method_name.to_owned());
invoke_req.set_args(args);
invoke_req.set_amount(String::from("0"));
let invoke_requests = vec![invoke_req; 1];
let mut auth_requires = vec![];
if !account.contract_account.is_empty() {
let mut s = account.contract_account.to_owned();
s.push_str("/");
s.push_str(account.address.to_owned().as_str());
auth_requires.push(s);
};
auth_requires.push(
config::CONFIG
.read()
.unwrap()
.compliance_check
.compliance_check_endorse_service_addr
.to_owned(),
);
let mut invoke_rpc_request = protos::xchain::InvokeRPCRequest::new();
invoke_rpc_request.set_bcname(chain_name.to_owned());
invoke_rpc_request.set_requests(protobuf::RepeatedField::from_vec(invoke_requests));
invoke_rpc_request.set_initiator(account.address.to_owned());
invoke_rpc_request.set_auth_require(protobuf::RepeatedField::from_vec(auth_requires.clone()));
let total_amount = config::CONFIG
.read()
.unwrap()
.compliance_check
.compliance_check_endorse_service_fee;
let mut pre_sel_utxo_req = protos::xchain::PreExecWithSelectUTXORequest::new();
pre_sel_utxo_req.set_bcname(chain_name.to_owned());
pre_sel_utxo_req.set_address(account.address.to_owned());
pre_sel_utxo_req.set_totalAmount(total_amount as i64);
pre_sel_utxo_req.set_request(invoke_rpc_request.clone());
let msg = session::Message {
to: Default::default(),
fee: Default::default(),
desc: String::from("call from contract"),
auth_require: auth_requires.clone(),
amount: Default::default(),
frozen_height: 0,
initiator: account.address.to_owned(),
};
let sess = session::Session::new(chain_name, account, &msg);
let mut resp = sess.pre_exec_with_select_utxo(pre_sel_utxo_req)?;
//TODO 代码优化
let msg = session::Message {
to: String::from(""),
fee: resp.get_response().get_gas_used().to_string(),
desc: String::from("call from contract"),
auth_require: auth_requires,
amount: Default::default(),
frozen_height: 0,
initiator: account.address.to_owned(),
};
let sess = session::Session::new(chain_name, account, &msg);
sess.gen_complete_tx_and_post(&mut resp)
}
pub fn query_contract(
account: &wallet::Account,
chain_name: &String,
method_name: &String,
args: HashMap<String, Vec<u8>>,
) -> Result<xchain::InvokeRPCResponse> {
let mut invoke_req = protos::xchain::InvokeRequest::new();
invoke_req.set_module_name(String::from("wasm"));
invoke_req.set_contract_name(account.contract_name.to_owned());
invoke_req.set_method_name(method_name.to_owned());
invoke_req.set_args(args);
let invoke_requests = vec![invoke_req; 1];
let mut auth_requires = vec![];
if !account.contract_account.is_empty() {
let mut s = account.contract_account.to_owned();
s.push_str("/");
s.push_str(account.address.to_owned().as_str());
auth_requires.push(s);
};
auth_requires.push(
config::CONFIG
.read()
.unwrap()
.compliance_check
.compliance_check_endorse_service_addr
.to_owned(),
);
let mut invoke_rpc_request = protos::xchain::InvokeRPCRequest::new();
invoke_rpc_request.set_bcname(chain_name.to_owned());
invoke_rpc_request.set_requests(protobuf::RepeatedField::from_vec(invoke_requests));
invoke_rpc_request.set_initiator(account.address.to_owned());
invoke_rpc_request.set_auth_require(protobuf::RepeatedField::from_vec(auth_requires.clone()));
let mut rt: sgx_status_t = sgx_status_t::SGX_ERROR_UNEXPECTED;
let req = serde_json::to_string(&invoke_rpc_request)?;
let mut output = 0 as *mut sgx_libc::c_void;
let mut out_len: usize = 0;
let resp = unsafe {
crate::ocall_xchain_pre_exec(
&mut rt,
req.as_ptr() as *const u8,
req.len(),
&mut output,
&mut out_len,
)
};
if resp != sgx_status_t::SGX_SUCCESS || rt != sgx_status_t::SGX_SUCCESS {
println!(
"[-] query_contract ocall_xchain_pre_exec failed: {}, {}!",
resp.as_str(),
rt.as_str()
);
return Err(Error::from(ErrorKind::InvalidArguments));
}
unsafe {
if sgx_types::sgx_is_outside_enclave(output, out_len) == 0 {
println!("[-] alloc error");
return Err(Error::from(ErrorKind::InvalidArguments));
}
}
let resp_slice = unsafe { slice::from_raw_parts(output as *mut u8, out_len) };
let invoke_rpc_resp: xchain::InvokeRPCResponse = serde_json::from_slice(&resp_slice).unwrap();
unsafe {
crate::ocall_free(output);
}
Ok(invoke_rpc_resp)
}
/*
pub fn test_contract() {
let bcname = String::from("xuper");
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("key/private.key");
let acc = super::wallet::Account::new(
d.to_str().unwrap(),
"counter327861",
"XC1111111111000000@xuper",
);
let mn = String::from("increase");
let mut args = HashMap::new();
args.insert(String::from("key"), String::from("counter").into_bytes());
let txid = invoke_contract(&acc, &bcname, &mn, args);
println!("contract txid: {:?}", txid);
assert_eq!(txid.is_ok(), true);
let txid = txid.unwrap();
let mut rt: sgx_status_t = sgx_status_t::SGX_ERROR_UNEXPECTED;
let mut output = 0 as *mut sgx_libc::c_void;
let mut out_len: usize = 0;
let res = unsafe {
crate::ocall_xchain_query_tx(
&mut rt,
txid.as_ptr() as *const u8,
txid.len(),
&mut output,
&mut out_len,
)
};
assert_eq!(res, sgx_status_t::SGX_SUCCESS);
assert_eq!(rt, sgx_status_t::SGX_SUCCESS);
unsafe {
assert_ne!(sgx_types::sgx_is_outside_enclave(output, out_len), 0);
}
let resp_slice = unsafe { slice::from_raw_parts(output as *mut u8, out_len) };
let result: xchain::TxStatus = serde_json::from_slice(resp_slice).unwrap();
unsafe {
crate::ocall_free(output);
}
println!("{:?}", result);
println!("invoke contract test passed");
}
pub fn test_query() {
let bcname = String::from("xuper");
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("key/private.key");
let acc = super::wallet::Account::new(
d.to_str().unwrap(),
"counter327861",
"XC1111111111000000@xuper",
);
let mn = String::from("get");
let mut args = HashMap::new();
args.insert(String::from("key"), String::from("counter").into_bytes());
let resp = query_contract(&acc, &bcname, &mn, args);
assert_eq!(resp.is_ok(), true);
println!("contract query response: {:?}", resp);
println!("contract query test passed");
}
*/
|
use crate::lib::core::{Star, StarSecret};
pub struct StaticStarSecret {}
impl StarSecret for StaticStarSecret {}
impl Star for StaticStarSecret {
fn star_display_code(&self) -> &str { "star-display-code" }
} |
use tcod::input::{self, Event, };
use super::rendering;
use super::data::{ Object, Game, Tcod, };
use crate::{PLAYER, MAP_HEIGHT, MAP_WIDTH};
pub fn target_tile(
max_range: Option<f32>,
objects: &[Object] ,
game: &mut Game,
tcod: &mut Tcod,
) -> Option<(i32, i32)> {
use tcod::input::KeyCode::Escape;
loop {
tcod.root.flush();
let event = input::check_for_event(input::KEY_PRESS | input::MOUSE).map(|e| e.1);
let mut key = None;
match event {
Some(Event::Mouse(m)) => tcod.mouse = m,
Some(Event::Key(k)) => key = Some(k),
None => {}
}
rendering::render_all(false, objects, game, tcod, );
let (x, y) = (tcod.mouse.cx as i32, tcod.mouse.cy as i32);
let in_fov = (x < MAP_WIDTH) && (y < MAP_HEIGHT) && tcod.fov.is_in_fov(x, y);
let in_range = max_range.map_or(true, |range| objects[PLAYER].distance(x, y) <= range);
if tcod.mouse.lbutton_pressed && in_fov && in_range {
return Some((x, y))
}
let escape = key.map_or(false, |k| k.code == Escape);
if tcod.mouse.rbutton_pressed || escape {
return None
}
}
}
pub fn target_monster(
max_range: Option<f32>,
objects: &[Object],
game: &mut Game,
tcod: &mut Tcod,
) -> Option<usize> {
loop {
match target_tile(max_range, objects, game, tcod) {
Some((x, y)) => {
for (id, obj) in objects.iter().enumerate() {
if obj.pos() == (x, y) && obj.fighter.is_some() && id != PLAYER {
return Some(id);
}
}
}
None => return None,
}
}
}
pub fn closest_monster(max_range: i32, objects: &mut [Object], tcod: &Tcod) -> Option<usize> {
let mut closest_enemy = None;
let mut closest_dist = (max_range + 1) as f32;
for (id, object) in objects.iter().enumerate() {
if (id != PLAYER)
&& object.fighter.is_some()
&& object.ai.is_some()
&& tcod.fov.is_in_fov(object.x, object.y)
{
let dist = objects[PLAYER].distance_to(object);
if dist < closest_dist {
closest_enemy = Some(id);
closest_dist = dist;
}
}
}
closest_enemy
}
|
mod reverse;
mod max_of_2;
mod small_to_high;
fn main() {
// ALGORISM IN reverse.rs
// Take a string and return a new string but with the character reversed
// example: hello rust -> tsur olleh
// TESTS:
// reverseString("hello") // => "olleh"
// reverseString("123i s8") // => "8s i321"
// reverseString("") // => ""
let input_01 = String::from ("hello rust");
let result_01 = reverse::reverse_string(&input_01);
println! ("{}", result_01);
assert_eq!("tsur olleh", result_01);
// ALGORISM in max_of_2.rs
// Take 2 numbers and find the max of them
// TESTS:
// max(-122387,124) // => 124
// max(-125,-2) // => -2
// max(142,2) // => 142
let result_02 = max_of_2::max_number(-122387,124);
println! ("The greater number is {}", result_02);
assert_eq!(124, result_02);
// Sort a numeric array by values (smallest to highes)
// TESTS:
// sort([8,12,53,1,2,-6,2]) // => [-6,1,2,2,8,12]
// sort([21,5,0,5,22,504]) // => [0,5,5,21,22,504]
let input_03 = [8,12,53,1,2,-6,2];
let to_vec = input_03.to_vec();
let result_03 = small_to_high::smt(to_vec);
println!("{:?}", result_03);
//????assert_eq!([-6,1,2,2,8,12], result_03);?????
// (D) remove a given character from a string
// TESTS:**
// removeChar("Hello World", "H") // => "ello World"
// removeChar("Hi, how are you doing?", "i") // => "H, how are you dong?"
let input_04 = String::from("Hi, how are you doing?");
let remove_char = 'i';
let result_04 = str::replace(&input_04, remove_char, "");
println!("{}", result_04);
}
|
//! Generate high level room layout
//!
mod params;
pub use params::*;
use crate::components::{RoomComponent, RoomConnection, RoomConnections};
use crate::geometry::{Axial, Hexagon};
use crate::prelude::hex_round;
use crate::storage::views::UnsafeView;
use crate::tables::morton_table::{ExtendFailure, MortonTable};
use rand::Rng;
use thiserror::Error;
use tracing::{debug, error};
#[derive(Debug, Clone, Error)]
pub enum OverworldGenerationError {
#[error("Can not place {number_of_rooms} rooms in an area with radius of {radius}")]
BadRadius { number_of_rooms: u32, radius: u32 },
#[error("Failed to build Room table: {0:?}")]
ExtendFail(ExtendFailure),
#[error("Failed to build Room weight table: {0:?}")]
WeightMapInitFail(ExtendFailure),
}
/// Transform the room_id into absolute 'room tile' space
fn room_id_to_axial(room_id: Axial, grid_size: i32) -> Axial {
const SQRT3: f64 = 1.73205080756887;
let size = (grid_size as f64 + 1.0) * SQRT3;
// 1) convert room_id to world space based on flat top hexes with size `grid_size`
let x = size * (3. / 2. * room_id.q as f64);
let y = size * (SQRT3 / 2. * room_id.q as f64 + SQRT3 * room_id.r as f64);
// 2) convert the world space coordinates back to axial, but pointy-top with radius of 1
let q = SQRT3 / 3. * x - 1. / 3. * y; // /1 omitted
let r = 2. / 3. * y;
hex_round([q as f32, r as f32])
}
/// Insert the given number of rooms in the given radius (where the unit is a room).
///
/// [ ] TODO: remove some nodes to produce less dense maps?
/// [ ] TODO: resource map?
/// [ ] TODO: political map?
/// [ ] TODO: parallellism?
/// [ ] TODO: non-uniform room sizes
pub fn generate_room_layout(
OverworldGenerationParams {
radius,
room_radius,
min_bridge_len,
max_bridge_len,
}: &OverworldGenerationParams,
rng: &mut impl Rng,
(mut rooms, mut room_connections): (
UnsafeView<Axial, RoomComponent>,
UnsafeView<Axial, RoomConnections>,
),
) -> Result<(), OverworldGenerationError> {
let radius = *radius as i32;
let room_radius = *room_radius as i32;
let center = Axial::new(radius, radius);
let bounds = Hexagon { center, radius };
// Init the grid
rooms.clear();
rooms
.extend(bounds.iter_points().map(|room_id| {
(
room_id,
RoomComponent {
offset: room_id_to_axial(room_id, room_radius),
seed: rng.gen(),
},
)
}))
.map_err(OverworldGenerationError::ExtendFail)?;
room_connections.clear();
room_connections
.extend(bounds.iter_points().map(|p| (p, Default::default())))
.map_err(OverworldGenerationError::ExtendFail)?;
debug!("Building room_connections");
// loosely running the Erdos - Runyi model
let connection_weights = MortonTable::from_iterator(bounds.iter_points().map(|p| {
let weight = rng.gen_range(-4.0..=6.0);
let weight = sigmoid(weight);
(p, weight)
}))
.map_err(OverworldGenerationError::WeightMapInitFail)?;
for point in bounds.iter_points() {
update_room_connections(
room_radius as u32,
*min_bridge_len,
*max_bridge_len,
point,
&connection_weights,
rng,
room_connections,
);
}
debug!("Building room_connections done");
// TODO: insert more room_connections if the graph is not fully connected
Ok(())
}
fn sigmoid(f: f32) -> f32 {
1.0 / (1.0 + std::f32::consts::E.powf(-f))
}
fn update_room_connections(
room_radius: u32,
min_bridge_len: u32,
max_bridge_len: u32,
point: Axial,
connection_weights: &MortonTable<f32>,
rng: &mut impl Rng,
mut room_connections: UnsafeView<Axial, RoomConnections>,
) {
let w = rng.gen_range(0.0..1.0);
let mut to_connect = [None; 6];
connection_weights.query_range(point, 3, &mut |p, weight| {
if w <= *weight {
let n = p - point;
if let Some(i) = Axial::neighbour_index(n) {
to_connect[i] = Some(n);
}
}
});
if !to_connect.iter().any(|c| c.is_some()) {
// if this room has no room_connections insert 1 at random
let mut weights = [0.0; 6];
connection_weights.query_range(point, 3, &mut |p, _| {
let n = p - point;
if let Some(i) = Axial::neighbour_index(n) {
weights[i] = rng.gen_range(0.5..=1.0);
}
});
let (i, _) = weights
.iter()
.enumerate()
.max_by(|(_, w1), (_, w2)| w1.partial_cmp(w2).expect("Expected non-nan values"))
.expect("Expected all rooms to have at least 1 neighbour");
to_connect[i] = Some(point.hex_neighbours()[i] - point);
}
let current_connections = {
let to_connect = &mut to_connect[..];
room_connections.update_with(point, |RoomConnections(ref mut conn)| {
for (i, c) in to_connect.iter_mut().enumerate() {
if conn[i].is_none() && c.is_some() {
let bridge_len = rng.gen_range(min_bridge_len..=max_bridge_len);
let padding = room_radius - bridge_len;
let offset_start = rng.gen_range(0..padding);
let offset_end = padding - offset_start;
// this is a new connection
conn[i] = c.map(|c| RoomConnection {
direction: c,
offset_start,
offset_end,
});
} else {
// if we don't have to update this posision then set it to None so we don't
// attempt to update the neighbour later.
*c = None;
}
}
})
}
.expect("expected the current room to have connection")
.clone();
for neighbour in current_connections
.0
.iter()
.filter_map(|n| n.as_ref())
.cloned()
{
room_connections.update_with(point + neighbour.direction, |conn| {
let inverse = neighbour.direction * -1;
let i = Axial::neighbour_index(inverse)
.expect("expected neighbour inverse to be a valid neighbour posision");
// this one's offsets are the current room's inverse
let offset_end = neighbour.offset_start;
let offset_end = offset_end.max(1) - 1; // offset_end - 1 or 0
let offset_start = neighbour.offset_end + 1;
conn.0[i] = Some(RoomConnection {
direction: inverse,
offset_start,
offset_end,
});
});
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn overworld_connections_are_valid() {
let mut rooms = MortonTable::new();
let mut room_connections = MortonTable::new();
let params = OverworldGenerationParams::builder()
.with_radius(12)
.with_room_radius(16)
.with_min_bridge_len(3)
.with_max_bridge_len(12)
.build()
.unwrap();
generate_room_layout(
¶ms,
&mut rand::thread_rng(),
(
UnsafeView::from_table(&mut rooms),
UnsafeView::from_table(&mut room_connections),
),
)
.unwrap();
assert_eq!(rooms.len(), room_connections.len());
// for each connection of the room test if the corresponding connection of the neighbour
// is valid.
for (room, RoomConnections(ref room_conn)) in room_connections.iter() {
for conn in room_conn.iter().filter_map(|x| x.as_ref()) {
let RoomConnections(ref conn_pairs) = room_connections
.at(room + conn.direction)
.expect("Expected the neighbour to be in the room_connections table");
let i = Axial::neighbour_index(conn.direction * -1).unwrap();
let conn_pair = conn_pairs[i]
.as_ref()
.expect("The pair connection was not found");
assert_eq!(conn_pair.direction, conn.direction * -1);
}
}
}
}
|
#[cfg(test)]
mod tests {
use crate::xor::encrypt_decrypt_repeating_key_xor;
// Fifth cryptopals challenge - https://cryptopals.com/sets/1/challenges/5
#[test]
fn challenge5() {
let plaintext =
"Burning 'em, if you ain't quick and nimble\nI go crazy when I hear a cymbal";
let ciphertext =
"0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272\
a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f";
let key: &str = "ICE";
let encrypted = hex::encode(&encrypt_decrypt_repeating_key_xor(
&key.bytes().collect::<Vec<u8>>(),
&plaintext.bytes().collect::<Vec<u8>>(),
));
assert_eq!(ciphertext, encrypted);
}
}
|
//
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
extern crate test;
use location_utils::{
cell_id_to_bytes, find_cell, location_from_degrees, location_to_bytes, S2_DEFAULT_LEVEL,
};
use lookup_data_generator::data::generate_and_serialize_sparse_weather_entries;
use maplit::hashmap;
use oak_functions_abi::proto::{Request, StatusCode};
use oak_functions_loader::{
grpc::{create_and_start_grpc_server, create_wasm_handler},
logger::Logger,
lookup::{parse_lookup_entries, LookupData, LookupDataAuth, LookupDataSource},
server::{Policy, WasmHandler},
};
use std::{
net::{Ipv6Addr, SocketAddr},
sync::Arc,
time::Duration,
};
use test::Bencher;
use test_utils::make_request;
#[tokio::test]
async fn test_server() {
let server_port = test_utils::free_port();
let address = SocketAddr::from((Ipv6Addr::UNSPECIFIED, server_port));
let mut manifest_path = std::env::current_dir().unwrap();
manifest_path.push("Cargo.toml");
let wasm_module_bytes =
test_utils::compile_rust_wasm(manifest_path.to_str().expect("Invalid target dir"), false)
.expect("Couldn't read Wasm module");
let mock_static_server = Arc::new(test_utils::MockStaticServer::default());
let mock_static_server_clone = mock_static_server.clone();
let static_server_port = test_utils::free_port();
let mock_static_server_background = test_utils::background(|term| async move {
mock_static_server_clone
.serve(static_server_port, term)
.await
});
let location_0 = location_from_degrees(52., -0.01);
let location_1 = location_from_degrees(14., -12.);
// Find the cells for the locations and generate the associated lookup index entries.
// These are purposely large so that each location is covered by a single cell.
let level = S2_DEFAULT_LEVEL;
let cell_0 = find_cell(&location_0, level).unwrap();
let cell_1 = find_cell(&location_1, level).unwrap();
let lookup_data = hashmap! {
location_to_bytes(&location_0).to_vec() => br#"{"temperature_degrees_celsius":10}"#.to_vec(),
location_to_bytes(&location_1).to_vec() => br#"{"temperature_degrees_celsius":42}"#.to_vec(),
cell_id_to_bytes(&cell_0) => location_to_bytes(&location_0).to_vec(),
cell_id_to_bytes(&cell_1) => location_to_bytes(&location_1).to_vec(),
};
mock_static_server.set_response_body(test_utils::serialize_entries(lookup_data));
let logger = Logger::for_test();
let lookup_data = Arc::new(LookupData::new_empty(
Some(LookupDataSource::Http {
url: format!("http://localhost:{}", static_server_port),
auth: LookupDataAuth::default(),
}),
logger.clone(),
));
lookup_data.refresh().await.unwrap();
let policy = Policy {
constant_response_size_bytes: 100,
constant_processing_time: Duration::from_millis(200),
};
let tee_certificate = vec![];
let wasm_handler = create_wasm_handler(&wasm_module_bytes, lookup_data, vec![], logger.clone())
.expect("could not create wasm_handler");
let server_background = test_utils::background(|term| async move {
create_and_start_grpc_server(
&address,
wasm_handler,
tee_certificate,
policy,
term,
logger,
)
.await
});
{
// Exact key_0.
let response = make_request(server_port, br#"{"lat":52.0,"lng":-0.01}"#)
.await
.response;
assert_eq!(StatusCode::Success as i32, response.status);
assert_eq!(
r#"{"temperature_degrees_celsius":10}"#,
std::str::from_utf8(response.body().unwrap()).unwrap()
);
}
{
// Close to key_0.
let response = make_request(server_port, br#"{"lat":51.9,"lng":-0.1}"#)
.await
.response;
assert_eq!(StatusCode::Success as i32, response.status);
assert_eq!(
r#"{"temperature_degrees_celsius":10}"#,
std::str::from_utf8(response.body().unwrap()).unwrap()
);
}
{
// A bit further from key_0.
let response = make_request(server_port, br#"{"lat":51.4,"lng":-0.6}"#)
.await
.response;
assert_eq!(StatusCode::Success as i32, response.status);
assert_eq!(
r#"could not find location within cutoff"#,
std::str::from_utf8(response.body().unwrap()).unwrap()
);
}
{
// Close to key_1.
let response = make_request(server_port, br#"{"lat":14.1,"lng":-11.9}"#)
.await
.response;
assert_eq!(StatusCode::Success as i32, response.status);
assert_eq!(
r#"{"temperature_degrees_celsius":42}"#,
std::str::from_utf8(response.body().unwrap()).unwrap()
);
}
{
// Far from both keys.
let response = make_request(server_port, br#"{"lat":-10.0,"lng":10.0}"#)
.await
.response;
assert_eq!(StatusCode::Success as i32, response.status);
assert_eq!(
r#"could not find index item for cell"#,
std::str::from_utf8(response.body().unwrap()).unwrap()
);
}
{
// Malformed request.
let response = make_request(server_port, b"invalid - JSON").await.response;
assert_eq!(StatusCode::Success as i32, response.status);
assert_eq!(
"could not deserialize request as JSON: Error(\"expected value\", line: 1, column: 1)",
std::str::from_utf8(response.body().unwrap()).unwrap()
);
}
let res = server_background.terminate_and_join().await;
assert!(res.is_ok());
mock_static_server_background.terminate_and_join().await;
}
#[bench]
fn bench_wasm_handler(bencher: &mut Bencher) {
// This benchmark test takes quite a long time when running with a realistic amount of lookup
// data. By default it uses a smaller number of entries. To run the bench with realistic data
// size, use `cargo bench --features large-bench`.
#[cfg(not(feature = "large-bench"))]
let (entry_count, elapsed_limit_millis) = (10_000, 20);
#[cfg(feature = "large-bench")]
let (entry_count, elapsed_limit_millis) = (200_000, 20);
let mut manifest_path = std::env::current_dir().unwrap();
manifest_path.push("Cargo.toml");
let wasm_module_bytes =
test_utils::compile_rust_wasm(manifest_path.to_str().expect("Invalid target dir"), true)
.expect("Couldn't read Wasm module");
let mut rng = rand::thread_rng();
let buf = generate_and_serialize_sparse_weather_entries(&mut rng, entry_count).unwrap();
let entries = parse_lookup_entries(buf).unwrap();
let lookup_data = Arc::new(LookupData::for_test(entries));
let logger = Logger::for_test();
let wasm_handler = WasmHandler::create(&wasm_module_bytes, lookup_data, vec![], logger)
.expect("Couldn't create the server");
let rt = tokio::runtime::Runtime::new().unwrap();
let summary = bencher.bench(|bencher| {
bencher.iter(|| {
let request = Request {
body: br#"{"lat":-60.1,"lng":120.1}"#.to_vec(),
};
let resp = rt
.block_on(wasm_handler.clone().handle_invoke(request))
.unwrap();
assert_eq!(resp.status, StatusCode::Success as i32);
});
});
// When running `cargo test` this benchmark test gets executed too, but `summary` will be `None`
// in that case. So, here we first check that `summary` is not empty.
if let Some(summary) = summary {
// `summary.mean` is in nanoseconds, even though it is not explicitly documented in
// https://doc.rust-lang.org/test/stats/struct.Summary.html.
let elapsed = Duration::from_nanos(summary.mean as u64);
// We expect the `mean` time for loading the test Wasm module and running its main function
// to be less than a fixed threshold.
assert!(
elapsed < Duration::from_millis(elapsed_limit_millis),
"elapsed time: {:.0?}",
elapsed
);
}
}
|
use hacspec_lib::*;
use hacspec_ristretto::*;
// === Positive Tests === //
#[test]
fn test_unit_add_zero() {
let point = BASE_POINT();
let zero = IDENTITY_POINT();
let res = add(point, zero);
assert!(equals(point, res));
}
#[test]
fn test_unit_inverse_sub() {
let point = BASE_POINT();
let res = sub(point, point);
assert!(equals(res, IDENTITY_POINT()))
}
#[test]
fn test_unit_scalar_mul_one() {
let point = BASE_POINT();
let one = Scalar::from_literal(1);
let res = mul(one, point);
assert!(equals(point, res))
}
#[test]
fn test_unit_add_to_self_double() {
let point = BASE_POINT();
let double_res = double(point);
let add_res = add(point, point);
assert!(equals(double_res, add_res))
}
#[test]
fn test_unit_add_encode() {
let hexs = Seq::<&str>::from_vec(vec![
//Non-canonical field encodings.
"0000000000000000000000000000000000000000000000000000000000000000",
"e2f2ae0a6abc4e71a884a961c500515f58e30b6aa582dd8db6a65945e08d2d76",
"6a493210f7499cd17fecb510ae0cea23a110e8d5b901f8acadd3095c73a3b919",
"94741f5d5d52755ece4f23f044ee27d5d1ea1e2bd196b462166b16152a9d0259",
"da80862773358b466ffadfe0b3293ab3d9fd53c5ea6c955358f568322daf6a57",
"e882b131016b52c1d3337080187cf768423efccbb517bb495ab812c4160ff44e",
"f64746d3c92b13050ed8d80236a7f0007c3b3f962f5ba793d19a601ebb1df403",
"44f53520926ec81fbd5a387845beb7df85a96a24ece18738bdcfa6a7822a176d",
"903293d8f2287ebe10e2374dc1a53e0bc887e592699f02d077d5263cdd55601c",
"02622ace8f7303a31cafc63f8fc48fdc16e1c8c8d234b2f0d6685282a9076031",
"20706fd788b2720a1ed2a5dad4952b01f413bcf0e7564de8cdc816689e2db95f",
"bce83f8ba5dd2fa572864c24ba1810f9522bc6004afe95877ac73241cafdab42",
"e4549ee16b9aa03099ca208c67adafcafa4c3f3e4e5303de6026e3ca8ff84460",
"aa52e000df2e16f55fb1032fc33bc42742dad6bd5a8fc0be0167436c5948501f",
"46376b80f409b29dc2b5f6f0c52591990896e5716f41477cd30085ab7f10301e",
"e0c418f7c8d9c4cdd7395b93ea124f3ad99021bb681dfc3302a9d99a2e53e64e",
]);
let points: Vec<RistrettoPoint> = hexs
.iter()
.map(|x| decode(RistrettoPointEncoded::from_hex(x)).unwrap())
.collect();
for i in 0..15 {
let add_res = add(points[1usize], points[i]);
assert!(equals(add_res, points[i + 1usize]));
}
}
#[test]
fn test_unit_one_way_map() {
let input_hexs = Seq::<&str>::from_vec(vec![
"5d1be09e3d0c82fc538112490e35701979d99e06ca3e2b5b54bffe8b4dc772c14d98b696a1bbfb5ca32c436cc61c16563790306c79eaca7705668b47dffe5bb6",
"f116b34b8f17ceb56e8732a60d913dd10cce47a6d53bee9204be8b44f6678b270102a56902e2488c46120e9276cfe54638286b9e4b3cdb470b542d46c2068d38",
"8422e1bbdaab52938b81fd602effb6f89110e1e57208ad12d9ad767e2e25510c27140775f9337088b982d83d7fcf0b2fa1edffe51952cbe7365e95c86eaf325c",
"ac22415129b61427bf464e17baee8db65940c233b98afce8d17c57beeb7876c2150d15af1cb1fb824bbd14955f2b57d08d388aab431a391cfc33d5bafb5dbbaf",
"165d697a1ef3d5cf3c38565beefcf88c0f282b8e7dbd28544c483432f1cec7675debea8ebb4e5fe7d6f6e5db15f15587ac4d4d4a1de7191e0c1ca6664abcc413",
"a836e6c9a9ca9f1e8d486273ad56a78c70cf18f0ce10abb1c7172ddd605d7fd2979854f47ae1ccf204a33102095b4200e5befc0465accc263175485f0e17ea5c",
"2cdc11eaeb95daf01189417cdddbf95952993aa9cb9c640eb5058d09702c74622c9965a697a3b345ec24ee56335b556e677b30e6f90ac77d781064f866a3c982",
]);
let result_hexs = Seq::<&str>::from_vec(vec![
"3066f82a1a747d45120d1740f14358531a8f04bbffe6a819f86dfe50f44a0a46",
"f26e5b6f7d362d2d2a94c5d0e7602cb4773c95a2e5c31a64f133189fa76ed61b",
"006ccd2a9e6867e6a2c5cea83d3302cc9de128dd2a9a57dd8ee7b9d7ffe02826",
"f8f0c87cf237953c5890aec3998169005dae3eca1fbb04548c635953c817f92a",
"ae81e7dedf20a497e10c304a765c1767a42d6e06029758d2d7e8ef7cc4c41179",
"e2705652ff9f5e44d3e841bf1c251cf7dddb77d140870d1ab2ed64f1a9ce8628",
"80bd07262511cdde4863f8a7434cef696750681cb9510eea557088f76d9e5065",
]);
let input_bytes: Vec<ByteString> = input_hexs.iter().map(|x| ByteString::from_hex(x)).collect();
let res_hacspec: Vec<RistrettoPoint> = input_bytes.iter().map(|x| one_way_map(*x)).collect();
let exp_res: Vec<RistrettoPoint> = result_hexs
.iter()
.map(|x| decode(RistrettoPointEncoded::from_hex(x)).unwrap())
.collect();
for i in 0..res_hacspec.len() {
assert!(equals(res_hacspec[i], exp_res[i]));
}
}
// === Negative Tests === //
#[test]
fn test_unit_neg_decode() {
let hexs = Seq::<&str>::from_vec(vec![
//Non-canonical field encodings.
"00ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f",
"f3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f",
"edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f",
//Negative field elements.
"0100000000000000000000000000000000000000000000000000000000000000",
"01ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f",
"ed57ffd8c914fb201471d1c3d245ce3c746fcbe63a3679d51b6a516ebebe0e20",
"c34c4e1826e5d403b78e246e88aa051c36ccf0aafebffe137d148a2bf9104562",
"c940e5a4404157cfb1628b108db051a8d439e1a421394ec4ebccb9ec92a8ac78",
"47cfc5497c53dc8e61c91d17fd626ffb1c49e2bca94eed052281b510b1117a24",
"f1c6165d33367351b0da8f6e4511010c68174a03b6581212c71c0e1d026c3c72",
"87260f7a2f12495118360f02c26a470f450dadf34a413d21042b43b9d93e1309",
//Non-square x^2.
"26948d35ca62e643e26a83177332e6b6afeb9d08e4268b650f1f5bbd8d81d371",
"4eac077a713c57b4f4397629a4145982c661f48044dd3f96427d40b147d9742f",
"de6a7b00deadc788eb6b6c8d20c0ae96c2f2019078fa604fee5b87d6e989ad7b",
"bcab477be20861e01e4a0e295284146a510150d9817763caf1a6f4b422d67042",
"2a292df7e32cababbd9de088d1d1abec9fc0440f637ed2fba145094dc14bea08",
"f4a9e534fc0d216c44b218fa0c42d99635a0127ee2e53c712f70609649fdff22",
"8268436f8c4126196cf64b3c7ddbda90746a378625f9813dd9b8457077256731",
"2810e5cbc2cc4d4eece54f61c6f69758e289aa7ab440b3cbeaa21995c2f4232b",
//Negative xy value.
"3eb858e78f5a7254d8c9731174a94f76755fd3941c0ac93735c07ba14579630e",
"a45fdc55c76448c049a1ab33f17023edfb2be3581e9c7aade8a6125215e04220",
"d483fe813c6ba647ebbfd3ec41adca1c6130c2beeee9d9bf065c8d151c5f396e",
"8a2e1d30050198c65a54483123960ccc38aef6848e1ec8f5f780e8523769ba32",
"32888462f8b486c68ad7dd9610be5192bbeaf3b443951ac1a8118419d9fa097b",
"227142501b9d4355ccba290404bde41575b037693cef1f438c47f8fbf35d1165",
"5c37cc491da847cfeb9281d407efc41e15144c876e0170b499a96a22ed31e01e",
"445425117cb8c90edcbc7c1cc0e74f747f2c1efa5630a967c64f287792a48a4b",
//s = -1, which causes y = 0.
"ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f",
]);
hexs.iter()
.for_each(|x| assert!(decode(RistrettoPointEncoded::from_hex(x)).is_err()));
}
|
extern crate ffmpeg_dev;
extern crate libc;
use ffmpeg_dev::sys;
use ffmpeg_dev::sys::av_register_all;
use ffmpeg_dev::sys::avformat_open_input;
use ffmpeg_dev::sys::AVCodecParameters;
use ffmpeg_dev::sys::AVFormatContext;
use ffmpeg_dev::sys::AVMediaType_AVMEDIA_TYPE_VIDEO;
use ffmpeg_dev::sys::AV_TIME_BASE;
use std::ffi::CString;
use std::fs;
use std::ptr::null_mut;
use std::slice::from_raw_parts;
extern "C" {
fn double_input(input: libc::c_int) -> libc::c_int;
fn test(input: *const ::std::os::raw::c_char) -> libc::c_void;
}
const PROJECT_PATH: &str = "/Users/zhushijie/Desktop/github/rust-ffmepg/assets";
fn mange_project_path() {
if !std::path::Path::new(&PROJECT_PATH).exists() {
fs::create_dir(PROJECT_PATH).unwrap();
} else {
fs::remove_dir_all(PROJECT_PATH).unwrap();
fs::create_dir(PROJECT_PATH).unwrap();
}
}
fn saveframe(frame: *mut sys::AVFrame, index: i32) {
unsafe {
let filepath = format!("{}/{}.jpg", PROJECT_PATH, index.to_string());
println!("pic name is {}", filepath);
let c_filepath = CString::new(filepath).unwrap().into_raw();
let p_format_ctx: *mut sys::AVFormatContext = sys::avformat_alloc_context();
(*p_format_ctx).oformat = sys::av_guess_format(
CString::new("mjpeg").unwrap().into_raw(),
null_mut(),
null_mut(),
);
println!("codeid is {}",(*(*p_format_ctx).oformat).video_codec);
let write_res = sys::avio_open(
&mut (*p_format_ctx).pb,
c_filepath,
sys::AVIO_FLAG_READ_WRITE as i32,
);
if write_res < 0 {
println!("Couldn't open output file");
return;
}
let codec: *mut sys::AVCodec = sys::avcodec_find_decoder((*(*p_format_ctx).oformat).video_codec);
let p_avstream = sys::avformat_new_stream(p_format_ctx, codec);
if p_avstream == null_mut() {
return;
}
let parameters = (*p_avstream).codecpar;
(*parameters).codec_id = (*(*p_format_ctx).oformat).video_codec;
(*parameters).codec_type = sys::AVMediaType_AVMEDIA_TYPE_VIDEO;
(*parameters).format = sys::AVPixelFormat_AV_PIX_FMT_YUVJ420P;
(*parameters).width = (*frame).width;
(*parameters).height = (*frame).height;
sys::av_dump_format(p_format_ctx, 0, c_filepath, 1);
let p_codec = sys::avcodec_find_encoder((*(*p_avstream).codecpar).codec_id);
if p_codec == null_mut() {
println!("Could not find encoder.");
return;
}
let p_codectx = sys::avcodec_alloc_context3(p_codec);
if p_codectx == null_mut() {
println!("Could not allocate video codec context");
return;
}
if sys::avcodec_parameters_to_context(p_codectx, (*p_avstream).codecpar) < 0 {
println!("Failed to copy,codec parameters to decoder context!");
return;
}
(*p_codectx).time_base = sys::AVRational { num: 1, den: 25 };
let code_res = sys::avcodec_open2(p_codectx, p_codec, null_mut());
if code_res < 0 {
println!("Could not open codec.");
return;
}
sys::avformat_write_header(p_format_ctx, null_mut());
let y_size = (*frame).width * (*frame).height;
let pkt: *mut sys::AVPacket = sys::av_packet_alloc();
sys::av_new_packet(pkt, y_size * 3);
let mut got_picture = 0;
let pic_decode_res = sys::avcodec_encode_video2(p_codectx, pkt, frame, &mut got_picture);
if pic_decode_res < 0 {
println!("Encode Error");
return;
}
if got_picture > 0 {
sys::av_write_frame(p_format_ctx, pkt);
}
sys::av_free_packet(pkt);
sys::av_write_trailer(p_format_ctx);
println!("Encode Successful");
if p_avstream != null_mut() {
sys::avcodec_close((*p_avstream).codec);
}
sys::avio_close((*p_format_ctx).pb);
sys::avformat_free_context(p_format_ctx);
return;
}
}
fn main() {
let input = 4;
let output = unsafe { double_input(input) };
println!("{} * 2 = {}", input, output);
let path = "/Users/zhushijie/Desktop/m3u8-demo/test.mp4";
// let path = "/Users/zhushijie/Desktop/demo/m3u8-demo/a.mp4";
let c_path = CString::new(path).expect("CString::new failed").into_raw();
unsafe {
test(c_path);
};
unsafe {
mange_project_path();
av_register_all();
sys::avdevice_register_all();
let mut video_stream_idx: Vec<usize> = Vec::new();
let txt_inputformat = CString::new("video4linux2")
.expect("CString::new failed")
.into_raw();
let input_fmt = sys::av_find_input_format(txt_inputformat);
// let audio_stream_idx = -1;
let mut ifmt_ctx: *mut AVFormatContext = sys::avformat_alloc_context();
let code = avformat_open_input(&mut ifmt_ctx, c_path, input_fmt, null_mut());
if code < 0 {
println!("找不到视频文件");
} else {
println!("视频打开成功");
}
let value = (*ifmt_ctx).duration;
let trade = AV_TIME_BASE as i64;
let durtime = value / trade;
let ctx = *ifmt_ctx;
println!("res is {},time is {}", code, durtime);
println!("stream is {}", ctx.nb_streams);
let stream_count = ctx.nb_streams;
let streams = from_raw_parts((*ifmt_ctx).streams, stream_count as usize)
.iter()
.map(|x| (*x).as_ref().expect("not null"))
.collect::<Vec<&sys::AVStream>>();
for (index, stream_ptr) in streams.iter().enumerate() {
let acc: *mut AVCodecParameters = stream_ptr.codecpar;
println!("codec_type is {},index is {}", (*acc).codec_type, index);
if (*acc).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
video_stream_idx.push(index);
let codec: *mut sys::AVCodec = sys::avcodec_find_decoder((*acc).codec_id);
if codec == null_mut() {
println!("没有该类型的解码器!");
break;
}
let codec_ctx: *mut sys::AVCodecContext = sys::avcodec_alloc_context3(codec);
sys::avcodec_parameters_to_context(codec_ctx, acc);
let res = sys::avcodec_open2(codec_ctx, codec, null_mut());
if res != 0 {}
println!("解码器打开成功");
let packet: *mut sys::AVPacket = sys::av_packet_alloc();
let pframe: *mut sys::AVFrame = sys::av_frame_alloc();
let tr_frame: *mut sys::AVFrame = sys::av_frame_alloc();
let picturesize = sys::avpicture_get_size(
sys::AVPixelFormat_AV_PIX_FMT_YUVJ420P,
(*codec_ctx).width,
(*codec_ctx).height,
) as usize;
let buffer = sys::av_malloc(picturesize) as *mut u8;
sys::avpicture_fill(
tr_frame as *mut sys::AVPicture,
buffer,
sys::AVPixelFormat_AV_PIX_FMT_YUVJ420P,
(*codec_ctx).width,
(*codec_ctx).height,
);
println!(
"width: {} height: {} pix_fmt: {}",
(*codec_ctx).width,
(*codec_ctx).height,
(*codec_ctx).pix_fmt
);
let mut pic_index = 1;
while sys::av_read_frame(ifmt_ctx, packet) >= 0 {
let stream_index = (*packet).stream_index as usize;
if video_stream_idx.contains(&stream_index) {
let ret_send = sys::avcodec_send_packet(codec_ctx, packet);
if ret_send < 0 {
println!("发送视频帧失败,跳过");
continue;
}
let receive_ret = sys::avcodec_receive_frame(codec_ctx, pframe);
if receive_ret < 0 {
println!("解码获取 视频帧失败,跳过");
continue;
}
if (*packet).flags == 1 {
pic_index += 1;
if pic_index < 20 {
let img_convert_ctx: *mut sys::SwsContext = sys::sws_getContext(
(*pframe).width,
(*pframe).height,
(*pframe).format,
(*pframe).width,
(*pframe).height,
sys::AVPixelFormat_AV_PIX_FMT_YUVJ420P,
sys::SWS_BICUBIC as i32,
null_mut(),
null_mut(),
null_mut(),
);
let h = sys::sws_scale(
img_convert_ctx,
(*pframe).data.as_ptr() as *mut *const u8,
(*pframe).linesize.as_ptr(),
0,
(*codec_ctx).height,
(*tr_frame).data.as_ptr(),
(*tr_frame).linesize.as_ptr(),
);
println!("重新计算的高端:{}", h);
saveframe(tr_frame, pic_index);
} else {
break;
}
}
}
}
sys::av_frame_unref(pframe);
sys::av_frame_unref(tr_frame);
sys::avcodec_close(codec_ctx);
sys::avformat_close_input(&mut ifmt_ctx);
}
}
}
return;
}
|
// Natural integers type
use std::ops::*;
use num_bigint::{BigInt};
use num_traits::{Pow, One, identities::Zero};
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord)]
pub struct Int(BigInt);
impl Add for Int {
type Output = Int;
fn add(self, rhs:Int) -> Int {
Int(self.0 + rhs.0)
}
}
impl Sub for Int {
type Output = Int;
fn sub(self, rhs:Int) -> Int {
Int(self.0 - rhs.0)
}
}
impl Mul for Int {
type Output = Int;
fn mul(self, rhs:Int) -> Int {
Int(self.0 * rhs.0)
}
}
impl Div for Int {
type Output = Int;
//#[requires(| self, rhs | => rhs != 0.into())]
fn div(self, rhs:Int) -> Int {
verif_pre!(rhs != 0.into());
Int(self.0 / rhs.0)
}
}
impl Rem for Int {
type Output = Int;
//#[requires(| self, rhs | => rhs != 0.into())]
fn rem(self, rhs:Int) -> Int {
verif_pre!(rhs != 0.into());
Int(self.0 % rhs.0)
}
}
impl Pow<Int> for Int {
type Output = Int;
//#[requires (|self, exp| => exp >= 0)]
fn pow(self, exp: Int) -> Int {
verif_pre!(exp >= 0.into());
let mut exp = exp.0;
if exp == BigInt::zero() {
return Int(BigInt::one());
}
let mut base = self.0.clone();
while exp.clone() & BigInt::one() == BigInt::zero() {
base = &base * &base;
exp >>= 1;
}
if exp == BigInt::one() {
return Int(base);
}
let mut acc = base.clone();
while exp.clone() > BigInt::one() {
exp >>= 1;
base = &base * &base;
if exp.clone() & BigInt::one() == BigInt::one() {
acc = &acc * &base;
}
}
Int(acc)
}
}
impl From<u128> for Int {
fn from(x:u128) -> Int {
Int(BigInt::from(x))
}
}
impl From<u64> for Int {
fn from(x:u64) -> Int {
Int(BigInt::from(x))
}
}
impl From<u32> for Int {
fn from(x:u32) -> Int {
Int(BigInt::from(x))
}
}
impl From<u8> for Int {
fn from(x:u8) -> Int {
Int(BigInt::from(x))
}
}
impl From<usize> for Int {
fn from(x:usize) -> Int {
Int(BigInt::from(x))
}
}
impl From<i128> for Int {
fn from(x:i128) -> Int {
Int(BigInt::from(x))
}
}
impl From<i64> for Int {
fn from(x:i64) -> Int {
Int(BigInt::from(x))
}
}
impl From<i32> for Int {
fn from(x:i32) -> Int {
Int(BigInt::from(x))
}
}
impl From<i8> for Int {
fn from(x:i8) -> Int {
Int(BigInt::from(x))
}
}
impl From<isize> for Int {
fn from(x:isize) -> Int {
Int(BigInt::from(x))
}
}
|
pub mod job;
pub mod util;
pub mod signal;
pub mod queue;
pub mod fs;
use async_process::{Command, Stdio};
use futures_lite::{io::BufReader, prelude::*};
pub async fn cmd() -> std::io::Result<()> {
let mut child = Command::new("ls")
.arg("/home/chrisp/dvsa/dito/")
.stdout(Stdio::piped())
.spawn()?;
let mut lines = BufReader::new(child.stdout.take().unwrap()).lines();
while let Some(line) = lines.next().await {
println!("{}", line?);
}
Ok(())
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
|
#![allow(non_snake_case)]
use gl;
use gl::types::*;
use std::ffi::{CStr, CString};
use std::fs::File;
use std::io::Read;
use std::{ptr, str};
use cgmath::prelude::*;
use crate::types::*;
pub struct Shader {
pub ID: u32
}
#[allow(dead_code)]
impl Shader {
pub fn new(vertexPath: &str, fragmentPath: &str) -> Shader {
let mut shader = Shader { ID: 0 };
let mut vShaderFile = File::open(vertexPath).unwrap_or_else(|_| panic!("Failed to open {}", vertexPath));
let mut fShaderFile = File::open(fragmentPath).unwrap_or_else(|_| panic!("Failed to open {}", fragmentPath));
let mut vertexCode = String::new();
let mut fragmentCode = String::new();
vShaderFile
.read_to_string(&mut vertexCode)
.expect("Failed to read vertex shader");
fShaderFile
.read_to_string(&mut fragmentCode)
.expect("Failed to read fragment shader");
let vShaderCode = CString::new(vertexCode.as_bytes()).unwrap();
let fShaderCode = CString::new(fragmentCode.as_bytes()).unwrap();
unsafe {
let vertex = gl::CreateShader(gl::VERTEX_SHADER);
gl::ShaderSource(vertex, 1, &vShaderCode.as_ptr(), ptr::null());
gl::CompileShader(vertex);
shader.checkCompileErrors(vertex, "VERTEX");
let fragment = gl::CreateShader(gl::FRAGMENT_SHADER);
gl::ShaderSource(fragment, 1, &fShaderCode.as_ptr(), ptr::null());
gl::CompileShader(fragment);
shader.checkCompileErrors(fragment, "FRAGMENT");
let ID = gl::CreateProgram();
gl::AttachShader(ID, vertex);
gl::AttachShader(ID, fragment);
gl::LinkProgram(ID);
shader.checkCompileErrors(ID, "PROGRAM");
gl::DeleteShader(vertex);
gl::DeleteShader(fragment);
shader.ID = ID;
}
shader
}
pub fn initShader(&self, model: &Matrix4, view: &Matrix4, projection: &Matrix4) {
unsafe {
self.useProgram();
self.setMat4(c_str!("model"), model);
self.setMat4(c_str!("view"), view);
self.setMat4(c_str!("projection"), projection);
}
}
pub fn updateModel(&self, model: &Matrix4) {
unsafe {
self.useProgram();
self.setMat4(c_str!("model"), model);
}
}
pub fn loadLight(&self, position: &Vector3, colour: &Vector3, attenuation: &Vector3) {
unsafe {
self.useProgram();
self.setVector3(c_str!("lightPosition"), position);
self.setVector3(c_str!("lightColour"), colour);
self.setVector3(c_str!("attenuation"), attenuation);
}
}
pub fn loadShine(&self, damper: f32, reflectivity: f32) {
unsafe {
self.useProgram();
self.setFloat(c_str!("shineDamper"), damper);
self.setFloat(c_str!("reflectivity"), reflectivity);
}
}
pub unsafe fn useProgram(&self) {
gl::UseProgram(self.ID)
}
pub unsafe fn setBool(&self, name: &CStr, value: bool) {
gl::Uniform1i(gl::GetUniformLocation(self.ID, name.as_ptr()), value as i32);
}
pub unsafe fn setInt(&self, name: &CStr, value: i32) {
gl::Uniform1i(gl::GetUniformLocation(self.ID, name.as_ptr()), value);
}
pub unsafe fn setFloat(&self, name: &CStr, value: f32) {
gl::Uniform1f(gl::GetUniformLocation(self.ID, name.as_ptr()), value);
}
pub unsafe fn setVector3(&self, name: &CStr, value: &Vector3) {
gl::Uniform3fv(gl::GetUniformLocation(self.ID, name.as_ptr()), 1, value.as_ptr());
}
pub unsafe fn setVec3(&self, name: &CStr, x: f32, y: f32, z: f32) {
gl::Uniform3f(gl::GetUniformLocation(self.ID, name.as_ptr()), x, y, z);
}
pub unsafe fn setMat4(&self, name: &CStr, mat: &Matrix4) {
gl::UniformMatrix4fv(
gl::GetUniformLocation(self.ID, name.as_ptr()),
1,
gl::FALSE,
mat.as_ptr(),
);
}
unsafe fn checkCompileErrors(&self, shader: u32, type_: &str) {
let mut success = gl::FALSE as GLint;
let mut infoLog = Vec::with_capacity(1024);
infoLog.set_len(1024 - 1); // subtract 1 to skip the trailing null character
if type_ != "PROGRAM" {
gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut success);
if success != gl::TRUE as GLint {
gl::GetShaderInfoLog(shader, 1024, ptr::null_mut(), infoLog.as_mut_ptr() as *mut GLchar);
println!(
"ERROR::SHADER_COMPILATION_ERROR of type: {}\n{}\n \
-- --------------------------------------------------- -- ",
type_,
str::from_utf8(&infoLog).unwrap()
);
}
} else {
gl::GetProgramiv(shader, gl::LINK_STATUS, &mut success);
if success != gl::TRUE as GLint {
gl::GetProgramInfoLog(shader, 1024, ptr::null_mut(), infoLog.as_mut_ptr() as *mut GLchar);
println!(
"ERROR::PROGRAM_LINKING_ERROR of type: {}\n{}\n \
-- --------------------------------------------------- -- ",
type_,
str::from_utf8(&infoLog).unwrap()
);
}
}
}
}
|
#[cfg(test)]
mod test_jump_to {
use diar::{
command::CommandError,
commands::jump::{jump_to, JumpTo},
domain::model::Favorite,
};
use crate::infrastructure::inmemory::repository::Repository;
#[test]
fn with_key() {
let fav = Favorite::new("name1", "/");
let repo = &Repository::new(vec![fav.clone()]);
let result = jump_to(repo, JumpTo::Key(fav.name())).unwrap();
assert_eq!(result, "/")
}
#[test]
fn with_key_but_not_exists() {
let repo = &Repository::new(Vec::new());
let result = jump_to(repo, JumpTo::Key("".to_string()));
assert_eq!(
result.err().unwrap().to_string(),
CommandError::GivenKeyNotFound.to_string()
)
}
//TODO: add tests for jump_to given JumpTo::FuzzyFinder and JumpTo::ProjectRoot
}
|
use core::f64;
use std::{fs::File, str::FromStr};
/// Parse the string `s` as a coordinate pair, like `"400x600"` or `"1.0,0.5"`
fn parse_pair<T: FromStr>(s: &str, separator:char) -> Option<(T, T)> {
match s.find(separator) {
None => None,
Some(index) => {
match (T::from_str(&s[..index]), T::from_str(&s[index + 1..])) {
(Ok(l), Ok(r)) => Some((l,r)),
_ => None
}
}
}
}
#[test]
fn test_parse_pair() {
assert_eq!(parse_pair::<i32>("", ','), None);
assert_eq!(parse_pair::<i32>("10,", ','), None);
assert_eq!(parse_pair::<i32>(",10", ','), None);
assert_eq!(parse_pair::<i32>("10,20", ','), Some((10, 20)));
assert_eq!(parse_pair::<i32>("10,20xy", ','), None);
assert_eq!(parse_pair::<f64>("0.5x", 'x'), None);
assert_eq!(parse_pair::<f64>("0.5x1.5", 'x'), Some((0.5, 1.50)));
}
use num::Complex;
/// Parse a pair of floating-point numbers separated by a comma as a complex number
fn parse_complex(s: &str) -> Option<Complex<f64>> {
match parse_pair(s, ',') {
Some((re, im)) => Some(Complex {re, im}),
None => None
}
}
#[test]
fn test_parse_complex() {
assert_eq!(parse_complex("1.25,-0.0625"), Some(Complex { re: 1.25, im: -0.0625 }));
assert_eq!(parse_complex("-0.0625"), None);
}
/// Given the row and column of a pixel in the output image, return the corresponding point on the complex plane.
/// `bounds` is a pair giving the width and height of the image in pixels.
/// `pixel` is a (column, row) pair indicating a particular pixel in that image.
/// The `upper_left` and `lower_right` parameters are points on the complex plane designating the area our image covers.
fn pixel_to_point(bounds: (usize, usize),
pixel: (usize, usize),
upper_left: Complex<f64>,
lower_right: Complex<f64>) -> Complex<f64> {
let (width, height) = (lower_right.re - upper_left.re,
upper_left.im - lower_right.im);
Complex {
re: upper_left.re + ((pixel.0 as f64 / bounds.0 as f64) * width),
im: upper_left.im - ((pixel.1 as f64 / bounds.1 as f64) * height)
}
}
#[test]
fn test_pixel_to_point() {
assert_eq!(pixel_to_point((100,200),
(25,175),
Complex { re: -1.0, im: 1.0},
Complex { re: 1.0, im: -1.0}),
Complex { re: -0.5, im: -0.75});
}
/// Try to determine if complex point `c` is in the Mandelbrot set, using at most `limit` iterations to decide.
fn escape_time(c: Complex<f64>, limit: usize) -> Option<usize> {
let mut z = Complex { re: 0.0, im: 0.0 };
for i in 0..limit {
if z.norm_sqr() > 4.0 {
return Some(i);
}
z = z * z + c;
}
None
}
/// Render a rectangle of the Mandelbrot set into a buffer of pixels.
///
/// The `bounds` argument gives the width and height of the buffer `pixels`
/// which holds one grayschale pixel per byte. The `upper_left` and `lower_left`
/// arguments specify points on the complex plane corresponding to the upper-left
/// and lower-right corners of the pixel buffer.
fn render(pixels: &mut [u8],
bounds: (usize, usize),
upper_left: Complex<f64>,
lower_right: Complex<f64>) {
assert!(pixels.len() == bounds.0 * bounds.1);
for row in 0..bounds.1 {
for column in 0..bounds.0 {
let point = pixel_to_point(bounds, (column, row), upper_left, lower_right);
pixels[row*bounds.0 + column] = match escape_time(point, 255) {
None => 0,
Some(count) => 255 - count as u8
};
}
}
}
use image::{ColorType, bmp::BmpEncoder};
/// Write the buffer `pixels` to image, whose dimensions are given by `bounds`, to the file named `filename`
fn write_image(pixels: &[u8], bounds: (usize,usize), filename: &str) -> Result<(), std::io::Error> {
let mut output = File::create(filename)?;
let mut encoder = BmpEncoder::new(&mut output);
encoder.encode(&pixels, bounds.0 as u32, bounds.1 as u32, ColorType::L8).expect("Unable to encode BMP");
Ok(())
}
use rayon::prelude::*;
use std::env;
fn main() {
let args: Vec<String> = env::args().collect();
println!("args len:{}\targs={:?}", args.len(), args);
if args.len() != 5 {
eprintln!("Usage: {} FILE PIXELS UPPERLEFT LOWERRIGHT", args[0]);
eprintln!("Example: {} mandel.png 1000x750 -1.20,0.35 -1,0.20", args[0]);
std::process::exit(1);
}
let bounds = parse_pair(&args[2], 'x').expect("error parsing image dimensions");
let upper_left = parse_complex(&args[3]).expect("error parsing upper-left corner point");
let lower_right = parse_complex(&args[4]).expect("error parsing lower-right corner point");
let mut pixels = vec![0; bounds.0 * bounds.1];
// slice up `pixels` into horizontal bands
{
let bands: Vec<(usize, &mut [u8])> = pixels
.chunks_mut(bounds.0)
.enumerate()
.collect();
bands.into_par_iter()
.for_each(|(i,band)| {
let top = i;
let band_bounds = (bounds.0, 1);
let band_upper_left = pixel_to_point(bounds, (0, top), upper_left, lower_right);
let band_lower_right = pixel_to_point(bounds, (bounds.0, top+1), upper_left, lower_right);
render(band, band_bounds, band_upper_left, band_lower_right);
});
}
write_image(&pixels, bounds, &args[1]).expect("error writing PNG file");
}
|
#[doc = "Register `CFGR1` reader"]
pub type R = crate::R<CFGR1_SPEC>;
#[doc = "Register `CFGR1` writer"]
pub type W = crate::W<CFGR1_SPEC>;
#[doc = "Field `FWDIS` reader - Firewall disable"]
pub type FWDIS_R = crate::BitReader<FWDIS_A>;
#[doc = "Firewall disable\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FWDIS_A {
#[doc = "0: Firewall protection enabled"]
Enabled = 0,
#[doc = "1: Firewall protection disabled"]
Disabled = 1,
}
impl From<FWDIS_A> for bool {
#[inline(always)]
fn from(variant: FWDIS_A) -> Self {
variant as u8 != 0
}
}
impl FWDIS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FWDIS_A {
match self.bits {
false => FWDIS_A::Enabled,
true => FWDIS_A::Disabled,
}
}
#[doc = "Firewall protection enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == FWDIS_A::Enabled
}
#[doc = "Firewall protection disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == FWDIS_A::Disabled
}
}
#[doc = "Field `FWDIS` writer - Firewall disable"]
pub type FWDIS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, FWDIS_A>;
impl<'a, REG, const O: u8> FWDIS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Firewall protection enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(FWDIS_A::Enabled)
}
#[doc = "Firewall protection disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(FWDIS_A::Disabled)
}
}
#[doc = "Field `BOOSTEN` reader - I/O analog switch voltage booster enable"]
pub type BOOSTEN_R = crate::BitReader<BOOSTEN_A>;
#[doc = "I/O analog switch voltage booster enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BOOSTEN_A {
#[doc = "0: I/O analog switches are supplied by VDDA voltage. This is the recommended configuration when using the ADC in high VDDA voltage operation"]
Disabled = 0,
#[doc = "1: I/O analog switches are supplied by a dedicated voltage booster (supplied by VDD). This is the recommended configuration when using the ADC in low VDDA voltage operation"]
Enabled = 1,
}
impl From<BOOSTEN_A> for bool {
#[inline(always)]
fn from(variant: BOOSTEN_A) -> Self {
variant as u8 != 0
}
}
impl BOOSTEN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BOOSTEN_A {
match self.bits {
false => BOOSTEN_A::Disabled,
true => BOOSTEN_A::Enabled,
}
}
#[doc = "I/O analog switches are supplied by VDDA voltage. This is the recommended configuration when using the ADC in high VDDA voltage operation"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == BOOSTEN_A::Disabled
}
#[doc = "I/O analog switches are supplied by a dedicated voltage booster (supplied by VDD). This is the recommended configuration when using the ADC in low VDDA voltage operation"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == BOOSTEN_A::Enabled
}
}
#[doc = "Field `BOOSTEN` writer - I/O analog switch voltage booster enable"]
pub type BOOSTEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, BOOSTEN_A>;
impl<'a, REG, const O: u8> BOOSTEN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "I/O analog switches are supplied by VDDA voltage. This is the recommended configuration when using the ADC in high VDDA voltage operation"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(BOOSTEN_A::Disabled)
}
#[doc = "I/O analog switches are supplied by a dedicated voltage booster (supplied by VDD). This is the recommended configuration when using the ADC in low VDDA voltage operation"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(BOOSTEN_A::Enabled)
}
}
#[doc = "Field `ANASWVDD` reader - GPIO analog switch control voltage selection when at least one analog peripheral supplied by VDDA is enabled (COMP, OPAMP, VREFBUF, ADC,...)"]
pub type ANASWVDD_R = crate::BitReader<ANASWVDD_A>;
#[doc = "GPIO analog switch control voltage selection when at least one analog peripheral supplied by VDDA is enabled (COMP, OPAMP, VREFBUF, ADC,...)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ANASWVDD_A {
#[doc = "0: I/O analog switches supplied by VDDA or booster when booster is ON"]
Vdda = 0,
#[doc = "1: I/O analog switches supplied by VDD"]
Vdd = 1,
}
impl From<ANASWVDD_A> for bool {
#[inline(always)]
fn from(variant: ANASWVDD_A) -> Self {
variant as u8 != 0
}
}
impl ANASWVDD_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ANASWVDD_A {
match self.bits {
false => ANASWVDD_A::Vdda,
true => ANASWVDD_A::Vdd,
}
}
#[doc = "I/O analog switches supplied by VDDA or booster when booster is ON"]
#[inline(always)]
pub fn is_vdda(&self) -> bool {
*self == ANASWVDD_A::Vdda
}
#[doc = "I/O analog switches supplied by VDD"]
#[inline(always)]
pub fn is_vdd(&self) -> bool {
*self == ANASWVDD_A::Vdd
}
}
#[doc = "Field `ANASWVDD` writer - GPIO analog switch control voltage selection when at least one analog peripheral supplied by VDDA is enabled (COMP, OPAMP, VREFBUF, ADC,...)"]
pub type ANASWVDD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ANASWVDD_A>;
impl<'a, REG, const O: u8> ANASWVDD_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "I/O analog switches supplied by VDDA or booster when booster is ON"]
#[inline(always)]
pub fn vdda(self) -> &'a mut crate::W<REG> {
self.variant(ANASWVDD_A::Vdda)
}
#[doc = "I/O analog switches supplied by VDD"]
#[inline(always)]
pub fn vdd(self) -> &'a mut crate::W<REG> {
self.variant(ANASWVDD_A::Vdd)
}
}
#[doc = "Field `I2C_PB6_FMP` reader - Fast-mode Plus (Fm+) driving capability activation on PB6"]
pub type I2C_PB6_FMP_R = crate::BitReader<I2C_PB6_FMP_A>;
#[doc = "Fast-mode Plus (Fm+) driving capability activation on PB6\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C_PB6_FMP_A {
#[doc = "0: PBx pin operates in standard mode"]
Disabled = 0,
#[doc = "1: Fm+ mode enabled on PB7 pin, and the Speed control is bypassed"]
Enabled = 1,
}
impl From<I2C_PB6_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C_PB6_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C_PB6_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C_PB6_FMP_A {
match self.bits {
false => I2C_PB6_FMP_A::Disabled,
true => I2C_PB6_FMP_A::Enabled,
}
}
#[doc = "PBx pin operates in standard mode"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == I2C_PB6_FMP_A::Disabled
}
#[doc = "Fm+ mode enabled on PB7 pin, and the Speed control is bypassed"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == I2C_PB6_FMP_A::Enabled
}
}
#[doc = "Field `I2C_PB6_FMP` writer - Fast-mode Plus (Fm+) driving capability activation on PB6"]
pub type I2C_PB6_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C_PB6_FMP_A>;
impl<'a, REG, const O: u8> I2C_PB6_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PBx pin operates in standard mode"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB6_FMP_A::Disabled)
}
#[doc = "Fm+ mode enabled on PB7 pin, and the Speed control is bypassed"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB6_FMP_A::Enabled)
}
}
#[doc = "Field `I2C_PB7_FMP` reader - Fast-mode Plus (Fm+) driving capability activation on PB7"]
pub use I2C_PB6_FMP_R as I2C_PB7_FMP_R;
#[doc = "Field `I2C_PB8_FMP` reader - Fast-mode Plus (Fm+) driving capability activation on PB8"]
pub use I2C_PB6_FMP_R as I2C_PB8_FMP_R;
#[doc = "Field `I2C_PB9_FMP` reader - Fast-mode Plus (Fm+) driving capability activation on PB9"]
pub use I2C_PB6_FMP_R as I2C_PB9_FMP_R;
#[doc = "Field `I2C_PB7_FMP` writer - Fast-mode Plus (Fm+) driving capability activation on PB7"]
pub use I2C_PB6_FMP_W as I2C_PB7_FMP_W;
#[doc = "Field `I2C_PB8_FMP` writer - Fast-mode Plus (Fm+) driving capability activation on PB8"]
pub use I2C_PB6_FMP_W as I2C_PB8_FMP_W;
#[doc = "Field `I2C_PB9_FMP` writer - Fast-mode Plus (Fm+) driving capability activation on PB9"]
pub use I2C_PB6_FMP_W as I2C_PB9_FMP_W;
#[doc = "Field `I2C1_FMP` reader - I2C1 Fast-mode Plus driving capability activation"]
pub type I2C1_FMP_R = crate::BitReader<I2C1_FMP_A>;
#[doc = "I2C1 Fast-mode Plus driving capability activation\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C1_FMP_A {
#[doc = "0: Fm+ mode is not enabled on I2Cx pins selected through AF selection bits"]
Disabled = 0,
#[doc = "1: Fm+ mode is enabled on I2Cx pins selected through AF selection bits"]
Enabled = 1,
}
impl From<I2C1_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C1_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C1_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C1_FMP_A {
match self.bits {
false => I2C1_FMP_A::Disabled,
true => I2C1_FMP_A::Enabled,
}
}
#[doc = "Fm+ mode is not enabled on I2Cx pins selected through AF selection bits"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == I2C1_FMP_A::Disabled
}
#[doc = "Fm+ mode is enabled on I2Cx pins selected through AF selection bits"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == I2C1_FMP_A::Enabled
}
}
#[doc = "Field `I2C1_FMP` writer - I2C1 Fast-mode Plus driving capability activation"]
pub type I2C1_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C1_FMP_A>;
impl<'a, REG, const O: u8> I2C1_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Fm+ mode is not enabled on I2Cx pins selected through AF selection bits"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(I2C1_FMP_A::Disabled)
}
#[doc = "Fm+ mode is enabled on I2Cx pins selected through AF selection bits"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(I2C1_FMP_A::Enabled)
}
}
#[doc = "Field `I2C2_FMP` reader - I2C2 Fast-mode Plus driving capability activation"]
pub use I2C1_FMP_R as I2C2_FMP_R;
#[doc = "Field `I2C3_FMP` reader - I2C3 Fast-mode Plus driving capability activation"]
pub use I2C1_FMP_R as I2C3_FMP_R;
#[doc = "Field `I2C4_FMP` reader - I2C3 Fast-mode Plus driving capability activation"]
pub use I2C1_FMP_R as I2C4_FMP_R;
#[doc = "Field `I2C2_FMP` writer - I2C2 Fast-mode Plus driving capability activation"]
pub use I2C1_FMP_W as I2C2_FMP_W;
#[doc = "Field `I2C3_FMP` writer - I2C3 Fast-mode Plus driving capability activation"]
pub use I2C1_FMP_W as I2C3_FMP_W;
#[doc = "Field `I2C4_FMP` writer - I2C3 Fast-mode Plus driving capability activation"]
pub use I2C1_FMP_W as I2C4_FMP_W;
#[doc = "Field `FPU_IE0` reader - Invalid operation interrupt enable"]
pub type FPU_IE0_R = crate::BitReader<FPU_IE0_A>;
#[doc = "Invalid operation interrupt enable\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FPU_IE0_A {
#[doc = "0: Interrupt disabled"]
Disabled = 0,
#[doc = "1: Interrupt enabled"]
Enabled = 1,
}
impl From<FPU_IE0_A> for bool {
#[inline(always)]
fn from(variant: FPU_IE0_A) -> Self {
variant as u8 != 0
}
}
impl FPU_IE0_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FPU_IE0_A {
match self.bits {
false => FPU_IE0_A::Disabled,
true => FPU_IE0_A::Enabled,
}
}
#[doc = "Interrupt disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == FPU_IE0_A::Disabled
}
#[doc = "Interrupt enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == FPU_IE0_A::Enabled
}
}
#[doc = "Field `FPU_IE0` writer - Invalid operation interrupt enable"]
pub type FPU_IE0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, FPU_IE0_A>;
impl<'a, REG, const O: u8> FPU_IE0_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Interrupt disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(FPU_IE0_A::Disabled)
}
#[doc = "Interrupt enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(FPU_IE0_A::Enabled)
}
}
#[doc = "Field `FPU_IE1` reader - Divide-by-zero interrupt enable"]
pub use FPU_IE0_R as FPU_IE1_R;
#[doc = "Field `FPU_IE2` reader - Underflow interrupt enable"]
pub use FPU_IE0_R as FPU_IE2_R;
#[doc = "Field `FPU_IE3` reader - Overflow interrupt enable"]
pub use FPU_IE0_R as FPU_IE3_R;
#[doc = "Field `FPU_IE4` reader - Input denormal interrupt enable"]
pub use FPU_IE0_R as FPU_IE4_R;
#[doc = "Field `FPU_IE5` reader - Inexact interrupt enable"]
pub use FPU_IE0_R as FPU_IE5_R;
#[doc = "Field `FPU_IE1` writer - Divide-by-zero interrupt enable"]
pub use FPU_IE0_W as FPU_IE1_W;
#[doc = "Field `FPU_IE2` writer - Underflow interrupt enable"]
pub use FPU_IE0_W as FPU_IE2_W;
#[doc = "Field `FPU_IE3` writer - Overflow interrupt enable"]
pub use FPU_IE0_W as FPU_IE3_W;
#[doc = "Field `FPU_IE4` writer - Input denormal interrupt enable"]
pub use FPU_IE0_W as FPU_IE4_W;
#[doc = "Field `FPU_IE5` writer - Inexact interrupt enable"]
pub use FPU_IE0_W as FPU_IE5_W;
impl R {
#[doc = "Bit 0 - Firewall disable"]
#[inline(always)]
pub fn fwdis(&self) -> FWDIS_R {
FWDIS_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 8 - I/O analog switch voltage booster enable"]
#[inline(always)]
pub fn boosten(&self) -> BOOSTEN_R {
BOOSTEN_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - GPIO analog switch control voltage selection when at least one analog peripheral supplied by VDDA is enabled (COMP, OPAMP, VREFBUF, ADC,...)"]
#[inline(always)]
pub fn anaswvdd(&self) -> ANASWVDD_R {
ANASWVDD_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 16 - Fast-mode Plus (Fm+) driving capability activation on PB6"]
#[inline(always)]
pub fn i2c_pb6_fmp(&self) -> I2C_PB6_FMP_R {
I2C_PB6_FMP_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - Fast-mode Plus (Fm+) driving capability activation on PB7"]
#[inline(always)]
pub fn i2c_pb7_fmp(&self) -> I2C_PB7_FMP_R {
I2C_PB7_FMP_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - Fast-mode Plus (Fm+) driving capability activation on PB8"]
#[inline(always)]
pub fn i2c_pb8_fmp(&self) -> I2C_PB8_FMP_R {
I2C_PB8_FMP_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - Fast-mode Plus (Fm+) driving capability activation on PB9"]
#[inline(always)]
pub fn i2c_pb9_fmp(&self) -> I2C_PB9_FMP_R {
I2C_PB9_FMP_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - I2C1 Fast-mode Plus driving capability activation"]
#[inline(always)]
pub fn i2c1_fmp(&self) -> I2C1_FMP_R {
I2C1_FMP_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - I2C2 Fast-mode Plus driving capability activation"]
#[inline(always)]
pub fn i2c2_fmp(&self) -> I2C2_FMP_R {
I2C2_FMP_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - I2C3 Fast-mode Plus driving capability activation"]
#[inline(always)]
pub fn i2c3_fmp(&self) -> I2C3_FMP_R {
I2C3_FMP_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - I2C3 Fast-mode Plus driving capability activation"]
#[inline(always)]
pub fn i2c4_fmp(&self) -> I2C4_FMP_R {
I2C4_FMP_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 26 - Invalid operation interrupt enable"]
#[inline(always)]
pub fn fpu_ie0(&self) -> FPU_IE0_R {
FPU_IE0_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - Divide-by-zero interrupt enable"]
#[inline(always)]
pub fn fpu_ie1(&self) -> FPU_IE1_R {
FPU_IE1_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - Underflow interrupt enable"]
#[inline(always)]
pub fn fpu_ie2(&self) -> FPU_IE2_R {
FPU_IE2_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - Overflow interrupt enable"]
#[inline(always)]
pub fn fpu_ie3(&self) -> FPU_IE3_R {
FPU_IE3_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - Input denormal interrupt enable"]
#[inline(always)]
pub fn fpu_ie4(&self) -> FPU_IE4_R {
FPU_IE4_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - Inexact interrupt enable"]
#[inline(always)]
pub fn fpu_ie5(&self) -> FPU_IE5_R {
FPU_IE5_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Firewall disable"]
#[inline(always)]
#[must_use]
pub fn fwdis(&mut self) -> FWDIS_W<CFGR1_SPEC, 0> {
FWDIS_W::new(self)
}
#[doc = "Bit 8 - I/O analog switch voltage booster enable"]
#[inline(always)]
#[must_use]
pub fn boosten(&mut self) -> BOOSTEN_W<CFGR1_SPEC, 8> {
BOOSTEN_W::new(self)
}
#[doc = "Bit 9 - GPIO analog switch control voltage selection when at least one analog peripheral supplied by VDDA is enabled (COMP, OPAMP, VREFBUF, ADC,...)"]
#[inline(always)]
#[must_use]
pub fn anaswvdd(&mut self) -> ANASWVDD_W<CFGR1_SPEC, 9> {
ANASWVDD_W::new(self)
}
#[doc = "Bit 16 - Fast-mode Plus (Fm+) driving capability activation on PB6"]
#[inline(always)]
#[must_use]
pub fn i2c_pb6_fmp(&mut self) -> I2C_PB6_FMP_W<CFGR1_SPEC, 16> {
I2C_PB6_FMP_W::new(self)
}
#[doc = "Bit 17 - Fast-mode Plus (Fm+) driving capability activation on PB7"]
#[inline(always)]
#[must_use]
pub fn i2c_pb7_fmp(&mut self) -> I2C_PB7_FMP_W<CFGR1_SPEC, 17> {
I2C_PB7_FMP_W::new(self)
}
#[doc = "Bit 18 - Fast-mode Plus (Fm+) driving capability activation on PB8"]
#[inline(always)]
#[must_use]
pub fn i2c_pb8_fmp(&mut self) -> I2C_PB8_FMP_W<CFGR1_SPEC, 18> {
I2C_PB8_FMP_W::new(self)
}
#[doc = "Bit 19 - Fast-mode Plus (Fm+) driving capability activation on PB9"]
#[inline(always)]
#[must_use]
pub fn i2c_pb9_fmp(&mut self) -> I2C_PB9_FMP_W<CFGR1_SPEC, 19> {
I2C_PB9_FMP_W::new(self)
}
#[doc = "Bit 20 - I2C1 Fast-mode Plus driving capability activation"]
#[inline(always)]
#[must_use]
pub fn i2c1_fmp(&mut self) -> I2C1_FMP_W<CFGR1_SPEC, 20> {
I2C1_FMP_W::new(self)
}
#[doc = "Bit 21 - I2C2 Fast-mode Plus driving capability activation"]
#[inline(always)]
#[must_use]
pub fn i2c2_fmp(&mut self) -> I2C2_FMP_W<CFGR1_SPEC, 21> {
I2C2_FMP_W::new(self)
}
#[doc = "Bit 22 - I2C3 Fast-mode Plus driving capability activation"]
#[inline(always)]
#[must_use]
pub fn i2c3_fmp(&mut self) -> I2C3_FMP_W<CFGR1_SPEC, 22> {
I2C3_FMP_W::new(self)
}
#[doc = "Bit 23 - I2C3 Fast-mode Plus driving capability activation"]
#[inline(always)]
#[must_use]
pub fn i2c4_fmp(&mut self) -> I2C4_FMP_W<CFGR1_SPEC, 23> {
I2C4_FMP_W::new(self)
}
#[doc = "Bit 26 - Invalid operation interrupt enable"]
#[inline(always)]
#[must_use]
pub fn fpu_ie0(&mut self) -> FPU_IE0_W<CFGR1_SPEC, 26> {
FPU_IE0_W::new(self)
}
#[doc = "Bit 27 - Divide-by-zero interrupt enable"]
#[inline(always)]
#[must_use]
pub fn fpu_ie1(&mut self) -> FPU_IE1_W<CFGR1_SPEC, 27> {
FPU_IE1_W::new(self)
}
#[doc = "Bit 28 - Underflow interrupt enable"]
#[inline(always)]
#[must_use]
pub fn fpu_ie2(&mut self) -> FPU_IE2_W<CFGR1_SPEC, 28> {
FPU_IE2_W::new(self)
}
#[doc = "Bit 29 - Overflow interrupt enable"]
#[inline(always)]
#[must_use]
pub fn fpu_ie3(&mut self) -> FPU_IE3_W<CFGR1_SPEC, 29> {
FPU_IE3_W::new(self)
}
#[doc = "Bit 30 - Input denormal interrupt enable"]
#[inline(always)]
#[must_use]
pub fn fpu_ie4(&mut self) -> FPU_IE4_W<CFGR1_SPEC, 30> {
FPU_IE4_W::new(self)
}
#[doc = "Bit 31 - Inexact interrupt enable"]
#[inline(always)]
#[must_use]
pub fn fpu_ie5(&mut self) -> FPU_IE5_W<CFGR1_SPEC, 31> {
FPU_IE5_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "configuration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFGR1_SPEC;
impl crate::RegisterSpec for CFGR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cfgr1::R`](R) reader structure"]
impl crate::Readable for CFGR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cfgr1::W`](W) writer structure"]
impl crate::Writable for CFGR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFGR1 to value 0x7c00_0001"]
impl crate::Resettable for CFGR1_SPEC {
const RESET_VALUE: Self::Ux = 0x7c00_0001;
}
|
use crate::Span;
use serde::{Deserialize, Serialize};
use std::fmt::Display;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum Operator {
Equal,
NotEqual,
LessThan,
GreaterThan,
LessThanOrEqual,
GreaterThanOrEqual,
RegexMatch,
NotRegexMatch,
Plus,
Minus,
Multiply,
Divide,
In,
NotIn,
Modulo,
FloorDivision,
And,
Or,
Pow,
StartsWith,
EndsWith,
BitOr,
BitXor,
BitAnd,
ShiftLeft,
ShiftRight,
}
impl Display for Operator {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Operator::Equal => write!(f, "=="),
Operator::NotEqual => write!(f, "!="),
Operator::LessThan => write!(f, "<"),
Operator::GreaterThan => write!(f, ">"),
Operator::RegexMatch => write!(f, "=~"),
Operator::NotRegexMatch => write!(f, "!~"),
Operator::Plus => write!(f, "+"),
Operator::Minus => write!(f, "-"),
Operator::Multiply => write!(f, "*"),
Operator::Divide => write!(f, "/"),
Operator::In => write!(f, "in"),
Operator::NotIn => write!(f, "not-in"),
Operator::Modulo => write!(f, "mod"),
Operator::FloorDivision => write!(f, "fdiv"),
Operator::And => write!(f, "&&"),
Operator::Or => write!(f, "||"),
Operator::Pow => write!(f, "**"),
Operator::BitOr => write!(f, "bit-or"),
Operator::BitXor => write!(f, "bit-xor"),
Operator::BitAnd => write!(f, "bit-and"),
Operator::ShiftLeft => write!(f, "bit-shl"),
Operator::ShiftRight => write!(f, "bit-shr"),
Operator::LessThanOrEqual => write!(f, "<="),
Operator::GreaterThanOrEqual => write!(f, ">="),
Operator::StartsWith => write!(f, "starts-with"),
Operator::EndsWith => write!(f, "ends-with"),
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Serialize, Deserialize)]
pub enum RangeInclusion {
Inclusive,
RightExclusive,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RangeOperator {
pub inclusion: RangeInclusion,
pub span: Span,
pub next_op_span: Span,
}
impl Display for RangeOperator {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.inclusion {
RangeInclusion::Inclusive => write!(f, ".."),
RangeInclusion::RightExclusive => write!(f, "..<"),
}
}
}
|
use std::cmp;
use super::data::{ Transition, Object, Game, MessageLog, PlayerAction };
use crate::PLAYER;
pub fn mut_two<T>(first_index: usize, second_index: usize, items: &mut [T]) -> (&mut T, &mut T) {
assert_ne!(first_index, second_index);
let split_at_index = cmp::max(first_index, second_index);
let(first_slice, second_slice) = items.split_at_mut(split_at_index);
if first_index < second_index {
(&mut first_slice[first_index], &mut second_slice[0])
} else {
(&mut second_slice[0], &mut first_slice[second_index])
}
}
pub fn from_dungeon_level(table: &[Transition], level: u32) -> u32 {
table
.iter()
.rev()
.find(|transition| level >= transition.level)
.map_or(0, |transition| transition.value)
}
pub fn vec_from_dungeon_level(table: Vec<Transition>, level: u32) -> u32 {
table
.iter()
.rev()
.find(|transition| level >= transition.level)
.map_or(0, |transition| transition.value)
}
// DEATH
pub fn player_death(player: &mut Object, game: &mut Game) {
game.log.add("You died!", tcod::colors::DARK_RED);
player.char = '%';
player.color = tcod::colors::DARK_RED;
player.alive = false;
player.fighter = None;
player.name = format!("The remains of {}. R.I.P.", player.name);
}
pub fn monster_death(monster: &mut Object, game: &mut Game) {
game.log.add(
format!(
"{} dies in agony! You gain {} XP.",
monster.name, monster.fighter.unwrap().xp,
),
tcod::colors::DARK_ORANGE,
);
monster.char = '%';
monster.color = tcod::colors::DARK_RED;
monster.blocks = false;
monster.fighter = None;
monster.ai = None;
monster.alive = false;
monster.name = format!("The stinking remains of a {}", monster.name);
}
pub fn growl(objects: &mut Vec<Object>, game: &mut Game) {
if objects[PLAYER].alive {
game.log.add(
format!("{} power is {} and he growls.", objects[PLAYER].name, objects[PLAYER].power(game)),
tcod::colors::WHITE
);
}
}
//
//pub fn growl(objects: &mut Vec<Object>, game: &mut Game) {
//// if objects[PLAYER].alive && objects[PLAYER]. != PlayerAction::DidntTakeTurn {
// if objects[PLAYER].alive {
// for object in objects {
// if object.name == "Player".to_string() {
// if object.alive {
// game.log.add(
// format!("Player power is {} and he growls.", object.power(game)),
// tcod::colors::WHITE
// );
// }
// }
// }
// }
//}
|
use clap::{App, Arg, ArgMatches};
use std::{sync::{mpsc::{Sender, Receiver}}, thread, time};
use my_tcp::{core::manager::{self, TaskMsg, TaskRet}, core::socket::Socket};
fn main() {
println!("Starting a Transport Node!");
// Parse command line arguments
let arg_matches = App::new("My Node Program")
.version("0.1.0")
.author("Ruichun Ma <ruichun.ma@yale.edu>")
.about("A test program for my transport protocol")
.arg(
Arg::with_name("exec_type")
.long("ex")
.takes_value(true)
.help("the type of node execution"),
)
// for client transfer mode
.arg(
Arg::with_name("dest_addr")
.long("da")
.takes_value(true)
.help("address of destination node"),
)
.arg(
Arg::with_name("dest_port")
.long("dp")
.takes_value(true)
.help("port of destination node"),
)
.arg(
Arg::with_name("local_port")
.long("lp")
.takes_value(true)
.help("Port of local node"),
)
.arg(
Arg::with_name("local_addr")
.long("la")
.takes_value(true)
.help("Address of local node"),
)
.arg(
Arg::with_name("byte_num")
.long("bn")
.takes_value(true)
.help("number of bytes to transfer"),
)
.arg(
Arg::with_name("interval")
.long("int")
.takes_value(true)
.help("execution interval of the transfer client, default 1 second"),
)
.arg(
Arg::with_name("buf_size")
.long("bs")
.takes_value(true)
.help("buffer size of the transfer client, default 65536"),
)
// args only for server
.arg(
Arg::with_name("backlog")
.long("bl")
.takes_value(true)
.help("buffer size of the transfer client, default 65536"),
)
.get_matches();
// get exec type
let command: String = arg_matches
.value_of("exec_type")
.unwrap_or("window_test")
.parse()
.expect("can not parse exec type");
let args = parse_args(arg_matches);
// ===== start socket manager ====
let (mut socket_manager, task_sender, ret_channel_recv) = manager::SocketManager::new(args.local_addr.clone());
thread::spawn(move || {
socket_manager.start();
});
match command.as_str() {
"transfer" => {
exec_transfer(args, task_sender, ret_channel_recv);
},
"server" => {
exec_server(args, task_sender, ret_channel_recv);
},
"local_test" => {
exec_local_test(args, task_sender, ret_channel_recv);
},
"window_test" => {
exec_window_test(args, task_sender, ret_channel_recv);
}
_ => {println!("Undefined exec command!");}
}
}
// transfer command syntax:
// Synopsis:
// Connect to a transfer server listening on port <port> at node
// <dest>, using local port <localPort>, and transfer <amount> bytes.
// Required arguments:
// dest: address of destination node
// port: destination port
// localPort: local port
// amount: number of bytes to transfer
// Optional arguments:
// interval: execution interval of the transfer client, default 1 second
// buf_size: buffer size of the transfer client, default 65536
fn exec_transfer (args: NodeArgs, task_sender: Sender<TaskMsg>, ret_channel_recv: Receiver<Receiver<TaskRet>>) {
println!("transfer args parsed.");
// === example Java code ====
// TCPSock sock = this.tcpMan.socket();
// sock.bind(localPort);
// sock.connect(destAddr, port);
// TransferClient client = new
// TransferClient(manager, this, sock, amount, interval, sz);
// client.start();
let mut sock1 = Socket::new(args.local_addr.clone(), task_sender.clone(), &ret_channel_recv);
sock1.bind(args.local_port).expect("Can not bind local port!");
sock1.connect(args.dest_addr.clone(), args.dest_port).expect("Can not establish connection.");
// wait
thread::sleep(time::Duration::from_millis(1000));
let mut sock2 = Socket::new(args.local_addr, task_sender.clone(), &ret_channel_recv);
sock2.bind(args.local_port + 1).expect("Can not bind local port!");
sock2.connect(args.dest_addr, args.dest_port).expect("Can not establish connection.");
thread::sleep(time::Duration::from_millis(1000));
let handle1 = thread::spawn(move || {
for i in 0..20 {
println!("Thread 0: Sending loop {} / 20 starts ...", i);
let mut test_data = Vec::new();
for i in 0..2000 {
test_data.push((i % 200) as u8);
}
sock1.write_all(&(test_data.clone())).unwrap();
thread::sleep(time::Duration::from_millis(500));
}
println!("Thread 0: All data sent !!");
thread::sleep(time::Duration::from_secs(5));
sock1.close();
});
let handle2 = thread::spawn(move || {
for i in 0..20 {
println!("Thread 1: Sending loop {} / 20 starts ...", i);
let mut test_data = Vec::new();
for i in 0..2000 {
test_data.push((i % 200) as u8);
}
sock2.write_all(&(test_data.clone())).unwrap();
thread::sleep(time::Duration::from_millis(500));
}
println!("Thread 1: All data sent !!");
thread::sleep(time::Duration::from_secs(5));
sock2.close();
});
thread::sleep(time::Duration::from_secs(20));
handle1.join().unwrap();
handle2.join().unwrap();
}
// server command syntax:
// server port backlog [servint workint sz]
// Synopsis:
// Start a transfer server at the local node, listening on port
// <port>. The server has a maximum pending (incoming) connection
// queue of length <backlog>.
// Required arguments:
// port: listening port
// backlog: maximum length of pending connection queue
// Optional arguments:
// ======= TODO: Do I need this two? ======
// servint: execution interval of the transfer server, default 1 second
// workint: execution interval of the transfer worker, default 1 second
// sz: buffer size of the transfer worker, default 65536
fn exec_server (args: NodeArgs, task_sender: Sender<TaskMsg>, ret_channel_recv: Receiver<Receiver<TaskRet>>) {
println!("server args parsed.");
// === example Java code ====
// TCPSock sock = this.tcpMan.socket();
// sock.bind(port);
// sock.listen(backlog);
// TransferServer server = new
// TransferServer(manager, this, sock, servint, workint, sz);
// server.start();
let mut sock = Socket::new(args.local_addr, task_sender.clone(), &ret_channel_recv);
sock.bind(args.local_port).expect("Can not bind local port!");
sock.listen(args.backlog).expect("Can not listen to port!");
loop {
// the receiving socket at server
let server_recv = sock.accept();
if server_recv.is_ok() {
let server_recv = server_recv.unwrap();
let remote = format!("{}:{}", server_recv.id.remote_addr, server_recv.id.remote_port);
println!("Got a new connection from {}", remote);
thread::spawn(move || {
println!("A new thread !");
for i in 0..20 {
let recv_data = server_recv.read_all(2000).unwrap();
for j in 0..2000 {
if recv_data[j] != (j % 200) as u8 {
println!("From {}: Wrong data received !!!", remote);
return;
}
}
println!("==========================\nFrom {}: {} / 20 data confirmed! \n", remote, i + 1);
// wait
thread::sleep(time::Duration::from_millis(10));
}
println!("============================\nFrom {}: All data right! \n", remote);
});
}
// wait
thread::sleep(time::Duration::from_millis(100));
}
}
fn exec_local_test (args: NodeArgs, task_sender: Sender<TaskMsg>, ret_channel_recv: Receiver<Receiver<TaskRet>>) {
println!("Local Test started ...");
// start server socket
let mut server_sock = Socket::new(args.local_addr.clone(), task_sender.clone(), &ret_channel_recv);
server_sock.bind(args.local_port).expect("Can not bind local port!");
server_sock.listen(args.backlog).expect("Can not listen to port!");
let mut client_sock = Socket::new(args.local_addr.clone(), task_sender.clone(), &ret_channel_recv);
client_sock.bind(args.local_port + 1).expect("Can not bind local port!");
client_sock.connect(args.local_addr, args.local_port).expect("Can not establish connection.");
// wait for the connection to be established
thread::sleep(time::Duration::from_millis(100));
// the receiving socket at server
let server_recv = server_sock.accept().expect("Can not get connection!");
// interleaved test
for _ in 0..100 {
let mut test_data = Vec::new();
for i in 0..200 {
test_data.push(i as u8);
}
let test_data = client_sock.write(&test_data, 0, 200).unwrap();
println!("Len = {} wrote ", test_data);
// wait
thread::sleep(time::Duration::from_millis(100));
let recv_data = server_recv.read_all(200).unwrap();
for i in 0..200 {
assert!(recv_data[i] == i as u8);
}
}
// send and recv test
for _ in 0..20 {
let mut test_data = Vec::new();
for i in 0..200 {
test_data.push(i as u8);
}
let test_data = client_sock.write(&test_data, 0, 200).unwrap();
println!("Len = {} wrote ", test_data);
}
// wait
thread::sleep(time::Duration::from_millis(200));
for _ in 0..20 {
let recv_data = server_recv.read_all(200).unwrap();
for i in 0..200 {
assert!(recv_data[i] == i as u8);
}
}
println!("All data right! \n");
client_sock.close();
// server_recv.close(); // we do not support current FIN from both side.
server_sock.close();
// wait
thread::sleep(time::Duration::from_millis(10));
server_recv.release();
// sleep to wait for other threads to do the job
thread::sleep(time::Duration::from_secs(10));
}
fn exec_window_test (args: NodeArgs, task_sender: Sender<TaskMsg>, ret_channel_recv: Receiver<Receiver<TaskRet>>) {
println!("Window Test started ...");
// start server socket
let mut server_sock = Socket::new(args.local_addr.clone(), task_sender.clone(), &ret_channel_recv);
server_sock.bind(args.local_port).expect("Can not bind local port!");
server_sock.listen(args.backlog).expect("Can not listen to port!");
let mut client_sock = Socket::new(args.local_addr.clone(), task_sender.clone(), &ret_channel_recv);
client_sock.bind(args.local_port + 1).expect("Can not bind local port!");
client_sock.connect(args.local_addr, args.local_port).expect("Can not establish connection.");
// wait for the connection to be established
thread::sleep(time::Duration::from_millis(500));
// the receiving socket at server
let server_recv = server_sock.accept().expect("Can not get connection!");
// flow control test
thread::spawn(move || {
for idx in 0..40 {
println!("{} over 40 ", idx);
let mut test_data = Vec::new();
for i in 0..200 {
test_data.push(i as u8);
}
let test_data = client_sock.write(&test_data, 0, 200).unwrap();
println!("Len = {} wrote ", test_data);
// quick sender
thread::sleep(time::Duration::from_millis(10));
}
thread::sleep(time::Duration::from_secs(10));
client_sock.release();
});
for _ in 0..40 {
let recv_data = server_recv.read_all(200).unwrap();
// slow receiver!
thread::sleep(time::Duration::from_millis(200));
for i in 0..200 {
assert!(recv_data[i] == i as u8);
}
}
println!("All data right! \n");
thread::sleep(time::Duration::from_millis(10));
server_recv.close(); // we do not support current FIN from both side.
thread::sleep(time::Duration::from_millis(10));
server_sock.release();
// wait
thread::sleep(time::Duration::from_millis(10));
// sleep to wait for other threads to do the job
thread::sleep(time::Duration::from_secs(10));
}
#[derive(Default)]
struct NodeArgs {
// for client
dest_addr: String,
dest_port: u8,
local_addr: String,
local_port: u8,
byte_num: u32,
interval: f32,
/// buf size of node apps
buf_size: u32,
backlog: u32,
}
fn parse_args (arg_matches: ArgMatches) -> NodeArgs {
let mut args = NodeArgs::default();
args.dest_addr = arg_matches
.value_of("dest_addr")
.unwrap_or("127.0.0.1")
.parse()
.expect("can not parse dest addr");
args.dest_port = arg_matches
.value_of("dest_port")
.unwrap_or("88")
.parse()
.expect("can not parse dest port");
args.local_port = arg_matches
.value_of("local_port")
.unwrap_or("88")
.parse()
.expect("can not parse local port");
args.local_addr = arg_matches
.value_of("local_addr")
.unwrap_or("127.0.0.1")
.parse()
.expect("can not parse local addr");
args.byte_num = arg_matches
.value_of("num_byte")
.unwrap_or("1024")
.parse()
.expect("can not parse num of bytes");
args.interval = arg_matches
.value_of("interval")
.unwrap_or("1.0")
.parse()
.expect("can not parse interval");
args.buf_size = arg_matches
.value_of("buf_size")
.unwrap_or("65536")
.parse()
.expect("can not parse buffer size");
// for server
args.backlog = arg_matches
.value_of("backlog")
.unwrap_or("16")
.parse()
.expect("can not parse backlog");
return args;
} |
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::collections::HashMap;
use std::fmt;
use std::fmt::Debug;
use std::ops::Index;
use crate::Allocator;
use crate::Value;
/// Blocks with tags greater than or equal to NO_SCAN_TAG contain binary data,
/// and are not scanned by the garbage collector. Likewise, we must avoid
/// interpreting the fields of blocks with such tags as Values.
pub const NO_SCAN_TAG: u8 = 251;
pub const FORWARD_TAG: u8 = 250;
pub const INFIX_TAG: u8 = 249;
pub const OBJECT_TAG: u8 = 248;
pub const CLOSURE_TAG: u8 = 247;
pub const LAZY_TAG: u8 = 246;
pub const CONT_TAG: u8 = 245;
pub const FORCING_TAG: u8 = 244;
pub const ABSTRACT_TAG: u8 = 251;
pub const STRING_TAG: u8 = 252;
pub const DOUBLE_TAG: u8 = 253;
pub const DOUBLE_ARRAY_TAG: u8 = 254;
pub const CUSTOM_TAG: u8 = 255;
/// A recently-allocated, not-yet-finalized Block.
#[repr(transparent)]
pub struct BlockBuilder<'a> {
fields: &'a mut [Value<'a>],
}
impl<'a> BlockBuilder<'a> {
/// `address` may be a pointer or an offset (the `Allocator` which invokes
/// `BlockBuilder::new` determines the meaning of `BlockBuilder` addresses).
/// `size` must be greater than 0 and denotes the number of fields in the
/// block.
///
/// # Panics
///
/// Panics if `fields.is_empty()`.
#[inline(always)]
pub fn new(fields: &'a mut [Value<'a>]) -> Self {
if fields.is_empty() {
panic!()
}
Self { fields }
}
/// The address of the field slice passed to `BlockBuilder::new`.
#[inline(always)]
pub fn address(&self) -> usize {
self.fields.as_ptr() as _
}
/// The number of fields in this block.
#[inline(always)]
pub fn size(&self) -> usize {
self.fields.len()
}
// TODO(jakebailey): This needs to be marked unsafe. The caller must
// initialize all of the fields.
#[inline(always)]
pub fn build(self) -> Value<'a> {
unsafe { Value::from_ptr(self.fields.as_ptr()) }
}
}
/// The contents of an OCaml block, consisting of a header and one or more
/// fields of type [`Value`](struct.Value.html).
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct Block<'arena>(pub(crate) &'arena [Value<'arena>]);
impl<'a> Block<'a> {
#[inline(always)]
pub fn header(self) -> Header {
Header(self.0[0].0)
}
#[inline(always)]
pub fn size(self) -> usize {
self.header().size()
}
#[inline(always)]
pub fn tag(self) -> u8 {
self.header().tag()
}
#[inline(always)]
pub fn as_value(self) -> Value<'a> {
unsafe { Value::from_ptr(&self.0[1]) }
}
#[inline(always)]
pub fn as_values(self) -> Option<&'a [Value<'a>]> {
if self.tag() >= NO_SCAN_TAG {
return None;
}
Some(&self.0[1..])
}
#[inline(always)]
pub fn as_int_slice(self) -> &'a [usize] {
let slice = &self.0[1..];
unsafe { std::slice::from_raw_parts(slice.as_ptr().cast(), slice.len()) }
}
/// Helper for `Value::clone_with_allocator`.
pub(crate) fn clone_with<'b, A: Allocator>(
self,
alloc: &'b A,
seen: &mut HashMap<usize, Value<'b>>,
) -> Value<'b> {
let mut block = alloc.block_with_size_and_tag(self.size(), self.tag());
match self.as_values() {
Some(fields) => {
for (i, field) in fields.iter().enumerate() {
let field = field.clone_with(alloc, seen);
alloc.set_field(&mut block, i, field)
}
}
None => {
// Safety: Both pointers must be valid, aligned, and
// non-overlapping. Both pointers are the heads of blocks which
// came from some Allocator. Allocators are required to allocate
// blocks with usize-aligned pointers, and those blocks are
// required to be valid for reads and writes for the number of
// usize-sized fields reported in the size in their header.
// Allocators are also required to allocate non-overlapping
// blocks.
unsafe {
std::ptr::copy_nonoverlapping(
self.0.as_ptr().offset(1) as *const usize,
alloc.block_ptr_mut(&mut block) as *mut usize,
self.size(),
)
}
}
}
block.build()
}
}
impl<'a> Index<usize> for Block<'a> {
type Output = Value<'a>;
#[inline(always)]
fn index(&self, index: usize) -> &Self::Output {
&self.0[index + 1]
}
}
impl Debug for Block<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.tag() == STRING_TAG {
write!(f, "{:?}", self.as_value().as_str().unwrap())
} else if self.tag() == DOUBLE_TAG {
write!(f, "{:?}", self.as_value().as_float().unwrap())
} else {
write!(f, "{}{:?}", self.tag(), self.as_values().unwrap())
}
}
}
// values from ocaml 'gc.h'
#[repr(usize)]
#[derive(Clone, Copy)]
pub enum Color {
White = crate::CAML_WHITE,
Gray = crate::CAML_GRAY,
Blue = crate::CAML_BLUE,
Black = crate::CAML_BLACK,
}
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct Header(usize);
impl Header {
#[inline(always)]
pub const fn new(size: usize, tag: u8) -> Self {
Self::with_color(size, tag, Color::White)
}
#[inline(always)]
pub const fn with_color(size: usize, tag: u8, color: Color) -> Self {
let bits = size << 10 | (color as usize) | (tag as usize);
Header(bits)
}
#[inline(always)]
pub const fn size(self) -> usize {
self.0 >> 10
}
#[inline(always)]
pub const fn tag(self) -> u8 {
self.0 as u8
}
#[inline(always)]
pub const fn color(self) -> Color {
match self.0 & Color::Black as usize {
crate::CAML_WHITE => Color::White,
crate::CAML_GRAY => Color::Gray,
crate::CAML_BLUE => Color::Blue,
crate::CAML_BLACK => Color::Black,
_ => unreachable!(),
}
}
#[inline(always)]
pub const fn from_bits(bits: usize) -> Self {
Header(bits)
}
#[inline(always)]
pub const fn to_bits(self) -> usize {
self.0
}
}
impl Debug for Header {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Header")
.field("size", &self.size())
.field("tag", &self.tag())
.finish()
}
}
|
use crate::libs::color::color_system;
use isaribi::{
style,
styled::{Style, Styled},
};
use nusa::prelude::*;
pub struct Props {
pub variant: Variant,
}
#[derive(Clone)]
pub enum Variant {
Primary,
PrimaryLikeMenu,
Secondary,
SecondaryLikeMenu,
Danger,
Disable,
Dark,
DarkLikeMenu,
TransparentDark,
Menu,
MenuAsSecondary,
MenuAsPrimary,
MenuAsLight,
Success,
Light,
LightLikeMenu,
TransparentLight,
}
pub struct Btn {}
impl Variant {
fn is_disable(&self) -> bool {
match self {
Self::Disable => true,
_ => false,
}
}
}
impl Btn {
pub fn class_name(variant: &Variant) -> String {
match variant {
Variant::Primary => Self::class("primary"),
Variant::PrimaryLikeMenu => Self::class("primary") + " " + &Self::class("like-menu"),
Variant::Secondary => Self::class("secondary"),
Variant::SecondaryLikeMenu => {
Self::class("secondary") + " " + &Self::class("like-menu")
}
Variant::Danger => Self::class("danger"),
Variant::Disable => Self::class("disable"),
Variant::Dark => Self::class("dark"),
Variant::DarkLikeMenu => Self::class("dark") + " " + &Self::class("like-menu"),
Variant::TransparentDark => Self::class("transparent-dark"),
Variant::Menu => Self::class("menu") + " " + &Self::class("like-menu"),
Variant::MenuAsSecondary => {
Self::class("menu-secondary") + " " + &Self::class("like-menu")
}
Variant::MenuAsPrimary => Self::class("menu-primary") + " " + &Self::class("like-menu"),
Variant::MenuAsLight => Self::class("menu-light") + " " + &Self::class("like-menu"),
Variant::Success => Self::class("success"),
Variant::Light => Self::class("light"),
Variant::LightLikeMenu => Self::class("light") + " " + &Self::class("like-menu"),
Variant::TransparentLight => Self::class("transparent-light"),
}
}
pub fn with_variant(
variant: Variant,
attrs: Attributes,
events: Events,
children: Vec<Html>,
) -> Html {
Self::styled(Html::button(
attrs
.class("pure-button")
.class(Self::class("base"))
.class(Self::class_name(&variant))
.flag("disabled", variant.is_disable()),
events,
children,
))
}
pub fn primary(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::Primary, attrs, events, children)
}
pub fn secondary(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::Secondary, attrs, events, children)
}
pub fn danger(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::Danger, attrs, events, children)
}
pub fn dark(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::Dark, attrs, events, children)
}
pub fn menu(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::Menu, attrs, events, children)
}
pub fn menu_as_secondary(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::MenuAsSecondary, attrs, events, children)
}
pub fn menu_as_primary(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::MenuAsPrimary, attrs, events, children)
}
pub fn menu_as_light(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::MenuAsLight, attrs, events, children)
}
pub fn success(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::Success, attrs, events, children)
}
pub fn light(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::with_variant(Variant::Light, attrs, events, children)
}
pub fn group(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Html::div(
attrs.class("pure-button-group").string("roll", "group"),
events,
children,
)
}
}
impl Styled for Btn {
fn style() -> Style {
style! {
".primary" {
"line-height": "1.5";
"background-color": color_system::blue(100, 5).to_string();
"color": color_system::gray(100, 0).to_string();
}
".secondary" {
"line-height": "1.5";
"background-color": color_system::gray(100, 5).to_string();
"color": color_system::gray(100, 0).to_string();
}
".danger" {
"line-height": "1.5";
"background-color": color_system::red(100, 5).to_string();
"color": color_system::gray(100, 0).to_string();
}
".dark" {
"line-height": "1.5";
"background-color": color_system::gray(100, 9).to_string();
"color": color_system::gray(100, 0).to_string();
}
".transparent-dark" {
"line-height": "1.5";
"background-color": "transparent";
"color": color_system::gray(100, 0).to_string();
}
".like-menu" {
"text-align": "left";
"border-radius": "2px";
}
".menu" {
"line-height": "1.5";
"background-color": color_system::gray(100, 9).to_string();
"color": color_system::gray(100, 0).to_string();
}
".menu:hover" {
"background-color": color_system::blue(100, 5).to_string();
}
".menu-secondary" {
"line-height": "1.5";
"background-color": color_system::gray(100, 5).to_string();
"color": color_system::gray(100, 0).to_string();
}
".menu-primary" {
"line-height": "1.5";
"background-color": color_system::blue(100, 5).to_string();
"color": color_system::gray(100, 0).to_string();
}
".menu-light" {
"line-height": "1.5";
"background-color": color_system::gray(100, 3).to_string();
"color": color_system::gray(100, 9).to_string();
}
".menu-secondary:hover, .menu-light:hover" {
"background-color": color_system::blue(100, 5).to_string();
}
".success" {
"line-height": "1.5";
"background-color": color_system::green(100, 5).to_string();
"color": color_system::gray(100, 0).to_string();
}
".light" {
"line-height": "1.5";
"background-color": color_system::gray(100, 3).to_string();
"color": color_system::gray(100, 9).to_string();
}
".transparent-light" {
"line-height": "1.5";
"background-color": "transparent";
"color": color_system::gray(100, 9).to_string();
}
}
}
}
|
use super::*;
#[cfg(test)]
use crate::rusty_hook::rusty_hook_tests::utils::{build_simple_command_runner, GIT_REV_PARSE_CMD};
#[cfg(test)]
mod get_root_directory_path_tests {
use super::*;
#[test]
fn uses_git_rev_parse_top_level_command() {
let exp = "/usr/me/foo";
let target_dir = "";
let run_command = |cmd: &str,
dir: Option<&str>,
stream_io: bool,
_env: Option<&HashMap<String, String>>| {
if cmd == GIT_REV_PARSE_CMD && dir == Some(target_dir) && !stream_io {
Ok(Some(String::from(exp)))
} else {
Ok(None)
}
};
let act = get_root_directory_path(run_command, Some(target_dir));
assert_eq!(act.unwrap(), Some(String::from(exp)));
}
#[test]
fn returns_error_on_command_error() {
let exp_err = "Ah!";
let run_command = build_simple_command_runner(Err(Some(String::from(exp_err))));
let act = get_root_directory_path(run_command, None);
assert_eq!(act, Err(Some(String::from(exp_err))));
}
}
#[cfg(test)]
mod get_hooks_directory_tests {
use super::*;
#[test]
fn uses_git_hooks_path_command() {
let exp = ".git/hooks";
let target_dir = "";
let run_command = |cmd: &str,
dir: Option<&str>,
stream_io: bool,
_env: Option<&HashMap<String, String>>| {
if cmd == "git rev-parse --git-path hooks" && dir == Some(target_dir) && !stream_io {
Ok(Some(String::from(exp)))
} else {
Ok(None)
}
};
let act = get_hooks_directory(run_command, target_dir);
assert_eq!(act.unwrap(), Some(String::from(exp)));
}
#[test]
fn returns_error_on_command_error() {
let exp_err = "failed";
let run_command = build_simple_command_runner(Err(Some(String::from(exp_err))));
let act = get_hooks_directory(run_command, "");
assert_eq!(act, Err(Some(String::from(exp_err))));
}
}
#[cfg(test)]
mod setup_hooks_tests {
use super::*;
#[test]
fn errors_when_hooks_directory_unknown() {
let exp_err = "Failure determining git hooks directory";
let run_command = build_simple_command_runner(Err(None));
let write_file = |_path: &str, _contents: &str, _x: bool| Ok(());
let result = setup_hooks(run_command, write_file, "", &[]);
assert_eq!(result, Err(String::from(exp_err)));
}
#[test]
fn errors_when_hook_write_fails() {
let run_command =
build_simple_command_runner(Ok(Some(String::from("/usr/repos/foo/.git/hooks"))));
let write_file = |_path: &str, _contents: &str, _x: bool| Err(String::from(""));
let result = setup_hooks(run_command, write_file, "", &[]);
assert_eq!(result, Err(String::from(hooks::HOOK_CREATION_ERROR)));
}
#[test]
fn creates_all_hooks() {
let root_dir = "/usr/repos/foo";
let git_hooks = ".git/hooks";
let run_command = build_simple_command_runner(Ok(Some(String::from(git_hooks))));
let write_file = |_p: &str, _c: &str, _x: bool| Ok(());
let result = setup_hooks(run_command, write_file, root_dir, &[]);
assert_eq!(result, Ok(()));
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use air::{proof::Queries, EvaluationFrame};
use crypto::{ElementHasher, Hasher, MerkleTree};
use math::StarkField;
use utils::{batch_iter_mut, collections::Vec, uninit_vector};
#[cfg(feature = "concurrent")]
use utils::iterators::*;
// TRACE TABLE
// ================================================================================================
pub struct TraceTable<B: StarkField> {
data: Vec<Vec<B>>,
blowup: usize,
}
impl<B: StarkField> TraceTable<B> {
// CONSTRUCTOR
// --------------------------------------------------------------------------------------------
/// Creates a new trace table from a list of provided register traces.
pub(super) fn new(data: Vec<Vec<B>>, blowup: usize) -> Self {
TraceTable { data, blowup }
}
// PUBLIC ACCESSORS
// --------------------------------------------------------------------------------------------
/// Returns number of registers in the trace table.
pub fn width(&self) -> usize {
self.data.len()
}
/// Returns the number of states in this trace table.
#[allow(clippy::len_without_is_empty)]
pub fn len(&self) -> usize {
self.data[0].len()
}
/// Returns blowup factor which was used to extend original trace into this trace.
pub fn blowup(&self) -> usize {
self.blowup
}
/// Returns value in the specified `register` at the specified `step`.
pub fn get(&self, register: usize, step: usize) -> B {
self.data[register][step]
}
/// Returns the entire register trace for the register at the specified index.
#[cfg(test)]
pub fn get_register(&self, idx: usize) -> &[B] {
&self.data[idx]
}
/// Copies values of all registers at the specified `step` into the `destination` slice.
pub fn read_row_into(&self, step: usize, row: &mut [B]) {
for (register, value) in self.data.iter().zip(row.iter_mut()) {
*value = register[step];
}
}
/// Reads current and next rows from the execution trace table into the specified frame.
pub fn read_frame_into(&self, lde_step: usize, frame: &mut EvaluationFrame<B>) {
// at the end of the trace, next state wraps around and we read the first step again
let next_lde_step = (lde_step + self.blowup()) % self.len();
self.read_row_into(lde_step, frame.current_mut());
self.read_row_into(next_lde_step, frame.next_mut());
}
// TRACE COMMITMENT
// --------------------------------------------------------------------------------------------
/// Builds a Merkle tree out of trace table rows (hash of each row becomes a leaf in the tree).
pub fn build_commitment<H: ElementHasher<BaseField = B>>(&self) -> MerkleTree<H> {
// allocate vector to store row hashes
let mut hashed_states = unsafe { uninit_vector::<H::Digest>(self.len()) };
// iterate though table rows, hashing each row; the hashing is done by first copying
// the state into trace_state buffer to avoid unneeded allocations, and then by applying
// the hash function to the buffer.
batch_iter_mut!(
&mut hashed_states,
128, // min batch size
|batch: &mut [H::Digest], batch_offset: usize| {
let mut trace_state = vec![B::ZERO; self.width()];
for (i, row_hash) in batch.iter_mut().enumerate() {
self.read_row_into(i + batch_offset, &mut trace_state);
*row_hash = H::hash_elements(&trace_state);
}
}
);
// build Merkle tree out of hashed rows
MerkleTree::new(hashed_states).expect("failed to construct trace Merkle tree")
}
// QUERY TRACE
// --------------------------------------------------------------------------------------------
/// Returns trace table rows at the specified positions along with Merkle authentication paths
/// from the `commitment` root to these rows.
pub fn query<H: Hasher>(&self, commitment: MerkleTree<H>, positions: &[usize]) -> Queries {
assert_eq!(
self.len(),
commitment.leaves().len(),
"inconsistent trace table commitment"
);
// allocate memory for queried trace states
let mut trace_states = Vec::with_capacity(positions.len());
// copy values from the trace table at the specified positions into rows
// and append the rows to trace_states
for &i in positions.iter() {
let row = self.data.iter().map(|r| r[i]).collect();
trace_states.push(row);
}
// build Merkle authentication paths to the leaves specified by positions
let trace_proof = commitment
.prove_batch(positions)
.expect("failed to generate a Merkle proof for trace queries");
Queries::new(trace_proof, trace_states)
}
}
|
mod selection_sort;
mod insertion_sort;
mod bubble_sort;
mod merge_sort;
fn main() {
let v = vec![1, 5, 2, 6, 3, 6, 0];
let mut v1 = v.clone();
let mut v2 = v.clone();
let mut v3 = v.clone();
let mut v4 = v.clone();
println!("Before: \t\t {:?}",v);
selection_sort::sort(&mut v1, |x,y| x < y);
insertion_sort::sort(&mut v2, |x,y| x < y);
bubble_sort::sort(&mut v3, |x,y| x < y);
merge_sort::sort(&mut v4, |x,y| x < y);
println!("After Selection Sort: \t {:?}",v1);
println!("After Insertion Sort: \t {:?}",v2);
println!("After Bubble Sort: \t {:?}",v3);
println!("After Merge Sort: \t {:?}",v4);
}
|
use std::ascii::AsciiExt;
use std::fmt;
use std::ops::{Add, Index, Range};
/// Represents a single nucleotide and acts as a building block
/// for the `DNA_Sequence` type.
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub enum Nucleotide {
A,
C,
G,
T,
}
impl Nucleotide {
/// Returns the complement of the current nucleotide.
pub fn complement(&self) -> Nucleotide {
use self::Nucleotide::*;
match *self {
A => T,
C => G,
G => C,
T => A,
}
}
fn from_char(letter: char) -> Nucleotide {
use self::Nucleotide::*;
match letter.to_ascii_uppercase() {
'A' => A,
'C' => C,
'G' => G,
'T' => T,
_ => panic!("Incorrect lettter"),
}
}
}
impl fmt::Display for Nucleotide {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::Nucleotide::*;
let letter = match *self {
A => 'A',
C => 'C',
G => 'G',
T => 'T',
};
write!(f, "{}", letter)
}
}
/// Represents a DNA sequence as a vector of Nucleotide instances.
#[allow(non_camel_case_types)]
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct DNA_Sequence(pub Vec<Nucleotide>);
impl DNA_Sequence {
/// Creates and returns a new DNA sequence representing the reverse
/// complement of the current sequence.
pub fn reverse_complement(&mut self) -> DNA_Sequence {
let reverse_complement_vec = self.0
.iter()
.rev()
.map(|x| x.complement())
.collect::<Vec<Nucleotide>>();
DNA_Sequence(reverse_complement_vec)
}
/// Provided a string representation of a DNA sequence (such as "AACGTACA"),
/// returns a corresponding `DNA_Sequence` instance.
pub fn from_string(dna_string: &str) -> DNA_Sequence {
let pattern_vec = dna_string.chars().map(Nucleotide::from_char).collect();
DNA_Sequence(pattern_vec)
}
pub fn new() -> DNA_Sequence {
DNA_Sequence(vec![])
}
/// Returns the length of the sequence (the number of nucleotides it contains).
pub fn len(&self) -> usize {
self.0.len()
}
/// Returns true if the sequence is empty (length of 0).
pub fn is_empty(&self) -> bool {
self.0.len() == 0
}
/// Provided a DNA pattern, returns the number of matches found in this sequence.
pub fn pattern_match_count(&self, pattern: DNA_Sequence) -> usize {
let mut count = 0;
for index in 0..(self.len() - pattern.len() + 1) {
if pattern == DNA_Sequence(self[index..index + pattern.len()].to_vec()) {
count += 1;
}
}
count
}
pub fn find_pattern_matches(&self, pattern: DNA_Sequence) -> Vec<usize> {
let mut match_indices = Vec::new();
for index in 0..(self.len() - pattern.len() + 1) {
if pattern == DNA_Sequence(self[index..index + pattern.len()].to_vec()) {
// if pattern == self[index..index + pattern.len()] {
match_indices.push(index);
}
}
match_indices
}
/// Returns an integer value corresponding to the current sequence for use with
/// frequency array algorithms. **Note:** sequences longer than 32 nucleotides might
/// result in an integer overflow.
pub fn pattern_to_number(&self) -> u64 {
let mut pattern_number = 0;
for (index, nucleotide) in self.0.iter().rev().enumerate() {
let multiplier = 4u64.pow(index as u32);
let nucleotide_number = match *nucleotide {
Nucleotide::A => 0,
Nucleotide::C => 1,
Nucleotide::G => 2,
Nucleotide::T => 3,
};
pattern_number += nucleotide_number * multiplier;
}
pattern_number
}
/// Returns a DNA sequence associated with an integer value and k-mer size
pub fn number_to_pattern(mut number: u64, kmer_size: u64) -> DNA_Sequence {
let mut dna_sequence = DNA_Sequence::new();
for i in 0..kmer_size {
let divisor = 4u64.pow((kmer_size - 1 - i) as u32);
let nucleotide_number: u64 = number / divisor;
dna_sequence = dna_sequence +
match nucleotide_number {
0 => Nucleotide::A,
1 => Nucleotide::C,
2 => Nucleotide::G,
3 => Nucleotide::T,
_ => panic!("Unexpected value in number_to_pattern()"),
};
number -= divisor * nucleotide_number;
}
dna_sequence
}
}
impl Add<Nucleotide> for DNA_Sequence {
type Output = DNA_Sequence;
fn add(mut self, nucleotide: Nucleotide) -> DNA_Sequence {
self.0.push(nucleotide);
self
}
}
impl Index<usize> for DNA_Sequence {
type Output = Nucleotide;
fn index(&self, index: usize) -> &Nucleotide {
&self.0[index]
}
}
impl Index<Range<usize>> for DNA_Sequence {
type Output = [Nucleotide];
fn index(&self, range: Range<usize>) -> &[Nucleotide] {
&self.0[range]
}
}
impl fmt::Display for DNA_Sequence {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let pattern_string = self.0
.iter()
.fold(String::new(),
|acc, &dna_letter| format!("{}{}", acc, dna_letter));
write!(f, "{}", pattern_string)
}
}
|
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::sync::{mpsc, Arc};
use std::{thread, time};
#[derive(PartialEq, Debug)]
enum OpResult<'a> {
// The result of a line operation: nothing, a sound played, or a jump
Void,
Sound(isize),
Recover(&'a str),
Jump(isize),
}
fn execute1<'a>(line: &'a str, registers: &mut HashMap<&'a str, isize>) -> OpResult<'a> {
let mut words = line.split_whitespace();
let op = words.next().unwrap();
let x_word = words.next().unwrap_or("a");
let x_value = x_word.parse::<isize>().unwrap_or(
*registers.get(x_word).unwrap_or(&0),
);
let y_word = words.next().unwrap_or("0");
let y_value = y_word.parse::<isize>().unwrap_or(
*registers.get(y_word).unwrap_or(&0),
);
let mut res = OpResult::Void;
match op {
"snd" => res = OpResult::Sound(x_value),
"set" => {
registers.insert(x_word, y_value);
}
"add" => {
registers.insert(x_word, x_value + y_value);
}
"mul" => {
registers.insert(x_word, x_value * y_value);
}
"mod" => {
registers.insert(x_word, x_value % y_value);
}
"rcv" => {
if x_value != 0 {
res = OpResult::Recover(x_word);
}
}
"jgz" => {
if x_value > 0 {
res = OpResult::Jump(y_value);
}
}
_ => (),
}
res
}
fn solve1(input: &str) -> isize {
let mut registers = HashMap::new();
let mut last_sound = 0;
let mut i = 0;
let lines: Vec<&str> = input.lines().collect();
loop {
match execute1(lines[i as usize], &mut registers) {
OpResult::Jump(x) => i += x - 1,
OpResult::Sound(x) => last_sound = x,
OpResult::Recover(_) => break,
OpResult::Void => (),
}
i += 1;
}
last_sound
}
fn execute2<'a>(line: &'a str, registers: &mut HashMap<&'a str, isize>) -> OpResult<'a> {
let mut words = line.split_whitespace();
let op = words.next().unwrap();
let x_word = words.next().unwrap_or("a");
let x_value = x_word.parse::<isize>().unwrap_or(
*registers.get(x_word).unwrap_or(&0),
);
let y_word = words.next().unwrap_or("0");
let y_value = y_word.parse::<isize>().unwrap_or(
*registers.get(y_word).unwrap_or(&0),
);
let mut res = OpResult::Void;
match op {
"snd" => res = OpResult::Sound(x_value),
"set" => {
registers.insert(x_word, y_value);
}
"add" => {
registers.insert(x_word, x_value + y_value);
}
"mul" => {
registers.insert(x_word, x_value * y_value);
}
"mod" => {
registers.insert(x_word, x_value % y_value);
}
"rcv" => res = OpResult::Recover(x_word),
"jgz" => {
if x_value > 0 {
res = OpResult::Jump(y_value);
}
}
_ => (),
}
res
}
#[derive(PartialEq, Debug)]
enum State {
Running(usize),
Waiting,
}
fn run(
input: Arc<str>,
number: isize,
tx: mpsc::Sender<isize>,
rx: mpsc::Receiver<isize>,
ty: mpsc::Sender<State>,
) {
let mut registers = HashMap::new();
registers.insert("p", number);
let mut i = 0;
let mut sent = 0;
let lines: Vec<&str> = input.lines().collect();
loop {
match execute2(lines[i as usize], &mut registers) {
OpResult::Jump(x) => i += x - 1,
OpResult::Sound(x) => {
tx.send(x).ok();
sent += 1;
ty.send(State::Running(sent)).ok();
}
OpResult::Recover(x) => {
ty.send(State::Waiting).ok();
registers.insert(x, rx.recv().unwrap());
ty.send(State::Running(sent)).ok();
}
OpResult::Void => (),
}
i += 1;
}
}
fn solve2(input: &str) -> usize {
let (t0, r0) = mpsc::channel(); // channel 0 -> 1
let (t1, r1) = mpsc::channel(); // channel 1 -> 0
let (t2, r2) = mpsc::channel(); // Channel sending infos from runnner 0
let (t3, r3) = mpsc::channel(); // Channel sending infos from runnner 1
let input_ref0 = Arc::from(input);
let input_ref1 = Arc::clone(&input_ref0);
thread::spawn(|| run(input_ref0, 0, t0, r1, t2));
thread::spawn(|| run(input_ref1, 1, t1, r0, t3));
let mut state0 = State::Running(0);
let mut state1 = State::Running(1);
let mut sent_by_1 = 0;
loop {
let mut updated = false;
for state in r2.try_iter() {
state0 = state;
updated = true;
}
for state in r3.try_iter() {
state1 = state;
updated = true;
if let State::Running(n) = state1 {
sent_by_1 = n
}
}
if !updated && state0 == State::Waiting && state1 == State::Waiting {
break;
}
thread::sleep(time::Duration::from_millis(1));
}
sent_by_1
}
fn main() {
let mut f = File::open(Path::new("input/day18.txt")).unwrap();
let mut input = String::new();
f.read_to_string(&mut input).ok();
let p1 = solve1(&input);
let p2 = solve2(&input);
println!("Part 1: {}, Part 2: {}", p1, p2)
}
#[cfg(test)]
mod test {
use super::*;
const INPUT: &str = "set a 1
add a 2
mul a a
mod a 5
snd a
set a 0
rcv a
jgz a -1
set a 1
jgz a -2
rcv a";
#[test]
fn execute_test() {
let mut regs = HashMap::new();
let mut lines = INPUT.lines();
assert_eq!(execute1(lines.next().unwrap(), &mut regs), OpResult::Void);
assert_eq!(regs.get("a"), Some(&1));
assert_eq!(execute1(lines.next().unwrap(), &mut regs), OpResult::Void);
assert_eq!(regs.get("a"), Some(&3));
assert_eq!(execute1(lines.next().unwrap(), &mut regs), OpResult::Void);
assert_eq!(regs.get("a"), Some(&9));
assert_eq!(execute1(lines.next().unwrap(), &mut regs), OpResult::Void);
assert_eq!(regs.get("a"), Some(&4));
assert_eq!(
execute1(lines.next().unwrap(), &mut regs),
OpResult::Sound(4)
);
assert_eq!(execute1(lines.next().unwrap(), &mut regs), OpResult::Void);
assert_eq!(execute1(lines.next().unwrap(), &mut regs), OpResult::Void);
assert_eq!(execute1(lines.next().unwrap(), &mut regs), OpResult::Void);
assert_eq!(execute1(lines.next().unwrap(), &mut regs), OpResult::Void);
assert_eq!(
execute1(lines.next().unwrap(), &mut regs),
OpResult::Jump(-2)
);
assert_eq!(
execute1(lines.next().unwrap(), &mut regs),
OpResult::Recover("a")
);
}
#[test]
fn solve1_test() {
assert_eq!(solve1(INPUT), 4);
}
#[test]
fn solve2_test() {
let input = "snd 1
snd 2
snd p
rcv a
rcv b
rcv c
rcv d";
assert_eq!(solve2(input), 3);
}
}
|
#[doc = "Reader of register CH3_DBG_CTDREQ"]
pub type R = crate::R<u32, super::CH3_DBG_CTDREQ>;
#[doc = "Reader of field `CH3_DBG_CTDREQ`"]
pub type CH3_DBG_CTDREQ_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:5"]
#[inline(always)]
pub fn ch3_dbg_ctdreq(&self) -> CH3_DBG_CTDREQ_R {
CH3_DBG_CTDREQ_R::new((self.bits & 0x3f) as u8)
}
}
|
const TRIPLET_SUM: u64 = 1000;
pub fn find() -> Option<u64> {
for a in 1..TRIPLET_SUM {
for b in a..(TRIPLET_SUM-a) {
let c = TRIPLET_SUM - a - b;
if a.pow(2) + b.pow(2) == c.pow(2) {
return Some(a * b * c);
}
}
}
None
}
|
use std::io::{stdin, Read, StdinLock};
use std::str::FromStr;
#[allow(dead_code)]
struct Scanner<'a> {
cin: StdinLock<'a>,
}
#[allow(dead_code)]
impl<'a> Scanner<'a> {
fn new(cin: StdinLock<'a>) -> Scanner<'a> {
Scanner { cin: cin }
}
fn read<T: FromStr>(&mut self) -> Option<T> {
let token = self.cin.by_ref().bytes().map(|c| c.unwrap() as char)
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
token.parse::<T>().ok()
}
fn input<T: FromStr>(&mut self) -> T {
self.read().unwrap()
}
fn vec<T: FromStr>(&mut self, len: usize) -> Vec<T> {
(0..len).map(|_| self.input()).collect()
}
fn mat<T: FromStr>(&mut self, row: usize, col: usize) -> Vec<Vec<T>> {
(0..row).map(|_| self.vec(col)).collect()
}
}
fn main() {
let cin = stdin();
let cin = cin.lock();
let mut sc = Scanner::new(cin);
let n: usize = sc.input();
let s: String = sc.input();
let chs: Vec<char> = s.chars().collect();
let (red, green, blue): (usize, usize, usize) = chs
.iter()
.fold((0, 0, 0), |(r, g, b), &ch| {
match ch {
'R' => (r + 1, g, b),
'G' => (r, g + 1, b),
'B' => (r, g, b + 1),
_ => unreachable!(),
}
});
let sum: usize = red * green * blue;
let mut sub: usize = 0;
for i in 0..n - 2 {
for j in i + 1..n - 1{
if chs[i] == chs[j] {
continue;
}
let k = j * 2 - i;
if k >= n || chs[k] == chs[i] || chs[k] == chs[j] {
continue;
}
sub += 1;
}
}
println!("{}", sum - sub);
}
|
use super::*;
use rust_htslib::bam;
macro_rules! btreemap {
( $b:expr; $($x:expr => $y:expr),* ) => ({
let mut temp_map = BTreeMap::with_b($b);
$(
temp_map.insert($x, $y);
)*
temp_map
});
( $($x:expr => $y:expr),* ) => ({
let mut temp_map = BTreeMap::new();
$(
temp_map.insert($x, $y);
)*
temp_map
});
( $b:expr; $($x:expr => $y:expr,)* ) => (
btreemap!{$b; $($x => $y),*}
);
( $($x:expr => $y:expr,)* ) => (
btreemap!{$($x => $y),*}
);
}
macro_rules! map(
{ $($key:expr => $value:expr),+ } => {
{
let mut m = ::std::collections::HashMap::new();
$(
m.insert($key, $value);
)+
m
}
};
);
fn get_header() -> bam::HeaderView {
bam::HeaderView::from_bytes(b"@HD VN:1.0 SO:coordinate
@SQ SN:chr1 LN:197195432
@SQ SN:chr10 LN:129993255
@SQ SN:chr11 LN:121843856
@SQ SN:chr12 LN:121257530
@SQ SN:chr13 LN:120284312
@SQ SN:chr14 LN:125194864
@SQ SN:chr15 LN:103494974
@SQ SN:chr16 LN:98319150
@SQ SN:chr17 LN:95272651
@SQ SN:chr18 LN:90772031
@SQ SN:chr19 LN:61342430
@SQ SN:chr2 LN:181748087
@SQ SN:chr3 LN:159599783
@SQ SN:chr4 LN:155630120
@SQ SN:chr5 LN:152537259
@SQ SN:chr6 LN:149517037
@SQ SN:chr7 LN:152524553
@SQ SN:chr8 LN:131738871
@SQ SN:chr9 LN:124076172
@SQ SN:chrM LN:16299
@SQ SN:chrX LN:166650296
@SQ SN:chrY LN:15902555
@PG ID:Bowtie VN:1.1.2 CL:\"bowtie --wrapper basic-0 --threads 4 -v 2 -m 10 -a /ifs/mirror/genomes/bowtie/mm9 /dev/fd/63 --sam\"
")
}
fn check_readgroups(grouped: ReadMap, expected_group: ReadMap) {
assert_eq!(grouped.keys().len(), expected_group.keys().len());
assert_eq!(grouped.values().len(), expected_group.values().len());
//println!("Test positions: ");
//for t_position in grouped.keys() {
//println!("{:#?}", t_position);
//}
//println!("Expected positions: ");
for (e_position, e_umis) in expected_group {
//println!("{:#?}", e_position);
let t_umis = grouped.get(&e_position).unwrap();
assert_eq!(e_umis.keys().len(), t_umis.keys().len());
assert_eq!(e_umis.values().len(), t_umis.keys().len());
for (e_umi, e_freq) in e_umis {
let t_freq = t_umis.get(&e_umi).unwrap();
assert_eq!(*t_freq, e_freq);
}
}
}
fn check_graph(graph: Vec<Node>, expected: Vec<Node>) {
assert_eq!(graph.len(), expected.len());
for (i, node) in graph.into_iter().enumerate() {
assert_eq!(node, expected[i]);
}
}
fn check_umi(grouping: Vec<&Node>, expected: Vec<&Node>) {
assert_eq!(grouping.len(), expected.len());
for (i, node) in grouping.into_iter().enumerate() {
assert_eq!(node, expected[i]);
}
}
#[test]
fn test_group_reads_small() {
let header = get_header();
let records_raw: Vec<&[u8]> = vec![
b"SRR2057595.142416_TAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:TAGTA",
b"SRR2057595.297818_CAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.324156_CAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.357312_CAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.413242_CAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.509959_CAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.623861_CAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
];
let records: Vec<bam::record::Record> = records_raw
.iter()
.map(|&r| bam::record::Record::from_sam(&header, r).unwrap())
.collect();
let expected_group: ReadMap = btreemap![
Position {pos: 61240265, is_spliced: None, is_rev: false, target: 10, tlen: None} => map![
BaseBits::new(b"CAGTA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[1]).unwrap()),
freq: 6,
},
BaseBits::new(b"TAGTA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[0]).unwrap()),
freq: 1,
}
]
];
let config = Config {
input_bam: String::from("INPUT"),
output_bam: String::from("OUTPUT"),
umi_tag: String::from("RX"),
allowed_read_dist: 1,
allowed_count_factor: 2,
allowed_network_depth: 2,
umi_in_read_id: false,
group_only: false,
ignore_splice_pos: false,
is_paired: false,
};
let (grouped, _) = group_reads(records, &config);
// Check the read_groups
check_readgroups(grouped, expected_group);
}
// Test the following:
// - A reverse mapped read is grouped on it's own
// - An unmapped read is not included
// - Different splice sites cause differnt groups to form
// - Different pos causes different groups to form
// - Different tid causes different groups to form
// - First highest mapq read is the representative read for a group
#[test]
fn test_group_reads_complex() {
let header = get_header();
let records_raw: Vec<&[u8]> = vec![
b"SRR2057595.142416_TAGTA 0 chr19 61240266 255 26M * 0 26 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:TAGTA",
b"SRR2057595.297818_CAGTA 16 chr19 61240266 255 26M * 0 26 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.324156_CAGTA 0 chr19 61240266 255 26M * 0 26 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.357312_CAGTA 0 chr19 61240266 254 26M * 0 26 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.324245_CAGTA 0 chr19 61240266 255 26M * 0 26 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.413242_CAGTA 0 chr19 61240266 255 25M * 0 25 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.509959_CAGTA 0 chr18 61240266 255 26M * 0 26 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
b"SRR2057595.623861_CAGTA 0 chr19 61240265 255 25M * 0 25 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA",
];
let records: Vec<bam::record::Record> = records_raw
.iter()
.map(|&r| bam::record::Record::from_sam(&header, r).unwrap())
.collect();
let expected_group: ReadMap = btreemap![
Position {pos: 61240265, is_spliced: None, is_rev: false, target: 10, tlen: None} => map![
BaseBits::new(b"CAGTA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[2]).unwrap()),
freq: 4,
},
BaseBits::new(b"TAGTA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[0]).unwrap()),
freq: 1,
}
],
Position {pos: 61240291, is_spliced: None, is_rev: true, target: 10, tlen: None} => map![
BaseBits::new(b"CAGTA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[1]).unwrap()),
freq: 1,
}
],
Position {pos: 61240264, is_spliced: None, is_rev: false, target: 10, tlen: None} => map![
BaseBits::new(b"CAGTA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[7]).unwrap()),
freq: 1,
}
],
Position {pos: 61240265, is_spliced: None, is_rev: false, target: 9,tlen: None} => map![
BaseBits::new(b"CAGTA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[6]).unwrap()),
freq: 1,
}
]
];
let config = Config {
input_bam: String::from("INPUT"),
output_bam: String::from("OUTPUT"),
umi_tag: String::from("RX"),
allowed_read_dist: 1,
allowed_count_factor: 2,
allowed_network_depth: 2,
umi_in_read_id: false,
group_only: false,
ignore_splice_pos: false,
is_paired: false,
};
let (grouped, _) = group_reads(records, &config);
// Check the read_groups
check_readgroups(grouped, expected_group);
}
#[test]
fn test_read_groups_umi_tools() {
let header = get_header();
let records_raw: Vec<&[u8]> = vec![
b"SRR2057595.11597812_ATAAA 16 chr19 4078297 255 38M * 0 0 * * XA:i:1 MD:Z:29A8 NM:i:1 RX:Z:ATAAA UG:i:52 BX:Z:ATAAA",
b"SRR2057595.10788_ATAAA 16 chr19 4078298 255 37M * 0 0 * * XA:i:0 MD:Z:37 NM:i:0 RX:Z:ATAAA UG:i:52 BX:Z:ATAAA",
b"SRR2057595.42646_ATAAA 16 chr19 4078298 255 37M * 0 0 * * XA:i:0 MD:Z:37 NM:i:0 RX:Z:ATAAA UG:i:52 BX:Z:ATAAA",
];
let records: Vec<bam::record::Record> = records_raw
.iter()
.map(|&r| bam::record::Record::from_sam(&header, r).unwrap())
.collect();
let expected_group: ReadMap = btreemap![
Position {pos: 4078334, is_spliced: None, is_rev: true, target: 10, tlen: None} => map![
BaseBits::new(b"ATAAA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[1]).unwrap()),
freq: 3,
}
]
];
let config = Config {
input_bam: String::from("INPUT"),
output_bam: String::from("OUTPUT"),
umi_tag: String::from("RX"),
allowed_read_dist: 1,
allowed_count_factor: 2,
allowed_network_depth: 2,
umi_in_read_id: false,
group_only: false,
ignore_splice_pos: false,
is_paired: false,
};
let (grouped, _) = group_reads(records, &config);
// Check the read_groups
check_readgroups(grouped, expected_group);
}
#[test]
fn test_read_groups_cigars() {
let header = get_header();
let records_raw: Vec<&[u8]> = vec![
b"SRR2057595.11597812_ATAAA 16 chr19 4078297 255 3S35M * 0 0 * * XA:i:1 MD:Z:29A8 NM:i:1 RX:Z:ATAAA UG:i:52 BX:Z:ATAAA",
b"SRR2057595.10788_ATAAA 16 chr19 4078294 255 33M4S * 0 0 * * XA:i:0 MD:Z:37 NM:i:0 RX:Z:ATAAA UG:i:52 BX:Z:ATAAA",
b"SRR2057595.42646_ATAAA 16 chr19 4078298 255 15M7N15M * 0 0 * * XA:i:0 MD:Z:37 NM:i:0 RX:Z:ATAAA UG:i:52 BX:Z:ATAAA",
b"SRR2057595.11597790_ATAAA 0 chr19 4078300 255 3S35M * 0 0 * * XA:i:1 MD:Z:29A8 NM:i:1 RX:Z:ATAAA UG:i:52 BX:Z:ATAAA",
b"SRR2057595.10988_ATAAA 0 chr19 4078298 255 33M4S * 0 0 * * XA:i:0 MD:Z:37 NM:i:0 RX:Z:ATAAA UG:i:52 BX:Z:ATAAA",
b"SRR2057595.4246_ATAAA 0 chr19 4078298 255 15M7N15M * 0 0 * * XA:i:0 MD:Z:37 NM:i:0 RX:Z:ATAAA UG:i:52 BX:Z:ATAAA",
];
let records: Vec<bam::record::Record> = records_raw
.iter()
.map(|&r| bam::record::Record::from_sam(&header, r).unwrap())
.collect();
let expected_group: ReadMap = btreemap![
Position {pos: 4078330, is_spliced: None, is_rev: true, target: 10, tlen: None} => map![
BaseBits::new(b"ATAAA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[1]).unwrap()),
freq: 1,
}],
Position {pos: 4078331, is_spliced: Some(35), is_rev: true, target: 10,tlen: None} => map![
BaseBits::new(b"ATAAA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[0]).unwrap()),
freq: 1,
}],
Position {pos: 4078334, is_spliced: Some(15), is_rev: true, target: 10,tlen: None} => map![
BaseBits::new(b"ATAAA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[2]).unwrap()),
freq: 1,
}],
Position {pos: 4078296, is_spliced: None, is_rev: false, target: 10,tlen: None} => map![
BaseBits::new(b"ATAAA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[3]).unwrap()),
freq: 1,
}],
Position {pos: 4078297, is_spliced: Some(33), is_rev: false, target: 10,tlen: None} => map![
BaseBits::new(b"ATAAA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[4]).unwrap()),
freq: 1,
}],
Position {pos: 4078297, is_spliced: Some(15), is_rev: false, target: 10,tlen: None} => map![
BaseBits::new(b"ATAAA").unwrap() => ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header, records_raw[5]).unwrap()),
freq: 1,
}]
];
let config = Config {
input_bam: String::from("INPUT"),
output_bam: String::from("OUTPUT"),
umi_tag: String::from("RX"),
allowed_read_dist: 1,
allowed_count_factor: 2,
allowed_network_depth: 2,
umi_in_read_id: false,
group_only: false,
ignore_splice_pos: false,
is_paired: false,
};
let (grouped, _) = group_reads(records, &config);
// Check the read_groups
check_readgroups(grouped, expected_group);
}
#[test]
fn test_graph_small() {
let header = get_header();
let uncon_graph = vec![
Node {
umi: BaseBits::new(b"CAGTA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_CAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA").unwrap()),
freq: 6,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"TAGTA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.142416_TAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:TAGTA").unwrap()),
freq: 1,
},
connections: vec![],
},
];
let expected = vec![
Node {
umi: BaseBits::new(b"CAGTA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_CAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:CAGTA").unwrap()),
freq: 6,
},
connections: vec![1],
},
Node {
umi: BaseBits::new(b"TAGTA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.142416_TAGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:TAGTA").unwrap()),
freq: 1,
},
connections: vec![],
},
];
let config = Config {
input_bam: String::from("INPUT"),
output_bam: String::from("OUTPUT"),
umi_tag: String::from("RX"),
allowed_read_dist: 1,
allowed_count_factor: 2,
allowed_network_depth: 2,
umi_in_read_id: false,
group_only: false,
ignore_splice_pos: false,
is_paired: false,
};
let graph = connect_graph(
uncon_graph,
config.allowed_read_dist,
config.allowed_count_factor,
);
println!("{:#?}", graph);
check_graph(graph, expected);
}
#[test]
fn test_graph_umi() {
// Test the umis found in the umi blog post:
// https://cgatoxford.files.wordpress.com/2015/08/schematic_25-e1443714121688.png
let header = get_header();
let uncon_graph = vec![
Node {
umi: BaseBits::new(b"ATTG").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_ATTG 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTG").unwrap()),
freq: 1,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"ATTA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_ATTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTA").unwrap()),
freq: 456,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"ATTT").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_ATTT 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTT").unwrap()),
freq: 2,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"AGTA").unwrap(),
freq: ReadFreq{
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_AGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGTA").unwrap()),
freq: 72,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"AGTC").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_AGTC 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGTC").unwrap()),
freq: 1,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"AGGA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.142416_AGGA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGGA").unwrap()),
freq: 90,
},
connections: vec![],
},
];
let expected = vec![
Node {
umi: BaseBits::new(b"ATTG").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_ATTG 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTG").unwrap()),
freq: 1,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"ATTA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_ATTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTA").unwrap()),
freq: 456,
},
connections: vec![0, 2, 3],
},
Node {
umi: BaseBits::new(b"ATTT").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_ATTT 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTT").unwrap()),
freq: 2,
},
connections: vec![0],
},
Node {
umi: BaseBits::new(b"AGTA").unwrap(),
freq: ReadFreq{
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_AGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGTA").unwrap()),
freq: 72,
},
connections: vec![4],
},
Node {
umi: BaseBits::new(b"AGTC").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_AGTC 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGTC").unwrap()),
freq: 1,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"AGGA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.142416_AGGA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGGA").unwrap()),
freq: 90,
},
connections: vec![],
},
];
let config = Config {
input_bam: String::from("INPUT"),
output_bam: String::from("OUTPUT"),
umi_tag: String::from("RX"),
allowed_read_dist: 1,
allowed_count_factor: 2,
allowed_network_depth: 2,
umi_in_read_id: false,
group_only: false,
ignore_splice_pos: false,
is_paired: false,
};
let graph = connect_graph(
uncon_graph,
config.allowed_read_dist,
config.allowed_count_factor,
);
println!("{:#?}", graph);
check_graph(graph, expected);
}
#[test]
fn test_determine_umi() {
// Test the umis found in the umi blog post:
// https://cgatoxford.files.wordpress.com/2015/08/schematic_25-e1443714121688.png
let header = get_header();
let graph = vec![
Node {
umi: BaseBits::new(b"ATTG").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_ATTG 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTG").unwrap()),
freq: 1,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"ATTA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_ATTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTA").unwrap()),
freq: 456,
},
connections: vec![0, 2, 3],
},
Node {
umi: BaseBits::new(b"ATTT").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_ATTT 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTT").unwrap()),
freq: 2,
},
connections: vec![0],
},
Node {
umi: BaseBits::new(b"AGTA").unwrap(),
freq: ReadFreq{
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_AGTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGTA").unwrap()),
freq: 72,
},
connections: vec![4, 5],
},
Node {
umi: BaseBits::new(b"AGTC").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_AGTC 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGTC").unwrap()),
freq: 1,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"AGGA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.297818_AGTG 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGTG").unwrap()),
freq: 5,
},
connections: vec![],
},
Node {
umi: BaseBits::new(b"AGGA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(&header,
b"SRR2057595.142416_AGGA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGGA").unwrap()),
freq: 90,
},
connections: vec![5],
},
];
let node1 = Node {
umi: BaseBits::new(b"ATTA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(
&header,
b"SRR2057595.297818_ATTA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:ATTA",
)
.unwrap()),
freq: 456,
},
connections: vec![0, 2, 3],
};
let node2 = Node {
umi: BaseBits::new(b"AGGA").unwrap(),
freq: ReadFreq {
read: ReadCollection::SingleRead(bam::record::Record::from_sam(
&header,
b"SRR2057595.142416_AGGA 0 chr19 61240266 255 26M * 0 0 * * XA:i:1 MD:Z:12C13 NM:i:1 RX:Z:AGGA",
)
.unwrap()),
freq: 90,
},
connections: vec![5],
};
let expected = vec![&node1, &node2];
let config = Config {
input_bam: String::from("INPUT"),
output_bam: String::from("OUTPUT"),
umi_tag: String::from("RX"),
allowed_read_dist: 1,
allowed_count_factor: 2,
allowed_network_depth: 2,
umi_in_read_id: false,
group_only: false,
ignore_splice_pos: false,
is_paired: false,
};
let grouping = determine_umi(&graph, config.allowed_network_depth);
// Test that nodes can't be double added to two different groups. Otherwise the group_only ends
// up printing them twice
assert_eq!(grouping[0].nodes.len(), 6);
assert_eq!(grouping[1].nodes.len(), 1);
let grouping: Vec<&Node> = grouping.iter().map(|n| n.nodes[n.master_node]).collect();
check_umi(grouping, expected);
}
|
#![no_std]
#![no_main]
#![feature(abi_x86_interrupt)]
#![feature(custom_test_frameworks)]
#![test_runner(xagima::testing::runner)]
#![reexport_test_harness_main = "test_main"]
#![feature(default_alloc_error_handler)]
use bootloader::BootInfo;
use core::panic::PanicInfo;
#[panic_handler]
fn panic(_: &PanicInfo) -> ! {
xagima::testing::success();
xagima::halt();
}
#[no_mangle]
pub extern "C" fn _start(boot_info: &'static BootInfo) -> ! {
xagima::init(boot_info);
test_main();
xagima::testing::fail();
xagima::halt();
}
#[allow(unconditional_recursion)]
fn stack_overflow() {
stack_overflow();
let mut value = 0u32;
volatile::Volatile::new(&mut value).read(); // prevent tail recursion optimizations
}
#[test_case]
fn test() {
stack_overflow();
}
|
use std::fs;
use structopt::clap::Shell;
include!("src/cli.rs");
const BIN_NAME: &str = "zellij";
fn main() {
// Generate Shell Completions
let mut clap_app = CliArgs::clap();
println!("cargo:rerun-if-changed=src/cli.rs");
let mut out_dir = std::env::var_os("CARGO_MANIFEST_DIR").unwrap();
out_dir.push("/assets/completions");
println!(
"Completion files will to added to this location: {:?}",
out_dir
);
fs::create_dir_all(&out_dir).unwrap();
clap_app.gen_completions(BIN_NAME, Shell::Bash, &out_dir);
clap_app.gen_completions(BIN_NAME, Shell::Zsh, &out_dir);
clap_app.gen_completions(BIN_NAME, Shell::Fish, &out_dir);
}
|
use anyhow::Result;
use std::io::SeekFrom;
/// Base trait for all readers and writers that support seeking to a specific point in their
/// underlying stream. This trait is similar to [std::io::Seek](std::io::Seek) but instead
/// of seeking to a specific byte offset, it allows seeking to a specific point.
pub trait SeekToPoint {
/// Seek to the point at the given `position` in the underlying stream.
///
/// If the seek operation completed successfully, this method returns the new point position
/// from the start of the underlying stream.
fn seek_point(&mut self, position: SeekFrom) -> Result<usize>;
/// Returns the index of the current point in the underlying stream. This is equivalent to
/// calling `seek_point(SeekFrom::Current(0))`.
fn point_index(&mut self) -> Result<usize> {
self.seek_point(SeekFrom::Current(0))
}
/// Returns the total number of points in the underlying stream.
fn point_count(&mut self) -> Result<usize> {
let current_pos = self.point_index()? as u64;
let len = self.seek_point(SeekFrom::End(0))?;
self.seek_point(SeekFrom::Start(current_pos))?;
Ok(len)
}
}
|
use crate::println;
use x86_64::instructions::port::Port;
use alloc::vec::Vec;
use alloc::fmt;
use core::fmt::Formatter;
pub struct PCIDevice {
bus: u8,
device: u8,
vendor_id: u16,
device_id: u16,
function: u8,
class_code: u8,
subclass_code: u8,
rev_id: u8
}
impl fmt::Display for PCIDevice {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{:02}.{:02}.{:01} {:04x}:{:04x} {:02x} {:02x} (rev id {:02x})", self.bus, self.device, self.function, self.vendor_id, self.device_id, self.class_code, self.subclass_code, self.rev_id)
}
}
pub struct PCIEnumerator;
impl PCIEnumerator {
fn config_read_dword(bus: u8, slot: u8, func: u8, offset: u8) -> u32 {
let lbus = bus as u32;
let lslot = slot as u32;
let lfunc = func as u32;
let loffset = offset as u32;
let address =
(lbus << 16) | (lslot << 11) | (lfunc << 8) | (loffset & 0xff) | (0x80000000 as u32);
unsafe {
let mut write_port = Port::<u32>::new(0xcf8);
write_port.write(address);
let mut read_port = Port::<u32>::new(0xcfc);
read_port.read()
}
}
fn config_get_vendor_id(bus: u8, device: u8, function: u8) -> u16 {
(Self::config_read_dword(bus, device, function, 0) & 0xffff) as u16
}
fn config_get_device_id(bus: u8, device: u8, function: u8) -> u16 {
(Self::config_read_dword(bus, device, function, 0x00) >> 16) as u16
}
fn config_get_class_code(bus: u8, device: u8, function: u8) -> u8 {
(Self::config_read_dword(bus, device, function, 0x08) >> 24) as u8
}
fn config_get_sub_class_code(bus: u8, device: u8, function: u8) -> u8 {
((Self::config_read_dword(bus, device, function, 0x08) >> 16) & 0xff) as u8
}
fn config_get_header_type(bus: u8, device: u8) -> u8 {
((Self::config_read_dword(bus, device, 0, 0x0c) >> 16) & 0xff) as u8
}
fn config_get_revision_id(bus: u8, device: u8, function: u8) -> u8 {
(Self::config_read_dword(bus, device, function, 0x08) & 0xff) as u8
}
fn get_pci_device(bus: u8, device: u8, function: u8) -> PCIDevice {
let vendor_id = Self::config_get_vendor_id(bus, device, function);
let device_id = Self::config_get_device_id(bus, device, function);
let class_code = Self::config_get_class_code(bus, device, function);
let subclass_code = Self::config_get_sub_class_code(bus, device, function);
let rev_id = Self::config_get_revision_id(bus, device, function);
PCIDevice {
bus,
device,
vendor_id,
device_id,
function,
class_code,
subclass_code,
rev_id,
}
}
pub fn enumerate() -> Vec<PCIDevice> {
let mut devices = Vec::<PCIDevice>::new();
for bus in 0..=255 {
for device in 0..=31 {
let vendor_id = Self::config_get_vendor_id(bus, device, 0);
if vendor_id == 0xffff {
continue
}
if (Self::config_get_header_type(bus, device) & 0x80) != 0 {
for function in 0..=7 {
let vendor_id = Self::config_get_vendor_id(bus, device, function);
if vendor_id == 0xffff {
continue
}
let class_code = Self::config_get_class_code(bus, device, function);
if class_code & 0xf0 != 0 {
continue
}
let pcidevice = Self::get_pci_device(bus, device, function);
devices.push(pcidevice);
}
} else {
let pcidevice = Self::get_pci_device(bus, device, 0);
devices.push(pcidevice);
}
}
}
for device in &devices {
println!("{}", device);
}
devices
}
}
|
use instruction::instruction::ExecuteResult;
use rtda::frame::Frame;
use util::code_reader::CodeReader;
#[allow(non_snake_case)]
fn _icmpPop(frame: Frame) -> (i32, i32, Frame) {
let Frame {
operand_stack,
local_vars,
} = frame;
let (val2, operand_stack) = operand_stack.pop_int();
let (val1, operand_stack) = operand_stack.pop_int();
let frame = Frame {
operand_stack,
local_vars,
};
(val1, val2, frame)
}
#[allow(non_snake_case)]
pub fn IF_ICMPGT(code_reader: CodeReader, frame: Frame) -> (ExecuteResult, CodeReader) {
println!("IF_ICMPGT");
let (offset, code_reader) = code_reader.read_i16();
let (val1, val2, frame) = _icmpPop(frame);
let offset = if val1 > val2 { offset as isize } else { 0 };
let execute_result = ExecuteResult { frame, offset };
(execute_result, code_reader)
}
#[allow(non_snake_case)]
pub fn IF_ICMPGE(code_reader: CodeReader, frame: Frame) -> (ExecuteResult, CodeReader) {
println!("IF_ICMPGE");
let (offset, code_reader) = code_reader.read_i16();
let (val1, val2, frame) = _icmpPop(frame);
let offset = if val1 >= val2 { offset as isize } else { 0 };
let execute_result = ExecuteResult { frame, offset };
(execute_result, code_reader)
}
#[allow(non_snake_case)]
pub fn IF_ICMPEQ(code_reader: CodeReader, frame: Frame) -> (ExecuteResult, CodeReader) {
println!("IF_ICMPEQ");
let (offset, code_reader) = code_reader.read_i16();
let (val1, val2, frame) = _icmpPop(frame);
let offset = if val1 == val2 { offset as isize } else { 0 };
let execute_result = ExecuteResult { frame, offset };
(execute_result, code_reader)
}
#[allow(non_snake_case)]
pub fn IF_ICMPNE(code_reader: CodeReader, frame: Frame) -> (ExecuteResult, CodeReader) {
println!("IF_ICMPNE");
let (offset, code_reader) = code_reader.read_i16();
let (val1, val2, frame) = _icmpPop(frame);
let offset = if val1 != val2 { offset as isize } else { 0 };
let execute_result = ExecuteResult { frame, offset };
(execute_result, code_reader)
}
#[allow(non_snake_case)]
pub fn IF_ICMPLT(code_reader: CodeReader, frame: Frame) -> (ExecuteResult, CodeReader) {
println!("IF_ICMPLT");
let (offset, code_reader) = code_reader.read_i16();
let (val1, val2, frame) = _icmpPop(frame);
let offset = if val1 < val2 { offset as isize } else { 0 };
let execute_result = ExecuteResult { frame, offset };
(execute_result, code_reader)
}
#[allow(non_snake_case)]
pub fn IF_ICMPLE(code_reader: CodeReader, frame: Frame) -> (ExecuteResult, CodeReader) {
println!("IF_ICMPLE");
let (offset, code_reader) = code_reader.read_i16();
let (val1, val2, frame) = _icmpPop(frame);
let offset = if val1 <= val2 { offset as isize } else { 0 };
let execute_result = ExecuteResult { frame, offset };
(execute_result, code_reader)
}
#[cfg(test)]
mod tests {
use instruction::comparison::if_icmp::*;
use instruction::instruction::ExecuteResult;
use rtda::frame::Frame;
use util::code_reader::CodeReader;
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPGT_success() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(1);
let operand_stack = operand_stack.push_int(0);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPGT(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 257);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPGT_fail() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(1);
let operand_stack = operand_stack.push_int(2);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPGT(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 0);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPGE_success() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(1);
let operand_stack = operand_stack.push_int(1);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPGE(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 257);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPGE_fail() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(0);
let operand_stack = operand_stack.push_int(2);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPGE(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 0);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPEQ_success() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(-1);
let operand_stack = operand_stack.push_int(-1);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPEQ(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 257);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPEQ_fail() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(0);
let operand_stack = operand_stack.push_int(1);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPEQ(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 0);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPNE_success() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(0);
let operand_stack = operand_stack.push_int(1);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPNE(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 257);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPNE_fail() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(-1);
let operand_stack = operand_stack.push_int(-1);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPNE(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 0);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPLT_success() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(1);
let operand_stack = operand_stack.push_int(2);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPLT(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 257);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPLT_fail() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(0);
let operand_stack = operand_stack.push_int(0);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPLT(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 0);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPLE_success() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(0);
let operand_stack = operand_stack.push_int(0);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPLE(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 257);
}
#[test]
#[allow(non_snake_case)]
fn test_IF_ICMPLE_fail() {
let frame = Frame::new(2, 2);
let Frame {
operand_stack,
local_vars,
} = frame;
let operand_stack = operand_stack.push_int(2);
let operand_stack = operand_stack.push_int(1);
let frame = Frame {
operand_stack,
local_vars,
};
let (ExecuteResult { frame: _, offset }, _) = IF_ICMPLE(CodeReader::new(&vec![1, 1]), frame);
assert_eq!(offset, 0);
}
}
|
use shorthand::ShortHand;
#[derive(ShortHand)]
struct UnitStruct;
fn main() {}
|
use ckb_chain_spec::consensus::{build_genesis_epoch_ext, ConsensusBuilder};
use ckb_dao_utils::genesis_dao_data;
use ckb_types::{
core::{
capacity_bytes, BlockBuilder, BlockView, Capacity, HeaderBuilder, HeaderView,
TransactionBuilder,
},
packed::{Byte32, CellInput, Script},
prelude::*,
utilities::DIFF_TWO,
};
use criterion::{criterion_group, Criterion};
use faketime::unix_time_as_millis;
use rand::{thread_rng, Rng};
use std::collections::HashMap;
const GENESIS_TARGET: u32 = 0x2001_0000;
const DEFAULT_EPOCH_REWARD: Capacity = capacity_bytes!(1_250_000);
const MIN_BLOCK_INTERVAL: u64 = 8;
#[cfg(not(feature = "ci"))]
const SAMPLES: &[usize] = &[100usize, 500];
#[cfg(feature = "ci")]
const SAMPLES: &[usize] = &[1usize];
#[derive(Default, Clone)]
pub struct FakeStore {
headers: HashMap<Byte32, HeaderView>,
total_uncles_count: HashMap<Byte32, u64>,
}
impl FakeStore {
fn insert(&mut self, block: BlockView) {
let before_total_uncles_count = self
.total_uncles_count
.get(&block.parent_hash())
.cloned()
.unwrap_or(0u64);
self.total_uncles_count.insert(
block.hash(),
before_total_uncles_count + block.uncles().data().len() as u64,
);
self.headers.insert(block.hash(), block.header());
}
pub(crate) fn get_block_header(&self, hash: &Byte32) -> Option<HeaderView> {
self.headers.get(hash).cloned()
}
pub(crate) fn total_uncles_count(&self, hash: &Byte32) -> Option<u64> {
self.total_uncles_count.get(hash).cloned()
}
}
fn gen_empty_block(parent: &HeaderView) -> BlockView {
let mut rng = thread_rng();
let nonce: u128 = rng.gen();
let uncles_count: u32 = rng.gen_range(0, 2);
let uncles: Vec<_> = (0..uncles_count)
.map(|_| {
BlockBuilder::default()
.nonce(nonce.pack())
.build()
.as_uncle()
})
.collect();
BlockBuilder::default()
.parent_hash(parent.hash())
.number((parent.number() + 1).pack())
.uncles(uncles)
.compact_target(parent.compact_target().pack())
.timestamp((parent.timestamp() + MIN_BLOCK_INTERVAL * 1000).pack())
.build()
}
fn bench(c: &mut Criterion) {
c.bench_function_over_inputs(
"next_epoch_ext",
|b, samples| {
b.iter_with_setup(
|| {
let now = unix_time_as_millis();
let header = HeaderBuilder::default()
.compact_target(GENESIS_TARGET.pack())
.timestamp(now.pack())
.build();
let input = CellInput::new_cellbase_input(0);
let witness = Script::default().into_witness();
let cellbase = TransactionBuilder::default()
.input(input)
.witness(witness)
.build();
let dao = genesis_dao_data(vec![&cellbase]).unwrap();
let genesis_block = BlockBuilder::default()
.compact_target(DIFF_TWO.pack())
.dao(dao)
.transaction(cellbase)
.header(header)
.build();
let mut parent = genesis_block.header();
let epoch_ext =
build_genesis_epoch_ext(DEFAULT_EPOCH_REWARD, DIFF_TWO, 1000, 14400);
let consensus = ConsensusBuilder::new(genesis_block.clone(), epoch_ext)
.initial_primary_epoch_reward(DEFAULT_EPOCH_REWARD)
.build();
let genesis_epoch_ext = consensus.genesis_epoch_ext().clone();
let mut store = FakeStore::default();
store.insert(genesis_block);
for _ in 1..genesis_epoch_ext.length() {
let block = gen_empty_block(&parent);
parent = block.header();
store.insert(block);
}
(consensus, genesis_epoch_ext, parent, store)
},
|(consensus, genesis_epoch_ext, parent, store)| {
let get_block_header = |hash: &Byte32| store.get_block_header(hash);
let total_uncles_count = |hash: &Byte32| store.total_uncles_count(hash);
for _ in 0..=**samples {
consensus.next_epoch_ext(
&genesis_epoch_ext,
&parent,
get_block_header,
total_uncles_count,
);
}
},
)
},
SAMPLES,
);
}
criterion_group!(next_epoch_ext, bench);
|
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT.
extern "C"
{
pub fn rdma_accept(id: *mut rdma_cm_id, conn_param: *mut rdma_conn_param) -> c_int;
pub fn rdma_bind_addr(id: *mut rdma_cm_id, addr: *mut sockaddr) -> c_int;
pub fn rdma_connect(id: *mut rdma_cm_id, conn_param: *mut rdma_conn_param) -> c_int;
pub fn rdma_create_ep(id: *mut *mut rdma_cm_id, res: *mut rdma_addrinfo, pd: *mut ibv_pd, qp_init_attr: *mut ibv_qp_init_attr) -> c_int;
pub fn rdma_create_qp(id: *mut rdma_cm_id, pd: *mut ibv_pd, qp_init_attr: *mut ibv_qp_init_attr) -> c_int;
pub fn rdma_create_qp_ex(id: *mut rdma_cm_id, qp_init_attr: *mut ibv_qp_init_attr_ex) -> c_int;
pub fn rdma_create_srq(id: *mut rdma_cm_id, pd: *mut ibv_pd, attr: *mut ibv_srq_init_attr) -> c_int;
pub fn rdma_create_srq_ex(id: *mut rdma_cm_id, attr: *mut ibv_srq_init_attr_ex) -> c_int;
pub fn rdma_destroy_ep(id: *mut rdma_cm_id);
pub fn rdma_destroy_id(id: *mut rdma_cm_id) -> c_int;
pub fn rdma_destroy_qp(id: *mut rdma_cm_id);
pub fn rdma_destroy_srq(id: *mut rdma_cm_id);
pub fn rdma_disconnect(id: *mut rdma_cm_id) -> c_int;
pub fn rdma_get_dst_port(id: *mut rdma_cm_id) -> __be16;
pub fn rdma_get_request(listen: *mut rdma_cm_id, id: *mut *mut rdma_cm_id) -> c_int;
pub fn rdma_get_src_port(id: *mut rdma_cm_id) -> __be16;
pub fn rdma_join_multicast(id: *mut rdma_cm_id, addr: *mut sockaddr, context: *mut c_void) -> c_int;
pub fn rdma_join_multicast_ex(id: *mut rdma_cm_id, mc_join_attr: *mut rdma_cm_join_mc_attr_ex, context: *mut c_void) -> c_int;
pub fn rdma_leave_multicast(id: *mut rdma_cm_id, addr: *mut sockaddr) -> c_int;
pub fn rdma_listen(id: *mut rdma_cm_id, backlog: c_int) -> c_int;
pub fn rdma_migrate_id(id: *mut rdma_cm_id, channel: *mut rdma_event_channel) -> c_int;
pub fn rdma_notify(id: *mut rdma_cm_id, event: ibv_event_type) -> c_int;
pub fn rdma_reject(id: *mut rdma_cm_id, private_data: *const c_void, private_data_len: u8) -> c_int;
pub fn rdma_resolve_addr(id: *mut rdma_cm_id, src_addr: *mut sockaddr, dst_addr: *mut sockaddr, timeout_ms: c_int) -> c_int;
pub fn rdma_resolve_route(id: *mut rdma_cm_id, timeout_ms: c_int) -> c_int;
pub fn rdma_set_option(id: *mut rdma_cm_id, level: c_int, optname: c_int, optval: *mut c_void, optlen: usize) -> c_int;
pub fn rust_rdma_get_local_addr(id: *mut rdma_cm_id) -> *mut sockaddr;
pub fn rust_rdma_get_peer_addr(id: *mut rdma_cm_id) -> *mut sockaddr;
pub fn rust_rdma_get_recv_comp(id: *mut rdma_cm_id, wc: *mut ibv_wc) -> c_int;
pub fn rust_rdma_get_send_comp(id: *mut rdma_cm_id, wc: *mut ibv_wc) -> c_int;
pub fn rust_rdma_post_read(id: *mut rdma_cm_id, context: *mut c_void, addr: *mut c_void, length: usize, mr: *mut ibv_mr, flags: c_int, remote_addr: u64, rkey: u32) -> c_int;
pub fn rust_rdma_post_readv(id: *mut rdma_cm_id, context: *mut c_void, sgl: *mut ibv_sge, nsge: c_int, flags: c_int, remote_addr: u64, rkey: u32) -> c_int;
pub fn rust_rdma_post_recv(id: *mut rdma_cm_id, context: *mut c_void, addr: *mut c_void, length: usize, mr: *mut ibv_mr) -> c_int;
pub fn rust_rdma_post_recvv(id: *mut rdma_cm_id, context: *mut c_void, sgl: *mut ibv_sge, nsge: c_int) -> c_int;
pub fn rust_rdma_post_send(id: *mut rdma_cm_id, context: *mut c_void, addr: *mut c_void, length: usize, mr: *mut ibv_mr, flags: c_int) -> c_int;
pub fn rust_rdma_post_sendv(id: *mut rdma_cm_id, context: *mut c_void, sgl: *mut ibv_sge, nsge: c_int, flags: c_int) -> c_int;
pub fn rust_rdma_post_ud_send(id: *mut rdma_cm_id, context: *mut c_void, addr: *mut c_void, length: usize, mr: *mut ibv_mr, flags: c_int, ah: *mut ibv_ah, remote_qpn: u32) -> c_int;
pub fn rust_rdma_post_write(id: *mut rdma_cm_id, context: *mut c_void, addr: *mut c_void, length: usize, mr: *mut ibv_mr, flags: c_int, remote_addr: u64, rkey: u32) -> c_int;
pub fn rust_rdma_post_writev(id: *mut rdma_cm_id, context: *mut c_void, sgl: *mut ibv_sge, nsge: c_int, flags: c_int, remote_addr: u64, rkey: u32) -> c_int;
pub fn rust_rdma_reg_msgs(id: *mut rdma_cm_id, addr: *mut c_void, length: usize) -> *mut ibv_mr;
pub fn rust_rdma_reg_read(id: *mut rdma_cm_id, addr: *mut c_void, length: usize) -> *mut ibv_mr;
pub fn rust_rdma_reg_write(id: *mut rdma_cm_id, addr: *mut c_void, length: usize) -> *mut ibv_mr;
}
|
use std::collections::HashMap;
use std::env;
use std::fmt;
use std::fs::File;
use std::hash::{Hash, Hasher};
use std::io::{BufRead, BufReader, BufWriter, Write};
use std::path::Path;
static DATA_FILE: &str = "data/glyphlist-extended.txt";
fn main() {
let mut map = HashMap::new();
let lines = BufReader::new(File::open(DATA_FILE).unwrap()).lines();
for line_result in lines {
let line = line_result.unwrap();
if line.starts_with("#") || line.is_empty() {
continue;
}
let parts = line.split(|c| c == ';' || c == ' ').collect::<Vec<_>>();
let c32 = u32::from_str_radix(parts[1], 16).unwrap();
if let Some(c) = std::char::from_u32(c32) {
map.insert(parts[0].to_owned(), format!("'\\u{{{:x}}}'", c as u32));
}
}
let path = Path::new(&env::var("OUT_DIR").unwrap()).join("codegen.rs");
let mut file = BufWriter::new(File::create(&path).unwrap());
write!(
&mut file,
"static GLYPH_MAP: phf::Map<&'static [u8], char> = "
)
.unwrap();
let mut map_builder = phf_codegen::Map::new();
for (key, value) in map {
map_builder.entry(GlyphKey(key), &value);
}
map_builder.build(&mut file).unwrap();
write!(&mut file, ";\n").unwrap();
}
#[derive(PartialEq, Eq)]
struct GlyphKey(String);
impl Hash for GlyphKey {
fn hash<H>(&self, h: &mut H)
where
H: Hasher,
{
self.0.as_bytes().hash(h)
}
}
impl phf_shared::PhfHash for GlyphKey {
fn phf_hash<H: Hasher>(&self, state: &mut H) {
self.0.as_bytes().phf_hash(state)
}
}
impl fmt::Debug for GlyphKey {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "&{:?}", self.0.as_bytes())
}
}
|
pub mod builder;
pub mod deserializer;
pub mod serializer;
pub mod transaction;
pub use self::deserializer::deserialize;
pub use self::serializer::serialize;
pub use self::transaction::Transaction;
|
extern crate chrono;
extern crate mysql;
use self::chrono::UTC;
use self::chrono::offset::TimeZone;
use self::mysql::conn::MyOpts;
use self::mysql::conn::pool::MyPool;
use self::mysql::error::MyResult;
use self::mysql::value::from_row;
use self::mysql::value::Value;
use std::clone::Clone;
use std::default::Default;
use worker::Record;
pub trait RecordRepository {
fn store (&self, record: Record) -> Result<(), RecordRepositoryError>;
fn fetch_record (&self, id: String) -> Result<(Record), RecordRepositoryError>;
fn fetch_limit (&self, size: u32, offset: u32) -> Result<(Vec<Record>), RecordRepositoryError>;
}
#[derive(Debug)]
pub enum RecordRepositoryError {
CannotStoreRecord,
CannotFetchRecord,
CannotDenormalizeRecord,
RecordNotFound
}
#[derive(Debug, Clone, RustcDecodable, RustcEncodable)]
pub struct MysqlConfig {
address: String,
username: String,
password: String,
database: String
}
impl MysqlConfig {
pub fn to_connection (&self) -> MysqlRepository {
let opts = MyOpts {
tcp_addr: Some(self.address.clone()),
user: Some(self.username.clone()),
pass: Some(self.password.clone()),
db_name: Some(self.database.to_string()),
..Default::default()
};
MysqlRepository::new(MyPool::new(opts).unwrap())
}
}
#[derive(Clone, Debug)]
pub struct MysqlRepository {
pool: MyPool
}
impl MysqlRepository {
pub fn new (pool: MyPool) -> MysqlRepository {
MysqlRepository { pool: pool }
}
fn row_to_record (&self, row: MyResult<Vec<Value>>) -> Record {
let (id, command, cwd, status, stderr, stdout, started_at_col, finished_at_col) = from_row::<(String, String, String, i32, String, String, String, String)>(row.unwrap());
let started_at = UTC.datetime_from_str(&started_at_col, "%Y-%m-%d %H:%M:%S").unwrap();
let finished_at = UTC.datetime_from_str(&finished_at_col, "%Y-%m-%d %H:%M:%S").unwrap();
let optimized_uuid = MysqlOptimizedUuid { uuid: id.to_string() };
Record {
id: optimized_uuid.to_uuid(),
command: command,
cwd: cwd,
status: status,
stderr: stderr,
stdout: stdout,
started_at: started_at,
finished_at: finished_at
}
}
}
#[derive(Clone, Debug)]
pub struct MysqlOptimizedUuid {
uuid: String
}
impl MysqlOptimizedUuid {
pub fn from_uuid (uuid: String) -> MysqlOptimizedUuid {
// the optimized way https://www.percona.com/blog/2014/12/19/store-uuid-optimized-way/
let mut ordered_uuid = uuid[14..18].to_string();
ordered_uuid.push_str(&uuid[9..13]);
ordered_uuid.push_str(&uuid[0..8]);
ordered_uuid.push_str(&uuid[19..23]);
ordered_uuid.push_str(&uuid[24..]);
MysqlOptimizedUuid { uuid: ordered_uuid }
}
pub fn to_uuid (&self) -> String {
let mut uuid = self.uuid[8..16].to_string();
uuid.push_str("-");
uuid.push_str(&self.uuid[4..8]);
uuid.push_str("-");
uuid.push_str(&self.uuid[0..4]);
uuid.push_str("-");
uuid.push_str(&self.uuid[16..20]);
uuid.push_str("-");
uuid.push_str(&self.uuid[20..]);
uuid
}
}
impl RecordRepository for MysqlRepository {
fn store (&self, record: Record) -> Result<(), RecordRepositoryError> {
let uuid_optimized = MysqlOptimizedUuid::from_uuid(record.id.clone());
let query = r"INSERT INTO results (id, command, cwd, status, stderr, stdout, started_at, finished_at) VALUES (UNHEX(?), ?, ?, ?, ?, ?, ?, ?)";
let mut stmt = match self.pool.prepare(query) {
Ok(s) => s,
Err(_) => return Err(RecordRepositoryError::CannotStoreRecord)
};
let result = match stmt.execute(
(uuid_optimized.clone().uuid, record.command, record.cwd, record.status, record.stderr,
record.stdout, record.started_at.format("%Y-%m-%d %H:%M:%S").to_string(), record.finished_at.format("%Y-%m-%d %H:%M:%S").to_string()
)
) {
Ok(_) => Ok(()),
Err(err) => {
error!("[{:?}] error storing in mysql {:?}", uuid_optimized.clone().uuid, err);
return Err(RecordRepositoryError::CannotStoreRecord);
}
};
result
}
fn fetch_limit (&self, size: u32, limit: u32) -> Result<(Vec<Record>), RecordRepositoryError> {
let query = r"SELECT HEX(id) AS id, command, cwd, status, stderr, stdout, CAST(started_at AS char) AS started_at, CAST(finished_at AS char) AS finished_at FROM results ORDER BY started_at DESC LIMIT ? OFFSET ?";
let mut stmt = match self.pool.prepare(query) {
Ok(s) => s,
Err(_) => return Err(RecordRepositoryError::CannotFetchRecord)
};
let results: Result<(Vec<Record>), RecordRepositoryError> = match stmt
.execute((size, limit))
.map(|result| {
result.map(|row| {
self.row_to_record(row)
}).collect()
})
{
Ok(records) => Ok(records),
Err(err) => {
error!("error fetching from mysql {:?}", err);
return Err(RecordRepositoryError::CannotDenormalizeRecord)
}
};
results
}
fn fetch_record (&self, id: String) -> Result<(Record), RecordRepositoryError> {
let uuid_optimized = MysqlOptimizedUuid::from_uuid(id.clone());
let query = r"SELECT HEX(id) AS id, command, cwd, status, stderr, stdout, CAST(started_at AS char) AS started_at, CAST(finished_at AS char) AS finished_at FROM results WHERE HEX(id) = ?";
let mut stmt = match self.pool.prepare(query) {
Ok(s) => s,
Err(_) => return Err(RecordRepositoryError::CannotFetchRecord)
};
let results: Result<(Vec<Record>), RecordRepositoryError> = match stmt
.execute((uuid_optimized.uuid, ))
.map(|result| {
result.map(|row| {
self.row_to_record(row)
}).collect()
})
{
Ok(records) => Ok(records),
Err(err) => {
error!("error fetching from mysql {:?}", err);
return Err(RecordRepositoryError::CannotDenormalizeRecord)
}
};
let records: Vec<Record> = results.unwrap();
let result: Result<(Record), RecordRepositoryError> = match records.len() {
1 => {
Ok(records[0].clone())
},
_ => return Err(RecordRepositoryError::RecordNotFound)
};
result
}
}
#[cfg(test)]
mod tests {
use super::MysqlOptimizedUuid;
#[test]
fn optimized_uuid() {
let uuid = String::from("58e0a7d7-eebc-11d8-9669-0800200c9a66");
let optimized_uuid = MysqlOptimizedUuid::from_uuid(uuid);
assert_eq!("11d8eebc58e0a7d796690800200c9a66", optimized_uuid.uuid);
assert_eq!("58e0a7d7-eebc-11d8-9669-0800200c9a66", optimized_uuid.to_uuid());
}
} |
use directory_client::metrics::MixMetric;
use directory_client::requests::metrics_mixes_post::MetricsMixPoster;
use directory_client::DirectoryClient;
use futures::channel::mpsc;
use futures::lock::Mutex;
use futures::StreamExt;
use log::{debug, error};
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
use tokio::runtime::Handle;
use tokio::task::JoinHandle;
pub(crate) enum MetricEvent {
Sent(String),
Received,
}
#[derive(Debug, Clone)]
// Note: you should NEVER create more than a single instance of this using 'new()'.
// You should always use .clone() to create additional instances
struct MixMetrics {
inner: Arc<Mutex<MixMetricsInner>>,
}
struct MixMetricsInner {
received: u64,
sent: HashMap<String, u64>,
}
impl MixMetrics {
pub(crate) fn new() -> Self {
MixMetrics {
inner: Arc::new(Mutex::new(MixMetricsInner {
received: 0,
sent: HashMap::new(),
})),
}
}
async fn increment_received_metrics(&mut self) {
let mut unlocked = self.inner.lock().await;
unlocked.received += 1;
}
async fn increment_sent_metrics(&mut self, destination: String) {
let mut unlocked = self.inner.lock().await;
let receiver_count = unlocked.sent.entry(destination).or_insert(0);
*receiver_count += 1;
}
async fn acquire_and_reset_metrics(&mut self) -> (u64, HashMap<String, u64>) {
let mut unlocked = self.inner.lock().await;
let received = unlocked.received;
let sent = std::mem::replace(&mut unlocked.sent, HashMap::new());
unlocked.received = 0;
(received, sent)
}
}
struct MetricsReceiver {
metrics: MixMetrics,
metrics_rx: mpsc::UnboundedReceiver<MetricEvent>,
}
impl MetricsReceiver {
fn new(metrics: MixMetrics, metrics_rx: mpsc::UnboundedReceiver<MetricEvent>) -> Self {
MetricsReceiver {
metrics,
metrics_rx,
}
}
fn start(mut self, handle: &Handle) -> JoinHandle<()> {
handle.spawn(async move {
while let Some(metrics_data) = self.metrics_rx.next().await {
match metrics_data {
MetricEvent::Received => self.metrics.increment_received_metrics().await,
MetricEvent::Sent(destination) => {
self.metrics.increment_sent_metrics(destination).await
}
}
}
})
}
}
struct MetricsSender {
metrics: MixMetrics,
directory_client: directory_client::Client,
pub_key_str: String,
sending_delay: Duration,
}
impl MetricsSender {
fn new(
metrics: MixMetrics,
directory_server: String,
pub_key_str: String,
sending_delay: Duration,
) -> Self {
MetricsSender {
metrics,
directory_client: directory_client::Client::new(directory_client::Config::new(
directory_server,
)),
pub_key_str,
sending_delay,
}
}
fn start(mut self, handle: &Handle) -> JoinHandle<()> {
handle.spawn(async move {
loop {
// set the deadline in the future
let sending_delay = tokio::time::delay_for(self.sending_delay);
let (received, sent) = self.metrics.acquire_and_reset_metrics().await;
match self.directory_client.metrics_post.post(&MixMetric {
pub_key: self.pub_key_str.clone(),
received,
sent,
}) {
Err(err) => error!("failed to send metrics - {:?}", err),
Ok(_) => debug!("sent metrics information"),
}
// wait for however much is left
sending_delay.await;
}
})
}
}
#[derive(Clone)]
pub struct MetricsReporter {
metrics_tx: mpsc::UnboundedSender<MetricEvent>,
}
impl MetricsReporter {
pub(crate) fn new(metrics_tx: mpsc::UnboundedSender<MetricEvent>) -> Self {
MetricsReporter { metrics_tx }
}
pub(crate) fn report_sent(&self, destination: String) {
// in unbounded_send() failed it means that the receiver channel was disconnected
// and hence something weird must have happened without a way of recovering
self.metrics_tx
.unbounded_send(MetricEvent::Sent(destination))
.unwrap()
}
pub(crate) fn report_received(&self) {
// in unbounded_send() failed it means that the receiver channel was disconnected
// and hence something weird must have happened without a way of recovering
self.metrics_tx
.unbounded_send(MetricEvent::Received)
.unwrap()
}
}
// basically an easy single entry point to start all metrics related tasks
pub struct MetricsController {
receiver: MetricsReceiver,
reporter: MetricsReporter,
sender: MetricsSender,
}
impl MetricsController {
pub(crate) fn new(
directory_server: String,
pub_key_str: String,
sending_delay: Duration,
) -> Self {
let (metrics_tx, metrics_rx) = mpsc::unbounded();
let shared_metrics = MixMetrics::new();
MetricsController {
sender: MetricsSender::new(
shared_metrics.clone(),
directory_server,
pub_key_str,
sending_delay,
),
receiver: MetricsReceiver::new(shared_metrics, metrics_rx),
reporter: MetricsReporter::new(metrics_tx),
}
}
// reporter is how node is going to be accessing the metrics data
pub(crate) fn start(self, handle: &Handle) -> MetricsReporter {
// TODO: should we do anything with JoinHandle(s) returned by start methods?
self.receiver.start(handle);
self.sender.start(handle);
self.reporter
}
}
|
use std::collections::HashMap;
use kite::{Document, Term, TermRef};
use kite::schema::FieldRef;
use byteorder::{BigEndian, WriteBytesExt};
use key_builder::KeyBuilder;
#[derive(Debug)]
pub struct SegmentBuilder {
current_doc: u16,
pub term_dictionary: HashMap<Term, TermRef>,
current_term_ref: u32,
pub term_directories: HashMap<(FieldRef, TermRef), Vec<u16>>,
pub statistics: HashMap<Vec<u8>, i64>,
pub stored_field_values: HashMap<(FieldRef, u16, Vec<u8>), Vec<u8>>,
}
#[derive(Debug)]
pub enum DocumentInsertError {
/// Segment couldn't hold any more docs
SegmentFull,
}
impl SegmentBuilder {
pub fn new() -> SegmentBuilder {
SegmentBuilder {
current_doc: 0,
term_dictionary: HashMap::new(),
current_term_ref: 0,
term_directories: HashMap::new(),
statistics: HashMap::new(),
stored_field_values: HashMap::new(),
}
}
fn get_term_ref(&mut self, term: &Term) -> TermRef {
if let Some(term_ref) = self.term_dictionary.get(term) {
return *term_ref;
}
// Add the term to the dictionary
let term_ref = TermRef::new(self.current_term_ref);
self.current_term_ref += 1;
self.term_dictionary.insert(term.clone(), term_ref);
term_ref
}
// TODO: Need to translate field names to field refs and terms to term refs
pub fn add_document(&mut self, doc: &Document) -> Result<u16, DocumentInsertError> {
// Get document ord
let doc_id = self.current_doc;
self.current_doc += 1;
try!(self.current_doc.checked_add(1).ok_or(DocumentInsertError::SegmentFull));
// Insert indexed fields
let mut term_frequencies = HashMap::new();
for (field, tokens) in doc.indexed_fields.iter() {
let mut field_token_count = 0;
for token in tokens.iter() {
field_token_count += 1;
// Get term ref
let term_ref = self.get_term_ref(&token.term);
// Term frequency
let mut term_frequency = term_frequencies.entry(term_ref).or_insert(0);
*term_frequency += 1;
// Write directory list
self.term_directories.entry((*field, term_ref)).or_insert_with(Vec::new).push(doc_id);
}
// Term frequencies
for (term_ref, frequency) in term_frequencies.drain() {
// Write term frequency
// 1 is by far the most common frequency. At search time, we interpret a missing
// key as meaning there is a term frequency of 1
if frequency != 1 {
let mut value_type = vec![b't', b'f'];
value_type.extend(term_ref.ord().to_string().as_bytes());
let mut frequency_bytes: Vec<u8> = Vec::new();
frequency_bytes.write_i64::<BigEndian>(frequency).unwrap();
self.stored_field_values.insert((*field, doc_id, value_type), frequency_bytes);
}
// Increment term document frequency
let stat_name = KeyBuilder::segment_stat_term_doc_frequency_stat_name(field.ord(), term_ref.ord());
let mut stat = self.statistics.entry(stat_name).or_insert(0);
*stat += 1;
}
// Field length
// Used by the BM25 similarity model
let length = ((field_token_count as f64).sqrt() - 1.0) * 3.0;
let length = if length > 255.0 { 255.0 } else { length } as u8;
if length != 0 {
self.stored_field_values.insert((*field, doc_id, b"len".to_vec()), vec![length]);
}
// Increment total field docs
{
let stat_name = KeyBuilder::segment_stat_total_field_docs_stat_name(field.ord());
let mut stat = self.statistics.entry(stat_name).or_insert(0);
*stat += 1;
}
// Increment total field tokens
{
let stat_name = KeyBuilder::segment_stat_total_field_tokens_stat_name(field.ord());
let mut stat = self.statistics.entry(stat_name).or_insert(0);
*stat += field_token_count;
}
}
// Insert stored fields
for (field, value) in doc.stored_fields.iter() {
self.stored_field_values.insert((*field, doc_id, b"val".to_vec()), value.to_bytes());
}
// Increment total docs
{
let mut stat = self.statistics.entry(b"total_docs".to_vec()).or_insert(0);
*stat += 1;
}
Ok(doc_id)
}
}
|
use std::cmp;
use card::Card;
use types;
use calculator::utility;
pub fn test(cards: Vec<Card>) -> Option<types::Combination> {
if cards.len() < 4 {
return None;
}
let hash_map = utility::get_count_hash_map(&cards[..]);
let mut three_cards: Option<types::Rank> = None;
let mut two_cards: Option<types::Rank> = None;
for (&rank_value, &count) in &hash_map {
let mut cur_rnk = rank_value;
if count > 2 {
if let Some(current_three) = three_cards {
let max_rank = cmp::max(current_three, cur_rnk);
three_cards = Some(max_rank);
cur_rnk = cmp::min(current_three, cur_rnk)
// the old three can become new two
} else {
three_cards = Some(cur_rnk);
continue; // no chance to change 2 cards here
}
}
if count > 1 {
if let Some(current_two) = two_cards {
if current_two < cur_rnk {
two_cards = Some(cur_rnk);
}
} else {
two_cards = Some(cur_rnk);
}
}
}
if let Some(current_three) = three_cards {
if let Some(current_two) = two_cards {
return Some(types::Combination::FullHouse(current_three, current_two));
}
}
return None;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn none_for_empty() {
assert_eq!(None, test(vec![]));
}
#[test]
fn none_for_four_cards() {
assert_eq!(
None,
test(vec![
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Diamonds,
},
])
);
}
#[test]
fn none_for_five_duplcate_cards() {
assert_eq!(
None,
test(vec![
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
])
);
}
#[test]
fn option_for_three_and_two_duplcate_cards() {
assert_eq!(
Some(types::Combination::FullHouse(
types::Rank::Two,
types::Rank::Three
)),
test(vec![
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Diamonds,
},
])
);
}
#[test]
fn option_for_fullhouse_with_three_twos_and_two_jacks() {
assert_eq!(
Some(types::Combination::FullHouse(
types::Rank::Two,
types::Rank::Jack
)),
test(vec![
Card {
rank: types::Rank::Jack,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Jack,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
])
);
}
#[test]
fn option_for_fullhouse_with_three_jacks_and_two_twos() {
assert_eq!(
Some(types::Combination::FullHouse(
types::Rank::Jack,
types::Rank::Two
)),
test(vec![
Card {
rank: types::Rank::Jack,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Jack,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::Three,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Jack,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Spades,
},
])
);
}
}
|
use crate::heapfile::HeapFile;
use crate::heapfileiter::HeapFileIterator;
use crate::page::Page;
use common::ids::{ContainerId, PageId, Permissions, TransactionId, ValueId};
use common::storage_trait::StorageTrait;
use common::testutil::gen_random_dir;
use common::{CrustyError, PAGE_SIZE};
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use std::sync::atomic::Ordering;
use std::sync::{Arc, RwLock};
/// The StorageManager struct
pub struct StorageManager {
/// Path to database metadata files.
pub storage_path: String,
is_temp: bool,
/* Lock to a vector of heapfile structs */
heapfiles_lock: Arc<RwLock<Vec<Arc<HeapFile>>>>,
}
/// The required functions in HeapStore's StorageManager that are specific for HeapFiles
impl StorageManager {
/// Get a page if exists for a given container.
pub(crate) fn get_page(
&self,
container_id: ContainerId,
page_id: PageId,
_tid: TransactionId,
_perm: Permissions,
_pin: bool,
) -> Option<Page> {
/* Get a pointer to the heapfiles vector */
let heapfiles = self.heapfiles_lock.clone().read().unwrap().clone();
/* Try to read the page from the heapfile given the provided page_id */
match self.lookup_hf(heapfiles, container_id).clone().read_page_from_file(page_id)
{
/* read_page_from_file succeeded: return the page wrapped in an option */
Ok(page) => return Some(page),
/* read_page_from_file failed: return None */
Err(error) => return None
}
}
/// Write a page
pub(crate) fn write_page(
&self,
container_id: ContainerId,
page: Page,
_tid: TransactionId,
) -> Result<(), CrustyError> {
/* Get a pointer to the heapfiles vector */
let heapfiles = self.heapfiles_lock.clone().write().unwrap().clone();
/* Try to write the page to the heapfile */
match self.lookup_hf(heapfiles, container_id).clone().write_page_to_file(page)
{
/* write_page_to_file succeeded: return Ok(()) */
Ok(()) => return Ok(()),
/* write_page_to_file failed: return a CrustyError */
Err(err) => return Err(CrustyError::CrustyError(String::from(
"write_page: could not write page to file")))
}
}
/// Get the number of pages for a container
fn get_num_pages(&self, container_id: ContainerId) -> PageId {
/* Get a pointer to the heapfiles vector */
let heapfiles = self.heapfiles_lock.clone().read().unwrap().clone();
/* Return the number of pages for the heapfile with the proivded container_id */
return self.lookup_hf(heapfiles, container_id).clone().num_pages();
}
// Iterate through all heapfiles in the SM to find the heapfile with the container_id
// Takes in a valid container_id
// Returns an index to the heapfile
fn lookup_hf(&self, heapfiles: Vec<Arc<HeapFile>>, container_id: ContainerId) -> Arc<HeapFile> {
/* Loop through the heapfiles vector to find the heapfile with the given container_id */
for i in 0..heapfiles.len()
{
/* Clone the heapfiles[i] pointer */
let heapfile = heapfiles[i].clone();
/* Return the heapfile if has the required ContainerId */
if HeapFile::container_id(&heapfile) == container_id
{
/* Return a clone of the heapfile */
return heapfile.clone();
}
}
panic!("lookup_hf: Invalid container_id");
}
/// Test utility function for counting reads and writes served by the heap file.
/// Can return 0,0 for invalid container_ids
#[allow(dead_code)]
pub(crate) fn get_hf_read_write_count(&self, container_id: ContainerId) -> (u16, u16) {
/* Get a pointer to the heapfiles vector */
let heapfiles = self.heapfiles_lock.clone().read().unwrap().clone();
/* Get a pointer to the heapfile whose container_id matches the argument container_id */
let heapfile = self.lookup_hf(heapfiles, container_id).clone();
return (0,0); /* TODO */
/*return(
heapfiles[index].clone().read_count.load(Ordering::Relaxed),
heapfiles[index].clone().write_count.load(Ordering::Relaxed)
);*/
}
}
/// Implementation of storage trait
impl StorageTrait for StorageManager {
type ValIterator = HeapFileIterator;
/// Create a new storage manager that will use storage_path as the location to persist data
/// (if the storage manager persists records on disk)
fn new(storage_path: String) -> Self {
let heapfiles_lock = Arc::new(RwLock::new(Vec::new()));
let sm = StorageManager {
storage_path: storage_path,
is_temp: false,
heapfiles_lock: heapfiles_lock
};
return sm;
}
/// Create a new storage manager for testing. If this creates a temporary directory it should be cleaned up
/// when it leaves scope.
fn new_test_sm() -> Self {
let storage_path = gen_random_dir().to_string_lossy().to_string();
let heapfiles_lock = Arc::new(RwLock::new(Vec::new()));
let sm = StorageManager {
storage_path: storage_path,
is_temp: true,
heapfiles_lock: heapfiles_lock
};
return sm;
}
/// Insert some bytes into a container for a particular value (e.g. record).
/// Any validation will be assumed to happen before.
/// Returns the value id associated with the stored value.
/// Function will need to find the first page that can hold the value.
/// A new page may need to be created if no space on existing pages can be found.
fn insert_value(
&self,
container_id: ContainerId,
value: Vec<u8>,
tid: TransactionId,
) -> ValueId {
/* Panic if value contains more than PAGE_SIZE bytes */
if value.len() > PAGE_SIZE {
panic!("Cannot handle inserting a value larger than the page size");
}
/* Create a new value_id struct whose container_id is set to the given container_id */
let mut value_id = ValueId {
container_id: container_id,
segment_id: None,
page_id: None,
slot_id: None
};
/* Get a pointer to the heapfiles vector */
let heapfiles = self.heapfiles_lock.clone().write().unwrap().clone();
/* Get a pointer to the heapfile whose container_id matches the argument container_id */
let heapfile = self.lookup_hf(heapfiles, container_id).clone();
/* Get the number of pages in the heapfile */
let num_pages = heapfile.num_pages();
/* Try to insert the value in a page that already exists */
for page_id in 0..num_pages
{
/* Try to read the page with page_id from the heapfile */
match heapfile.read_page_from_file(page_id)
{
/* Read_page_from_file succeeded */
Ok(mut page) =>
/* Check if we can add value into the page */
match page.add_value(&value)
{
/* add_value succeeded: update value_id and write edited page to file */
Some(slot_id) => {value_id.page_id = Some(page_id);
value_id.slot_id = Some(slot_id);
match heapfile.write_page_to_file(page)
{
Ok(()) => (),
Err(error) => panic!("insert_value: could not write page to file")
};
/* break once we have have space for the value */
break},
/* add_value failed: try adding the value to the next available page */
None => ()
},
/* Read_page_from_file failed */
Err(error) => panic!("insert_value: Invalid page_id")
}
}
/* Add the value to a new page if all existing pages are full */
if value_id.slot_id.is_none()
{
/* Create a page whose page_id will be the number of existing pages in the heapfile */
let mut page = Page::new(num_pages);
/* Try adding the value into this new page */
match page.add_value(&value.clone())
{
/* add_value succeeded: update the value_id and write new page to file*/
Some(slot_id) => {value_id.page_id = Some(num_pages);
value_id.slot_id = Some(slot_id);
match heapfile.write_page_to_file(page)
{
Ok(()) => (),
Err(error) => panic!("insert_value: could not write page to file")
};
},
/* add_value failed: we should never fail when adding a value to a new page */
None => panic!("insert_value: Failed to add value to new page")
}
}
/* Return ValueId information once we have successfully inserted the new value */
return value_id;
}
/// Insert some bytes into a container for vector of values (e.g. record).
/// Any validation will be assumed to happen before.
/// Returns a vector of value ids associated with the stored values.
fn insert_values(
&self,
container_id: ContainerId,
values: Vec<Vec<u8>>,
tid: TransactionId,
) -> Vec<ValueId> {
/* Initialize a new vector that will hold the ValueIds to be returned */
let mut value_ids = Vec::new();
/* Insert each value in the argument vector into the heapfile */
for i in 0..values.len()
{
/* Insert ith value and append the returned ValueId to the result vector */
value_ids.push(self.insert_value(container_id, values[i].clone(), tid));
}
/* Return the vector of returned value_ids */
return value_ids;
}
/// Delete the data for a value. If the valueID is not found it returns Ok() still.
fn delete_value(&self, id: ValueId, tid: TransactionId) -> Result<(), CrustyError> {
/* Get the heapfile with the provided container_id */
let heapfiles = self.heapfiles_lock.clone().write().unwrap().clone();
let heapfile = self.lookup_hf(heapfiles, id.container_id).clone();
/* Make sure that neither page_id nor slot_id are None values */
if id.page_id.is_none() || id.slot_id.is_none()
{
return Err(CrustyError::CrustyError(String::from("ERROR")))
}
/* Try to read the page from file with the provided page_id */
match heapfile.read_page_from_file(id.page_id.unwrap())
{
Ok(mut page) =>
/* Try to delete the value from the page */
match page.delete_value(id.slot_id.unwrap())
{
Some(()) => return Ok(()),
None => Ok(())
},
Err(error) => Ok(())
}
}
/// Updates a value. Returns record ID on update (which may have changed). Error on failure
/// Any process that needs to determine if a value changed will need to compare the return valueId against
/// the sent value.
fn update_value(
&self,
value: Vec<u8>,
id: ValueId,
_tid: TransactionId,
) -> Result<ValueId, CrustyError> {
/* Try to delete a value */
match self.delete_value(id, _tid)
{
/* delete_value succeeded: insert the new value into the heapfile */
Ok(()) => return Ok(self.insert_value(id.container_id, value, _tid)),
/* delete_value failed: return a CrustyError */
Err(error) => return Err(error)
}
}
/// Create a new container to be stored.
fn create_container(&self, container_id: ContainerId) -> Result<(), CrustyError> {
/* Get RwLock to append a heapfile onto the heapfiles vector */
{
/* Get a pointer to the heapfiles vector */
let mut heapfiles = self.heapfiles_lock.write().unwrap();
/* Create a new file_path based on the length of the heapfiles vector */
let mut file_path = PathBuf::new();
let string = format!("{}{}", self.storage_path, heapfiles.len());
file_path.push(string);
let heapfile_result = HeapFile::new(file_path, container_id);
match heapfile_result
{
Ok(heapfile) => heapfiles.push(Arc::new(heapfile)),
Err(error) => return Err(error)
}
}
return Ok(());
}
/// Remove the container and all stored values in the container.
/// If the container is persisted remove the underlying files
fn remove_container(&self, container_id: ContainerId) -> Result<(), CrustyError> {
/* Get a pointer to the heapfiles vector */
let mut heapfiles = self.heapfiles_lock.write().unwrap();
/* Loop through the heapfiles vector */
for i in 0..heapfiles.len(){
/* Return the heapfile whose ContainerId matches container_id */
if HeapFile::container_id(&heapfiles[i]) == container_id
{
heapfiles.remove(i);
return Ok(());
}
}
return Err(CrustyError::CrustyError(String::from("ERROR")))
}
/// Get an iterator that returns all valid records
fn get_iterator(
&self,
container_id: ContainerId,
tid: TransactionId,
_perm: Permissions,
) -> Self::ValIterator {
/* Get the heapfile pointer given the provided container_id */
let heapfiles = self.heapfiles_lock.clone().write().unwrap().clone();
let heapfile = self.lookup_hf(heapfiles, container_id).clone();
/* Return a new iterator */
return HeapFileIterator::new(container_id, tid, heapfile);
}
/// Get the data for a particular ValueId. Error if does not exists
fn get_value(
&self,
id: ValueId,
tid: TransactionId,
perm: Permissions,
) -> Result<Vec<u8>, CrustyError> {
/* Get the heapfile pointer given the provided container_id */
let heapfiles = self.heapfiles_lock.clone().read().unwrap().clone();
let heapfile = self.lookup_hf(heapfiles, id.container_id).clone();
/* Make sure that neither page_id nor slot_id are None values */
if id.page_id.is_none() || id.slot_id.is_none()
{
return Err(CrustyError::CrustyError(String::from("get_value: ValidId invalid")))
}
/* Try to read a page from heapfile given the page_id */
match heapfile.read_page_from_file(id.page_id.unwrap())
{
/* read_page_from_file succeeded */
Ok(page) =>
match page.get_value(id.slot_id.unwrap())
{
Some(value) => return Ok(value),
None => return Err(CrustyError::CrustyError(String::from("get_value: could not get value from page")))
},
/* read_page_from_file failed */
Err(error) => return Err(CrustyError::CrustyError(String::from("get_value: could not read page from file")))
}
}
/// Notify the storage manager that the transaction is finished so that any held resources can be released.
fn transaction_finished(&self, tid: TransactionId) {
panic!("TODO milestone tm");
}
/// Testing utility to reset all state associated the storage manager.
/// If there is a buffer pool it should be reset.
fn reset(&self) {
panic!("TODO milestone hs");
}
/// Shutdown the storage manager. Can call drop. Should be safe to call multiple times.
/// If temp, this should remove all stored files.
fn shutdown(&self) {
drop(self);
if self.is_temp == true
{
let heapfiles = self.heapfiles_lock.write().unwrap();
for i in 0..heapfiles.len()
{
let string = format!("{}{}", self.storage_path, i);
match fs::remove_file(string)
{
Ok(()) => (),
Err(error) => panic!("shutdown: could not remove temporary file")
}
}
}
}
}
/// Trait Impl for Drop
impl Drop for StorageManager {
/// Shutdown the storage manager. Can call be called by shutdown. Should be safe to call multiple times.
/// If temp, this should remove all stored files.
fn drop(&mut self) {
//panic!("TODO milestone hs"); TODO remove this later
}
}
#[cfg(test)]
#[allow(unused_must_use)]
mod test {
use super::*;
use crate::storage_manager::StorageManager;
use common::storage_trait::StorageTrait;
use common::testutil::*;
#[test]
fn hs_sm_a_insert() {
init();
let sm = StorageManager::new_test_sm();
let cid = 1;
sm.create_container(cid);
let bytes = get_random_byte_vec(40);
let tid = TransactionId::new();
let val1 = sm.insert_value(cid, bytes.clone(), tid);
assert_eq!(1, sm.get_num_pages(cid));
assert_eq!(0, val1.page_id.unwrap());
assert_eq!(0, val1.slot_id.unwrap());
let p1 = sm
.get_page(cid, 0, tid, Permissions::ReadOnly, false)
.unwrap();
let val2 = sm.insert_value(cid, bytes.clone(), tid);
assert_eq!(1, sm.get_num_pages(cid));
assert_eq!(0, val2.page_id.unwrap());
assert_eq!(1, val2.slot_id.unwrap());
let p2 = sm
.get_page(cid, 0, tid, Permissions::ReadOnly, false)
.unwrap();
assert_ne!(p1.get_bytes()[..], p2.get_bytes()[..]);
}
#[test]
fn hs_sm_b_iter_small() {
init();
let sm = StorageManager::new_test_sm();
let cid = 1;
sm.create_container(cid);
let tid = TransactionId::new();
//Test one page
let mut byte_vec: Vec<Vec<u8>> = Vec::new();
byte_vec.push(get_random_byte_vec(400));
byte_vec.push(get_random_byte_vec(400));
byte_vec.push(get_random_byte_vec(400));
for val in &byte_vec {
{sm.insert_value(cid, val.clone(), tid);}
}
let iter = sm.get_iterator(cid, tid, Permissions::ReadOnly);
for (i, x) in iter.enumerate() {
assert_eq!(byte_vec[i], x);
}
let mut byte_vec2: Vec<Vec<u8>> = Vec::new();
// Should be on two pages
byte_vec2.push(get_random_byte_vec(400));
byte_vec2.push(get_random_byte_vec(400));
byte_vec2.push(get_random_byte_vec(400));
byte_vec2.push(get_random_byte_vec(400));
for val in &byte_vec2 {
sm.insert_value(cid, val.clone(), tid);
}
byte_vec.append(&mut byte_vec2);
let iter = sm.get_iterator(cid, tid, Permissions::ReadOnly);
for (i, x) in iter.enumerate() {
assert_eq!(byte_vec[i], x);
}
let mut byte_vec2: Vec<Vec<u8>> = Vec::new();
// Should be on 3 pages
byte_vec2.push(get_random_byte_vec(300));
byte_vec2.push(get_random_byte_vec(500));
byte_vec2.push(get_random_byte_vec(400));
for val in &byte_vec2 {
sm.insert_value(cid, val.clone(), tid);
}
byte_vec.append(&mut byte_vec2);
let iter = sm.get_iterator(cid, tid, Permissions::ReadOnly);
for (i, x) in iter.enumerate() {
assert_eq!(byte_vec[i], x);
}
}
#[test]
#[ignore]
fn hs_sm_b_iter_large() {
init();
let sm = StorageManager::new_test_sm();
let cid = 1;
sm.create_container(cid).unwrap();
let tid = TransactionId::new();
let vals = get_random_vec_of_byte_vec(1000, 40, 400);
sm.insert_values(cid, vals, tid);
let mut count = 0;
for _ in sm.get_iterator(cid, tid, Permissions::ReadOnly) {
count += 1;
}
assert_eq!(1000, count);
}
}
|
/*
* Rustパターン(記法)。
* CreatedAt: 2019-07-07
*/
fn main() {
let numbers = (2, 4, 8, 16, 32);
match numbers {
// (first, ..) => println!("{}", first), // 2
// (.., last) => println!("{}", last), // 32
// (.., second, ..) => println!("{}", second), // error: `..` can only be used once per tuple or tuple struct pattern
(first, .., last) => {
println!("{}, {}", first, last); // 2, 32
},
}
}
|
mod vm;
mod worktype;
pub use vm::*;
pub use worktype::*; |
use std::env;
use std::fs;
use std::io;
use std::os::unix::fs::PermissionsExt;
use std::process;
fn main() -> io::Result<()> {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
eprintln!("{:?}: no mode given", &args[0]);
process::exit(1);
}
let mode: u32 = u32::from_str_radix(&args[1], 8).unwrap();
for i in 2..args.len() {
let mut perms = fs::metadata(&args[i])?.permissions();
perms.set_mode(mode);
fs::set_permissions(&args[i], perms)?;
}
process::exit(0);
}
|
use async_trait::async_trait;
use uuid::Uuid;
use common::cache::Cache;
use common::error::Error;
use common::infrastructure::cache::InMemCache;
use common::result::Result;
use crate::domain::user::{Email, User, UserId, UserRepository, Username};
use crate::mocks;
pub struct InMemUserRepository {
cache: InMemCache<UserId, User>,
}
impl InMemUserRepository {
pub fn new() -> Self {
InMemUserRepository {
cache: InMemCache::new(),
}
}
pub async fn populated() -> Self {
let repo = Self::new();
repo.save(&mut mocks::user1()).await.unwrap();
repo.save(&mut mocks::user2()).await.unwrap();
repo.save(&mut mocks::validated_user1()).await.unwrap();
repo.save(&mut mocks::validated_user2()).await.unwrap();
repo.save(&mut mocks::admin1()).await.unwrap();
repo
}
}
impl Default for InMemUserRepository {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl UserRepository for InMemUserRepository {
async fn next_id(&self) -> Result<UserId> {
let uuid = Uuid::new_v4();
UserId::new(&uuid.to_string())
}
async fn find_all(&self) -> Result<Vec<User>> {
Ok(self.cache.all().await)
}
async fn find_by_id(&self, id: &UserId) -> Result<User> {
self.cache
.get(id)
.await
.ok_or(Error::new("user", "not_found"))
}
async fn find_by_username(&self, username: &Username) -> Result<User> {
self.cache
.find(|(_, user)| user.identity().username().value() == username.value())
.await
.ok_or(Error::new("user", "not_found"))
}
async fn find_by_email(&self, email: &Email) -> Result<User> {
self.cache
.find(|(_, user)| user.identity().email().value() == email.value())
.await
.ok_or(Error::new("user", "not_found"))
}
async fn save(&self, user: &mut User) -> Result<()> {
self.cache.set(user.base().id().clone(), user.clone()).await
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::domain::user::*;
use crate::mocks;
#[tokio::test]
async fn next_id() {
let repo = InMemUserRepository::new();
let id1 = repo.next_id().await.unwrap();
let id2 = repo.next_id().await.unwrap();
let id3 = repo.next_id().await.unwrap();
assert!(id1.value().len() > 10);
assert_ne!(id1, id2);
assert_ne!(id2, id3);
}
#[tokio::test]
async fn find_by_id() {
let repo = InMemUserRepository::new();
let mut user = mocks::user1();
user.set_person(Person::new(Fullname::new("Name", "Lastname").unwrap()).unwrap())
.unwrap();
repo.save(&mut user).await.unwrap();
assert!(repo.find_by_id(&user.base().id()).await.is_ok());
assert!(user.person().is_some());
let found_user = repo.find_by_id(&user.base().id()).await.unwrap();
assert_eq!(user.base(), found_user.base());
assert_eq!(user.base(), found_user.base());
let changed_user_person = found_user.person().unwrap();
assert_eq!(changed_user_person.fullname().name(), "Name");
assert_eq!(changed_user_person.fullname().lastname(), "Lastname");
assert!(repo
.find_by_username(user.identity().username())
.await
.is_ok());
assert!(repo.find_by_email(user.identity().email()).await.is_ok());
assert!(repo
.find_by_username(&Username::new("nonexisting").unwrap())
.await
.is_err());
assert!(repo
.find_by_email(&Email::new("username@asd.com").unwrap())
.await
.is_err());
}
}
|
use termion::event::Key;
use termion::input::TermRead;
use std::sync::mpsc;
use std::time::Duration;
use std::{io, thread};
pub enum Event<T> {
Input(T),
Continue,
}
pub struct Config {
exit_key: Key,
}
impl Default for Config {
fn default() -> Config {
Config {
exit_key: Key::Char('q'),
}
}
}
pub struct Events {
rx: mpsc::Receiver<Event<Key>>,
}
impl Events {
pub fn new() -> Events {
Events::with_config(Config::default())
}
pub fn with_config(config: Config) -> Events {
let (tx, rx) = mpsc::channel();
let tx2 = tx.clone();
thread::spawn(move || {
let stdin = io::stdin();
for evt in stdin.keys() {
if let Ok(key) = evt {
if let Err(err) = tx.send(Event::Input(key)) {
eprintln!("{}", err);
return;
}
if key == config.exit_key {
return;
}
}
}
});
thread::spawn(move || loop {
if tx2.send(Event::Continue).is_err() {
break;
}
thread::sleep(Duration::from_millis(150));
});
Events { rx }
}
pub fn next(&self) -> Result<Event<Key>, mpsc::RecvError> {
self.rx.recv()
}
}
|
#![no_std]
#![crate_type="lib"]
#![feature(const_fn)]
#![feature(const_mut_refs)]
#![feature(clamp)]
#![feature(test)]
#![feature(const_fn_floating_point_arithmetic)]
pub mod util;
pub mod trig;
pub mod vector;
pub mod matrices;
pub mod prng;
pub mod hasher; |
use crate::WorldGenerator;
use feather_core::anvil::level::SuperflatGeneratorOptions;
use feather_core::biomes::Biome;
use feather_core::blocks::BlockId;
use feather_core::chunk::Chunk;
use feather_core::util::ChunkPosition;
pub struct SuperflatWorldGenerator {
pub options: SuperflatGeneratorOptions,
}
impl WorldGenerator for SuperflatWorldGenerator {
fn generate_chunk(&self, position: ChunkPosition) -> Chunk {
let biome = Biome::from_identifier(self.options.biome.as_str()).unwrap_or(Biome::Plains);
let mut chunk = Chunk::new_with_default_biome(position, biome);
let mut y_counter = 0;
for layer in self.options.clone().layers {
if layer.height == 0 {
continue;
}
let layer_block = BlockId::from_identifier(layer.block.as_str());
if let Some(layer_block) = layer_block {
for y in y_counter..(y_counter + layer.height) {
for x in 0..16 {
for z in 0..16 {
chunk.set_block_at(x as usize, y as usize, z as usize, layer_block);
}
}
}
} else {
// Skip this layer
log::debug!("Failed to generate layer: unknown block {}", layer.block);
}
y_counter += layer.height;
}
chunk.recalculate_heightmap();
chunk
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
pub fn test_worldgen_flat() {
let mut options = SuperflatGeneratorOptions::default();
dbg!(&options);
options.biome = Biome::Mountains.identifier().to_string();
let chunk_pos = ChunkPosition { x: 1, z: 2 };
let generator = SuperflatWorldGenerator { options };
let chunk = generator.generate_chunk(chunk_pos);
assert_eq!(chunk.position(), chunk_pos);
for x in 0usize..16 {
for z in 0usize..16 {
for (y, block) in &[
(0usize, BlockId::bedrock()),
(1usize, BlockId::dirt()),
(2usize, BlockId::dirt()),
(3usize, BlockId::grass_block()),
] {
assert_eq!(chunk.block_at(x, *y, z), *block);
}
for y in 4..256 {
assert_eq!(
chunk.block_at(x as usize, y as usize, z as usize),
BlockId::air()
);
}
assert_eq!(chunk.biome_at(x, z), Biome::Mountains);
}
}
}
}
|
pub mod http;
pub mod utils;
pub mod renderer;
fn main() {
println!("Hello, world!");
}
|
use std::collections::VecDeque;
use rust_intcode::intcode;
#[test]
fn test_intcode() {
{ // mem[7] = mem[1] + mem[2]; write(mem[7]);
let mut memory = vec![ 1, 1, 2, 7, 4, 7, 99, 0];
let mut itape = VecDeque::new();
let otape = intcode(&mut memory, &mut itape);
assert_eq!(memory[7], 3);
assert_eq!(otape.len(), 1);
assert_eq!(otape[0], 3);
}
{ // https://adventofcode.com/2019/day/2 - first test case
let mut memory = vec![1,9,10,3,2,3,11,0,99,30,40,50];
let _otape = intcode(&mut memory, &mut VecDeque::new());
assert_eq!(memory[0], 3500);
}
{ // https://adventofcode.com/2019/day/2
let mut memory = vec![1,0,0,0,99];
let _otape = intcode(&mut memory, &mut VecDeque::new());
assert_eq!(memory, [2,0,0,0,99]);
}
{ // https://adventofcode.com/2019/day/2
let mut memory = vec![2,3,0,3,99];
let _otape = intcode(&mut memory, &mut VecDeque::new());
assert_eq!(memory,[2,3,0,6,99] );
}
{ // https://adventofcode.com/2019/day/2
let mut memory = vec![2,4,4,5,99,0];
let _otape = intcode(&mut memory, &mut VecDeque::new());
assert_eq!(memory, [2,4,4,5,99,9801]);
}
{ // https://adventofcode.com/2019/day/2
let mut memory = vec![1,1,1,4,99,5,6,0,99];
let _otape = intcode(&mut memory, &mut VecDeque::new());
assert_eq!(memory, [30,1,1,4,2,5,6,0,99]);
}
{ // https://adventofcode.com/2019/day/5
let mut memory = vec![3,0,4,0,99];
let mut itape = VecDeque::from(vec![-25]);
let otape = intcode(&mut memory, &mut itape);
assert_eq!(memory[0], -25);
assert_eq!(otape[0], -25);
}
{ // https://adventofcode.com/2019/day/5
let mut memory = vec![1002,4,3,4,33];
let _otape = intcode(&mut memory, &mut VecDeque::new());
assert_eq!(memory[4], 99);
}
{ // https://adventofcode.com/2019/day/5
let mut memory = vec![1101,100,-1,4, 0];
let _otape = intcode(&mut memory, &mut VecDeque::new());
assert_eq!(memory[4], 99);
}
{ //TODO https://adventofcode.com/2019/day/5 part 2
let mut memory = vec![3,0,4,0,99];
let mut itape = VecDeque::from(vec![-25]);
let otape = intcode(&mut memory, &mut itape);
assert_eq!(memory[0], -25);
assert_eq!(otape[0], -25);
}
} // test intcode
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SyntheticsPrivateLocationCreationResponse : Object that contains the new private location, the public key for result encryption, and the configuration skeleton.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SyntheticsPrivateLocationCreationResponse {
/// Configuration skeleton for the private location. See installation instructions of the private location on how to use this configuration.
#[serde(rename = "config", skip_serializing_if = "Option::is_none")]
pub config: Option<serde_json::Value>,
#[serde(rename = "private_location", skip_serializing_if = "Option::is_none")]
pub private_location: Option<Box<crate::models::SyntheticsPrivateLocation>>,
#[serde(rename = "result_encryption", skip_serializing_if = "Option::is_none")]
pub result_encryption: Option<Box<crate::models::SyntheticsPrivateLocationCreationResponseResultEncryption>>,
}
impl SyntheticsPrivateLocationCreationResponse {
/// Object that contains the new private location, the public key for result encryption, and the configuration skeleton.
pub fn new() -> SyntheticsPrivateLocationCreationResponse {
SyntheticsPrivateLocationCreationResponse {
config: None,
private_location: None,
result_encryption: None,
}
}
}
|
use std::io::{BufWriter, stdin, stdout, Write};
#[derive(Default)]
struct Scanner {
buffer: Vec<String>
}
impl Scanner {
fn next<T: std::str::FromStr>(&mut self) -> T {
loop {
if let Some(token) = self.buffer.pop() {
return token.parse().ok().expect("Failed parse");
}
let mut input = String::new();
stdin().read_line(&mut input).expect("Faild read");
self.buffer = input.split_whitespace().rev().map(String::from).collect();
}
}
}
fn main() {
let mut scan = Scanner::default();
let out = &mut BufWriter::new(stdout());
for _ in 0..(scan.next::<usize>()) {
let n = scan.next::<usize>();
let s: Vec<char> = scan.next::<String>().chars().collect();
let mut flag = true;
let mut i = 0;
let mut d = 0;
while i < s.len() {
if d == 1 {
if i + 1 >= n || s[i + 1] != s[i] {
// writeln!(out, "i= {}", i).ok();
flag = false;
break;
}
i += 2;
} else {
i += 1;
}
d = 1 - d;
}
let ans = if flag {
"YES"
} else {
"NO"
};
writeln!(out, "{}", ans).ok();
}
} |
use std::error::Error;
use std::io;
use std::collections::HashMap;
use std::process::Command;
use std::str::from_utf8;
use serde::Deserialize;
use serde_json::Value;
use super::Authenticator;
#[derive(Debug, Deserialize)]
struct Item {
name: Option<String>,
login: Option<LoginItem>,
#[serde(flatten)]
extra: HashMap<String, Value>
}
#[derive(Debug, Deserialize)]
struct LoginItem {
username: Option<String>,
password: Option<String>,
#[serde(flatten)]
extra: HashMap<String, Value>
}
pub struct Bitwarden {
secrets: Vec<Item>
}
impl Authenticator for Bitwarden {
// read bitwarden output, bitwarden cli must logged in before
fn new(master_password: &str) -> Result<Self, Box<dyn Error>> {
let output = Command::new("bw")
.arg("unlock")
.arg(master_password)
.arg("--raw")
.output()
.expect("bitwarden cli command bw not found!");
if !output.status.success() {
return Err(Box::new(io::Error::new(io::ErrorKind::Other,from_utf8(&output.stderr)?)));
}
let json = Command::new("bw")
.arg("list")
.arg("items")
.arg("--session")
.arg(from_utf8(&output.stdout).unwrap())
.arg("--raw")
.output()
.expect("bitwarden cli command bw not found!");
Ok(Bitwarden {
secrets: serde_json::from_slice(&json.stdout)?
})
}
// get first matching password
fn get(&self, name: &str, username: &str) -> Result<&str, Box<dyn Error>> {
// TODO: fix checks with ? synatx if possible
for item in &self.secrets {
match &item.name {
Some(item_name) => {
// check if hostname appears in bitwarden name
if item_name.to_lowercase().contains(&name.to_lowercase()) {
match &item.login {
Some(item_login) => {
match &item_login.username {
Some(item_username) => {
// check if username appears in bitwarden username
if item_username.contains(&username) {
match &item_login.password {
// password found
Some(item_password) => return Ok(item_password),
None => ()
}
}
},
None => ()
}
},
None => ()
}
}
},
None => ()
}
}
Err(Box::new(io::Error::new(io::ErrorKind::NotFound, "password in bitwarden not found!")))
}
} |
// This stub file contains items which aren't used yet; feel free to remove this module attribute
// to enable stricter warnings.
#![allow(unused)]
const CARS_PRODUCED_PER_HOUR: u32 = 221;
fn success_rate(speed: u8) -> f64 {
match speed {
0 ..= 4 => 1.00,
5 ..= 8 => 0.90,
9 ..= 10 => 0.77,
_ => panic!("Speed must be a value from 0 to 10")
}
}
pub fn production_rate_per_hour(speed: u8) -> f64 {
CARS_PRODUCED_PER_HOUR as f64 * speed as f64 * success_rate(speed)
}
pub fn working_items_per_minute(speed: u8) -> u32 {
let production_per_minute = production_rate_per_hour(speed) / 60.0;
production_per_minute.floor() as u32
}
|
use crate::field::FieldCsv;
use crate::register::{Register, RegisterCsv};
use crate::utils;
use serde::{Deserialize, Serialize};
use std::path;
use svd_parser::svd::peripheral::{Peripheral as SvdPeripheral, PeripheralBuilder};
use svd_parser::svd::AddressBlock as SvdAddressBlock;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum Usage {
Registers,
Buffer,
Resverd,
}
impl From<Usage> for String {
fn from(ab: Usage) -> Self {
match ab {
Usage::Registers => String::from("Registers"),
Usage::Buffer => String::from("Buffer"),
Usage::Resverd => String::from("Resverd"),
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct AddressBlock {
pub offset: u32,
pub size: u32,
pub usage: Usage,
}
impl From<AddressBlock> for SvdAddressBlock {
fn from(ab: AddressBlock) -> Self {
Self {
offset: ab.offset,
size: ab.size,
usage: ab.usage.into(),
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PeripheralCsv {
pub name: String,
pub begin: String,
pub end: String,
pub description: String,
}
impl PeripheralCsv {
pub fn to_peripheral(self) -> Peripheral {
let base_address = utils::from_radix_to_u32(&self.begin).unwrap();
let end_address = utils::from_radix_to_u32(&self.end).unwrap();
let offset = 0;
let size = end_address - base_address + 1;
let mut p = Peripheral {
name: self.name,
version: String::from("0.1"),
description: self.description,
base_address,
address_block: AddressBlock {
offset,
size,
usage: Usage::Registers,
},
registers: Vec::new(),
};
p.read_csv();
p
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Peripheral {
pub name: String,
pub version: String,
pub description: String,
pub base_address: u32,
pub address_block: AddressBlock,
pub registers: Vec<Register>,
}
impl Peripheral {
pub fn read_csv(&mut self) {
let path = String::from("./csvs/") + &self.name + ".csv";
let mut rdr = csv::Reader::from_path(path).unwrap();
for r in rdr.deserialize() {
let record: RegisterCsv = r.unwrap();
let mut reg = record.to_register();
let p = format!("./csvs/{}/{}.csv", &self.name, ®.name.to_uppercase());
let path = path::Path::new(&p);
if path.exists() {
let mut rdr = csv::Reader::from_path(path).unwrap();
for f in rdr.deserialize() {
let field_csv: FieldCsv = f.unwrap();
let field = field_csv.to_field();
reg.fields.push(field);
}
}
self.registers.push(reg);
}
}
pub fn to_svd(self) -> SvdPeripheral {
let builder = PeripheralBuilder::default();
let mut registers = Vec::new();
for register in self.registers {
registers.push(register.to_svd())
}
builder
.name(self.name)
.base_address(self.base_address.into())
.version(Some(self.version))
.description(Some(self.description))
.address_block(Some(self.address_block.into()))
.registers(Some(registers))
.build()
.unwrap()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_csv_de() {
let mut p = Peripheral {
name: String::from("timer"),
version: String::from("1.0"),
description: String::from(""),
base_address: 0,
address_block: AddressBlock {
offset: 0,
size: 0,
usage: Usage::Resverd,
},
registers: Vec::new(),
};
p.read_csv();
println!("{:#?}", p);
}
}
|
use super::{parse_redis_value, Value};
pub(crate) const SLOT_SIZE: usize = 16384;
#[derive(Debug, Clone, Copy, PartialEq)]
pub(crate) enum RoutingInfo {
AllNodes,
AllMasters,
Random,
Slot(u16),
}
fn get_arg(values: &[Value], idx: usize) -> Option<&[u8]> {
match values.get(idx) {
Some(Value::Data(ref data)) => Some(&data[..]),
_ => None,
}
}
fn get_command_arg(values: &[Value], idx: usize) -> Option<Vec<u8>> {
get_arg(values, idx).map(|x| x.to_ascii_uppercase())
}
fn get_u64_arg(values: &[Value], idx: usize) -> Option<u64> {
get_arg(values, idx)
.and_then(|x| std::str::from_utf8(x).ok())
.and_then(|x| x.parse().ok())
}
impl RoutingInfo {
pub fn for_packed_command(cmd: &[u8]) -> Option<RoutingInfo> {
parse_redis_value(cmd).ok().and_then(RoutingInfo::for_value)
}
pub fn for_value(value: Value) -> Option<RoutingInfo> {
let args = match value {
Value::Bulk(args) => args,
_ => return None,
};
match &get_command_arg(&args, 0)?[..] {
b"FLUSHALL" | b"FLUSHDB" | b"SCRIPT" => Some(RoutingInfo::AllMasters),
b"ECHO" | b"CONFIG" | b"CLIENT" | b"SLOWLOG" | b"DBSIZE" | b"LASTSAVE" | b"PING"
| b"INFO" | b"BGREWRITEAOF" | b"BGSAVE" | b"CLIENT LIST" | b"SAVE" | b"TIME"
| b"KEYS" => Some(RoutingInfo::AllNodes),
b"SCAN" | b"CLIENT SETNAME" | b"SHUTDOWN" | b"SLAVEOF" | b"REPLICAOF"
| b"SCRIPT KILL" | b"MOVE" | b"BITOP" => None,
b"EVALSHA" | b"EVAL" => {
let key_count = get_u64_arg(&args, 2)?;
if key_count == 0 {
Some(RoutingInfo::Random)
} else {
get_arg(&args, 3).and_then(RoutingInfo::for_key)
}
}
b"XGROUP" | b"XINFO" => get_arg(&args, 2).and_then(RoutingInfo::for_key),
b"XREAD" | b"XREADGROUP" => {
let streams_position = args.iter().position(|a| match a {
Value::Data(a) => a.eq_ignore_ascii_case(b"STREAMS"),
_ => false,
})?;
get_arg(&args, streams_position + 1).and_then(RoutingInfo::for_key)
}
_ => match get_arg(&args, 1) {
Some(key) => RoutingInfo::for_key(key),
None => Some(RoutingInfo::Random),
},
}
}
pub fn for_key(key: &[u8]) -> Option<RoutingInfo> {
let key = match get_hashtag(&key) {
Some(tag) => tag,
None => &key,
};
Some(RoutingInfo::Slot(
crc16::State::<crc16::XMODEM>::calculate(key) % SLOT_SIZE as u16,
))
}
}
#[derive(Debug)]
pub(crate) struct Slot {
start: u16,
end: u16,
master: String,
replicas: Vec<String>,
}
impl Slot {
pub fn new(s: u16, e: u16, m: String, r: Vec<String>) -> Self {
Self {
start: s,
end: e,
master: m,
replicas: r,
}
}
pub fn start(&self) -> u16 {
self.start
}
pub fn end(&self) -> u16 {
self.end
}
pub fn master(&self) -> &str {
&self.master
}
#[allow(dead_code)]
pub fn replicas(&self) -> &Vec<String> {
&self.replicas
}
}
fn get_hashtag(key: &[u8]) -> Option<&[u8]> {
let open = key.iter().position(|v| *v == b'{');
let open = match open {
Some(open) => open,
None => return None,
};
let close = key[open..].iter().position(|v| *v == b'}');
let close = match close {
Some(close) => close,
None => return None,
};
let rv = &key[open + 1..open + close];
if rv.is_empty() {
None
} else {
Some(rv)
}
}
#[cfg(test)]
mod tests {
use super::{get_hashtag, RoutingInfo};
use crate::cmd;
#[test]
fn test_get_hashtag() {
assert_eq!(get_hashtag(&b"foo{bar}baz"[..]), Some(&b"bar"[..]));
assert_eq!(get_hashtag(&b"foo{}{baz}"[..]), None);
assert_eq!(get_hashtag(&b"foo{{bar}}zap"[..]), Some(&b"{bar"[..]));
}
#[test]
fn test_routing_info_mixed_capatalization() {
let mut upper = cmd("XREAD");
upper.arg("STREAMS").arg("foo").arg(0);
let mut lower = cmd("xread");
lower.arg("streams").arg("foo").arg(0);
assert_eq!(
RoutingInfo::for_packed_command(&upper.get_packed_command()).unwrap(),
RoutingInfo::for_packed_command(&lower.get_packed_command()).unwrap()
);
let mut mixed = cmd("xReAd");
mixed.arg("StReAmS").arg("foo").arg(0);
assert_eq!(
RoutingInfo::for_packed_command(&lower.get_packed_command()).unwrap(),
RoutingInfo::for_packed_command(&mixed.get_packed_command()).unwrap()
);
}
}
|
use std::path::{Path,PathBuf};
use std::fmt;
use std::thread;
use store::{Store, Values};
use std::sync::{Arc,Mutex, Condvar};
use std::error::Error;
use std::io::{self, Write};
use yak_client::Datum;
use rusqlite;
extern crate r2d2;
extern crate r2d2_sqlite;
type DatabaseConnection = r2d2::PooledConnection<r2d2_sqlite::SqliteConnectionManager>;
type SeqCVar = Arc<(Mutex<i64>, Condvar)>;
#[derive(Clone)]
pub struct SqliteStore {
pool: r2d2::Pool<r2d2_sqlite::SqliteConnectionManager>,
seqnotify: SeqCVar
}
#[automatically_derived]
impl fmt::Debug for SqliteStore {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "SqliteStore{{ pool: ??? }}")
}
}
struct SqliteIterator{
db: DatabaseConnection,
space: String,
next_idx: i64,
seqnotify: SeqCVar
}
#[derive(Debug)]
pub enum SqliteError {
SqliteError(rusqlite::SqliteError),
PoolError(r2d2::GetTimeout),
IoError(io::Error),
}
impl fmt::Display for SqliteError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
&SqliteError::SqliteError(ref err) => write!(fmt, "Store error:{}", err),
&SqliteError::PoolError(ref err) => write!(fmt, "Pool error:{}", err),
&SqliteError::IoError(ref err) => write!(fmt, "IO error:{}", err),
}
}
}
impl Error for SqliteError {
fn description(&self) -> &str {
match self {
&SqliteError::SqliteError(ref mdb) => mdb.description(),
&SqliteError::PoolError(ref err) => err.description(),
&SqliteError::IoError(ref err) => err.description(),
}
}
}
const EMPTY_SEQ_INIT : i64 = -1;
impl SqliteStore {
pub fn new(path: &Path) -> Result<SqliteStore, SqliteError> {
let mut buf = path.to_path_buf();
buf.push("queues.sqlite");
let config = r2d2::Config::builder()
.error_handler(Box::new(r2d2::LoggingErrorHandler))
.build();
let manager = r2d2_sqlite::SqliteConnectionManager::new(&buf.to_string_lossy()).unwrap();
let pool = r2d2::Pool::new(config, manager).unwrap();
let store = SqliteStore { pool: pool, seqnotify: Arc::new((Mutex::new(EMPTY_SEQ_INIT), Condvar::new())) };
let mut db = try!(store.open_db());
try!(db.execute("CREATE TABLE IF NOT EXISTS logs (
space VARCHAR NOT NULL,
seq INT NOT NULL,
key BLOB NOT NULL,
value BLOB NOT NULL,
PRIMARY KEY (space, seq)
)", &[]));
Ok(store)
}
fn open_db(&self) -> Result<DatabaseConnection, SqliteError> {
let db = try!(self.pool.get());
Ok(db)
}
}
impl Store for SqliteStore {
type Iter = SqliteIterator;
type Error = SqliteError;
fn read(&self, space: &str, key: &[u8]) -> Result<Values, SqliteError> {
trace!("#read:{:?}", key);
let db = try!(self.open_db());
let res = {
let sql = "SELECT value FROM logs WHERE space = ?1 AND key = ?2";
let mut stmt = try!(db.prepare(sql));
trace!("{}@[{:?}, {:?}]", sql, space, key);
let rows = try!(stmt.query_map(&[&space, &key], |row| {
let vec = row.get::<Vec<u8>>(0);
trace!("Row:{:?}", vec);
vec
}));
try!(rows.collect())
};
Ok(res)
}
fn write(&self, space: &str, key: &[u8], val: &[u8]) -> Result<(), SqliteError> {
trace!("#write: {:?}/{:?}={:?}", space, key, val);
let db = try!(self.open_db());
let sql = "SELECT seq+1 FROM logs WHERE space = ? ORDER BY seq DESC LIMIT 1";
trace!("{}@[{:?}]", sql, space);
let mut stmt = try!(db.prepare(sql));
let idxo = try!(stmt.query_map(&[&space], |r| r.get(0))).next();
let idx = try!(idxo.unwrap_or(Ok(0)));
let sql = "INSERT INTO logs (seq, space, key, value) VALUES (?, ?, ?, ?)";
trace!("{}@[{:?}, {:?}, {:?}, {:?}]", sql, idx, space, key, val);
try!(db.execute(sql, &[&idx, &space, &key, &val]));
{
debug!("Notify of new idx: {}", idx);
let &(ref lock, ref cvar) = &*self.seqnotify;
let mut idx_ref = lock.lock().unwrap();
debug!("Obtained lock! {} → {}", &*idx_ref, idx);
*idx_ref = idx;
cvar.notify_all();
}
Ok(())
}
fn subscribe(&self, space: &str) -> Result<Self::Iter, SqliteError> {
trace!("#subscribe: {:?}", space);
let db = try!(self.open_db());
Ok(SqliteIterator{ db: db, space: space.to_string(), next_idx: 0, seqnotify: self.seqnotify.clone() })
}
}
impl SqliteIterator {
fn fetch_next(&mut self) -> Result<Option<Datum>, SqliteError> {
trace!("#fetch_next: {:?}", self);
loop {
let sql = "SELECT seq, key, value FROM logs WHERE space = ? AND seq = ? ORDER BY seq ASC /* LIMIT 1 */";
let mut q = try!(self.db.prepare(sql));
trace!("{}@[{}, {}]", sql, self.space, self.next_idx);
let mut results = try!(q.query(&[&self.space, &self.next_idx]));
while let Some(mut rowp) = results.next() {
let row = try!(rowp);
let seq : i64 = row.get::<i64>(0);
let key = row.get(1);
let value = row.get(2);
let datum = Datum { key: key, content: value };
debug!("Result: @{:?} {:?}", seq, datum);
self.next_idx = seq+1;
return Ok(Some(datum))
}
{
let &(ref lock, ref cvar) = &*self.seqnotify;
trace!("Nothing found: @{:?}; waiting", self);
let mut seq = lock.lock().unwrap();
trace!("Current seq: {:?}; db seq: {:?};", self.next_idx, *seq);
while self.next_idx > *seq {
trace!("Wait! want next-idx:{:?} > current:{:?}", &*seq, self.next_idx);
let (lockp, _no_timeout) = cvar.wait_timeout_ms(seq, 1000).unwrap();
seq = lockp;
trace!("Awoken! current:{:?}; expected next-idx:{:?}; timeout? {:?}", &*seq, self.next_idx, _no_timeout);
}
}
}
}
}
impl Iterator for SqliteIterator {
type Item = Datum;
fn next(&mut self) -> Option<Self::Item> {
trace!("Iterator#next {:?}", self);
self.fetch_next().unwrap()
}
}
impl fmt::Debug for SqliteIterator {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "SqliteIterator{{ db: <db>, next_idx:{:?}, space:{:?} }}",
&self.next_idx, &self.space)
}
}
impl From<rusqlite::SqliteError> for SqliteError {
fn from(err: rusqlite::SqliteError) -> SqliteError {
SqliteError::SqliteError(err)
}
}
impl From<r2d2::GetTimeout> for SqliteError {
fn from(err: r2d2::GetTimeout) -> SqliteError {
SqliteError::PoolError(err)
}
}
impl From<io::Error> for SqliteError {
fn from(err: io::Error) -> SqliteError {
SqliteError::IoError(err)
}
}
#[cfg(test)]
mod test {
use super::SqliteStore;
use store::test::TestableStore;
use quickcheck::TestResult;
use rand::Rng;
use std::path::PathBuf;
use std::fs;
impl TestableStore for SqliteStore {
fn build() -> SqliteStore {
let mut rng = ::rand::thread_rng();
let p = PathBuf::from(format!("target/sqlite3_store/{}", rng.gen_ascii_chars().take(16).collect::<String>()));
fs::create_dir_all(&p).unwrap();
SqliteStore::new(&p).unwrap()
}
}
build_store_tests!(SqliteStore);
}
|
use proconio::input;
fn main() {
input! {
a:i32,
b:i32,
c:i32,
d:i32,
};
let ans = a * d - b * c;
println!("{}", ans);
}
|
use std::io::Read;
fn main() {
let mut stdin = std::io::stdin();
let mut buf = String::new();
stdin.read_to_string(&mut buf).unwrap();
let out = solve(&buf);
println!("{out}");
}
fn solve(input: &str) -> String {
let mut lines = input
.lines()
.map(|l| {
l.split_whitespace()
.map(|x| x.parse::<i64>().unwrap())
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
let mut numbers = lines.pop().unwrap();
numbers.sort_unstable();
let n = lines[0][0];
let k = lines[0][1];
let unseen = n * 2 - k;
let mut last = i64::MIN;
let mut pairs = 0;
for number in numbers.into_iter() {
if number == last {
last = i64::MIN;
pairs += 1;
} else {
last = number;
}
}
pairs += unseen % n;
format!("{pairs}")
}
#[cfg(test)]
mod test {
use crate::*;
#[test]
fn test1() {
let test = r"9 5
2 2 4 1 1";
assert_eq!(solve(test), "2");
}
#[test]
fn test2() {
let test = r"2 3
1 2 1";
assert_eq!(solve(test), "2");
}
}
|
use crate::registry::{MetaType, Registry};
use crate::{
do_resolve, CacheControl, Context, ContextSelectionSet, Error, ObjectType, OutputValueType,
Positioned, QueryEnv, QueryError, Result, SchemaEnv, SubscriptionType, Type,
};
use async_graphql_parser::query::Field;
use futures::Stream;
use indexmap::IndexMap;
use std::borrow::Cow;
use std::pin::Pin;
#[doc(hidden)]
pub struct MergedObject<A, B>(pub A, pub B);
impl<A, B> Default for MergedObject<A, B>
where
A: Default,
B: Default,
{
fn default() -> Self {
Self(A::default(), B::default())
}
}
impl<A: Type, B: Type> Type for MergedObject<A, B> {
fn type_name() -> Cow<'static, str> {
Cow::Owned(format!("{}_{}", A::type_name(), B::type_name()))
}
fn create_type_info(registry: &mut Registry) -> String {
registry.create_type::<Self, _>(|registry| {
let mut fields = IndexMap::new();
let mut cc = CacheControl::default();
A::create_type_info(registry);
if let Some(MetaType::Object {
fields: a_fields,
cache_control: a_cc,
..
}) = registry.types.remove(&*A::type_name())
{
fields.extend(a_fields);
cc.merge(&a_cc);
}
B::create_type_info(registry);
if let Some(MetaType::Object {
fields: b_fields,
cache_control: b_cc,
..
}) = registry.types.remove(&*B::type_name())
{
fields.extend(b_fields);
cc.merge(&b_cc);
}
MetaType::Object {
name: Self::type_name().to_string(),
description: None,
fields,
cache_control: cc,
extends: false,
keys: None,
}
})
}
}
#[async_trait::async_trait]
impl<A, B> ObjectType for MergedObject<A, B>
where
A: ObjectType + Send + Sync,
B: ObjectType + Send + Sync,
{
async fn resolve_field(&self, ctx: &Context<'_>) -> Result<serde_json::Value> {
match self.0.resolve_field(ctx).await {
Ok(value) => Ok(value),
Err(Error::Query {
err: QueryError::FieldNotFound { .. },
..
}) => self.1.resolve_field(ctx).await,
Err(err) => Err(err),
}
}
}
#[async_trait::async_trait]
impl<A, B> OutputValueType for MergedObject<A, B>
where
A: ObjectType + Send + Sync,
B: ObjectType + Send + Sync,
{
async fn resolve(
&self,
ctx: &ContextSelectionSet<'_>,
_field: &Positioned<Field>,
) -> Result<serde_json::Value> {
do_resolve(ctx, self).await
}
}
#[async_trait::async_trait]
impl<A, B> SubscriptionType for MergedObject<A, B>
where
A: SubscriptionType + Send + Sync,
B: SubscriptionType + Send + Sync,
{
async fn create_field_stream(
&self,
idx: usize,
ctx: &Context<'_>,
schema_env: SchemaEnv,
query_env: QueryEnv,
) -> Result<Pin<Box<dyn Stream<Item = Result<serde_json::Value>> + Send>>> {
match self
.0
.create_field_stream(idx, ctx, schema_env.clone(), query_env.clone())
.await
{
Ok(value) => Ok(value),
Err(Error::Query {
err: QueryError::FieldNotFound { .. },
..
}) => {
self.1
.create_field_stream(idx, ctx, schema_env, query_env)
.await
}
Err(err) => Err(err),
}
}
}
#[doc(hidden)]
#[async_graphql_derive::SimpleObject(internal)]
#[derive(Default)]
pub struct MergedObjectTail;
#[doc(hidden)]
#[derive(Default)]
pub struct MergedObjectSubscriptionTail;
#[async_graphql_derive::Subscription(internal)]
impl MergedObjectSubscriptionTail {}
|
#![allow(proc_macro_derive_resolution_fallback)]
use crate::auth::Auth;
use crate::schema::users;
use chrono::{Duration, Utc};
use serde::Serialize;
use serde_derive::Deserialize;
#[derive(Debug, Queryable, Serialize, Deserialize, Identifiable, PartialEq, AsChangeset)]
#[table_name = "users"]
pub struct User {
pub id: u32,
pub username: String,
pub email: String,
pub salt: Vec<u8>,
pub password: Vec<u8>,
}
#[derive(Insertable, Debug)]
#[table_name = "users"]
pub struct NewUser<'a> {
pub username: &'a str,
pub email: &'a str,
pub salt: Vec<u8>,
pub password: Vec<u8>,
}
#[derive(Serialize)]
pub struct UserAuth<'a> {
username: &'a str,
email: &'a str,
token: String,
}
impl User {
pub fn to_user_auth(&self, secret: &[u8]) -> UserAuth {
let exp = Utc::now() + Duration::days(60);
let token = Auth {
id: self.id,
username: self.username.clone(),
exp: exp.timestamp(),
}
.token(secret);
UserAuth {
username: &self.username,
email: &self.email,
token,
}
}
}
|
fn main() {
let mut words = vec![String::from("Hello"),String::from("Yellow"),
String::from("Tree"),String::from("Rust"),String::from("Compiler!")];
println!("{:?}", words);
//Borrowing não ia funcionar pois só se pode emprestar uma vez da estrutura toda
let t = words[1].clone();
words[1] = words[2].clone();//String não tem copy, por isso é necessário usar a função clone
words[2] = t;
//Podemos usar também words.swap(1,2);
// ou std::mem::swap(&mut words1[0], &mut words2[0]); quando temos dois arrays separados
println!("{:?}", words);
} |
pub fn factors(n: u64) -> Vec<u64> {
let mut results = vec![];
if n <= 1 {
return results;
}
for number in 2..n + 1 {
if is_prime(number) && n % number == 0 {
results.push(number);
let remainder = n / number;
let mut factors: Vec<u64> = factors(remainder);
results.append(&mut factors);
break;
}
}
return results;
}
fn is_prime(n: u64) -> bool {
for i in 2..n {
if n % i == 0 {
return false;
}
}
return n != 1;
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
_reserved_0_cr1: [u8; 0x04],
#[doc = "0x04 - USART control register 2"]
pub cr2: CR2,
#[doc = "0x08 - USART control register 3"]
pub cr3: CR3,
#[doc = "0x0c - USART baud rate register"]
pub brr: BRR,
#[doc = "0x10 - USART guard time and prescaler register"]
pub gtpr: GTPR,
#[doc = "0x14 - USART receiver timeout register"]
pub rtor: RTOR,
#[doc = "0x18 - USART request register"]
pub rqr: RQR,
_reserved_7_isr: [u8; 0x04],
#[doc = "0x20 - USART interrupt flag clear register"]
pub icr: ICR,
#[doc = "0x24 - USART receive data register"]
pub rdr: RDR,
#[doc = "0x28 - USART transmit data register"]
pub tdr: TDR,
#[doc = "0x2c - USART prescaler register"]
pub presc: PRESC,
}
impl RegisterBlock {
#[doc = "0x00 - USART control register 1 \\[alternate\\]"]
#[inline(always)]
pub const fn cr1_disabled(&self) -> &CR1_DISABLED {
unsafe { &*(self as *const Self).cast::<u8>().add(0usize).cast() }
}
#[doc = "0x00 - USART control register 1 \\[alternate\\]"]
#[inline(always)]
pub const fn cr1_enabled(&self) -> &CR1_ENABLED {
unsafe { &*(self as *const Self).cast::<u8>().add(0usize).cast() }
}
#[doc = "0x1c - USART interrupt and status register \\[alternate\\]"]
#[inline(always)]
pub const fn isr_disabled(&self) -> &ISR_DISABLED {
unsafe { &*(self as *const Self).cast::<u8>().add(28usize).cast() }
}
#[doc = "0x1c - USART interrupt and status register \\[alternate\\]"]
#[inline(always)]
pub const fn isr_enabled(&self) -> &ISR_ENABLED {
unsafe { &*(self as *const Self).cast::<u8>().add(28usize).cast() }
}
}
#[doc = "CR1_enabled (rw) register accessor: USART control register 1 \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1_enabled::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1_enabled::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr1_enabled`]
module"]
pub type CR1_ENABLED = crate::Reg<cr1_enabled::CR1_ENABLED_SPEC>;
#[doc = "USART control register 1 \\[alternate\\]"]
pub mod cr1_enabled;
#[doc = "CR1_disabled (rw) register accessor: USART control register 1 \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1_disabled::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1_disabled::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr1_disabled`]
module"]
pub type CR1_DISABLED = crate::Reg<cr1_disabled::CR1_DISABLED_SPEC>;
#[doc = "USART control register 1 \\[alternate\\]"]
pub mod cr1_disabled;
#[doc = "CR2 (rw) register accessor: USART control register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr2`]
module"]
pub type CR2 = crate::Reg<cr2::CR2_SPEC>;
#[doc = "USART control register 2"]
pub mod cr2;
#[doc = "CR3 (rw) register accessor: USART control register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr3::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr3::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr3`]
module"]
pub type CR3 = crate::Reg<cr3::CR3_SPEC>;
#[doc = "USART control register 3"]
pub mod cr3;
#[doc = "BRR (rw) register accessor: USART baud rate register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`brr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`brr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`brr`]
module"]
pub type BRR = crate::Reg<brr::BRR_SPEC>;
#[doc = "USART baud rate register"]
pub mod brr;
#[doc = "GTPR (rw) register accessor: USART guard time and prescaler register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gtpr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`gtpr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`gtpr`]
module"]
pub type GTPR = crate::Reg<gtpr::GTPR_SPEC>;
#[doc = "USART guard time and prescaler register"]
pub mod gtpr;
#[doc = "RTOR (rw) register accessor: USART receiver timeout register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rtor::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rtor::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rtor`]
module"]
pub type RTOR = crate::Reg<rtor::RTOR_SPEC>;
#[doc = "USART receiver timeout register"]
pub mod rtor;
#[doc = "RQR (w) register accessor: USART request register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rqr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rqr`]
module"]
pub type RQR = crate::Reg<rqr::RQR_SPEC>;
#[doc = "USART request register"]
pub mod rqr;
#[doc = "ISR_enabled (r) register accessor: USART interrupt and status register \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr_enabled::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`isr_enabled`]
module"]
pub type ISR_ENABLED = crate::Reg<isr_enabled::ISR_ENABLED_SPEC>;
#[doc = "USART interrupt and status register \\[alternate\\]"]
pub mod isr_enabled;
#[doc = "ISR_disabled (r) register accessor: USART interrupt and status register \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr_disabled::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`isr_disabled`]
module"]
pub type ISR_DISABLED = crate::Reg<isr_disabled::ISR_DISABLED_SPEC>;
#[doc = "USART interrupt and status register \\[alternate\\]"]
pub mod isr_disabled;
#[doc = "ICR (w) register accessor: USART interrupt flag clear register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`icr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`icr`]
module"]
pub type ICR = crate::Reg<icr::ICR_SPEC>;
#[doc = "USART interrupt flag clear register"]
pub mod icr;
#[doc = "RDR (r) register accessor: USART receive data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rdr`]
module"]
pub type RDR = crate::Reg<rdr::RDR_SPEC>;
#[doc = "USART receive data register"]
pub mod rdr;
#[doc = "TDR (rw) register accessor: USART transmit data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tdr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tdr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tdr`]
module"]
pub type TDR = crate::Reg<tdr::TDR_SPEC>;
#[doc = "USART transmit data register"]
pub mod tdr;
#[doc = "PRESC (rw) register accessor: USART prescaler register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`presc::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`presc::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`presc`]
module"]
pub type PRESC = crate::Reg<presc::PRESC_SPEC>;
#[doc = "USART prescaler register"]
pub mod presc;
|
// // match分支和模式匹配
// // 一个枚举和一个以枚举成员作为模式的match表达式
// enum Coin{
// Penny,
// Nickel,
// Dime,
// Quarter,
// }
// fn value_in_cents(coin: Coin) -> u32{
// match coin {
// // 一个分支有两个部分:一个模式和一些代码
// // 第一个分支的模式是Coin::Penny , =>将模式和代码分开 , 这里的代码仅仅是1
// // 每一个分支之间用逗号分隔
// Coin::Penny => 1,
// Coin::Nickel => 5,
// Coin::Dime => 10,
// Coin::Quarter => 25,
// }
// }
// ------------------------------### 绑定值的模式
// #[derive(Debug)]
// enum UsState{
// Alabama,
// Alaska,
// }
// enum Coin{
// Penny,
// Nickel,
// Dime,
// Quarter(UsState),
// }
// fn value_in_cents(coin: Coin) -> u32 {
// match coin{
// Coin::Penny => 1,
// Coin::Nickel => 5,
// Coin::Dime => 10,
// Coin::Quarter(state) => {
// println!(
// "State quarter from {:?}!",
// state
// );
// 25
// },
// }
// }
// ------------------------------------匹配Option<T>
// fn puls_one(x: Option<i32>) -> Option<i32> {
// match x{
// None => None,
// Some(i) => Some(i + 1),
// }
// }
// let five = Some(5);
// let six = puls_one(5);
// let none = puls_one(None);
// -----------------------------------匹配是穷尽的
// 如果我们没有处理None的情况,编译的时候就会报错,Rust知道我们没有覆盖所有可能的情况甚至知道哪些模式被忘记了
// Rust中的匹配是穷尽的:也就是说必须穷举到最后的可能性来使代码有效。
// fn puls_one(x: Option<i32>) -> Option<i32>{
// match x {
// Some(i) => Some(i + 1),
// }
// }
// ----------------------------------通配符
// rust也提供了一个模式,用于不想列举出所有可能值的场景
// _会匹配所有的值,将其放置于其他分支之后,_将会匹配所有之前没有指定的可能的值。()就是uint值,所以_的情况什么也不会发生
// 因此可以说我们想要对_通配符之前没有列出的所有可能的值不做任何处理。
// 但是match在只关心一个情况的场景中就有点啰嗦了,为此Rust提供了if let。
// let some_u8_value = 0u8;
// match some_u8_value{
// 1 => println!("one"),
// 3 => println!("three"),
// 5 => println!("five"),
// 7 => println!("seven"),
// _ => (),
// } |
use super::libs::id_table::{IdColor, IdTable, IdTableBuilder, ObjectId, Surface};
use super::libs::matrix::camera::CameraMatrix;
use super::libs::matrix::model::ModelMatrix;
use super::libs::tex_table::TexTable;
use super::libs::webgl::{program, ProgramType, WebGlF32Vbo, WebGlI16Ibo, WebGlRenderingContext};
use crate::arena::{block, BlockRef};
use crate::libs::random_id::U128Id;
use ndarray::{arr1, Array2};
use ordered_float::OrderedFloat;
use std::collections::BTreeMap;
pub enum RenderingMode<'a> {
IdMap { grabbed: &'a U128Id },
View,
}
pub struct Character {
vertex_buffer: WebGlF32Vbo,
v_color_buffer: WebGlF32Vbo,
id_buffer: WebGlF32Vbo,
normal_buffer: WebGlF32Vbo,
index_buffer: WebGlI16Ibo,
texture_coord_buffer: WebGlF32Vbo,
}
impl Character {
pub fn new(gl: &WebGlRenderingContext) -> Self {
let vertex_buffer = gl.create_vbo_with_f32array(
&[
[0.5, 0.0, 1.0],
[-0.5, 0.0, 1.0],
[0.5, 0.0, 0.0],
[-0.5, 0.0, 0.0],
]
.concat(),
);
let id_buffer = gl.create_vbo_with_f32array(&[0.0, 0.0, 0.0, 0.0]);
let v_color_buffer = gl.create_vbo_with_f32array(
&[
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
]
.concat(),
);
let texture_coord_buffer =
gl.create_vbo_with_f32array(&[[1.0, 0.0], [0.0, 0.0], [1.0, 1.0], [0.0, 1.0]].concat());
let normal_buffer = gl.create_vbo_with_f32array(
&[
[0.0, -1.0, 0.0],
[0.0, -1.0, 0.0],
[0.0, -1.0, 0.0],
[0.0, -1.0, 0.0],
]
.concat(),
);
let index_buffer = gl.create_ibo_with_i16array(&[0, 1, 2, 3, 2, 1]);
Self {
vertex_buffer,
v_color_buffer,
id_buffer,
index_buffer,
texture_coord_buffer,
normal_buffer,
}
}
pub fn update_id(
&self,
builder: &mut IdTableBuilder,
camera_matrix: &CameraMatrix,
characters: impl Iterator<Item = BlockRef<block::Character>>,
) {
let inv_camera: Array2<f32> = ModelMatrix::new()
.with_z_axis_rotation(camera_matrix.z_axis_rotation())
.into();
let s = inv_camera.dot(&arr1(&[1.0, 0.0, 0.0, 1.0]));
let s = [s[0] as f64, s[1] as f64, s[2] as f64];
let t = inv_camera.dot(&arr1(&[0.0, 0.0, 1.0, 1.0]));
let t = [t[0] as f64, t[1] as f64, t[2] as f64];
for character in characters {
let block_id = character.id();
character.map(|character| {
let surface = Surface {
p: character.position().clone(),
r: [0.0, 0.0, 0.0],
s: s.clone(),
t: t.clone(),
};
builder.insert(
&block_id,
IdColor::from(0),
ObjectId::Character(U128Id::clone(&block_id), surface),
);
});
}
}
pub fn render(
&self,
gl: &mut WebGlRenderingContext,
id_table: &IdTable,
vp_matrix: &Array2<f32>,
camera_position: &[f32; 3],
camera_matrix: &CameraMatrix,
characters: impl Iterator<Item = BlockRef<block::Character>>,
rendering_mode: &RenderingMode,
is_2d_mode: bool,
tex_table: &mut TexTable,
) {
gl.use_program(ProgramType::UnshapedProgram);
gl.depth_func(web_sys::WebGlRenderingContext::LEQUAL);
gl.set_a_vertex(&self.vertex_buffer, 3, 0);
gl.set_a_texture_coord(&self.texture_coord_buffer, 2, 0);
gl.set_a_id(&self.id_buffer, 1, 0);
gl.set_a_v_color(&self.v_color_buffer, 4, 0);
gl.set_a_normal(&self.normal_buffer, 3, 0);
gl.bind_buffer(
web_sys::WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,
Some(&self.index_buffer),
);
gl.set_u_expand(0.0);
gl.set_u_v_color_mask(program::V_COLOR_MASK_NONE);
gl.set_u_camera_position(camera_position);
gl.set_u_vp_matrix(vp_matrix.clone().reversed_axes());
gl.set_u_bg_color_1(program::COLOR_NONE);
gl.set_u_bg_color_2(program::COLOR_NONE);
gl.set_u_texture_1(program::TEXTURE_NONE);
gl.set_u_texture_2(program::TEXTURE_NONE);
gl.set_u_perspective(if is_2d_mode {
program::PERSPECTIVE_PROJECTION
} else {
program::PERSPECTIVE_NORMAL
});
gl.set_u_light(program::LIGHT_NONE);
gl.set_u_shape(program::SHAPE_2D_BOX);
match rendering_mode {
RenderingMode::IdMap { .. } => {
gl.set_u_texture_0(program::TEXTURE_NONE);
gl.set_u_id(program::ID_V_WRITE);
}
RenderingMode::View => {
gl.set_u_texture_0(program::TEXTURE_NORMAL);
gl.set_u_id(program::ID_V_READ);
}
}
let mut z_index: BTreeMap<
OrderedFloat<f32>,
BTreeMap<
OrderedFloat<f32>,
Vec<(
Array2<f32>,
Array2<f32>,
Array2<f32>,
BlockRef<block::Character>,
)>,
>,
> = BTreeMap::new();
for character in characters {
let character_id = character.id();
let character_block = BlockRef::clone(&character);
character.map(|character| {
if let RenderingMode::IdMap { grabbed } = rendering_mode {
if character_id == **grabbed {
return;
}
}
if character.size() > 0.0 {
if let Some(texture) = character
.selected_texture()
.and_then(|texture| texture.image())
{
if let Some(tex_size) = texture.map(|texture| texture.size().clone()) {
let height = character.size() * character.tex_size();
let width = height * tex_size[0] / tex_size[1];
let s = [width as f32, 0.0, height as f32];
let p = character.position();
let p = [p[0] as f32, p[1] as f32, p[2] as f32 + 1.0 / 128.0];
let model_matrix: Array2<f32> = ModelMatrix::new()
.with_scale(&s)
.with_x_axis_rotation(if is_2d_mode {
camera_matrix.x_axis_rotation() - std::f32::consts::FRAC_PI_2
} else {
0.0
})
.with_z_axis_rotation(camera_matrix.z_axis_rotation())
.with_movement(&p)
.into();
let inv_model_matrix: Array2<f32> = ModelMatrix::new()
.with_movement(&[-p[0], -p[1], -p[2]])
.with_z_axis_rotation(-camera_matrix.z_axis_rotation())
.with_x_axis_rotation(if is_2d_mode {
-(camera_matrix.x_axis_rotation() - std::f32::consts::FRAC_PI_2)
} else {
0.0
})
.with_scale(&[1.0 / s[0], 1.0 / s[1], 1.0 / s[2]])
.into();
let mvp_matrix = vp_matrix.dot(&model_matrix);
let sp = mvp_matrix.dot(&arr1(&[0.0, 0.0, 0.0, 1.0]));
let z_key = OrderedFloat(-sp[2] / sp[3]);
let y_key = OrderedFloat(-sp[1] / sp[3]);
if let Some(y_index) = z_index.get_mut(&z_key) {
if let Some(v) = y_index.get_mut(&y_key) {
v.push((
model_matrix,
inv_model_matrix,
mvp_matrix,
character_block,
));
} else {
y_index.insert(
y_key,
vec![(
model_matrix,
inv_model_matrix,
mvp_matrix,
character_block,
)],
);
}
} else {
let mut y_index = BTreeMap::new();
y_index.insert(
y_key,
vec![(
model_matrix,
inv_model_matrix,
mvp_matrix,
character_block,
)],
);
z_index.insert(z_key, y_index);
}
}
}
}
});
}
for (_, y_index) in z_index {
for (_, characters) in y_index {
for (model_matrix, inv_model_matrix, mvp_matrix, character) in characters {
let character_id = character.id();
character.map(|character| {
if let Some(texture) = character
.selected_texture()
.and_then(|texture| texture.image())
{
if let Some(tex_idx) = texture.map(|image_data| {
tex_table.use_resource(gl, &texture.id(), image_data)
}) {
let id_offset_color = unwrap!(id_table.offset_color(&character_id));
gl.set_u_translate(mvp_matrix.reversed_axes());
gl.set_u_model_matrix(model_matrix.reversed_axes());
gl.set_u_inv_model_matrix(inv_model_matrix.reversed_axes());
gl.set_u_id_value(id_offset_color.value() as i32);
gl.set_u_texture_0_sampler(tex_idx);
gl.draw_elements_with_i32(
web_sys::WebGlRenderingContext::TRIANGLES,
6,
web_sys::WebGlRenderingContext::UNSIGNED_SHORT,
0,
);
}
}
});
}
}
}
}
}
|
use crate::mmtk::MMTK;
use crate::plan::global::BasePlan;
use crate::plan::global::CommonPlan;
use crate::plan::global::GcStatus;
use crate::plan::global::NoCopy;
use crate::plan::marksweep::gc_work::MSProcessEdges;
use crate::plan::marksweep::mutator::ALLOCATOR_MAPPING;
use crate::plan::AllocationSemantics;
use crate::plan::Plan;
use crate::plan::PlanConstraints;
use crate::policy::mallocspace::MallocSpace;
use crate::policy::space::Space;
use crate::scheduler::gc_work::*;
use crate::scheduler::*;
use crate::util::alloc::allocators::AllocatorSelector;
use crate::util::heap::layout::heap_layout::Mmapper;
use crate::util::heap::layout::heap_layout::VMMap;
use crate::util::heap::layout::vm_layout_constants::{HEAP_END, HEAP_START};
use crate::util::heap::HeapMeta;
use crate::util::options::UnsafeOptionsWrapper;
#[cfg(feature = "sanity")]
use crate::util::sanity::sanity_checker::*;
use crate::util::OpaquePointer;
use crate::vm::VMBinding;
use std::sync::Arc;
use enum_map::EnumMap;
pub struct MarkSweep<VM: VMBinding> {
common: CommonPlan<VM>,
ms: MallocSpace<VM>,
}
unsafe impl<VM: VMBinding> Sync for MarkSweep<VM> {}
pub const MS_CONSTRAINTS: PlanConstraints = PlanConstraints {
moves_objects: false,
gc_header_bits: 2,
gc_header_words: 0,
num_specialized_scans: 1,
..PlanConstraints::default()
};
impl<VM: VMBinding> Plan for MarkSweep<VM> {
type VM = VM;
fn gc_init(
&mut self,
heap_size: usize,
vm_map: &'static VMMap,
scheduler: &Arc<MMTkScheduler<VM>>,
) {
self.common.gc_init(heap_size, vm_map, scheduler);
}
fn schedule_collection(&'static self, scheduler: &MMTkScheduler<VM>) {
self.base().set_collection_kind();
self.base().set_gc_status(GcStatus::GcPrepare);
// Stop & scan mutators (mutator scanning can happen before STW)
scheduler.work_buckets[WorkBucketStage::Unconstrained]
.add(StopMutators::<MSProcessEdges<VM>>::new());
// Prepare global/collectors/mutators
scheduler.work_buckets[WorkBucketStage::Prepare]
.add(Prepare::<Self, NoCopy<VM>>::new(self));
// Release global/collectors/mutators
scheduler.work_buckets[WorkBucketStage::Release]
.add(Release::<Self, NoCopy<VM>>::new(self));
// Resume mutators
#[cfg(feature = "sanity")]
scheduler.work_buckets[WorkBucketStage::Final]
.add(ScheduleSanityGC::<Self, NoCopy<VM>>::new());
scheduler.set_finalizer(Some(EndOfGC));
}
fn get_allocator_mapping(&self) -> &'static EnumMap<AllocationSemantics, AllocatorSelector> {
&*ALLOCATOR_MAPPING
}
fn prepare(&self, tls: OpaquePointer) {
self.common.prepare(tls, true);
// Dont need to prepare for MallocSpace
}
fn release(&self, tls: OpaquePointer) {
trace!("Marksweep: Release");
self.common.release(tls, true);
unsafe { self.ms.release_all_chunks() };
}
fn get_collection_reserve(&self) -> usize {
0
}
fn get_pages_used(&self) -> usize {
self.common.get_pages_used() + self.ms.reserved_pages()
}
fn base(&self) -> &BasePlan<VM> {
&self.common.base
}
fn common(&self) -> &CommonPlan<VM> {
&self.common
}
fn constraints(&self) -> &'static PlanConstraints {
&MS_CONSTRAINTS
}
fn create_worker_local(
&self,
tls: OpaquePointer,
mmtk: &'static MMTK<Self::VM>,
) -> GCWorkerLocalPtr {
let mut c = NoCopy::new(mmtk);
c.init(tls);
GCWorkerLocalPtr::new(c)
}
}
impl<VM: VMBinding> MarkSweep<VM> {
pub fn new(
vm_map: &'static VMMap,
mmapper: &'static Mmapper,
options: Arc<UnsafeOptionsWrapper>,
_scheduler: &'static MMTkScheduler<VM>,
) -> Self {
let heap = HeapMeta::new(HEAP_START, HEAP_END);
MarkSweep {
common: CommonPlan::new(vm_map, mmapper, options, heap, &MS_CONSTRAINTS),
ms: MallocSpace::new(),
}
}
pub fn ms_space(&self) -> &MallocSpace<VM> {
&self.ms
}
}
|
//! DNS resolver configuration
use std::io;
use std::net::SocketAddr;
use std::time::Duration;
/// Configures the behavior of DNS requests
#[derive(Clone, Debug)]
pub struct DnsConfig {
/// List of name servers; must not be empty
pub name_servers: Vec<SocketAddr>,
/// List of search domains
pub search: Vec<String>,
/// Minimum number of dots in a name to trigger an initial absolute query
pub n_dots: u32,
/// Duration before retrying or failing an unanswered request
pub timeout: Duration,
/// Number of attempts made before returning an error
pub attempts: u32,
/// Whether to rotate through available nameservers
pub rotate: bool,
/// If `true`, perform `AAAA` queries first and return IPv4 addresses
/// as IPv4-mapped IPv6 addresses.
pub use_inet6: bool,
}
impl DnsConfig {
/// Returns the default system configuration for DNS requests.
pub fn load_default() -> io::Result<DnsConfig> {
default_config_impl()
}
/// Returns a `DnsConfig` using the given set of name servers,
/// setting all other fields to generally sensible default values.
pub fn with_name_servers(name_servers: Vec<SocketAddr>) -> DnsConfig {
DnsConfig {
name_servers,
search: Vec::new(),
n_dots: 1,
timeout: Duration::from_secs(5),
attempts: 5,
rotate: false,
use_inet6: false,
}
}
}
#[cfg(unix)]
fn default_config_impl() -> io::Result<DnsConfig> {
use crate::resolv_conf::load;
load()
}
#[cfg(windows)]
fn default_config_impl() -> io::Result<DnsConfig> {
// TODO: Get a list of nameservers from Windows API.
// For now, return an IO error.
Err(io::Error::new(
io::ErrorKind::Other,
"Nameserver list not available on Windows",
))
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<error_response::Error>,
}
pub mod error_response {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Error {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationDisplay>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<OperationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDisplay {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationProperties {
#[serde(rename = "serviceSpecification", default, skip_serializing_if = "Option::is_none")]
pub service_specification: Option<ServiceSpecification>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceSpecification {
#[serde(rename = "metricSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub metric_specifications: Vec<MetricSpecification>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricSpecification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "displayDescription", default, skip_serializing_if = "Option::is_none")]
pub display_description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
#[serde(rename = "aggregationType", default, skip_serializing_if = "Option::is_none")]
pub aggregation_type: Option<metric_specification::AggregationType>,
#[serde(rename = "fillGapWithZero", default, skip_serializing_if = "Option::is_none")]
pub fill_gap_with_zero: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub category: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dimensions: Vec<Dimension>,
}
pub mod metric_specification {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AggregationType {
Average,
Minimum,
Maximum,
Total,
Count,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Dimension {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "internalName", default, skip_serializing_if = "Option::is_none")]
pub internal_name: Option<String>,
#[serde(rename = "toBeExportedForShoebox", default, skip_serializing_if = "Option::is_none")]
pub to_be_exported_for_shoebox: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NameAvailabilityParameters {
#[serde(rename = "type")]
pub type_: String,
pub name: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NameAvailability {
#[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LinkNotificationHubParameters {
#[serde(rename = "resourceId")]
pub resource_id: String,
#[serde(rename = "connectionString")]
pub connection_string: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LinkedNotificationHub {
#[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<operation_status::Status>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
#[serde(rename = "percentComplete", default, skip_serializing_if = "Option::is_none")]
pub percent_complete: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
pub mod operation_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Succeeded,
Failed,
Canceled,
Creating,
Deleting,
Moving,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CommunicationServiceResourceList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<CommunicationServiceResource>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CommunicationServiceResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(flatten)]
pub location_resource: LocationResource,
#[serde(flatten)]
pub tagged_resource: TaggedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CommunicationServiceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LocationResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TaggedResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CommunicationServiceProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<communication_service_properties::ProvisioningState>,
#[serde(rename = "hostName", default, skip_serializing_if = "Option::is_none")]
pub host_name: Option<String>,
#[serde(rename = "dataLocation")]
pub data_location: String,
#[serde(rename = "notificationHubId", default, skip_serializing_if = "Option::is_none")]
pub notification_hub_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
#[serde(rename = "immutableResourceId", default, skip_serializing_if = "Option::is_none")]
pub immutable_resource_id: Option<String>,
}
pub mod communication_service_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Unknown,
Succeeded,
Failed,
Canceled,
Running,
Creating,
Updating,
Deleting,
Moving,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CommunicationServiceKeys {
#[serde(rename = "primaryKey", default, skip_serializing_if = "Option::is_none")]
pub primary_key: Option<String>,
#[serde(rename = "secondaryKey", default, skip_serializing_if = "Option::is_none")]
pub secondary_key: Option<String>,
#[serde(rename = "primaryConnectionString", default, skip_serializing_if = "Option::is_none")]
pub primary_connection_string: Option<String>,
#[serde(rename = "secondaryConnectionString", default, skip_serializing_if = "Option::is_none")]
pub secondary_connection_string: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegenerateKeyParameters {
#[serde(rename = "keyType", default, skip_serializing_if = "Option::is_none")]
pub key_type: Option<regenerate_key_parameters::KeyType>,
}
pub mod regenerate_key_parameters {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum KeyType {
Primary,
Secondary,
}
}
|
#[doc = "Register `CR` reader"]
pub type R = crate::R<CR_SPEC>;
#[doc = "Register `CR` writer"]
pub type W = crate::W<CR_SPEC>;
#[doc = "Field `TSCE` reader - Touch sensing controller enable"]
pub type TSCE_R = crate::BitReader;
#[doc = "Field `TSCE` writer - Touch sensing controller enable"]
pub type TSCE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `START` reader - Start a new acquisition"]
pub type START_R = crate::BitReader;
#[doc = "Field `START` writer - Start a new acquisition"]
pub type START_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AM` reader - Acquisition mode"]
pub type AM_R = crate::BitReader;
#[doc = "Field `AM` writer - Acquisition mode"]
pub type AM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SYNCPOL` reader - Synchronization pin polarity"]
pub type SYNCPOL_R = crate::BitReader;
#[doc = "Field `SYNCPOL` writer - Synchronization pin polarity"]
pub type SYNCPOL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IODEF` reader - I/O Default mode"]
pub type IODEF_R = crate::BitReader;
#[doc = "Field `IODEF` writer - I/O Default mode"]
pub type IODEF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MCV` reader - Max count value"]
pub type MCV_R = crate::FieldReader;
#[doc = "Field `MCV` writer - Max count value"]
pub type MCV_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `PGPSC` reader - pulse generator prescaler"]
pub type PGPSC_R = crate::FieldReader;
#[doc = "Field `PGPSC` writer - pulse generator prescaler"]
pub type PGPSC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `SSPSC` reader - Spread spectrum prescaler"]
pub type SSPSC_R = crate::BitReader;
#[doc = "Field `SSPSC` writer - Spread spectrum prescaler"]
pub type SSPSC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SSE` reader - Spread spectrum enable"]
pub type SSE_R = crate::BitReader;
#[doc = "Field `SSE` writer - Spread spectrum enable"]
pub type SSE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SSD` reader - Spread spectrum deviation"]
pub type SSD_R = crate::FieldReader;
#[doc = "Field `SSD` writer - Spread spectrum deviation"]
pub type SSD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
#[doc = "Field `CTPL` reader - Charge transfer pulse low"]
pub type CTPL_R = crate::FieldReader;
#[doc = "Field `CTPL` writer - Charge transfer pulse low"]
pub type CTPL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `CTPH` reader - Charge transfer pulse high"]
pub type CTPH_R = crate::FieldReader;
#[doc = "Field `CTPH` writer - Charge transfer pulse high"]
pub type CTPH_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
impl R {
#[doc = "Bit 0 - Touch sensing controller enable"]
#[inline(always)]
pub fn tsce(&self) -> TSCE_R {
TSCE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Start a new acquisition"]
#[inline(always)]
pub fn start(&self) -> START_R {
START_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Acquisition mode"]
#[inline(always)]
pub fn am(&self) -> AM_R {
AM_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Synchronization pin polarity"]
#[inline(always)]
pub fn syncpol(&self) -> SYNCPOL_R {
SYNCPOL_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - I/O Default mode"]
#[inline(always)]
pub fn iodef(&self) -> IODEF_R {
IODEF_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bits 5:7 - Max count value"]
#[inline(always)]
pub fn mcv(&self) -> MCV_R {
MCV_R::new(((self.bits >> 5) & 7) as u8)
}
#[doc = "Bits 12:14 - pulse generator prescaler"]
#[inline(always)]
pub fn pgpsc(&self) -> PGPSC_R {
PGPSC_R::new(((self.bits >> 12) & 7) as u8)
}
#[doc = "Bit 15 - Spread spectrum prescaler"]
#[inline(always)]
pub fn sspsc(&self) -> SSPSC_R {
SSPSC_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - Spread spectrum enable"]
#[inline(always)]
pub fn sse(&self) -> SSE_R {
SSE_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bits 17:23 - Spread spectrum deviation"]
#[inline(always)]
pub fn ssd(&self) -> SSD_R {
SSD_R::new(((self.bits >> 17) & 0x7f) as u8)
}
#[doc = "Bits 24:27 - Charge transfer pulse low"]
#[inline(always)]
pub fn ctpl(&self) -> CTPL_R {
CTPL_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bits 28:31 - Charge transfer pulse high"]
#[inline(always)]
pub fn ctph(&self) -> CTPH_R {
CTPH_R::new(((self.bits >> 28) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bit 0 - Touch sensing controller enable"]
#[inline(always)]
#[must_use]
pub fn tsce(&mut self) -> TSCE_W<CR_SPEC, 0> {
TSCE_W::new(self)
}
#[doc = "Bit 1 - Start a new acquisition"]
#[inline(always)]
#[must_use]
pub fn start(&mut self) -> START_W<CR_SPEC, 1> {
START_W::new(self)
}
#[doc = "Bit 2 - Acquisition mode"]
#[inline(always)]
#[must_use]
pub fn am(&mut self) -> AM_W<CR_SPEC, 2> {
AM_W::new(self)
}
#[doc = "Bit 3 - Synchronization pin polarity"]
#[inline(always)]
#[must_use]
pub fn syncpol(&mut self) -> SYNCPOL_W<CR_SPEC, 3> {
SYNCPOL_W::new(self)
}
#[doc = "Bit 4 - I/O Default mode"]
#[inline(always)]
#[must_use]
pub fn iodef(&mut self) -> IODEF_W<CR_SPEC, 4> {
IODEF_W::new(self)
}
#[doc = "Bits 5:7 - Max count value"]
#[inline(always)]
#[must_use]
pub fn mcv(&mut self) -> MCV_W<CR_SPEC, 5> {
MCV_W::new(self)
}
#[doc = "Bits 12:14 - pulse generator prescaler"]
#[inline(always)]
#[must_use]
pub fn pgpsc(&mut self) -> PGPSC_W<CR_SPEC, 12> {
PGPSC_W::new(self)
}
#[doc = "Bit 15 - Spread spectrum prescaler"]
#[inline(always)]
#[must_use]
pub fn sspsc(&mut self) -> SSPSC_W<CR_SPEC, 15> {
SSPSC_W::new(self)
}
#[doc = "Bit 16 - Spread spectrum enable"]
#[inline(always)]
#[must_use]
pub fn sse(&mut self) -> SSE_W<CR_SPEC, 16> {
SSE_W::new(self)
}
#[doc = "Bits 17:23 - Spread spectrum deviation"]
#[inline(always)]
#[must_use]
pub fn ssd(&mut self) -> SSD_W<CR_SPEC, 17> {
SSD_W::new(self)
}
#[doc = "Bits 24:27 - Charge transfer pulse low"]
#[inline(always)]
#[must_use]
pub fn ctpl(&mut self) -> CTPL_W<CR_SPEC, 24> {
CTPL_W::new(self)
}
#[doc = "Bits 28:31 - Charge transfer pulse high"]
#[inline(always)]
#[must_use]
pub fn ctph(&mut self) -> CTPH_W<CR_SPEC, 28> {
CTPH_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CR_SPEC;
impl crate::RegisterSpec for CR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cr::R`](R) reader structure"]
impl crate::Readable for CR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cr::W`](W) writer structure"]
impl crate::Writable for CR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CR to value 0"]
impl crate::Resettable for CR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crypto::blake2s;
use hash::H256;
#[derive(Debug, Default)]
struct TransactionList;
/// The Block struct represents the compact data representing one unit of the chain.
struct Block {
block_header: BlockHeader,
transactions: TransactionList,
// TODO omners (?)
}
/// The BlockHeader contains all of the relevant data to verify the chain, but no records of
/// transactions. This is useful for light clients that only want to keep up with the chain and
/// verify blocks but have no concern for keeping a record of the full transactions.
#[derive(Debug, Default)]
struct BlockHeader {
coinbase: H256,
prev_block_hash: H256,
block_hash: H256,
timestamp: u64,
nonce: u32,
}
impl Block {
fn generate(header: BlockHeader, txList: TransactionList) -> Block {
Block{
block_header: header,
transactions: txList;
}
}
fn hash(&Self) {
// TODO encode the fields and hash them like below.
let mut res = [0u8; 160];
}
fn hex_hash(&Self) -> //TODO implement hex_encode that takes [u8;160] {
hex_encode(self.hash())
}
}
impl BlockHeader {
fn from_prev(prev: &Block) -> BlockHeader {
let mut result = [0u8; 128];
result[0..32].copy_from_slice(prev.prev_block_hash);
result[32..64].copy_from_slice(prev.block_hash);
result[64..96].copy_from_slice(blake2s(prev.timestamp));
result[96..128].copy_from_slice(blake2s(prev.nonce));
let mut block_header = BlockHeader::default();
block_header.prev_block_hash = blake2s(result);
block_header.timestamp = timestamp(); // TODO implement timestamp macro
block_header.nonce = prev.nonce + 1;
block_header
}
}
/// TODO: implement Blake2s merkle rooting
#[inline]
fn timestamp() -> u64 {
use time::get_time;
let now = get_time();
now.sec as u64
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SyntheticsSslCertificateSubject : Object describing the SSL certificate used for the test.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SyntheticsSslCertificateSubject {
/// Country Name associated with the certificate.
#[serde(rename = "C", skip_serializing_if = "Option::is_none")]
pub C: Option<String>,
/// Common Name that associated with the certificate.
#[serde(rename = "CN", skip_serializing_if = "Option::is_none")]
pub CN: Option<String>,
/// Locality associated with the certificate.
#[serde(rename = "L", skip_serializing_if = "Option::is_none")]
pub L: Option<String>,
/// Organization associated with the certificate.
#[serde(rename = "O", skip_serializing_if = "Option::is_none")]
pub O: Option<String>,
/// Organizational Unit associated with the certificate.
#[serde(rename = "OU", skip_serializing_if = "Option::is_none")]
pub OU: Option<String>,
/// State Or Province Name associated with the certificate.
#[serde(rename = "ST", skip_serializing_if = "Option::is_none")]
pub ST: Option<String>,
/// Subject Alternative Name associated with the certificate.
#[serde(rename = "altName", skip_serializing_if = "Option::is_none")]
pub alt_name: Option<String>,
}
impl SyntheticsSslCertificateSubject {
/// Object describing the SSL certificate used for the test.
pub fn new() -> SyntheticsSslCertificateSubject {
SyntheticsSslCertificateSubject {
C: None,
CN: None,
L: None,
O: None,
OU: None,
ST: None,
alt_name: None,
}
}
}
|
use std::io::{self, BufRead};
fn is_sum_of_two_numbers_in_previous_n(nums: &Vec<u64>, start : usize, end : usize) -> bool {
for i in start..end {
for j in i+1..end {
if nums[i] + nums[j] == nums[end] {
return true;
}
}
}
return false;
}
fn find_not_sum_of_two_numbers_in_previous_n(nums: &Vec<u64>) -> Option<u64> {
for start in 0..(nums.len()-25) {
let end = start + 25;
if !is_sum_of_two_numbers_in_previous_n(nums, start, end) {
return Some(nums[end]);
}
}
return None;
}
fn main() {
let nums : Vec<u64> = io::stdin().lock().lines().map(|line| line.unwrap().parse::<u64>().unwrap()).collect();
let special_number = find_not_sum_of_two_numbers_in_previous_n(&nums).unwrap();
println!("{}", special_number);
for i in 0..nums.len() {
let mut sum = nums[i];
for j in i+1..nums.len() {
sum += nums[j];
if sum > special_number {
break;
}
if sum == special_number {
let range = nums.get(i..j).unwrap();
let min = range.iter().min().unwrap();
let max = range.iter().max().unwrap();
dbg!(min, max, min + max);
}
}
}
}
|
//! Relay implementation for consensus blocks.
use crate::protocol::compact_block::{CompactBlockClient, CompactBlockServer};
use crate::utils::{NetworkPeerHandle, NetworkWrapper, RequestResponseErr};
use crate::{
DownloadResult, ProtocolBackend, ProtocolClient, ProtocolServer, RelayError, LOG_TARGET,
};
use async_trait::async_trait;
use codec::{Decode, Encode};
use futures::channel::oneshot;
use futures::stream::StreamExt;
use lru::LruCache;
use parking_lot::Mutex;
use sc_client_api::{BlockBackend, HeaderBackend};
use sc_network::request_responses::{IncomingRequest, OutgoingResponse, ProtocolConfig};
use sc_network::types::ProtocolName;
use sc_network::{OutboundFailure, PeerId, RequestFailure};
use sc_network_common::sync::message::{BlockAttributes, BlockData, BlockRequest, FromBlock};
use sc_network_sync::block_relay_protocol::{
BlockDownloader, BlockRelayParams, BlockResponseError, BlockServer,
};
use sc_service::SpawnTaskHandle;
use sc_transaction_pool_api::{InPoolTransaction, TransactionPool, TxHash};
use sp_runtime::generic::BlockId;
use sp_runtime::traits::{Block as BlockT, Header};
use sp_runtime::Justifications;
use std::num::NonZeroUsize;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tracing::{debug, info, trace, warn};
type BlockHash<Block> = <Block as BlockT>::Hash;
type BlockHeader<Block> = <Block as BlockT>::Header;
type Extrinsic<Block> = <Block as BlockT>::Extrinsic;
const SYNC_PROTOCOL: &str = "/subspace/consensus-block-relay/1";
// TODO: size these properly, or move to config
const NUM_PEER_HINT: NonZeroUsize = NonZeroUsize::new(100).expect("Not zero; qed");
const TRANSACTION_CACHE_SIZE: NonZeroUsize = NonZeroUsize::new(512).expect("Not zero; qed");
/// Initial request to the server
/// We currently ignore the direction field and return a single block,
/// revisit if needed
#[derive(Encode, Decode)]
struct InitialRequest<Block: BlockT, ProtocolRequest> {
/// Starting block
from_block: BlockId<Block>,
/// Requested block components
block_attributes: BlockAttributes,
/// The protocol specific part of the request
protocol_request: ProtocolRequest,
}
/// Initial response from server
#[derive(Encode, Decode)]
struct InitialResponse<Block: BlockT, ProtocolResponse> {
/// Hash of the block being downloaded
block_hash: BlockHash<Block>,
/// The partial block, without the extrinsics
partial_block: PartialBlock<Block>,
/// The opaque protocol specific part of the response.
/// This is optional because BlockAttributes::BODY may not be set in
/// the BlockRequest, in which case we don't need to fetch the
/// extrinsics
protocol_response: Option<ProtocolResponse>,
}
/// The partial block response from the server. It has all the fields
/// except the extrinsics. The extrinsics are handled by the protocol
#[derive(Encode, Decode)]
struct PartialBlock<Block: BlockT> {
hash: BlockHash<Block>,
header: Option<BlockHeader<Block>>,
indexed_body: Option<Vec<Vec<u8>>>,
justifications: Option<Justifications>,
}
/// The message to the server
#[derive(Encode, Decode)]
enum ServerMessage<Block: BlockT, ProtocolRequest> {
/// Initial message, to be handled both by the client
/// and the protocol
InitialRequest(InitialRequest<Block, ProtocolRequest>),
/// Message to be handled by the protocol
ProtocolRequest(ProtocolRequest),
}
impl<Block: BlockT, ProtocolRequest> From<ProtocolRequest>
for ServerMessage<Block, ProtocolRequest>
{
fn from(inner: ProtocolRequest) -> ServerMessage<Block, ProtocolRequest> {
ServerMessage::ProtocolRequest(inner)
}
}
/// The client side of the consensus block relay
struct ConsensusRelayClient<Block, Pool, ProtoClient>
where
Block: BlockT,
Pool: TransactionPool,
ProtoClient: ProtocolClient<BlockHash<Block>, TxHash<Pool>, Extrinsic<Block>>,
{
network: Arc<NetworkWrapper>,
protocol_name: ProtocolName,
protocol_client: Arc<ProtoClient>,
_phantom_data: std::marker::PhantomData<(Block, Pool)>,
}
impl<Block, Pool, ProtoClient> ConsensusRelayClient<Block, Pool, ProtoClient>
where
Block: BlockT,
Pool: TransactionPool,
ProtoClient: ProtocolClient<BlockHash<Block>, TxHash<Pool>, Extrinsic<Block>>,
{
/// Downloads the requested block from the peer using the relay protocol
async fn download(
&self,
who: PeerId,
request: BlockRequest<Block>,
) -> Result<DownloadResult<BlockHash<Block>, BlockData<Block>>, RelayError> {
let start_ts = Instant::now();
let network_peer_handle = self
.network
.network_peer_handle(self.protocol_name.clone(), who)?;
// Perform the initial request/response
let initial_request = InitialRequest {
from_block: match request.from {
FromBlock::Hash(h) => BlockId::<Block>::Hash(h),
FromBlock::Number(n) => BlockId::<Block>::Number(n),
},
block_attributes: request.fields,
protocol_request: self.protocol_client.build_initial_request(),
};
let initial_response = network_peer_handle
.request::<_, InitialResponse<Block, ProtoClient::Response>>(
ServerMessage::InitialRequest(initial_request),
)
.await?;
// Resolve the protocol response to get the extrinsics
let (body, local_miss) = if let Some(protocol_response) = initial_response.protocol_response
{
let (body, local_miss) = self
.resolve_extrinsics::<ServerMessage<Block, ProtoClient::Request>>(
protocol_response,
&network_peer_handle,
)
.await?;
(Some(body), local_miss)
} else {
(None, 0)
};
// Assemble the final response
let block_data = BlockData::<Block> {
hash: initial_response.partial_block.hash,
header: initial_response.partial_block.header,
body,
indexed_body: initial_response.partial_block.indexed_body,
receipt: None,
message_queue: None,
justification: None,
justifications: initial_response.partial_block.justifications,
};
Ok(DownloadResult {
download_unit_id: initial_response.block_hash,
downloaded: block_data,
latency: start_ts.elapsed(),
local_miss,
})
}
/// Resolves the extrinsics from the initial response
async fn resolve_extrinsics<Request>(
&self,
protocol_response: ProtoClient::Response,
network_peer_handle: &NetworkPeerHandle,
) -> Result<(Vec<Extrinsic<Block>>, usize), RelayError>
where
Request: From<ProtoClient::Request> + Encode + Send + Sync,
{
let (block_hash, resolved) = self
.protocol_client
.resolve_initial_response::<Request>(protocol_response, network_peer_handle)
.await?;
let mut local_miss = 0;
let extrinsics = resolved
.into_iter()
.map(|entry| {
let encoded = entry.protocol_unit.encode();
if !entry.locally_resolved {
trace!(
target: LOG_TARGET,
"relay::download: local miss: {block_hash:?}/{:?}, \
size = {}",
entry.protocol_unit_id,
encoded.len()
);
local_miss += encoded.len();
}
entry.protocol_unit
})
.collect();
Ok((extrinsics, local_miss))
}
}
#[async_trait]
impl<Block, Pool, ProtoClient> BlockDownloader<Block>
for ConsensusRelayClient<Block, Pool, ProtoClient>
where
Block: BlockT,
Pool: TransactionPool,
ProtoClient: ProtocolClient<BlockHash<Block>, TxHash<Pool>, Extrinsic<Block>>,
{
async fn download_block(
&self,
who: PeerId,
request: BlockRequest<Block>,
) -> Result<Result<Vec<u8>, RequestFailure>, oneshot::Canceled> {
let ret = self.download(who, request.clone()).await;
match ret {
Ok(result) => {
let downloaded = result.downloaded.encode();
trace!(
target: LOG_TARGET,
"relay::download_block: {:?} => {},{},{:?}",
result.download_unit_id,
downloaded.len(),
result.local_miss,
result.latency
);
Ok(Ok(downloaded))
}
Err(error) => {
debug!(
target: LOG_TARGET,
"relay::download_block: error: {who:?}/{request:?}/{error:?}"
);
match error {
RelayError::RequestResponse(error) => match error {
RequestResponseErr::DecodeFailed { .. } => {
Ok(Err(RequestFailure::Network(OutboundFailure::Timeout)))
}
RequestResponseErr::RequestFailure(err) => Ok(Err(err)),
RequestResponseErr::NetworkUninitialized => {
// TODO: This is the best error found that kind of matches
Ok(Err(RequestFailure::NotConnected))
}
RequestResponseErr::Canceled => Err(oneshot::Canceled),
},
_ => {
// Why timeout???
Ok(Err(RequestFailure::Network(OutboundFailure::Timeout)))
}
}
}
}
}
fn block_response_into_blocks(
&self,
_request: &BlockRequest<Block>,
response: Vec<u8>,
) -> Result<Vec<BlockData<Block>>, BlockResponseError> {
match Decode::decode(&mut response.as_ref()) {
Ok(block_data) => Ok(vec![block_data]),
Err(err) => Err(BlockResponseError::DecodeFailed(format!(
"Failed to decode consensus response: {err:?}"
))),
}
}
}
/// The server side of the consensus block relay
struct ConsensusRelayServer<
Block: BlockT,
Client,
ProtoServer: ProtocolServer<BlockHash<Block>> + Send,
> {
client: Arc<Client>,
protocol: Box<ProtoServer>,
request_receiver: async_channel::Receiver<IncomingRequest>,
_block: std::marker::PhantomData<Block>,
}
impl<Block, Client, ProtoServer> ConsensusRelayServer<Block, Client, ProtoServer>
where
Block: BlockT,
Client: HeaderBackend<Block> + BlockBackend<Block>,
ProtoServer: ProtocolServer<BlockHash<Block>> + Send,
{
/// Handles the received request from the client side
async fn on_request(&mut self, request: IncomingRequest) {
// Drop the request in case of errors and let the client time out.
// This is the behavior of the current substrate block handler.
let IncomingRequest {
peer,
payload,
pending_response,
} = request;
let server_msg: ServerMessage<Block, ProtoServer::Request> =
match Decode::decode(&mut payload.as_ref()) {
Ok(msg) => msg,
Err(err) => {
warn!(
target: LOG_TARGET,
"relay::on_request: decode incoming: {peer}: {err:?}"
);
return;
}
};
let ret = match server_msg {
ServerMessage::InitialRequest(req) => self.on_initial_request(req),
ServerMessage::ProtocolRequest(req) => self.on_protocol_request(req),
};
match ret {
Ok(response) => {
self.send_response(peer, response, pending_response);
trace!(
target: LOG_TARGET,
"relay::consensus server: request processed from: {peer}"
);
}
Err(error) => {
debug!(
target: LOG_TARGET,
"relay::consensus server: error: {peer}/{error:?}"
);
}
}
}
/// Handles the initial request from the client
fn on_initial_request(
&mut self,
initial_request: InitialRequest<Block, ProtoServer::Request>,
) -> Result<Vec<u8>, RelayError> {
let block_hash = self.block_hash(&initial_request.from_block)?;
let block_attributes = initial_request.block_attributes;
// Build the generic and the protocol specific parts of the response
let partial_block = self.get_partial_block(&block_hash, block_attributes)?;
let protocol_response = if block_attributes.contains(BlockAttributes::BODY) {
Some(
self.protocol
.build_initial_response(&block_hash, initial_request.protocol_request)?,
)
} else {
None
};
let initial_response: InitialResponse<Block, ProtoServer::Response> = InitialResponse {
block_hash,
partial_block,
protocol_response,
};
Ok(initial_response.encode())
}
/// Handles the protocol request from the client
fn on_protocol_request(
&mut self,
request: ProtoServer::Request,
) -> Result<Vec<u8>, RelayError> {
let response = self.protocol.on_request(request)?;
Ok(response.encode())
}
/// Builds the partial block response
fn get_partial_block(
&self,
block_hash: &BlockHash<Block>,
block_attributes: BlockAttributes,
) -> Result<PartialBlock<Block>, RelayError> {
let block_header = match self.client.header(*block_hash) {
Ok(Some(header)) => header,
Ok(None) => {
return Err(RelayError::BlockHeader(format!(
"Missing header: {block_hash:?}"
)))
}
Err(err) => return Err(RelayError::BlockHeader(format!("{block_hash:?}, {err:?}"))),
};
let hash = block_header.hash();
let header = if block_attributes.contains(BlockAttributes::HEADER) {
Some(block_header)
} else {
None
};
let indexed_body = if block_attributes.contains(BlockAttributes::INDEXED_BODY) {
self.client
.block_indexed_body(*block_hash)
.map_err(|err| RelayError::BlockIndexedBody(format!("{block_hash:?}, {err:?}")))?
} else {
None
};
let justifications = if block_attributes.contains(BlockAttributes::JUSTIFICATION) {
self.client.justifications(*block_hash).map_err(|err| {
RelayError::BlockJustifications(format!("{block_hash:?}, {err:?}"))
})?
} else {
None
};
Ok(PartialBlock {
hash,
header,
indexed_body,
justifications,
})
}
/// Converts the BlockId to block hash
fn block_hash(&self, block_id: &BlockId<Block>) -> Result<BlockHash<Block>, RelayError> {
match self.client.block_hash_from_id(block_id) {
Ok(Some(hash)) => Ok(hash),
Ok(None) => Err(RelayError::BlockHash(format!("Missing: {block_id:?}"))),
Err(err) => Err(RelayError::BlockHash(format!("{block_id:?}, {err:?}"))),
}
}
/// Builds/sends the response back to the client
fn send_response(
&self,
peer: PeerId,
response: Vec<u8>,
sender: oneshot::Sender<OutgoingResponse>,
) {
let response = OutgoingResponse {
result: Ok(response),
reputation_changes: Vec::new(),
sent_feedback: None,
};
if sender.send(response).is_err() {
warn!(
target: LOG_TARGET,
"relay::send_response: failed to send to {peer}"
);
}
}
}
#[async_trait]
impl<Block, Client, ProtoServer> BlockServer<Block>
for ConsensusRelayServer<Block, Client, ProtoServer>
where
Block: BlockT,
Client: HeaderBackend<Block> + BlockBackend<Block>,
ProtoServer: ProtocolServer<BlockHash<Block>> + Send,
{
async fn run(&mut self) {
info!(
target: LOG_TARGET,
"relay::consensus block server: starting"
);
while let Some(request) = self.request_receiver.next().await {
self.on_request(request).await;
}
}
}
/// The backend interface for the consensus block relay
struct ConsensusBackend<Block: BlockT, Client, Pool: TransactionPool> {
client: Arc<Client>,
transaction_pool: Arc<Pool>,
transaction_cache: Arc<Mutex<LruCache<TxHash<Pool>, Extrinsic<Block>>>>,
}
impl<Block, Client, Pool> ConsensusBackend<Block, Client, Pool>
where
Block: BlockT,
Client: HeaderBackend<Block> + BlockBackend<Block>,
Pool: TransactionPool<Block = Block> + 'static,
{
fn new(
client: Arc<Client>,
transaction_pool: Arc<Pool>,
spawn_handle: SpawnTaskHandle,
) -> Self {
let transaction_cache = Arc::new(Mutex::new(LruCache::new(TRANSACTION_CACHE_SIZE)));
spawn_handle.spawn_blocking("block-relay-transaction-import", None, {
let transaction_pool = transaction_pool.clone();
let transaction_cache = transaction_cache.clone();
Box::pin(async move {
while let Some(hash) = transaction_pool.import_notification_stream().next().await {
if let Some(transaction) = transaction_pool.ready_transaction(&hash) {
transaction_cache
.lock()
.put(hash.clone(), transaction.data().clone());
trace!(
target: LOG_TARGET,
"relay::backend: received import notification: {hash:?}"
);
}
}
})
});
Self {
client,
transaction_pool,
transaction_cache,
}
}
/// Adds the entry to the cache
fn update_cache(&self, tx_hash: &TxHash<Pool>, extrinsic: &Extrinsic<Block>) {
self.transaction_cache
.lock()
.put(tx_hash.clone(), extrinsic.clone());
trace!(
target: LOG_TARGET,
"relay::backend: updated cache: {tx_hash:?}"
);
}
}
impl<Block, Client, Pool> ProtocolBackend<BlockHash<Block>, TxHash<Pool>, Extrinsic<Block>>
for ConsensusBackend<Block, Client, Pool>
where
Block: BlockT,
Client: HeaderBackend<Block> + BlockBackend<Block>,
Pool: TransactionPool<Block = Block> + 'static,
{
fn download_unit_members(
&self,
block_hash: &BlockHash<Block>,
) -> Result<Vec<(TxHash<Pool>, Extrinsic<Block>)>, RelayError> {
let maybe_extrinsics = self
.client
.block_body(*block_hash)
.map_err(|err| RelayError::BlockBody(format!("{block_hash:?}, {err:?}")))?;
if let Some(extrinsics) = maybe_extrinsics {
Ok(extrinsics
.into_iter()
.map(|extrinsic| (self.transaction_pool.hash_of(&extrinsic), extrinsic))
.collect())
} else {
Err(RelayError::BlockExtrinsicsNotFound(format!(
"{block_hash:?}"
)))
}
}
fn protocol_unit(
&self,
block_hash: &BlockHash<Block>,
tx_hash: &TxHash<Pool>,
) -> Result<Option<Extrinsic<Block>>, RelayError> {
// First look up the cache
if let Some(extrinsic) = self.transaction_cache.lock().get(tx_hash) {
return Ok(Some(extrinsic.clone()));
}
// Next look up the block extrinsics
if let Ok(Some(extrinsics)) = self.client.block_body(*block_hash) {
if !extrinsics.is_empty() {
let len = extrinsics.len();
for extrinsic in extrinsics {
if self.transaction_pool.hash_of(&extrinsic) == *tx_hash {
// TODO: avoid adding inherents to the cache
self.update_cache(tx_hash, &extrinsic);
return Ok(Some(extrinsic));
}
}
trace!(
target: LOG_TARGET,
"relay::protocol_unit: {tx_hash:?} not found in {block_hash:?}/{len}",
);
}
}
// Failed to find the transaction among the block extrinsics, look up the
// transaction pool
if let Some(in_pool_transaction) = self.transaction_pool.ready_transaction(tx_hash) {
let extrinsic = in_pool_transaction.data().clone();
self.update_cache(tx_hash, &extrinsic);
return Ok(Some(extrinsic));
}
Ok(None)
}
}
pub fn build_consensus_relay<Block, Client, Pool>(
network: Arc<NetworkWrapper>,
client: Arc<Client>,
pool: Arc<Pool>,
spawn_handle: SpawnTaskHandle,
) -> BlockRelayParams<Block>
where
Block: BlockT,
Client: HeaderBackend<Block> + BlockBackend<Block> + 'static,
Pool: TransactionPool<Block = Block> + 'static,
{
let (tx, request_receiver) = async_channel::bounded(NUM_PEER_HINT.get());
let backend = Arc::new(ConsensusBackend::new(client.clone(), pool, spawn_handle));
let relay_client: ConsensusRelayClient<Block, Pool, _> = ConsensusRelayClient {
network,
protocol_name: SYNC_PROTOCOL.into(),
protocol_client: Arc::new(CompactBlockClient {
backend: backend.clone(),
}),
_phantom_data: Default::default(),
};
let relay_server = ConsensusRelayServer {
client,
protocol: Box::new(CompactBlockServer { backend }),
request_receiver,
_block: Default::default(),
};
let mut protocol_config = ProtocolConfig {
name: SYNC_PROTOCOL.into(),
fallback_names: Vec::new(),
max_request_size: 1024 * 1024,
max_response_size: 16 * 1024 * 1024,
request_timeout: Duration::from_secs(20),
inbound_queue: None,
};
protocol_config.inbound_queue = Some(tx);
BlockRelayParams {
server: Box::new(relay_server),
downloader: Arc::new(relay_client),
request_response_config: protocol_config,
}
}
|
use crate::libs::color::color_system;
use isaribi::{
style,
styled::{Style, Styled},
};
use kagura::prelude::*;
use nusa::prelude::*;
pub struct Props {}
pub enum Msg {}
pub enum On {}
pub struct Header {}
impl Component for Header {
type Props = Props;
type Msg = Msg;
type Event = On;
}
impl HtmlComponent for Header {}
impl Constructor for Header {
fn constructor(props: Self::Props) -> Self {
Self {}
}
}
impl Update for Header {}
impl Render<Html> for Header {
type Children = (Attributes, Events, Vec<Html>);
fn render(&self, (attrs, events, children): Self::Children) -> Html {
let attrs = attrs.class(Self::class("base"));
Self::styled(Html::div(attrs, events, children))
}
}
impl Styled for Header {
fn style() -> Style {
style! {
".base"{
"background-color": color_system::gray(255, 8).to_string();
"color": color_system::gray(255, 0).to_string();
"padding": ".65em";
"row-gap": ".65em";
"display": "grid";
}
}
}
}
|
use std::ops::{ Add, Sub };
use std::ops::Not;
use std::cmp::PartialEq;
use std::convert::From;
#[derive(Debug)]
struct ComplexNumber {
re: i32,
im: i32
}
impl Sub<Self> for ComplexNumber {
type Output = Self;
fn sub(self, rhs: Self) -> Self {
Self {
re: self.re - rhs.re,
im: self.im - rhs.im
}
}
}
impl Add<Self> for ComplexNumber {
type Output = Self;
fn add(self, rhs: Self) -> Self {
Self {
re: self.re + rhs.re,
im: self.im + rhs.im
}
}
}
impl Not for ComplexNumber {
type Output = Self;
fn not(self) -> Self::Output {
Self {
re: self.re,
im: - self.im
}
}
}
impl PartialEq<Self> for ComplexNumber {
fn eq(&self, rhs: &Self) -> bool {
self.re == rhs.re && self.im == rhs.im
}
}
impl From<(isize, isize)> for ComplexNumber {
fn from(t: (isize, isize)) -> Self {
Self {
re: t.0 as i32,
im: t.1 as i32
}
}
}
trait CanMod {
fn my_mod(&self) -> f64;
}
impl CanMod for ComplexNumber {
fn my_mod(&self) -> f64 {
let square = (self.re * self.re + self.im * self.im) as f64;
square.sqrt()
}
}
impl CanMod for (isize, isize) {
fn my_mod(&self) -> f64 {
let square = (self.0 * self.0 + self.1 * self.1) as f64;
square.sqrt()
}
}
fn mod_complex<T: CanMod>(c: T) -> f64 {
c.my_mod()
}
fn main() {
let c1 = ComplexNumber { re: 1, im: -2 };
let c2 = ComplexNumber { re: -1, im: 3 };
let c3 = ComplexNumber { re: 0, im: -4 };
let c4 = ComplexNumber { re: 5, im: 7 };
let c5 = ComplexNumber { re: 3, im: -3 };
let c6 = ComplexNumber { re: 4, im: 0 };
let c7 = ComplexNumber { re: 3, im: 3 };
let c8 = ComplexNumber { re: 3, im: 3 };
println!("{:?}", c1 + c2);
println!("{:?}", c3 - c4);
println!("{:?} == {:?} : {}", &c5, &c6, c5 == c6);
println!("{}", c5 == !c7);
println!("{}", c5 != !c8);
println!("{:?}", !c5);
println!("{:?}", !c6);
let t = (23, 17);
println!("{:?}", ComplexNumber::from(t));
let c9: ComplexNumber = t.into();
println!("{:?}", c9);
println!("{}", mod_complex(c9));
println!("{}", mod_complex(t));
}
|
use std::env;
use friday_error;
use friday_error::frierr;
use friday_error::FridayError;
pub fn get_environment<S: AsRef<str>>(name: S) -> Result<String, FridayError> {
return env::var(name.as_ref()).or_else(
|err| frierr!("Unable to get environment variable {} - Reason {}\
\n\nThings to try\n1. Try setting it to some value {}=..",
name.as_ref(),
err,
name.as_ref()));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.